Index: Tools/Scripts/webkitpy/layout_tests/generate_results_dashboard.py |
diff --git a/Tools/Scripts/webkitpy/layout_tests/generate_results_dashboard.py b/Tools/Scripts/webkitpy/layout_tests/generate_results_dashboard.py |
index 5210596a454b59cd5d9f5af8e7905003c98f5a80..81908b2210f7205dafffdfd6f2a08c4356864152 100644 |
--- a/Tools/Scripts/webkitpy/layout_tests/generate_results_dashboard.py |
+++ b/Tools/Scripts/webkitpy/layout_tests/generate_results_dashboard.py |
@@ -27,6 +27,7 @@ |
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
import json |
+import logging |
class ProcessJsonData(object): |
@@ -116,8 +117,13 @@ class GenerateDashBoard(object): |
if self._filesystem.isdir(self._filesystem.join(self._results_directory_path, dir)): |
results_directories.append(self._filesystem.join(self._results_directory_path, dir)) |
results_directories.sort(reverse=True, key=lambda x: self._filesystem.mtime(x)) |
- with open(self._filesystem.join(results_directories[0], 'failing_results.json'), "r") as file: |
- input_json_string = file.readline() |
+ current_failing_results_json_file = self._filesystem.join(results_directories[0], 'failing_results.json') |
+ try: |
+ file = open(current_failing_results_json_file, "r") |
+ except IOError: |
+ logging.debug("Could not generate archived results dashboard failing_results.json does not exist") |
+ return |
Dirk Pranke
2014/09/05 21:07:31
I don't think we should catch this error. It shoul
patro
2014/09/06 06:50:55
Done.
|
+ input_json_string = file.readline() |
input_json_string = input_json_string[12:-2] # Remove preceeding string ADD_RESULTS( and ); at the end |
self._current_result_json_dict['tests'] = json.loads(input_json_string)['tests'] |
results_directories = results_directories[1:] |
@@ -127,21 +133,26 @@ class GenerateDashBoard(object): |
# Load the remaining stale layout test results Json's to create the dashboard |
for json_file in results_directories: |
- with open(self._filesystem.join(json_file, 'failing_results.json'), "r") as file: |
+ failing_json_file_path = self._filesystem.join(json_file, 'failing_results.json') |
+ full_json_file_path = self._filesystem.join(json_file, 'full_results.json') |
+ with open(failing_json_file_path, "r") as file: |
json_string = file.readline() |
json_string = json_string[12:-2] # Remove preceeding string ADD_RESULTS( and ); at the end |
self._old_failing_results_list.append(json.loads(json_string)) |
- with open(self._filesystem.join(json_file, 'full_results.json'), "r") as full_file: |
+ with open(full_json_file_path, "r") as full_file: |
json_string_full_result = full_file.readline() |
self._old_full_results_list.append(json.loads(json_string_full_result)) |
self._copy_dashboard_html() |
def generate(self): |
self._initialize() |
- process_json_data = ProcessJsonData(self._current_result_json_dict, self._old_failing_results_list, self._old_full_results_list) |
- self._final_result = process_json_data.generate_archived_result() |
- final_json = json.dumps(self._final_result) |
- final_json = 'ADD_RESULTS(' + final_json + ');' |
- with open(self._filesystem.join(self._results_directory, 'archived_results.json'), "w") as file: |
- file.write(final_json) |
+ |
+ # There must be atleast one archived result to be processed |
+ if self._current_result_json_dict: |
+ process_json_data = ProcessJsonData(self._current_result_json_dict, self._old_failing_results_list, self._old_full_results_list) |
+ self._final_result = process_json_data.generate_archived_result() |
+ final_json = json.dumps(self._final_result) |
+ final_json = 'ADD_RESULTS(' + final_json + ');' |
+ with open(self._filesystem.join(self._results_directory, 'archived_results.json'), "w") as file: |
+ file.write(final_json) |