OLD | NEW |
1 # Copyright (C) 2014 Google Inc. All rights reserved. | 1 # Copyright (C) 2014 Google Inc. All rights reserved. |
2 # | 2 # |
3 # Redistribution and use in source and binary forms, with or without | 3 # Redistribution and use in source and binary forms, with or without |
4 # modification, are permitted provided that the following conditions are | 4 # modification, are permitted provided that the following conditions are |
5 # met: | 5 # met: |
6 # | 6 # |
7 # * Redistributions of source code must retain the above copyright | 7 # * Redistributions of source code must retain the above copyright |
8 # notice, this list of conditions and the following disclaimer. | 8 # notice, this list of conditions and the following disclaimer. |
9 # * Redistributions in binary form must reproduce the above | 9 # * Redistributions in binary form must reproduce the above |
10 # copyright notice, this list of conditions and the following disclaimer | 10 # copyright notice, this list of conditions and the following disclaimer |
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
98 self._old_full_results_list = [] | 98 self._old_full_results_list = [] |
99 self._final_result = [] | 99 self._final_result = [] |
100 | 100 |
101 def _add_individual_result_links(self, results_directories): | 101 def _add_individual_result_links(self, results_directories): |
102 archived_results_file_list = [(file + '/results.html') for file in resul
ts_directories] | 102 archived_results_file_list = [(file + '/results.html') for file in resul
ts_directories] |
103 archived_results_file_list.insert(0, 'results.html') | 103 archived_results_file_list.insert(0, 'results.html') |
104 self._current_result_json_dict['result_links'] = archived_results_file_l
ist | 104 self._current_result_json_dict['result_links'] = archived_results_file_l
ist |
105 | 105 |
106 def _copy_dashboard_html(self): | 106 def _copy_dashboard_html(self): |
107 dashboard_file = self._filesystem.join(self._results_directory, 'dashboa
rd.html') | 107 dashboard_file = self._filesystem.join(self._results_directory, 'dashboa
rd.html') |
108 dashboard_html_file_path = self._filesystem.join(self._port.layout_tests
_dir(), 'fast/harness/archived-results-dashboard.html') | 108 dashboard_html_file_path = self._filesystem.join( |
| 109 self._port.layout_tests_dir(), 'fast/harness/archived-results-dashbo
ard.html') |
109 if not self._filesystem.exists(dashboard_file): | 110 if not self._filesystem.exists(dashboard_file): |
110 if self._filesystem.exists(dashboard_html_file_path): | 111 if self._filesystem.exists(dashboard_html_file_path): |
111 self._filesystem.copyfile(dashboard_html_file_path, dashboard_fi
le) | 112 self._filesystem.copyfile(dashboard_html_file_path, dashboard_fi
le) |
112 | 113 |
113 def _initialize(self): | 114 def _initialize(self): |
114 file_list = self._filesystem.listdir(self._results_directory_path) | 115 file_list = self._filesystem.listdir(self._results_directory_path) |
115 results_directories = [] | 116 results_directories = [] |
116 for dir in file_list: | 117 for dir in file_list: |
117 full_dir_path = self._filesystem.join(self._results_directory_path,
dir) | 118 full_dir_path = self._filesystem.join(self._results_directory_path,
dir) |
118 if self._filesystem.isdir(full_dir_path): | 119 if self._filesystem.isdir(full_dir_path): |
(...skipping 18 matching lines...) Expand all Loading... |
137 self._old_failing_results_list.append(json.loads(json_string)) | 138 self._old_failing_results_list.append(json.loads(json_string)) |
138 json_string_full_result = self._filesystem.read_text_file(full_json_
file_path) | 139 json_string_full_result = self._filesystem.read_text_file(full_json_
file_path) |
139 self._old_full_results_list.append(json.loads(json_string_full_resul
t)) | 140 self._old_full_results_list.append(json.loads(json_string_full_resul
t)) |
140 self._copy_dashboard_html() | 141 self._copy_dashboard_html() |
141 | 142 |
142 def generate(self): | 143 def generate(self): |
143 self._initialize() | 144 self._initialize() |
144 | 145 |
145 # There must be at least one archived result to be processed | 146 # There must be at least one archived result to be processed |
146 if self._current_result_json_dict: | 147 if self._current_result_json_dict: |
147 process_json_data = ProcessJsonData(self._current_result_json_dict,
self._old_failing_results_list, self._old_full_results_list) | 148 process_json_data = ProcessJsonData(self._current_result_json_dict, |
| 149 self._old_failing_results_list,
self._old_full_results_list) |
148 self._final_result = process_json_data.generate_archived_result() | 150 self._final_result = process_json_data.generate_archived_result() |
149 final_json = json.dumps(self._final_result) | 151 final_json = json.dumps(self._final_result) |
150 final_json = 'ADD_RESULTS(' + final_json + ');' | 152 final_json = 'ADD_RESULTS(' + final_json + ');' |
151 archived_results_file_path = self._filesystem.join(self._results_dir
ectory, 'archived_results.json') | 153 archived_results_file_path = self._filesystem.join(self._results_dir
ectory, 'archived_results.json') |
152 self._filesystem.write_text_file(archived_results_file_path, final_j
son) | 154 self._filesystem.write_text_file(archived_results_file_path, final_j
son) |
OLD | NEW |