| OLD | NEW |
| 1 # Copyright (C) 2014 Google Inc. All rights reserved. | 1 # Copyright (C) 2014 Google Inc. All rights reserved. |
| 2 # | 2 # |
| 3 # Redistribution and use in source and binary forms, with or without | 3 # Redistribution and use in source and binary forms, with or without |
| 4 # modification, are permitted provided that the following conditions are | 4 # modification, are permitted provided that the following conditions are |
| 5 # met: | 5 # met: |
| 6 # | 6 # |
| 7 # * Redistributions of source code must retain the above copyright | 7 # * Redistributions of source code must retain the above copyright |
| 8 # notice, this list of conditions and the following disclaimer. | 8 # notice, this list of conditions and the following disclaimer. |
| 9 # * Redistributions in binary form must reproduce the above | 9 # * Redistributions in binary form must reproduce the above |
| 10 # copyright notice, this list of conditions and the following disclaimer | 10 # copyright notice, this list of conditions and the following disclaimer |
| (...skipping 13 matching lines...) Expand all Loading... |
| 24 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 24 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 25 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 25 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 26 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 26 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 28 | 28 |
| 29 import json | 29 import json |
| 30 import logging | 30 import logging |
| 31 | 31 |
| 32 | 32 |
| 33 class ProcessJsonData(object): | 33 class ProcessJsonData(object): |
| 34 | |
| 35 def __init__(self, current_result_json_dict, old_failing_results_list, old_f
ull_results_list): | 34 def __init__(self, current_result_json_dict, old_failing_results_list, old_f
ull_results_list): |
| 36 self._current_result_json_dict = current_result_json_dict | 35 self._current_result_json_dict = current_result_json_dict |
| 37 self._old_failing_results_list = old_failing_results_list | 36 self._old_failing_results_list = old_failing_results_list |
| 38 self._old_full_results_list = old_full_results_list | 37 self._old_full_results_list = old_full_results_list |
| 39 self._final_result = [] | 38 self._final_result = [] |
| 40 | 39 |
| 41 def _get_test_result(self, test_result_data): | 40 def _get_test_result(self, test_result_data): |
| 42 actual = test_result_data['actual'] | 41 actual = test_result_data['actual'] |
| 43 expected = test_result_data['expected'] | 42 expected = test_result_data['expected'] |
| 44 if actual == 'SKIP': | 43 if actual == 'SKIP': |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 80 self._add_archived_result(json_object, row) | 79 self._add_archived_result(json_object, row) |
| 81 return | 80 return |
| 82 | 81 |
| 83 def generate_archived_result(self): | 82 def generate_archived_result(self): |
| 84 for key in self._current_result_json_dict["tests"]: | 83 for key in self._current_result_json_dict["tests"]: |
| 85 self._process_json_object(self._current_result_json_dict["tests"][ke
y], [key]) | 84 self._process_json_object(self._current_result_json_dict["tests"][ke
y], [key]) |
| 86 return self._current_result_json_dict | 85 return self._current_result_json_dict |
| 87 | 86 |
| 88 | 87 |
| 89 class DashBoardGenerator(object): | 88 class DashBoardGenerator(object): |
| 90 | |
| 91 def __init__(self, port): | 89 def __init__(self, port): |
| 92 self._port = port | 90 self._port = port |
| 93 self._filesystem = port.host.filesystem | 91 self._filesystem = port.host.filesystem |
| 94 self._results_directory = self._port.results_directory() | 92 self._results_directory = self._port.results_directory() |
| 95 self._results_directory_path = self._filesystem.dirname(self._results_di
rectory) | 93 self._results_directory_path = self._filesystem.dirname(self._results_di
rectory) |
| 96 self._current_result_json_dict = {} | 94 self._current_result_json_dict = {} |
| 97 self._old_failing_results_list = [] | 95 self._old_failing_results_list = [] |
| 98 self._old_full_results_list = [] | 96 self._old_full_results_list = [] |
| 99 self._final_result = [] | 97 self._final_result = [] |
| 100 | 98 |
| 101 def _add_individual_result_links(self, results_directories): | 99 def _add_individual_result_links(self, results_directories): |
| 102 archived_results_file_list = [(file + '/results.html') for file in resul
ts_directories] | 100 archived_results_file_list = [(file + '/results.html') for file in resul
ts_directories] |
| 103 archived_results_file_list.insert(0, 'results.html') | 101 archived_results_file_list.insert(0, 'results.html') |
| 104 self._current_result_json_dict['result_links'] = archived_results_file_l
ist | 102 self._current_result_json_dict['result_links'] = archived_results_file_l
ist |
| 105 | 103 |
| 106 def _copy_dashboard_html(self): | 104 def _copy_dashboard_html(self): |
| 107 dashboard_file = self._filesystem.join(self._results_directory, 'dashboa
rd.html') | 105 dashboard_file = self._filesystem.join(self._results_directory, 'dashboa
rd.html') |
| 108 dashboard_html_file_path = self._filesystem.join(self._port.layout_tests
_dir(), 'fast/harness/archived-results-dashboard.html') | 106 dashboard_html_file_path = self._filesystem.join(self._port.layout_tests
_dir(), |
| 107 'fast/harness/archived-
results-dashboard.html') |
| 109 if not self._filesystem.exists(dashboard_file): | 108 if not self._filesystem.exists(dashboard_file): |
| 110 if self._filesystem.exists(dashboard_html_file_path): | 109 if self._filesystem.exists(dashboard_html_file_path): |
| 111 self._filesystem.copyfile(dashboard_html_file_path, dashboard_fi
le) | 110 self._filesystem.copyfile(dashboard_html_file_path, dashboard_fi
le) |
| 112 | 111 |
| 113 def _initialize(self): | 112 def _initialize(self): |
| 114 file_list = self._filesystem.listdir(self._results_directory_path) | 113 file_list = self._filesystem.listdir(self._results_directory_path) |
| 115 results_directories = [] | 114 results_directories = [] |
| 116 for dir in file_list: | 115 for dir in file_list: |
| 117 full_dir_path = self._filesystem.join(self._results_directory_path,
dir) | 116 full_dir_path = self._filesystem.join(self._results_directory_path,
dir) |
| 118 if self._filesystem.isdir(full_dir_path): | 117 if self._filesystem.isdir(full_dir_path): |
| 119 if self._results_directory in full_dir_path: | 118 if self._results_directory in full_dir_path: |
| 120 results_directories.append(full_dir_path) | 119 results_directories.append(full_dir_path) |
| 121 results_directories.sort(reverse=True, key=lambda x: self._filesystem.mt
ime(x)) | 120 results_directories.sort(reverse=True, key=lambda x: self._filesystem.mt
ime(x)) |
| 122 current_failing_results_json_file = self._filesystem.join(results_direct
ories[0], 'failing_results.json') | 121 current_failing_results_json_file = self._filesystem.join(results_direct
ories[0], 'failing_results.json') |
| 123 input_json_string = self._filesystem.read_text_file(current_failing_resu
lts_json_file) | 122 input_json_string = self._filesystem.read_text_file(current_failing_resu
lts_json_file) |
| 124 input_json_string = input_json_string[12:-2] # Remove preceding string
ADD_RESULTS( and ); at the end | 123 input_json_string = input_json_string[12:-2] # Remove preceding string
ADD_RESULTS( and ); at the end |
| 125 self._current_result_json_dict['tests'] = json.loads(input_json_string)[
'tests'] | 124 self._current_result_json_dict['tests'] = json.loads(input_json_string)[
'tests'] |
| 126 results_directories = results_directories[1:] | 125 results_directories = results_directories[1:] |
| 127 | 126 |
| 128 # To add hyperlink to individual results.html | 127 # To add hyperlink to individual results.html |
| 129 self._add_individual_result_links(results_directories) | 128 self._add_individual_result_links(results_directories) |
| 130 | 129 |
| 131 # Load the remaining stale layout test results Json's to create the dash
board | 130 # Load the remaining stale layout test results Json's to create the dash
board |
| 132 for json_file in results_directories: | 131 for json_file in results_directories: |
| 133 failing_json_file_path = self._filesystem.join(json_file, 'failing_r
esults.json') | 132 failing_json_file_path = self._filesystem.join(json_file, 'failing_r
esults.json') |
| 134 full_json_file_path = self._filesystem.join(json_file, 'full_results
.json') | 133 full_json_file_path = self._filesystem.join(json_file, 'full_results
.json') |
| 135 json_string = self._filesystem.read_text_file(failing_json_file_path
) | 134 json_string = self._filesystem.read_text_file(failing_json_file_path
) |
| 136 json_string = json_string[12:-2] # Remove preceding string ADD_RES
ULTS( and ); at the end | 135 json_string = json_string[12:-2] # Remove preceding string ADD_RESU
LTS( and ); at the end |
| 137 self._old_failing_results_list.append(json.loads(json_string)) | 136 self._old_failing_results_list.append(json.loads(json_string)) |
| 138 json_string_full_result = self._filesystem.read_text_file(full_json_
file_path) | 137 json_string_full_result = self._filesystem.read_text_file(full_json_
file_path) |
| 139 self._old_full_results_list.append(json.loads(json_string_full_resul
t)) | 138 self._old_full_results_list.append(json.loads(json_string_full_resul
t)) |
| 140 self._copy_dashboard_html() | 139 self._copy_dashboard_html() |
| 141 | 140 |
| 142 def generate(self): | 141 def generate(self): |
| 143 self._initialize() | 142 self._initialize() |
| 144 | 143 |
| 145 # There must be at least one archived result to be processed | 144 # There must be at least one archived result to be processed |
| 146 if self._current_result_json_dict: | 145 if self._current_result_json_dict: |
| 147 process_json_data = ProcessJsonData(self._current_result_json_dict,
self._old_failing_results_list, self._old_full_results_list) | 146 process_json_data = ProcessJsonData(self._current_result_json_dict,
self._old_failing_results_list, |
| 147 self._old_full_results_list) |
| 148 self._final_result = process_json_data.generate_archived_result() | 148 self._final_result = process_json_data.generate_archived_result() |
| 149 final_json = json.dumps(self._final_result) | 149 final_json = json.dumps(self._final_result) |
| 150 final_json = 'ADD_RESULTS(' + final_json + ');' | 150 final_json = 'ADD_RESULTS(' + final_json + ');' |
| 151 archived_results_file_path = self._filesystem.join(self._results_dir
ectory, 'archived_results.json') | 151 archived_results_file_path = self._filesystem.join(self._results_dir
ectory, 'archived_results.json') |
| 152 self._filesystem.write_text_file(archived_results_file_path, final_j
son) | 152 self._filesystem.write_text_file(archived_results_file_path, final_j
son) |
| OLD | NEW |