Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(13)

Side by Side Diff: Tools/Scripts/webkitpy/layout_tests/generate_results_dashboard.py

Issue 489093002: Enabling archiving of test results by default in run-webkit-tests. (Closed) Base URL: https://chromium.googlesource.com/chromium/blink.git@master
Patch Set: Added unit_tests Created 6 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 # Copyright (C) 2014 Google Inc. All rights reserved. 1 # Copyright (C) 2014 Google Inc. All rights reserved.
2 # 2 #
3 # Redistribution and use in source and binary forms, with or without 3 # Redistribution and use in source and binary forms, with or without
4 # modification, are permitted provided that the following conditions are 4 # modification, are permitted provided that the following conditions are
5 # met: 5 # met:
6 # 6 #
7 # * Redistributions of source code must retain the above copyright 7 # * Redistributions of source code must retain the above copyright
8 # notice, this list of conditions and the following disclaimer. 8 # notice, this list of conditions and the following disclaimer.
9 # * Redistributions in binary form must reproduce the above 9 # * Redistributions in binary form must reproduce the above
10 # copyright notice, this list of conditions and the following disclaimer 10 # copyright notice, this list of conditions and the following disclaimer
11 # in the documentation and/or other materials provided with the 11 # in the documentation and/or other materials provided with the
12 # distribution. 12 # distribution.
13 # * Neither the name of Google Inc. nor the names of its 13 # * Neither the name of Google Inc. nor the names of its
14 # contributors may be used to endorse or promote products derived from 14 # contributors may be used to endorse or promote products derived from
15 # this software without specific prior written permission. 15 # this software without specific prior written permission.
16 # 16 #
17 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 28
29 import json 29 import json
30 import logging
30 31
31 32
32 class ProcessJsonData(object): 33 class ProcessJsonData(object):
33 34
34 def __init__(self, current_result_json_dict, old_failing_results_list, old_f ull_results_list): 35 def __init__(self, current_result_json_dict, old_failing_results_list, old_f ull_results_list):
35 self._current_result_json_dict = current_result_json_dict 36 self._current_result_json_dict = current_result_json_dict
36 self._old_failing_results_list = old_failing_results_list 37 self._old_failing_results_list = old_failing_results_list
37 self._old_full_results_list = old_full_results_list 38 self._old_full_results_list = old_full_results_list
38 self._final_result = [] 39 self._final_result = []
39 40
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
109 if self._filesystem.exists(dashboard_html_file_path): 110 if self._filesystem.exists(dashboard_html_file_path):
110 self._filesystem.copyfile(dashboard_html_file_path, dashboard_fi le) 111 self._filesystem.copyfile(dashboard_html_file_path, dashboard_fi le)
111 112
112 def _initialize(self): 113 def _initialize(self):
113 file_list = self._filesystem.listdir(self._results_directory_path) 114 file_list = self._filesystem.listdir(self._results_directory_path)
114 results_directories = [] 115 results_directories = []
115 for dir in file_list: 116 for dir in file_list:
116 if self._filesystem.isdir(self._filesystem.join(self._results_direct ory_path, dir)): 117 if self._filesystem.isdir(self._filesystem.join(self._results_direct ory_path, dir)):
117 results_directories.append(self._filesystem.join(self._results_d irectory_path, dir)) 118 results_directories.append(self._filesystem.join(self._results_d irectory_path, dir))
118 results_directories.sort(reverse=True, key=lambda x: self._filesystem.mt ime(x)) 119 results_directories.sort(reverse=True, key=lambda x: self._filesystem.mt ime(x))
119 with open(self._filesystem.join(results_directories[0], 'failing_results .json'), "r") as file: 120 current_failing_results_json_file = self._filesystem.join(results_direct ories[0], 'failing_results.json')
120 input_json_string = file.readline() 121 try:
122 file = open(current_failing_results_json_file, "r")
123 except IOError:
124 logging.debug("Could not generate archived results dashboard failing _results.json does not exist")
125 return
Dirk Pranke 2014/09/05 21:07:31 I don't think we should catch this error. It shoul
patro 2014/09/06 06:50:55 Done.
126 input_json_string = file.readline()
121 input_json_string = input_json_string[12:-2] # Remove preceeding strin g ADD_RESULTS( and ); at the end 127 input_json_string = input_json_string[12:-2] # Remove preceeding strin g ADD_RESULTS( and ); at the end
122 self._current_result_json_dict['tests'] = json.loads(input_json_string)[ 'tests'] 128 self._current_result_json_dict['tests'] = json.loads(input_json_string)[ 'tests']
123 results_directories = results_directories[1:] 129 results_directories = results_directories[1:]
124 130
125 # To add hyperlink to individual results.html 131 # To add hyperlink to individual results.html
126 self._add_individual_result_links(results_directories) 132 self._add_individual_result_links(results_directories)
127 133
128 # Load the remaining stale layout test results Json's to create the dash board 134 # Load the remaining stale layout test results Json's to create the dash board
129 for json_file in results_directories: 135 for json_file in results_directories:
130 with open(self._filesystem.join(json_file, 'failing_results.json'), "r") as file: 136 failing_json_file_path = self._filesystem.join(json_file, 'failing_r esults.json')
137 full_json_file_path = self._filesystem.join(json_file, 'full_results .json')
138 with open(failing_json_file_path, "r") as file:
131 json_string = file.readline() 139 json_string = file.readline()
132 json_string = json_string[12:-2] # Remove preceeding string ADD_RE SULTS( and ); at the end 140 json_string = json_string[12:-2] # Remove preceeding string ADD_RE SULTS( and ); at the end
133 self._old_failing_results_list.append(json.loads(json_string)) 141 self._old_failing_results_list.append(json.loads(json_string))
134 142
135 with open(self._filesystem.join(json_file, 'full_results.json'), "r" ) as full_file: 143 with open(full_json_file_path, "r") as full_file:
136 json_string_full_result = full_file.readline() 144 json_string_full_result = full_file.readline()
137 self._old_full_results_list.append(json.loads(json_string_full_resul t)) 145 self._old_full_results_list.append(json.loads(json_string_full_resul t))
138 self._copy_dashboard_html() 146 self._copy_dashboard_html()
139 147
140 def generate(self): 148 def generate(self):
141 self._initialize() 149 self._initialize()
142 process_json_data = ProcessJsonData(self._current_result_json_dict, self ._old_failing_results_list, self._old_full_results_list) 150
143 self._final_result = process_json_data.generate_archived_result() 151 # There must be atleast one archived result to be processed
144 final_json = json.dumps(self._final_result) 152 if self._current_result_json_dict:
145 final_json = 'ADD_RESULTS(' + final_json + ');' 153 process_json_data = ProcessJsonData(self._current_result_json_dict, self._old_failing_results_list, self._old_full_results_list)
146 with open(self._filesystem.join(self._results_directory, 'archived_resul ts.json'), "w") as file: 154 self._final_result = process_json_data.generate_archived_result()
147 file.write(final_json) 155 final_json = json.dumps(self._final_result)
156 final_json = 'ADD_RESULTS(' + final_json + ');'
157 with open(self._filesystem.join(self._results_directory, 'archived_r esults.json'), "w") as file:
158 file.write(final_json)
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698