OLD | NEW |
---|---|
1 #!/usr/bin/python | 1 #!/usr/bin/python |
2 | 2 |
3 ''' | 3 ''' |
4 Copyright 2012 Google Inc. | 4 Copyright 2012 Google Inc. |
5 | 5 |
6 Use of this source code is governed by a BSD-style license that can be | 6 Use of this source code is governed by a BSD-style license that can be |
7 found in the LICENSE file. | 7 found in the LICENSE file. |
8 ''' | 8 ''' |
9 | 9 |
10 ''' | 10 ''' |
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
142 # for example, "file:one/two" resolves to the file ./one/two | 142 # for example, "file:one/two" resolves to the file ./one/two |
143 # (relative to current working dir) | 143 # (relative to current working dir) |
144 def _GetContentsOfUrl(self, url): | 144 def _GetContentsOfUrl(self, url): |
145 file_prefix = 'file:' | 145 file_prefix = 'file:' |
146 if url.startswith(file_prefix): | 146 if url.startswith(file_prefix): |
147 filename = url[len(file_prefix):] | 147 filename = url[len(file_prefix):] |
148 return open(filename, 'r').read() | 148 return open(filename, 'r').read() |
149 else: | 149 else: |
150 return urllib2.urlopen(url).read() | 150 return urllib2.urlopen(url).read() |
151 | 151 |
152 # Returns a dictionary of actual results from actual-results.json file. | |
153 # | |
154 # The dictionary returned has this format: | |
155 # { | |
156 # u'imageblur_565.png': [u'bitmap-64bitMD5', 3359963596899141322], | |
157 # u'imageblur_8888.png': [u'bitmap-64bitMD5', 4217923806027861152], | |
158 # u'shadertext3_8888.png': [u'bitmap-64bitMD5', 3713708307125704716] | |
159 # } | |
160 # | |
161 # If the JSON actual result summary file cannot be loaded, the behavior | |
162 # depends on self._missing_json_is_fatal: | |
163 # - if true: execution will halt with an exception | |
164 # - if false: we will log an error message but return an empty dictionary | |
165 # | |
166 # params: | |
167 # json_url: URL pointing to a JSON actual result summary file | |
168 # sections: a list of section names to include in the results, e.g. | |
169 # [gm_json.JSONKEY_ACTUALRESULTS_FAILED, | |
170 # gm_json.JSONKEY_ACTUALRESULTS_NOCOMPARISON] ; | |
171 # if None, then include ALL sections. | |
172 def _GetActualResults(self, json_url, sections=None): | |
epoger
2013/06/12 15:58:46
Patchset 3 extracts a new _GetActualResults() meth
| |
173 try: | |
174 json_contents = self._GetContentsOfUrl(json_url) | |
175 except urllib2.HTTPError: | |
176 message = 'unable to load JSON summary URL %s' % json_url | |
177 if self._missing_json_is_fatal: | |
178 raise ValueError(message) | |
179 else: | |
180 print '# %s' % message | |
181 return {} | |
182 | |
183 json_dict = gm_json.LoadFromString(json_contents) | |
184 results_to_return = {} | |
185 actual_results = json_dict[gm_json.JSONKEY_ACTUALRESULTS] | |
186 if not sections: | |
187 sections = actual_results.keys() | |
188 for section in sections: | |
189 section_results = actual_results[section] | |
190 if section_results: | |
191 results_to_return.update(section_results) | |
192 return results_to_return | |
193 | |
152 # Returns a list of files that require rebaselining. | 194 # Returns a list of files that require rebaselining. |
153 # | 195 # |
154 # Note that this returns a list of FILES, like this: | 196 # Note that this returns a list of FILES, like this: |
155 # ['imageblur_565.png', 'xfermodes_pdf.png'] | 197 # ['imageblur_565.png', 'xfermodes_pdf.png'] |
156 # rather than a list of TESTS, like this: | 198 # rather than a list of TESTS, like this: |
157 # ['imageblur', 'xfermodes'] | 199 # ['imageblur', 'xfermodes'] |
158 # | 200 # |
159 # If the JSON actual result summary file cannot be loaded, the behavior | |
160 # depends on self._missing_json_is_fatal: | |
161 # - if true: execution will halt with an exception | |
162 # - if false: we will log an error message but return an empty list so we | |
163 # go on to the next platform | |
164 # | |
165 # params: | 201 # params: |
166 # json_url: URL pointing to a JSON actual result summary file | 202 # json_url: URL pointing to a JSON actual result summary file |
167 # add_new: if True, then return files listed in any of these sections: | 203 # add_new: if True, then return files listed in any of these sections: |
168 # - JSONKEY_ACTUALRESULTS_FAILED | 204 # - JSONKEY_ACTUALRESULTS_FAILED |
169 # - JSONKEY_ACTUALRESULTS_NOCOMPARISON | 205 # - JSONKEY_ACTUALRESULTS_NOCOMPARISON |
170 # if False, then return files listed in these sections: | 206 # if False, then return files listed in these sections: |
171 # - JSONKEY_ACTUALRESULTS_FAILED | 207 # - JSONKEY_ACTUALRESULTS_FAILED |
172 # | 208 # |
173 def _GetFilesToRebaseline(self, json_url, add_new): | 209 def _GetFilesToRebaseline(self, json_url, add_new): |
174 if self._dry_run: | 210 if self._dry_run: |
175 print '' | 211 print '' |
176 print '#' | 212 print '#' |
177 print ('# Getting files to rebaseline from JSON summary URL %s ...' | 213 print ('# Getting files to rebaseline from JSON summary URL %s ...' |
178 % json_url) | 214 % json_url) |
179 try: | |
180 json_contents = self._GetContentsOfUrl(json_url) | |
181 except urllib2.HTTPError: | |
182 message = 'unable to load JSON summary URL %s' % json_url | |
183 if self._missing_json_is_fatal: | |
184 raise ValueError(message) | |
185 else: | |
186 print '# %s' % message | |
187 return [] | |
188 | |
189 json_dict = gm_json.LoadFromString(json_contents) | |
190 actual_results = json_dict[gm_json.JSONKEY_ACTUALRESULTS] | |
191 sections = [gm_json.JSONKEY_ACTUALRESULTS_FAILED] | 215 sections = [gm_json.JSONKEY_ACTUALRESULTS_FAILED] |
192 if add_new: | 216 if add_new: |
193 sections.append(gm_json.JSONKEY_ACTUALRESULTS_NOCOMPARISON) | 217 sections.append(gm_json.JSONKEY_ACTUALRESULTS_NOCOMPARISON) |
194 | 218 results_to_rebaseline = self._GetActualResults(json_url=json_url, |
195 files_to_rebaseline = [] | 219 sections=sections) |
196 for section in sections: | 220 files_to_rebaseline = results_to_rebaseline.keys() |
197 section_results = actual_results[section] | 221 files_to_rebaseline.sort() |
198 if section_results: | |
199 files_to_rebaseline.extend(section_results.keys()) | |
200 | |
201 print '# ... found files_to_rebaseline %s' % files_to_rebaseline | 222 print '# ... found files_to_rebaseline %s' % files_to_rebaseline |
202 if self._dry_run: | 223 if self._dry_run: |
203 print '#' | 224 print '#' |
204 return files_to_rebaseline | 225 return files_to_rebaseline |
205 | 226 |
206 # Rebaseline a single file. | 227 # Rebaseline a single file. |
207 def _RebaselineOneFile(self, expectations_subdir, builder_name, | 228 def _RebaselineOneFile(self, expectations_subdir, builder_name, |
208 infilename, outfilename): | 229 infilename, outfilename): |
209 if self._dry_run: | 230 if self._dry_run: |
210 print '' | 231 print '' |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
270 outfilename=outfilename) | 291 outfilename=outfilename) |
271 | 292 |
272 # Rebaseline all platforms/tests/types we specified in the constructor. | 293 # Rebaseline all platforms/tests/types we specified in the constructor. |
273 def RebaselineAll(self): | 294 def RebaselineAll(self): |
274 for subdir in self._subdirs: | 295 for subdir in self._subdirs: |
275 if not subdir in SUBDIR_MAPPING.keys(): | 296 if not subdir in SUBDIR_MAPPING.keys(): |
276 raise Exception(('unrecognized platform subdir "%s"; ' + | 297 raise Exception(('unrecognized platform subdir "%s"; ' + |
277 'should be one of %s') % ( | 298 'should be one of %s') % ( |
278 subdir, SUBDIR_MAPPING.keys())) | 299 subdir, SUBDIR_MAPPING.keys())) |
279 builder_name = SUBDIR_MAPPING[subdir] | 300 builder_name = SUBDIR_MAPPING[subdir] |
301 json_url = '/'.join([self._json_base_url, | |
302 subdir, builder_name, subdir, | |
303 self._json_filename]) | |
304 | |
280 if self._tests: | 305 if self._tests: |
281 for test in self._tests: | 306 for test in self._tests: |
282 self._RebaselineOneTest(expectations_subdir=subdir, | 307 self._RebaselineOneTest(expectations_subdir=subdir, |
283 builder_name=builder_name, | 308 builder_name=builder_name, |
284 test=test) | 309 test=test) |
285 else: # get the raw list of files that need rebaselining from JSON | 310 else: # get the raw list of files that need rebaselining from JSON |
286 json_url = '/'.join([self._json_base_url, | |
287 subdir, builder_name, subdir, | |
288 self._json_filename]) | |
289 filenames = self._GetFilesToRebaseline(json_url=json_url, | 311 filenames = self._GetFilesToRebaseline(json_url=json_url, |
290 add_new=self._add_new) | 312 add_new=self._add_new) |
291 for filename in filenames: | 313 for filename in filenames: |
292 outfilename = os.path.join(subdir, filename); | 314 outfilename = os.path.join(subdir, filename); |
293 self._RebaselineOneFile(expectations_subdir=subdir, | 315 self._RebaselineOneFile(expectations_subdir=subdir, |
294 builder_name=builder_name, | 316 builder_name=builder_name, |
295 infilename=filename, | 317 infilename=filename, |
296 outfilename=outfilename) | 318 outfilename=outfilename) |
297 | 319 |
298 # main... | 320 # main... |
(...skipping 30 matching lines...) Expand all Loading... | |
329 '"--tests aaclip bigmatrix"; if unspecified, then all ' + | 351 '"--tests aaclip bigmatrix"; if unspecified, then all ' + |
330 'failing tests (according to the actual-results.json ' + | 352 'failing tests (according to the actual-results.json ' + |
331 'file) will be rebaselined.') | 353 'file) will be rebaselined.') |
332 args = parser.parse_args() | 354 args = parser.parse_args() |
333 rebaseliner = Rebaseliner(tests=args.tests, configs=args.configs, | 355 rebaseliner = Rebaseliner(tests=args.tests, configs=args.configs, |
334 subdirs=args.subdirs, dry_run=args.dry_run, | 356 subdirs=args.subdirs, dry_run=args.dry_run, |
335 json_base_url=args.json_base_url, | 357 json_base_url=args.json_base_url, |
336 json_filename=args.json_filename, | 358 json_filename=args.json_filename, |
337 add_new=args.add_new) | 359 add_new=args.add_new) |
338 rebaseliner.RebaselineAll() | 360 rebaseliner.RebaselineAll() |
OLD | NEW |