Index: tools/rebaseline.py |
=================================================================== |
--- tools/rebaseline.py (revision 9527) |
+++ tools/rebaseline.py (working copy) |
@@ -149,6 +149,48 @@ |
else: |
return urllib2.urlopen(url).read() |
+ # Returns a dictionary of actual results from actual-results.json file. |
+ # |
+ # The dictionary returned has this format: |
+ # { |
+ # u'imageblur_565.png': [u'bitmap-64bitMD5', 3359963596899141322], |
+ # u'imageblur_8888.png': [u'bitmap-64bitMD5', 4217923806027861152], |
+ # u'shadertext3_8888.png': [u'bitmap-64bitMD5', 3713708307125704716] |
+ # } |
+ # |
+ # If the JSON actual result summary file cannot be loaded, the behavior |
+ # depends on self._missing_json_is_fatal: |
+ # - if true: execution will halt with an exception |
+ # - if false: we will log an error message but return an empty dictionary |
+ # |
+ # params: |
+ # json_url: URL pointing to a JSON actual result summary file |
+ # sections: a list of section names to include in the results, e.g. |
+ # [gm_json.JSONKEY_ACTUALRESULTS_FAILED, |
+ # gm_json.JSONKEY_ACTUALRESULTS_NOCOMPARISON] ; |
+ # if None, then include ALL sections. |
+ def _GetActualResults(self, json_url, sections=None): |
epoger
2013/06/12 15:58:46
Patchset 3 extracts a new _GetActualResults() meth
|
+ try: |
+ json_contents = self._GetContentsOfUrl(json_url) |
+ except urllib2.HTTPError: |
+ message = 'unable to load JSON summary URL %s' % json_url |
+ if self._missing_json_is_fatal: |
+ raise ValueError(message) |
+ else: |
+ print '# %s' % message |
+ return {} |
+ |
+ json_dict = gm_json.LoadFromString(json_contents) |
+ results_to_return = {} |
+ actual_results = json_dict[gm_json.JSONKEY_ACTUALRESULTS] |
+ if not sections: |
+ sections = actual_results.keys() |
+ for section in sections: |
+ section_results = actual_results[section] |
+ if section_results: |
+ results_to_return.update(section_results) |
+ return results_to_return |
+ |
# Returns a list of files that require rebaselining. |
# |
# Note that this returns a list of FILES, like this: |
@@ -156,12 +198,6 @@ |
# rather than a list of TESTS, like this: |
# ['imageblur', 'xfermodes'] |
# |
- # If the JSON actual result summary file cannot be loaded, the behavior |
- # depends on self._missing_json_is_fatal: |
- # - if true: execution will halt with an exception |
- # - if false: we will log an error message but return an empty list so we |
- # go on to the next platform |
- # |
# params: |
# json_url: URL pointing to a JSON actual result summary file |
# add_new: if True, then return files listed in any of these sections: |
@@ -176,28 +212,13 @@ |
print '#' |
print ('# Getting files to rebaseline from JSON summary URL %s ...' |
% json_url) |
- try: |
- json_contents = self._GetContentsOfUrl(json_url) |
- except urllib2.HTTPError: |
- message = 'unable to load JSON summary URL %s' % json_url |
- if self._missing_json_is_fatal: |
- raise ValueError(message) |
- else: |
- print '# %s' % message |
- return [] |
- |
- json_dict = gm_json.LoadFromString(json_contents) |
- actual_results = json_dict[gm_json.JSONKEY_ACTUALRESULTS] |
sections = [gm_json.JSONKEY_ACTUALRESULTS_FAILED] |
if add_new: |
sections.append(gm_json.JSONKEY_ACTUALRESULTS_NOCOMPARISON) |
- |
- files_to_rebaseline = [] |
- for section in sections: |
- section_results = actual_results[section] |
- if section_results: |
- files_to_rebaseline.extend(section_results.keys()) |
- |
+ results_to_rebaseline = self._GetActualResults(json_url=json_url, |
+ sections=sections) |
+ files_to_rebaseline = results_to_rebaseline.keys() |
+ files_to_rebaseline.sort() |
print '# ... found files_to_rebaseline %s' % files_to_rebaseline |
if self._dry_run: |
print '#' |
@@ -277,15 +298,16 @@ |
'should be one of %s') % ( |
subdir, SUBDIR_MAPPING.keys())) |
builder_name = SUBDIR_MAPPING[subdir] |
+ json_url = '/'.join([self._json_base_url, |
+ subdir, builder_name, subdir, |
+ self._json_filename]) |
+ |
if self._tests: |
for test in self._tests: |
self._RebaselineOneTest(expectations_subdir=subdir, |
builder_name=builder_name, |
test=test) |
else: # get the raw list of files that need rebaselining from JSON |
- json_url = '/'.join([self._json_base_url, |
- subdir, builder_name, subdir, |
- self._json_filename]) |
filenames = self._GetFilesToRebaseline(json_url=json_url, |
add_new=self._add_new) |
for filename in filenames: |