Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(7)

Side by Side Diff: tools/rebaseline.py

Issue 16311011: rebaseline.py: try to download images from Google Storage before skia-autogen (Closed) Base URL: http://skia.googlecode.com/svn/trunk/
Patch Set: little_fixes Created 7 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | tools/tests/rebaseline/output/all/output-expected/command_line » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/python 1 #!/usr/bin/python
2 2
3 ''' 3 '''
4 Copyright 2012 Google Inc. 4 Copyright 2012 Google Inc.
5 5
6 Use of this source code is governed by a BSD-style license that can be 6 Use of this source code is governed by a BSD-style license that can be
7 found in the LICENSE file. 7 found in the LICENSE file.
8 ''' 8 '''
9 9
10 ''' 10 '''
11 Rebaselines the given GM tests, on all bots and all configurations. 11 Rebaselines the given GM tests, on all bots and all configurations.
12 Must be run from the gm-expected directory. If run from a git or SVN 12 Must be run from the gm-expected directory. If run from a git or SVN
13 checkout, the files will be added to the staging area for commit. 13 checkout, the files will be added to the staging area for commit.
14 ''' 14 '''
15 15
16 # System-level imports 16 # System-level imports
17 import argparse 17 import argparse
18 import os 18 import os
19 import re
19 import subprocess 20 import subprocess
20 import sys 21 import sys
21 import urllib2 22 import urllib2
22 23
23 # Imports from within Skia 24 # Imports from within Skia
24 # 25 #
25 # We need to add the 'gm' directory, so that we can import gm_json.py within 26 # We need to add the 'gm' directory, so that we can import gm_json.py within
26 # that directory. That script allows us to parse the actual-results.json file 27 # that directory. That script allows us to parse the actual-results.json file
27 # written out by the GM tool. 28 # written out by the GM tool.
28 # Make sure that the 'gm' dir is in the PYTHONPATH, but add it at the *end* 29 # Make sure that the 'gm' dir is in the PYTHONPATH, but add it at the *end*
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
98 if not subdirs: 99 if not subdirs:
99 self._subdirs = sorted(SUBDIR_MAPPING.keys()) 100 self._subdirs = sorted(SUBDIR_MAPPING.keys())
100 self._missing_json_is_fatal = False 101 self._missing_json_is_fatal = False
101 else: 102 else:
102 self._subdirs = subdirs 103 self._subdirs = subdirs
103 self._missing_json_is_fatal = True 104 self._missing_json_is_fatal = True
104 self._json_base_url = json_base_url 105 self._json_base_url = json_base_url
105 self._json_filename = json_filename 106 self._json_filename = json_filename
106 self._dry_run = dry_run 107 self._dry_run = dry_run
107 self._add_new = add_new 108 self._add_new = add_new
109 self._googlestorage_gm_actuals_root = (
110 'http://chromium-skia-gm.commondatastorage.googleapis.com/gm')
111 self._testname_pattern = re.compile('(\S+)_(\S+).png')
108 self._is_svn_checkout = ( 112 self._is_svn_checkout = (
109 os.path.exists('.svn') or 113 os.path.exists('.svn') or
110 os.path.exists(os.path.join(os.pardir, '.svn'))) 114 os.path.exists(os.path.join(os.pardir, '.svn')))
111 self._is_git_checkout = ( 115 self._is_git_checkout = (
112 os.path.exists('.git') or 116 os.path.exists('.git') or
113 os.path.exists(os.path.join(os.pardir, '.git'))) 117 os.path.exists(os.path.join(os.pardir, '.git')))
114 118
115 # If dry_run is False, execute subprocess.call(cmd). 119 # If dry_run is False, execute subprocess.call(cmd).
116 # If dry_run is True, print the command we would have otherwise run. 120 # If dry_run is True, print the command we would have otherwise run.
117 # Raises a CommandFailedException if the command fails. 121 # Raises a CommandFailedException if the command fails.
118 def _Call(self, cmd): 122 def _Call(self, cmd):
119 if self._dry_run: 123 if self._dry_run:
120 print '%s' % ' '.join(cmd) 124 print '%s' % ' '.join(cmd)
121 return 125 return
122 if subprocess.call(cmd) != 0: 126 if subprocess.call(cmd) != 0:
123 raise CommandFailedException('error running command: ' + 127 raise CommandFailedException('error running command: ' +
124 ' '.join(cmd)) 128 ' '.join(cmd))
125 129
130 # Download a single actual result from GoogleStorage, returning True if it
131 # succeeded.
132 def _DownloadFromGoogleStorage(self, infilename, outfilename, all_results):
133 test_name = self._testname_pattern.match(infilename).group(1)
134 if not test_name:
135 print '# unable to find test_name for infilename %s' % infilename
136 return False
137 try:
138 hash_type, hash_value = all_results[infilename]
139 except KeyError:
140 print ('# unable to find filename %s in all_results dict' %
141 infilename)
142 return False
143 url = '%s/%s/%s/%s.png' % (self._googlestorage_gm_actuals_root,
144 hash_type, test_name, hash_value)
145 try:
146 self._DownloadFile(source_url=url, dest_filename=outfilename)
147 return True
148 except CommandFailedException:
149 print '# Couldn\'t fetch gs_url %s' % url
150 return False
151
152 # Download a single actual result from skia-autogen, returning True if it
153 # succeeded.
154 def _DownloadFromAutogen(self, infilename, outfilename,
155 expectations_subdir, builder_name):
156 url = ('http://skia-autogen.googlecode.com/svn/gm-actual/' +
157 expectations_subdir + '/' + builder_name + '/' +
158 expectations_subdir + '/' + infilename)
159 try:
160 self._DownloadFile(source_url=url, dest_filename=outfilename)
161 return True
162 except CommandFailedException:
163 print '# Couldn\'t fetch autogen_url %s' % url
164 return False
165
126 # Download a single file, raising a CommandFailedException if it fails. 166 # Download a single file, raising a CommandFailedException if it fails.
127 def _DownloadFile(self, source_url, dest_filename): 167 def _DownloadFile(self, source_url, dest_filename):
128 # Download into a temporary file and then rename it afterwards, 168 # Download into a temporary file and then rename it afterwards,
129 # so that we don't corrupt the existing file if it fails midway thru. 169 # so that we don't corrupt the existing file if it fails midway thru.
130 temp_filename = os.path.join(os.path.dirname(dest_filename), 170 temp_filename = os.path.join(os.path.dirname(dest_filename),
131 '.temp-' + os.path.basename(dest_filename)) 171 '.temp-' + os.path.basename(dest_filename))
132 172
133 # TODO(epoger): Replace calls to "curl"/"mv" (which will only work on 173 # TODO(epoger): Replace calls to "curl"/"mv" (which will only work on
134 # Unix) with a Python HTTP library (which should work cross-platform) 174 # Unix) with a Python HTTP library (which should work cross-platform)
135 self._Call([ 'curl', '--fail', '--silent', source_url, 175 self._Call([ 'curl', '--fail', '--silent', source_url,
136 '--output', temp_filename ]) 176 '--output', temp_filename ])
137 self._Call([ 'mv', temp_filename, dest_filename ]) 177 self._Call([ 'mv', temp_filename, dest_filename ])
138 178
139 # Returns the full contents of a URL, as a single string. 179 # Returns the full contents of a URL, as a single string.
140 # 180 #
141 # Unlike standard URL handling, we allow relative "file:" URLs; 181 # Unlike standard URL handling, we allow relative "file:" URLs;
142 # for example, "file:one/two" resolves to the file ./one/two 182 # for example, "file:one/two" resolves to the file ./one/two
143 # (relative to current working dir) 183 # (relative to current working dir)
144 def _GetContentsOfUrl(self, url): 184 def _GetContentsOfUrl(self, url):
145 file_prefix = 'file:' 185 file_prefix = 'file:'
146 if url.startswith(file_prefix): 186 if url.startswith(file_prefix):
147 filename = url[len(file_prefix):] 187 filename = url[len(file_prefix):]
148 return open(filename, 'r').read() 188 return open(filename, 'r').read()
149 else: 189 else:
150 return urllib2.urlopen(url).read() 190 return urllib2.urlopen(url).read()
151 191
192 # Returns a dictionary of actual results from actual-results.json file.
193 #
194 # The dictionary returned has this format:
195 # {
196 # u'imageblur_565.png': [u'bitmap-64bitMD5', 3359963596899141322],
197 # u'imageblur_8888.png': [u'bitmap-64bitMD5', 4217923806027861152],
198 # u'shadertext3_8888.png': [u'bitmap-64bitMD5', 3713708307125704716]
199 # }
200 #
201 # If the JSON actual result summary file cannot be loaded, the behavior
202 # depends on self._missing_json_is_fatal:
203 # - if true: execution will halt with an exception
204 # - if false: we will log an error message but return an empty dictionary
205 #
206 # params:
207 # json_url: URL pointing to a JSON actual result summary file
208 # sections: a list of section names to include in the results, e.g.
209 # [gm_json.JSONKEY_ACTUALRESULTS_FAILED,
210 # gm_json.JSONKEY_ACTUALRESULTS_NOCOMPARISON] ;
211 # if None, then include ALL sections.
212 def _GetActualResults(self, json_url, sections=None):
213 try:
214 json_contents = self._GetContentsOfUrl(json_url)
215 except (urllib2.HTTPError, IOError):
216 message = 'unable to load JSON summary URL %s' % json_url
217 if self._missing_json_is_fatal:
218 raise ValueError(message)
219 else:
220 print '# %s' % message
221 return {}
222
223 json_dict = gm_json.LoadFromString(json_contents)
224 results_to_return = {}
225 actual_results = json_dict[gm_json.JSONKEY_ACTUALRESULTS]
226 if not sections:
227 sections = actual_results.keys()
228 for section in sections:
229 section_results = actual_results[section]
230 if section_results:
231 results_to_return.update(section_results)
232 return results_to_return
233
152 # Returns a list of files that require rebaselining. 234 # Returns a list of files that require rebaselining.
153 # 235 #
154 # Note that this returns a list of FILES, like this: 236 # Note that this returns a list of FILES, like this:
155 # ['imageblur_565.png', 'xfermodes_pdf.png'] 237 # ['imageblur_565.png', 'xfermodes_pdf.png']
156 # rather than a list of TESTS, like this: 238 # rather than a list of TESTS, like this:
157 # ['imageblur', 'xfermodes'] 239 # ['imageblur', 'xfermodes']
158 # 240 #
159 # If the JSON actual result summary file cannot be loaded, the behavior
160 # depends on self._missing_json_is_fatal:
161 # - if true: execution will halt with an exception
162 # - if false: we will log an error message but return an empty list so we
163 # go on to the next platform
164 #
165 # params: 241 # params:
166 # json_url: URL pointing to a JSON actual result summary file 242 # json_url: URL pointing to a JSON actual result summary file
167 # add_new: if True, then return files listed in any of these sections: 243 # add_new: if True, then return files listed in any of these sections:
168 # - JSONKEY_ACTUALRESULTS_FAILED 244 # - JSONKEY_ACTUALRESULTS_FAILED
169 # - JSONKEY_ACTUALRESULTS_NOCOMPARISON 245 # - JSONKEY_ACTUALRESULTS_NOCOMPARISON
170 # if False, then return files listed in these sections: 246 # if False, then return files listed in these sections:
171 # - JSONKEY_ACTUALRESULTS_FAILED 247 # - JSONKEY_ACTUALRESULTS_FAILED
172 # 248 #
173 def _GetFilesToRebaseline(self, json_url, add_new): 249 def _GetFilesToRebaseline(self, json_url, add_new):
174 if self._dry_run: 250 if self._dry_run:
175 print '' 251 print ''
176 print '#' 252 print '#'
177 print ('# Getting files to rebaseline from JSON summary URL %s ...' 253 print ('# Getting files to rebaseline from JSON summary URL %s ...'
178 % json_url) 254 % json_url)
179 try:
180 json_contents = self._GetContentsOfUrl(json_url)
181 except urllib2.HTTPError:
182 message = 'unable to load JSON summary URL %s' % json_url
183 if self._missing_json_is_fatal:
184 raise ValueError(message)
185 else:
186 print '# %s' % message
187 return []
188
189 json_dict = gm_json.LoadFromString(json_contents)
190 actual_results = json_dict[gm_json.JSONKEY_ACTUALRESULTS]
191 sections = [gm_json.JSONKEY_ACTUALRESULTS_FAILED] 255 sections = [gm_json.JSONKEY_ACTUALRESULTS_FAILED]
192 if add_new: 256 if add_new:
193 sections.append(gm_json.JSONKEY_ACTUALRESULTS_NOCOMPARISON) 257 sections.append(gm_json.JSONKEY_ACTUALRESULTS_NOCOMPARISON)
194 258 results_to_rebaseline = self._GetActualResults(json_url=json_url,
195 files_to_rebaseline = [] 259 sections=sections)
196 for section in sections: 260 files_to_rebaseline = results_to_rebaseline.keys()
197 section_results = actual_results[section] 261 files_to_rebaseline.sort()
198 if section_results:
199 files_to_rebaseline.extend(section_results.keys())
200
201 print '# ... found files_to_rebaseline %s' % files_to_rebaseline 262 print '# ... found files_to_rebaseline %s' % files_to_rebaseline
202 if self._dry_run: 263 if self._dry_run:
203 print '#' 264 print '#'
204 return files_to_rebaseline 265 return files_to_rebaseline
205 266
206 # Rebaseline a single file. 267 # Rebaseline a single file.
207 def _RebaselineOneFile(self, expectations_subdir, builder_name, 268 def _RebaselineOneFile(self, expectations_subdir, builder_name,
208 infilename, outfilename): 269 infilename, outfilename, all_results):
209 if self._dry_run: 270 if self._dry_run:
210 print '' 271 print ''
211 print '# ' + infilename 272 print '# ' + infilename
212 url = ('http://skia-autogen.googlecode.com/svn/gm-actual/' +
213 expectations_subdir + '/' + builder_name + '/' +
214 expectations_subdir + '/' + infilename)
215 273
216 # Try to download this file, but if that fails, keep going... 274 # First try to download this result image from Google Storage.
275 # If that fails, try skia-autogen.
276 # If that fails too, just go on to the next file.
217 # 277 #
218 # This not treated as a fatal failure because not all 278 # This not treated as a fatal failure because not all
219 # platforms generate all configs (e.g., Android does not 279 # platforms generate all configs (e.g., Android does not
220 # generate PDF). 280 # generate PDF).
221 # 281 #
222 # We could tweak the list of configs within this tool to 282 # TODO(epoger): Once we are downloading only files that the
223 # reflect which combinations the bots actually generate, and 283 # actual-results.json file told us to, this should become a
224 # then fail if any of those expected combinations are 284 # fatal error. (If the actual-results.json file told us that
225 # missing... but then this tool would become useless every 285 # the test failed with XXX results, we should be able to download
226 # time someone tweaked the configs on the bots without 286 # those results every time.)
227 # updating this script. 287 if not self._DownloadFromGoogleStorage(infilename=infilename,
228 try: 288 outfilename=outfilename,
229 self._DownloadFile(source_url=url, dest_filename=outfilename) 289 all_results=all_results):
230 except CommandFailedException: 290 if not self._DownloadFromAutogen(infilename=infilename,
231 print '# Couldn\'t fetch ' + url 291 outfilename=outfilename,
232 return 292 expectations_subdir=expectations_su bdir,
293 builder_name=builder_name):
294 print '# Couldn\'t fetch infilename ' + infilename
295 return
233 296
234 # Add this file to version control (if appropriate). 297 # Add this file to version control (if appropriate).
235 if self._add_new: 298 if self._add_new:
236 if self._is_svn_checkout: 299 if self._is_svn_checkout:
237 cmd = [ 'svn', 'add', '--quiet', outfilename ] 300 cmd = [ 'svn', 'add', '--quiet', outfilename ]
238 self._Call(cmd) 301 self._Call(cmd)
239 cmd = [ 'svn', 'propset', '--quiet', 'svn:mime-type', 302 cmd = [ 'svn', 'propset', '--quiet', 'svn:mime-type',
240 'image/png', outfilename ]; 303 'image/png', outfilename ];
241 self._Call(cmd) 304 self._Call(cmd)
242 elif self._is_git_checkout: 305 elif self._is_git_checkout:
243 cmd = [ 'git', 'add', outfilename ] 306 cmd = [ 'git', 'add', outfilename ]
244 self._Call(cmd) 307 self._Call(cmd)
245 308
246 # Rebaseline the given configs for a single test. 309 # Rebaseline the given configs for a single test.
247 # 310 #
248 # params: 311 # params:
249 # expectations_subdir 312 # expectations_subdir
250 # builder_name 313 # builder_name
251 # test: a single test to rebaseline 314 # test: a single test to rebaseline
252 def _RebaselineOneTest(self, expectations_subdir, builder_name, test): 315 # all_results: a dictionary of all actual results
316 def _RebaselineOneTest(self, expectations_subdir, builder_name, test,
317 all_results):
253 if self._configs: 318 if self._configs:
254 configs = self._configs 319 configs = self._configs
255 else: 320 else:
256 if (expectations_subdir == 'base-shuttle-win7-intel-angle'): 321 if (expectations_subdir == 'base-shuttle-win7-intel-angle'):
257 configs = [ 'angle', 'anglemsaa16' ] 322 configs = [ 'angle', 'anglemsaa16' ]
258 else: 323 else:
259 configs = [ '565', '8888', 'gpu', 'pdf', 'mesa', 'msaa16', 324 configs = [ '565', '8888', 'gpu', 'pdf', 'mesa', 'msaa16',
260 'msaa4' ] 325 'msaa4' ]
261 if self._dry_run: 326 if self._dry_run:
262 print '' 327 print ''
263 print '# ' + expectations_subdir + ':' 328 print '# ' + expectations_subdir + ':'
264 for config in configs: 329 for config in configs:
265 infilename = test + '_' + config + '.png' 330 infilename = test + '_' + config + '.png'
266 outfilename = os.path.join(expectations_subdir, infilename); 331 outfilename = os.path.join(expectations_subdir, infilename);
267 self._RebaselineOneFile(expectations_subdir=expectations_subdir, 332 self._RebaselineOneFile(expectations_subdir=expectations_subdir,
268 builder_name=builder_name, 333 builder_name=builder_name,
269 infilename=infilename, 334 infilename=infilename,
270 outfilename=outfilename) 335 outfilename=outfilename,
336 all_results=all_results)
271 337
272 # Rebaseline all platforms/tests/types we specified in the constructor. 338 # Rebaseline all platforms/tests/types we specified in the constructor.
273 def RebaselineAll(self): 339 def RebaselineAll(self):
274 for subdir in self._subdirs: 340 for subdir in self._subdirs:
275 if not subdir in SUBDIR_MAPPING.keys(): 341 if not subdir in SUBDIR_MAPPING.keys():
276 raise Exception(('unrecognized platform subdir "%s"; ' + 342 raise Exception(('unrecognized platform subdir "%s"; ' +
277 'should be one of %s') % ( 343 'should be one of %s') % (
278 subdir, SUBDIR_MAPPING.keys())) 344 subdir, SUBDIR_MAPPING.keys()))
279 builder_name = SUBDIR_MAPPING[subdir] 345 builder_name = SUBDIR_MAPPING[subdir]
346 json_url = '/'.join([self._json_base_url,
347 subdir, builder_name, subdir,
348 self._json_filename])
349 all_results = self._GetActualResults(json_url=json_url)
350
280 if self._tests: 351 if self._tests:
281 for test in self._tests: 352 for test in self._tests:
282 self._RebaselineOneTest(expectations_subdir=subdir, 353 self._RebaselineOneTest(expectations_subdir=subdir,
283 builder_name=builder_name, 354 builder_name=builder_name,
284 test=test) 355 test=test, all_results=all_results)
285 else: # get the raw list of files that need rebaselining from JSON 356 else: # get the raw list of files that need rebaselining from JSON
286 json_url = '/'.join([self._json_base_url,
287 subdir, builder_name, subdir,
288 self._json_filename])
289 filenames = self._GetFilesToRebaseline(json_url=json_url, 357 filenames = self._GetFilesToRebaseline(json_url=json_url,
290 add_new=self._add_new) 358 add_new=self._add_new)
291 for filename in filenames: 359 for filename in filenames:
292 outfilename = os.path.join(subdir, filename); 360 outfilename = os.path.join(subdir, filename);
293 self._RebaselineOneFile(expectations_subdir=subdir, 361 self._RebaselineOneFile(expectations_subdir=subdir,
294 builder_name=builder_name, 362 builder_name=builder_name,
295 infilename=filename, 363 infilename=filename,
296 outfilename=outfilename) 364 outfilename=outfilename,
365 all_results=all_results)
297 366
298 # main... 367 # main...
299 368
300 parser = argparse.ArgumentParser() 369 parser = argparse.ArgumentParser()
301 parser.add_argument('--add-new', action='store_true', 370 parser.add_argument('--add-new', action='store_true',
302 help='in addition to the standard behavior of ' + 371 help='in addition to the standard behavior of ' +
303 'updating expectations for failing tests, add ' + 372 'updating expectations for failing tests, add ' +
304 'expectations for tests which don\'t have expectations ' + 373 'expectations for tests which don\'t have expectations ' +
305 'yet.') 374 'yet.')
306 parser.add_argument('--configs', metavar='CONFIG', nargs='+', 375 parser.add_argument('--configs', metavar='CONFIG', nargs='+',
(...skipping 22 matching lines...) Expand all
329 '"--tests aaclip bigmatrix"; if unspecified, then all ' + 398 '"--tests aaclip bigmatrix"; if unspecified, then all ' +
330 'failing tests (according to the actual-results.json ' + 399 'failing tests (according to the actual-results.json ' +
331 'file) will be rebaselined.') 400 'file) will be rebaselined.')
332 args = parser.parse_args() 401 args = parser.parse_args()
333 rebaseliner = Rebaseliner(tests=args.tests, configs=args.configs, 402 rebaseliner = Rebaseliner(tests=args.tests, configs=args.configs,
334 subdirs=args.subdirs, dry_run=args.dry_run, 403 subdirs=args.subdirs, dry_run=args.dry_run,
335 json_base_url=args.json_base_url, 404 json_base_url=args.json_base_url,
336 json_filename=args.json_filename, 405 json_filename=args.json_filename,
337 add_new=args.add_new) 406 add_new=args.add_new)
338 rebaseliner.RebaselineAll() 407 rebaseliner.RebaselineAll()
OLDNEW
« no previous file with comments | « no previous file | tools/tests/rebaseline/output/all/output-expected/command_line » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698