Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(63)

Side by Side Diff: tools/rebaseline.py

Issue 17379004: rebaseline.py: split image-based rebaselining, which will go away soon, into its own script (Closed) Base URL: http://skia.googlecode.com/svn/trunk/
Patch Set: Created 7 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/python 1 #!/usr/bin/python
2 2
3 ''' 3 '''
4 Copyright 2012 Google Inc. 4 Copyright 2012 Google Inc.
5 5
6 Use of this source code is governed by a BSD-style license that can be 6 Use of this source code is governed by a BSD-style license that can be
7 found in the LICENSE file. 7 found in the LICENSE file.
8 ''' 8 '''
9 9
10 ''' 10 '''
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
70 70
71 class CommandFailedException(Exception): 71 class CommandFailedException(Exception):
72 pass 72 pass
73 73
74 class Rebaseliner(object): 74 class Rebaseliner(object):
75 75
76 # params: 76 # params:
77 # json_base_url: base URL from which to read json_filename 77 # json_base_url: base URL from which to read json_filename
78 # json_filename: filename (under json_base_url) from which to read a 78 # json_filename: filename (under json_base_url) from which to read a
79 # summary of results; typically "actual-results.json" 79 # summary of results; typically "actual-results.json"
80 # subdirs: which platform subdirectories to rebaseline; if not specified,
81 # rebaseline all platform subdirectories
82 # tests: list of tests to rebaseline, or None if we should rebaseline 80 # tests: list of tests to rebaseline, or None if we should rebaseline
83 # whatever files the JSON results summary file tells us to 81 # whatever files the JSON results summary file tells us to
84 # configs: which configs to run for each test; this should only be 82 # configs: which configs to run for each test; this should only be
85 # specified if the list of tests was also specified (otherwise, 83 # specified if the list of tests was also specified (otherwise,
86 # the JSON file will give us test names and configs) 84 # the JSON file will give us test names and configs)
87 # dry_run: if True, instead of actually downloading files or adding 85 # dry_run: if True, instead of actually downloading files or adding
88 # files to checkout, display a list of operations that 86 # files to checkout, display a list of operations that
89 # we would normally perform 87 # we would normally perform
90 # add_new: if True, add expectations for tests which don't have any yet 88 # add_new: if True, add expectations for tests which don't have any yet
89 # missing_json_is_fatal: whether to halt execution if we cannot read a
90 # JSON actual result summary file
91 def __init__(self, json_base_url, json_filename, 91 def __init__(self, json_base_url, json_filename,
92 subdirs=None, tests=None, configs=None, dry_run=False, 92 tests=None, configs=None, dry_run=False,
93 add_new=False): 93 add_new=False, missing_json_is_fatal=False):
94 if configs and not tests: 94 if configs and not tests:
95 raise ValueError('configs should only be specified if tests ' + 95 raise ValueError('configs should only be specified if tests ' +
96 'were specified also') 96 'were specified also')
97 self._tests = tests 97 self._tests = tests
98 self._configs = configs 98 self._configs = configs
99 if not subdirs:
100 self._subdirs = sorted(SUBDIR_MAPPING.keys())
101 self._missing_json_is_fatal = False
102 else:
103 self._subdirs = subdirs
104 self._missing_json_is_fatal = True
105 self._json_base_url = json_base_url 99 self._json_base_url = json_base_url
106 self._json_filename = json_filename 100 self._json_filename = json_filename
107 self._dry_run = dry_run 101 self._dry_run = dry_run
108 self._add_new = add_new 102 self._add_new = add_new
103 self._missing_json_is_fatal = missing_json_is_fatal
109 self._googlestorage_gm_actuals_root = ( 104 self._googlestorage_gm_actuals_root = (
110 'http://chromium-skia-gm.commondatastorage.googleapis.com/gm') 105 'http://chromium-skia-gm.commondatastorage.googleapis.com/gm')
111 self._testname_pattern = re.compile('(\S+)_(\S+).png') 106 self._testname_pattern = re.compile('(\S+)_(\S+).png')
112 self._is_svn_checkout = ( 107 self._is_svn_checkout = (
113 os.path.exists('.svn') or 108 os.path.exists('.svn') or
114 os.path.exists(os.path.join(os.pardir, '.svn'))) 109 os.path.exists(os.path.join(os.pardir, '.svn')))
115 self._is_git_checkout = ( 110 self._is_git_checkout = (
116 os.path.exists('.git') or 111 os.path.exists('.git') or
117 os.path.exists(os.path.join(os.pardir, '.git'))) 112 os.path.exists(os.path.join(os.pardir, '.git')))
118 113
(...skipping 213 matching lines...) Expand 10 before | Expand all | Expand 10 after
332 print '# ' + expectations_subdir + ':' 327 print '# ' + expectations_subdir + ':'
333 for config in configs: 328 for config in configs:
334 infilename = test + '_' + config + '.png' 329 infilename = test + '_' + config + '.png'
335 outfilename = os.path.join(expectations_subdir, infilename); 330 outfilename = os.path.join(expectations_subdir, infilename);
336 self._RebaselineOneFile(expectations_subdir=expectations_subdir, 331 self._RebaselineOneFile(expectations_subdir=expectations_subdir,
337 builder_name=builder_name, 332 builder_name=builder_name,
338 infilename=infilename, 333 infilename=infilename,
339 outfilename=outfilename, 334 outfilename=outfilename,
340 all_results=all_results) 335 all_results=all_results)
341 336
342 # Rebaseline all platforms/tests/types we specified in the constructor. 337 # Rebaseline all tests/types we specified in the constructor.
343 def RebaselineAll(self): 338 #
344 for subdir in self._subdirs: 339 # params:
345 if not subdir in SUBDIR_MAPPING.keys(): 340 # subdir : must be one of SUBDIR_MAPPING.keys()
346 raise Exception(('unrecognized platform subdir "%s"; ' + 341 def RebaselineSubdir(self, subdir):
347 'should be one of %s') % ( 342 if not subdir in SUBDIR_MAPPING.keys():
348 subdir, SUBDIR_MAPPING.keys())) 343 raise Exception(('unrecognized platform subdir "%s"; ' +
349 builder_name = SUBDIR_MAPPING[subdir] 344 'should be one of %s') % (
350 json_url = '/'.join([self._json_base_url, 345 subdir, SUBDIR_MAPPING.keys()))
351 subdir, builder_name, subdir, 346 builder_name = SUBDIR_MAPPING[subdir]
352 self._json_filename]) 347 json_url = '/'.join([self._json_base_url,
353 all_results = self._GetActualResults(json_url=json_url) 348 subdir, builder_name, subdir,
349 self._json_filename])
350 all_results = self._GetActualResults(json_url=json_url)
354 351
355 if self._tests: 352 if self._tests:
356 for test in self._tests: 353 for test in self._tests:
357 self._RebaselineOneTest(expectations_subdir=subdir, 354 self._RebaselineOneTest(expectations_subdir=subdir,
358 builder_name=builder_name, 355 builder_name=builder_name,
359 test=test, all_results=all_results) 356 test=test, all_results=all_results)
360 else: # get the raw list of files that need rebaselining from JSON 357 else: # get the raw list of files that need rebaselining from JSON
361 filenames = self._GetFilesToRebaseline(json_url=json_url, 358 filenames = self._GetFilesToRebaseline(json_url=json_url,
362 add_new=self._add_new) 359 add_new=self._add_new)
363 for filename in filenames: 360 for filename in filenames:
364 outfilename = os.path.join(subdir, filename); 361 outfilename = os.path.join(subdir, filename);
365 self._RebaselineOneFile(expectations_subdir=subdir, 362 self._RebaselineOneFile(expectations_subdir=subdir,
366 builder_name=builder_name, 363 builder_name=builder_name,
367 infilename=filename, 364 infilename=filename,
368 outfilename=outfilename, 365 outfilename=outfilename,
369 all_results=all_results) 366 all_results=all_results)
370 367
371 # main... 368 # main...
372 369
373 parser = argparse.ArgumentParser() 370 parser = argparse.ArgumentParser()
374 parser.add_argument('--add-new', action='store_true', 371 parser.add_argument('--add-new', action='store_true',
375 help='in addition to the standard behavior of ' + 372 help='in addition to the standard behavior of ' +
376 'updating expectations for failing tests, add ' + 373 'updating expectations for failing tests, add ' +
377 'expectations for tests which don\'t have expectations ' + 374 'expectations for tests which don\'t have expectations ' +
378 'yet.') 375 'yet.')
379 parser.add_argument('--configs', metavar='CONFIG', nargs='+', 376 parser.add_argument('--configs', metavar='CONFIG', nargs='+',
(...skipping 16 matching lines...) Expand all
396 parser.add_argument('--subdirs', metavar='SUBDIR', nargs='+', 393 parser.add_argument('--subdirs', metavar='SUBDIR', nargs='+',
397 help='which platform subdirectories to rebaseline; ' + 394 help='which platform subdirectories to rebaseline; ' +
398 'if unspecified, rebaseline all subdirs, same as ' + 395 'if unspecified, rebaseline all subdirs, same as ' +
399 '"--subdirs %s"' % ' '.join(sorted(SUBDIR_MAPPING.keys()))) 396 '"--subdirs %s"' % ' '.join(sorted(SUBDIR_MAPPING.keys())))
400 parser.add_argument('--tests', metavar='TEST', nargs='+', 397 parser.add_argument('--tests', metavar='TEST', nargs='+',
401 help='which tests to rebaseline, e.g. ' + 398 help='which tests to rebaseline, e.g. ' +
402 '"--tests aaclip bigmatrix"; if unspecified, then all ' + 399 '"--tests aaclip bigmatrix"; if unspecified, then all ' +
403 'failing tests (according to the actual-results.json ' + 400 'failing tests (according to the actual-results.json ' +
404 'file) will be rebaselined.') 401 'file) will be rebaselined.')
405 args = parser.parse_args() 402 args = parser.parse_args()
406 rebaseliner = Rebaseliner(tests=args.tests, configs=args.configs, 403 if args.subdirs:
407 subdirs=args.subdirs, dry_run=args.dry_run, 404 subdirs = args.subdirs
408 json_base_url=args.json_base_url, 405 missing_json_is_fatal = True
409 json_filename=args.json_filename, 406 else:
410 add_new=args.add_new) 407 subdirs = sorted(SUBDIR_MAPPING.keys())
411 rebaseliner.RebaselineAll() 408 missing_json_is_fatal = False
409 for subdir in subdirs:
410 # Soon, we will be instantiating different Rebaseliner objects depending
411 # on whether we are rebaselining an expected-results.json file, or
412 # individual image files. Different gm-expected subdirectories may move
413 # from individual image files to JSON-format expectations at different
414 # times, so we need to make this determination per subdirectory.
415 #
416 # See https://goto.google.com/ChecksumTransitionDetail
417 rebaseliner = Rebaseliner(tests=args.tests, configs=args.configs,
epoger 2013/06/18 17:27:40 Patchset 1 moves the subdir loop outside of the Re
418 dry_run=args.dry_run,
419 json_base_url=args.json_base_url,
420 json_filename=args.json_filename,
421 add_new=args.add_new,
422 missing_json_is_fatal=missing_json_is_fatal)
423 rebaseliner.RebaselineSubdir(subdir=subdir)
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698