Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(74)

Side by Side Diff: tools/rebaseline.py

Issue 16306010: rebaseline.py: --tests and --configs are now FILTERS within json results (Closed) Base URL: http://skia.googlecode.com/svn/trunk/
Patch Set: add_filtering Created 7 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | tools/tests/rebaseline/output/all/output-expected/command_line » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/python 1 #!/usr/bin/python
2 2
3 ''' 3 '''
4 Copyright 2012 Google Inc. 4 Copyright 2012 Google Inc.
5 5
6 Use of this source code is governed by a BSD-style license that can be 6 Use of this source code is governed by a BSD-style license that can be
7 found in the LICENSE file. 7 found in the LICENSE file.
8 ''' 8 '''
9 9
10 ''' 10 '''
11 Rebaselines the given GM tests, on all bots and all configurations. 11 Rebaselines the given GM tests, on all bots and all configurations.
12 Must be run from the gm-expected directory. If run from a git or SVN 12 Must be run from the gm-expected directory. If run from a git or SVN
13 checkout, the files will be added to the staging area for commit. 13 checkout, the files will be added to the staging area for commit.
14 ''' 14 '''
15 15
16 # System-level imports 16 # System-level imports
17 import argparse 17 import argparse
18 import os 18 import os
19 import re
19 import subprocess 20 import subprocess
20 import sys 21 import sys
21 import urllib2 22 import urllib2
22 23
23 # Imports from within Skia 24 # Imports from within Skia
24 # 25 #
25 # Make sure that they are in the PYTHONPATH, but add them at the *end* 26 # We need to add the 'gm' directory, so that we can import gm_json.py within
26 # so any that are already in the PYTHONPATH will be preferred. 27 # that directory. That script allows us to parse the actual-results.json file
28 # written out by the GM tool.
29 # Make sure that the 'gm' dir is in the PYTHONPATH, but add ir at the *end*
Stephen White 2013/06/06 20:41:52 Nit: ir -> it?
epoger 2013/06/07 14:15:22 Stephen- this is already fixed in the new home for
30 # so any dirs that are already in the PYTHONPATH will be preferred.
31 #
32 # This assumes that the 'gm' directory has been checked out as a sibling of
33 # the 'tools' directory containing this script, which will be the case if
34 # 'trunk' was checked out as a single unit.
27 GM_DIRECTORY = os.path.realpath( 35 GM_DIRECTORY = os.path.realpath(
28 os.path.join(os.path.dirname(os.path.dirname(__file__)), 'gm')) 36 os.path.join(os.path.dirname(os.path.dirname(__file__)), 'gm'))
29 if GM_DIRECTORY not in sys.path: 37 if GM_DIRECTORY not in sys.path:
30 sys.path.append(GM_DIRECTORY) 38 sys.path.append(GM_DIRECTORY)
31 import gm_json 39 import gm_json
32 40
33 41
34 # Mapping of gm-expectations subdir (under 42 # Mapping of gm-expectations subdir (under
35 # https://skia.googlecode.com/svn/gm-expected/ ) 43 # https://skia.googlecode.com/svn/gm-expected/ )
36 # to builder name (see list at http://108.170.217.252:10117/builders ) 44 # to builder name (see list at http://108.170.217.252:10117/builders )
(...skipping 27 matching lines...) Expand all
64 pass 72 pass
65 73
66 class Rebaseliner(object): 74 class Rebaseliner(object):
67 75
68 # params: 76 # params:
69 # json_base_url: base URL from which to read json_filename 77 # json_base_url: base URL from which to read json_filename
70 # json_filename: filename (under json_base_url) from which to read a 78 # json_filename: filename (under json_base_url) from which to read a
71 # summary of results; typically "actual-results.json" 79 # summary of results; typically "actual-results.json"
72 # subdirs: which platform subdirectories to rebaseline; if not specified, 80 # subdirs: which platform subdirectories to rebaseline; if not specified,
73 # rebaseline all platform subdirectories 81 # rebaseline all platform subdirectories
74 # tests: list of tests to rebaseline, or None if we should rebaseline 82 # tests: list of tests to rebaseline, as a filter applied to
75 # whatever files the JSON results summary file tells us to 83 # the list from the JSON file
76 # configs: which configs to run for each test; this should only be 84 # configs: which configs to run for each test, as a filter applied to
77 # specified if the list of tests was also specified (otherwise, 85 # the list from the JSON file
78 # the JSON file will give us test names and configs)
79 # dry_run: if True, instead of actually downloading files or adding 86 # dry_run: if True, instead of actually downloading files or adding
80 # files to checkout, display a list of operations that 87 # files to checkout, display a list of operations that
81 # we would normally perform 88 # we would normally perform
82 def __init__(self, json_base_url, json_filename, 89 def __init__(self, json_base_url, json_filename,
83 subdirs=None, tests=None, configs=None, dry_run=False): 90 subdirs=None, tests=None, configs=None, dry_run=False):
84 if configs and not tests:
85 raise ValueError('configs should only be specified if tests ' +
86 'were specified also')
87 self._tests = tests 91 self._tests = tests
88 self._configs = configs 92 self._configs = configs
89 if not subdirs: 93 if not subdirs:
90 self._subdirs = sorted(SUBDIR_MAPPING.keys()) 94 self._subdirs = sorted(SUBDIR_MAPPING.keys())
91 else: 95 else:
92 self._subdirs = subdirs 96 self._subdirs = subdirs
93 self._json_base_url = json_base_url 97 self._json_base_url = json_base_url
94 self._json_filename = json_filename 98 self._json_filename = json_filename
95 self._dry_run = dry_run 99 self._dry_run = dry_run
96 self._is_svn_checkout = ( 100 self._is_svn_checkout = (
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
144 # rather than a list of TESTS, like this: 148 # rather than a list of TESTS, like this:
145 # ['imageblur', 'xfermodes'] 149 # ['imageblur', 'xfermodes']
146 # 150 #
147 # params: 151 # params:
148 # json_url: URL pointing to a JSON actual result summary file 152 # json_url: URL pointing to a JSON actual result summary file
149 # 153 #
150 # TODO(epoger): add a parameter indicating whether "no-comparison" 154 # TODO(epoger): add a parameter indicating whether "no-comparison"
151 # results (those for which we don't have any expectations yet) 155 # results (those for which we don't have any expectations yet)
152 # should be rebaselined. For now, we only return failed expectations. 156 # should be rebaselined. For now, we only return failed expectations.
153 def _GetFilesToRebaseline(self, json_url): 157 def _GetFilesToRebaseline(self, json_url):
158 if self._dry_run:
159 print ''
160 print '#'
154 print ('# Getting files to rebaseline from JSON summary URL %s ...' 161 print ('# Getting files to rebaseline from JSON summary URL %s ...'
155 % json_url) 162 % json_url)
156 json_contents = self._GetContentsOfUrl(json_url) 163 json_contents = self._GetContentsOfUrl(json_url)
157 json_dict = gm_json.LoadFromString(json_contents) 164 json_dict = gm_json.LoadFromString(json_contents)
158 actual_results = json_dict[gm_json.JSONKEY_ACTUALRESULTS] 165 actual_results = json_dict[gm_json.JSONKEY_ACTUALRESULTS]
159 166
160 files_to_rebaseline = [] 167 files_to_rebaseline = []
161 failed_results = actual_results[gm_json.JSONKEY_ACTUALRESULTS_FAILED] 168 failed_results = actual_results[gm_json.JSONKEY_ACTUALRESULTS_FAILED]
162 if failed_results: 169 if failed_results:
163 files_to_rebaseline.extend(failed_results.keys()) 170 files_to_rebaseline.extend(failed_results.keys())
164 171
165 print '# ... found files_to_rebaseline %s' % files_to_rebaseline 172 print '# ... found files_to_rebaseline %s' % files_to_rebaseline
173 if self._dry_run:
174 print '#'
166 return files_to_rebaseline 175 return files_to_rebaseline
167 176
168 # Rebaseline a single file. 177 # Rebaseline a single file.
169 def _RebaselineOneFile(self, expectations_subdir, builder_name, 178 def _RebaselineOneFile(self, expectations_subdir, builder_name,
170 infilename, outfilename): 179 infilename, outfilename):
180 if self._dry_run:
181 print ''
171 print '# ' + infilename 182 print '# ' + infilename
172 url = ('http://skia-autogen.googlecode.com/svn/gm-actual/' + 183 url = ('http://skia-autogen.googlecode.com/svn/gm-actual/' +
173 expectations_subdir + '/' + builder_name + '/' + 184 expectations_subdir + '/' + builder_name + '/' +
174 expectations_subdir + '/' + infilename) 185 expectations_subdir + '/' + infilename)
175 186
176 # Try to download this file, but if that fails, keep going... 187 # Try to download this file.
177 # 188 #
178 # This not treated as a fatal failure because not all 189 # If the download fails, this will raise an exception and halt the
179 # platforms generate all configs (e.g., Android does not 190 # rebaseline process. Since the JSON results summary told us that
180 # generate PDF). 191 # this file needed rebaselining, we ought to be able to download it...
181 # 192 self._DownloadFile(source_url=url, dest_filename=outfilename)
182 # We could tweak the list of configs within this tool to
183 # reflect which combinations the bots actually generate, and
184 # then fail if any of those expected combinations are
185 # missing... but then this tool would become useless every
186 # time someone tweaked the configs on the bots without
187 # updating this script.
188 try:
189 self._DownloadFile(source_url=url, dest_filename=outfilename)
190 except CommandFailedException:
191 print '# Couldn\'t fetch ' + url
192 return
193 193
194 # Add this file to version control (if it isn't already). 194 # Add this file to version control (if it isn't already).
195 if self._is_svn_checkout: 195 if self._is_svn_checkout:
196 cmd = [ 'svn', 'add', '--quiet', outfilename ] 196 cmd = [ 'svn', 'add', '--quiet', outfilename ]
197 self._Call(cmd) 197 self._Call(cmd)
198 cmd = [ 'svn', 'propset', '--quiet', 'svn:mime-type', 'image/png', 198 cmd = [ 'svn', 'propset', '--quiet', 'svn:mime-type', 'image/png',
199 outfilename ]; 199 outfilename ];
200 self._Call(cmd) 200 self._Call(cmd)
201 elif self._is_git_checkout: 201 elif self._is_git_checkout:
202 cmd = [ 'git', 'add', outfilename ] 202 cmd = [ 'git', 'add', outfilename ]
203 self._Call(cmd) 203 self._Call(cmd)
204 204
205 # Rebaseline the given configs for a single test.
206 #
207 # params:
208 # expectations_subdir
209 # builder_name
210 # test: a single test to rebaseline
211 def _RebaselineOneTest(self, expectations_subdir, builder_name, test):
212 if self._configs:
213 configs = self._configs
214 else:
215 if (expectations_subdir == 'base-shuttle-win7-intel-angle'):
216 configs = [ 'angle', 'anglemsaa16' ]
217 else:
218 configs = [ '565', '8888', 'gpu', 'pdf', 'mesa', 'msaa16',
219 'msaa4' ]
220 print '# ' + expectations_subdir + ':'
221 for config in configs:
222 infilename = test + '_' + config + '.png'
223 outfilename = os.path.join(expectations_subdir, infilename);
224 self._RebaselineOneFile(expectations_subdir=expectations_subdir,
225 builder_name=builder_name,
226 infilename=infilename,
227 outfilename=outfilename)
228
229 # Rebaseline all platforms/tests/types we specified in the constructor. 205 # Rebaseline all platforms/tests/types we specified in the constructor.
230 def RebaselineAll(self): 206 def RebaselineAll(self):
207 filename_pattern = re.compile('(\S+)_(\S+).png')
231 for subdir in self._subdirs: 208 for subdir in self._subdirs:
232 if not subdir in SUBDIR_MAPPING.keys(): 209 if not subdir in SUBDIR_MAPPING.keys():
233 raise Exception(('unrecognized platform subdir "%s"; ' + 210 raise Exception(('unrecognized platform subdir "%s"; ' +
234 'should be one of %s') % ( 211 'should be one of %s') % (
235 subdir, SUBDIR_MAPPING.keys())) 212 subdir, SUBDIR_MAPPING.keys()))
236 builder_name = SUBDIR_MAPPING[subdir] 213 builder_name = SUBDIR_MAPPING[subdir]
237 if self._tests: 214 json_url = '/'.join([self._json_base_url,
Stephen White 2013/06/06 20:41:52 Is there a way to force an override of the json fi
epoger 2013/06/07 14:15:22 Pessimism is underrated. I am a bit anxious about
238 for test in self._tests: 215 subdir, builder_name, subdir,
239 self._RebaselineOneTest(expectations_subdir=subdir, 216 self._json_filename])
240 builder_name=builder_name, 217 filenames = self._GetFilesToRebaseline(json_url=json_url)
241 test=test) 218 for filename in filenames:
242 else: # get the raw list of files that need rebaselining from JSON 219 # Apply our filters, if we have any.
243 json_url = '/'.join([self._json_base_url, 220 match = filename_pattern.match(filename)
244 subdir, builder_name, subdir, 221 test = match.group(1)
245 self._json_filename]) 222 config = match.group(2)
246 filenames = self._GetFilesToRebaseline(json_url=json_url) 223 if self._tests and test not in self._tests:
247 for filename in filenames: 224 continue
248 outfilename = os.path.join(subdir, filename); 225 if self._configs and config not in self._configs:
249 self._RebaselineOneFile(expectations_subdir=subdir, 226 continue
250 builder_name=builder_name, 227
251 infilename=filename, 228 outfilename = os.path.join(subdir, filename);
252 outfilename=outfilename) 229 self._RebaselineOneFile(expectations_subdir=subdir,
230 builder_name=builder_name,
231 infilename=filename,
232 outfilename=outfilename)
253 233
254 # main... 234 # main...
255 235
256 parser = argparse.ArgumentParser() 236 parser = argparse.ArgumentParser()
257 parser.add_argument('--configs', metavar='CONFIG', nargs='+', 237 parser.add_argument('--configs', metavar='CONFIG', nargs='+',
258 help='which configurations to rebaseline, e.g. ' + 238 help='which configurations to rebaseline, e.g. ' +
259 '"--configs 565 8888"; if unspecified, run a default ' + 239 '"--configs 565 8888", as a filter over the configs ' +
260 'set of configs. This should ONLY be specified if ' + 240 'which JSON_FILENAME tells us need rebaselining; ' +
261 '--tests has also been specified.') 241 'if unspecified, then rebaseline all the configs that ' +
262 parser.add_argument('--dry_run', action='store_true', 242 'JSON_FILENAME tells us need rebaselining.')
243 parser.add_argument('--dry-run', action='store_true',
263 help='instead of actually downloading files or adding ' + 244 help='instead of actually downloading files or adding ' +
264 'files to checkout, display a list of operations that ' + 245 'files to checkout, display a list of operations that ' +
265 'we would normally perform') 246 'we would normally perform')
266 parser.add_argument('--json_base_url', 247 parser.add_argument('--json-base-url',
267 help='base URL from which to read JSON_FILENAME ' + 248 help='base URL from which to read JSON_FILENAME ' +
268 'files; defaults to %(default)s', 249 'files; defaults to %(default)s',
269 default='http://skia-autogen.googlecode.com/svn/gm-actual') 250 default='http://skia-autogen.googlecode.com/svn/gm-actual')
270 parser.add_argument('--json_filename', 251 parser.add_argument('--json-filename',
271 help='filename (under JSON_BASE_URL) to read a summary ' + 252 help='filename (under JSON_BASE_URL) to read a summary ' +
272 'of results from; defaults to %(default)s', 253 'of results from; defaults to %(default)s',
273 default='actual-results.json') 254 default='actual-results.json')
274 parser.add_argument('--subdirs', metavar='SUBDIR', nargs='+', 255 parser.add_argument('--subdirs', metavar='SUBDIR', nargs='+',
275 help='which platform subdirectories to rebaseline; ' + 256 help='which platform subdirectories to rebaseline; ' +
276 'if unspecified, rebaseline all subdirs, same as ' + 257 'if unspecified, rebaseline all subdirs, same as ' +
277 '"--subdirs %s"' % ' '.join(sorted(SUBDIR_MAPPING.keys()))) 258 '"--subdirs %s"' % ' '.join(sorted(SUBDIR_MAPPING.keys())))
278 parser.add_argument('--tests', metavar='TEST', nargs='+', 259 parser.add_argument('--tests', metavar='TEST', nargs='+',
279 help='which tests to rebaseline, e.g. ' + 260 help='which tests to rebaseline, e.g. ' +
280 '"--tests aaclip bigmatrix"; if unspecified, then all ' + 261 '"--tests aaclip bigmatrix", as a filter over the tests ' +
281 'failing tests (according to the actual-results.json ' + 262 'which JSON_FILENAME tells us need rebaselining; ' +
282 'file) will be rebaselined.') 263 'if unspecified, then rebaseline all the tests that ' +
264 'JSON_FILENAME tells us need rebaselining.')
283 args = parser.parse_args() 265 args = parser.parse_args()
284 rebaseliner = Rebaseliner(tests=args.tests, configs=args.configs, 266 rebaseliner = Rebaseliner(tests=args.tests, configs=args.configs,
285 subdirs=args.subdirs, dry_run=args.dry_run, 267 subdirs=args.subdirs, dry_run=args.dry_run,
286 json_base_url=args.json_base_url, 268 json_base_url=args.json_base_url,
287 json_filename=args.json_filename) 269 json_filename=args.json_filename)
288 rebaseliner.RebaselineAll() 270 rebaseliner.RebaselineAll()
OLDNEW
« no previous file with comments | « no previous file | tools/tests/rebaseline/output/all/output-expected/command_line » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698