Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(92)

Side by Side Diff: tools/rebaseline.py

Issue 15789010: rebaseline.py: --tests and --configs are now FILTERS within json results (Closed) Base URL: http://skia.googlecode.com/svn/trunk/
Patch Set: implement_add_new_option Created 7 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 #!/usr/bin/python 1 #!/usr/bin/python
2 2
3 ''' 3 '''
4 Copyright 2012 Google Inc. 4 Copyright 2012 Google Inc.
5 5
6 Use of this source code is governed by a BSD-style license that can be 6 Use of this source code is governed by a BSD-style license that can be
7 found in the LICENSE file. 7 found in the LICENSE file.
8 ''' 8 '''
9 9
10 ''' 10 '''
11 Rebaselines the given GM tests, on all bots and all configurations. 11 Rebaselines the given GM tests, on all bots and all configurations.
12 Must be run from the gm-expected directory. If run from a git or SVN 12 Must be run from the gm-expected directory. If run from a git or SVN
13 checkout, the files will be added to the staging area for commit. 13 checkout, the files will be added to the staging area for commit.
14 ''' 14 '''
15 15
16 # System-level imports 16 # System-level imports
17 import argparse 17 import argparse
18 import os 18 import os
19 import re
19 import subprocess 20 import subprocess
20 import sys 21 import sys
21 import urllib2 22 import urllib2
22 23
23 # Imports from within Skia 24 # Imports from within Skia
24 # 25 #
25 # Make sure that they are in the PYTHONPATH, but add them at the *end* 26 # We need to add the 'gm' directory, so that we can import gm_json.py within
26 # so any that are already in the PYTHONPATH will be preferred. 27 # that directory. That script allows us to parse the actual-results.json file
28 # written out by the GM tool.
29 # Make sure that the 'gm' dir is in the PYTHONPATH, but add it at the *end*
30 # so any dirs that are already in the PYTHONPATH will be preferred.
31 #
32 # This assumes that the 'gm' directory has been checked out as a sibling of
33 # the 'tools' directory containing this script, which will be the case if
34 # 'trunk' was checked out as a single unit.
27 GM_DIRECTORY = os.path.realpath( 35 GM_DIRECTORY = os.path.realpath(
28 os.path.join(os.path.dirname(os.path.dirname(__file__)), 'gm')) 36 os.path.join(os.path.dirname(os.path.dirname(__file__)), 'gm'))
29 if GM_DIRECTORY not in sys.path: 37 if GM_DIRECTORY not in sys.path:
30 sys.path.append(GM_DIRECTORY) 38 sys.path.append(GM_DIRECTORY)
31 import gm_json 39 import gm_json
32 40
33 41
34 # Mapping of gm-expectations subdir (under 42 # Mapping of gm-expectations subdir (under
35 # https://skia.googlecode.com/svn/gm-expected/ ) 43 # https://skia.googlecode.com/svn/gm-expected/ )
36 # to builder name (see list at http://108.170.217.252:10117/builders ) 44 # to builder name (see list at http://108.170.217.252:10117/builders )
(...skipping 27 matching lines...) Expand all
64 pass 72 pass
65 73
66 class Rebaseliner(object): 74 class Rebaseliner(object):
67 75
68 # params: 76 # params:
69 # json_base_url: base URL from which to read json_filename 77 # json_base_url: base URL from which to read json_filename
70 # json_filename: filename (under json_base_url) from which to read a 78 # json_filename: filename (under json_base_url) from which to read a
71 # summary of results; typically "actual-results.json" 79 # summary of results; typically "actual-results.json"
72 # subdirs: which platform subdirectories to rebaseline; if not specified, 80 # subdirs: which platform subdirectories to rebaseline; if not specified,
73 # rebaseline all platform subdirectories 81 # rebaseline all platform subdirectories
74 # tests: list of tests to rebaseline, or None if we should rebaseline 82 # tests: list of tests to rebaseline, as a filter applied to
75 # whatever files the JSON results summary file tells us to 83 # the list from the JSON file
76 # configs: which configs to run for each test; this should only be 84 # configs: which configs to run for each test, as a filter applied to
77 # specified if the list of tests was also specified (otherwise, 85 # the list from the JSON file
78 # the JSON file will give us test names and configs) 86 # add_new: if True, download actual results of tests which we don't
87 # have baselines for yet, in addition to any failing tests
79 # dry_run: if True, instead of actually downloading files or adding 88 # dry_run: if True, instead of actually downloading files or adding
80 # files to checkout, display a list of operations that 89 # files to checkout, display a list of operations that
81 # we would normally perform 90 # we would normally perform
82 def __init__(self, json_base_url, json_filename, 91 def __init__(self, json_base_url, json_filename,
83 subdirs=None, tests=None, configs=None, dry_run=False): 92 subdirs=None, tests=None, configs=None, add_new=False,
84 if configs and not tests: 93 dry_run=False):
85 raise ValueError('configs should only be specified if tests ' +
86 'were specified also')
87 self._tests = tests 94 self._tests = tests
88 self._configs = configs 95 self._configs = configs
89 if not subdirs: 96 if not subdirs:
90 self._subdirs = sorted(SUBDIR_MAPPING.keys()) 97 self._subdirs = sorted(SUBDIR_MAPPING.keys())
91 else: 98 else:
92 self._subdirs = subdirs 99 self._subdirs = subdirs
93 self._json_base_url = json_base_url 100 self._json_base_url = json_base_url
94 self._json_filename = json_filename 101 self._json_filename = json_filename
102 self._add_new = add_new
95 self._dry_run = dry_run 103 self._dry_run = dry_run
96 self._is_svn_checkout = ( 104 self._is_svn_checkout = (
97 os.path.exists('.svn') or 105 os.path.exists('.svn') or
98 os.path.exists(os.path.join(os.pardir, '.svn'))) 106 os.path.exists(os.path.join(os.pardir, '.svn')))
99 self._is_git_checkout = ( 107 self._is_git_checkout = (
100 os.path.exists('.git') or 108 os.path.exists('.git') or
101 os.path.exists(os.path.join(os.pardir, '.git'))) 109 os.path.exists(os.path.join(os.pardir, '.git')))
102 110
103 # If dry_run is False, execute subprocess.call(cmd). 111 # If dry_run is False, execute subprocess.call(cmd).
104 # If dry_run is True, print the command we would have otherwise run. 112 # If dry_run is True, print the command we would have otherwise run.
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
137 else: 145 else:
138 return urllib2.urlopen(url).read() 146 return urllib2.urlopen(url).read()
139 147
140 # Returns a list of files that require rebaselining. 148 # Returns a list of files that require rebaselining.
141 # 149 #
142 # Note that this returns a list of FILES, like this: 150 # Note that this returns a list of FILES, like this:
143 # ['imageblur_565.png', 'xfermodes_pdf.png'] 151 # ['imageblur_565.png', 'xfermodes_pdf.png']
144 # rather than a list of TESTS, like this: 152 # rather than a list of TESTS, like this:
145 # ['imageblur', 'xfermodes'] 153 # ['imageblur', 'xfermodes']
146 # 154 #
155 # If self._add_new is True, then include tests which we don't have
156 # baselines for yet, in addition to any failing tests.
157 #
147 # params: 158 # params:
148 # json_url: URL pointing to a JSON actual result summary file 159 # json_url: URL pointing to a JSON actual result summary file
149 #
150 # TODO(epoger): add a parameter indicating whether "no-comparison"
151 # results (those for which we don't have any expectations yet)
152 # should be rebaselined. For now, we only return failed expectations.
153 def _GetFilesToRebaseline(self, json_url): 160 def _GetFilesToRebaseline(self, json_url):
161 if self._dry_run:
162 print ''
163 print '#'
154 print ('# Getting files to rebaseline from JSON summary URL %s ...' 164 print ('# Getting files to rebaseline from JSON summary URL %s ...'
155 % json_url) 165 % json_url)
156 json_contents = self._GetContentsOfUrl(json_url) 166 json_contents = self._GetContentsOfUrl(json_url)
157 json_dict = gm_json.LoadFromString(json_contents) 167 json_dict = gm_json.LoadFromString(json_contents)
158 actual_results = json_dict[gm_json.JSONKEY_ACTUALRESULTS] 168 actual_results = json_dict[gm_json.JSONKEY_ACTUALRESULTS]
159 169
160 files_to_rebaseline = [] 170 files_to_rebaseline = []
161 failed_results = actual_results[gm_json.JSONKEY_ACTUALRESULTS_FAILED] 171 failed_results = actual_results[gm_json.JSONKEY_ACTUALRESULTS_FAILED]
162 if failed_results: 172 if failed_results:
163 files_to_rebaseline.extend(failed_results.keys()) 173 files_to_rebaseline.extend(failed_results.keys())
174 if self._add_new:
epoger 2013/06/06 20:19:02 Patchset 3 adds implementation of the --add-new op
175 new_results = actual_results[gm_json.JSONKEY_ACTUALRESULTS_NOCOMPARI SON]
176 if new_results:
177 files_to_rebaseline.extend(new_results.keys())
164 178
165 print '# ... found files_to_rebaseline %s' % files_to_rebaseline 179 print '# ... found files_to_rebaseline %s' % files_to_rebaseline
180 if self._dry_run:
181 print '#'
166 return files_to_rebaseline 182 return files_to_rebaseline
167 183
168 # Rebaseline a single file. 184 # Rebaseline a single file.
169 def _RebaselineOneFile(self, expectations_subdir, builder_name, 185 def _RebaselineOneFile(self, expectations_subdir, builder_name,
170 infilename, outfilename): 186 infilename, outfilename):
187 if self._dry_run:
188 print ''
171 print '# ' + infilename 189 print '# ' + infilename
172 url = ('http://skia-autogen.googlecode.com/svn/gm-actual/' + 190 url = ('http://skia-autogen.googlecode.com/svn/gm-actual/' +
173 expectations_subdir + '/' + builder_name + '/' + 191 expectations_subdir + '/' + builder_name + '/' +
174 expectations_subdir + '/' + infilename) 192 expectations_subdir + '/' + infilename)
175 193
176 # Try to download this file, but if that fails, keep going... 194 # Try to download this file.
177 # 195 #
178 # This not treated as a fatal failure because not all 196 # If the download fails, this will raise an exception and halt the
179 # platforms generate all configs (e.g., Android does not 197 # rebaseline process. Since the JSON results summary told us that
180 # generate PDF). 198 # this file needed rebaselining, we ought to be able to download it...
181 # 199 self._DownloadFile(source_url=url, dest_filename=outfilename)
182 # We could tweak the list of configs within this tool to
183 # reflect which combinations the bots actually generate, and
184 # then fail if any of those expected combinations are
185 # missing... but then this tool would become useless every
186 # time someone tweaked the configs on the bots without
187 # updating this script.
188 try:
189 self._DownloadFile(source_url=url, dest_filename=outfilename)
190 except CommandFailedException:
191 print '# Couldn\'t fetch ' + url
192 return
193 200
194 # Add this file to version control (if it isn't already). 201 # Add this file to version control (if it isn't already).
195 if self._is_svn_checkout: 202 if self._is_svn_checkout:
196 cmd = [ 'svn', 'add', '--quiet', outfilename ] 203 cmd = [ 'svn', 'add', '--quiet', outfilename ]
197 self._Call(cmd) 204 self._Call(cmd)
198 cmd = [ 'svn', 'propset', '--quiet', 'svn:mime-type', 'image/png', 205 cmd = [ 'svn', 'propset', '--quiet', 'svn:mime-type', 'image/png',
199 outfilename ]; 206 outfilename ];
200 self._Call(cmd) 207 self._Call(cmd)
201 elif self._is_git_checkout: 208 elif self._is_git_checkout:
202 cmd = [ 'git', 'add', outfilename ] 209 cmd = [ 'git', 'add', outfilename ]
203 self._Call(cmd) 210 self._Call(cmd)
204 211
205 # Rebaseline the given configs for a single test.
206 #
207 # params:
208 # expectations_subdir
209 # builder_name
210 # test: a single test to rebaseline
211 def _RebaselineOneTest(self, expectations_subdir, builder_name, test):
212 if self._configs:
213 configs = self._configs
214 else:
215 if (expectations_subdir == 'base-shuttle-win7-intel-angle'):
216 configs = [ 'angle', 'anglemsaa16' ]
217 else:
218 configs = [ '565', '8888', 'gpu', 'pdf', 'mesa', 'msaa16',
219 'msaa4' ]
220 print '# ' + expectations_subdir + ':'
221 for config in configs:
222 infilename = test + '_' + config + '.png'
223 outfilename = os.path.join(expectations_subdir, infilename);
224 self._RebaselineOneFile(expectations_subdir=expectations_subdir,
225 builder_name=builder_name,
226 infilename=infilename,
227 outfilename=outfilename)
228
229 # Rebaseline all platforms/tests/types we specified in the constructor. 212 # Rebaseline all platforms/tests/types we specified in the constructor.
230 def RebaselineAll(self): 213 def RebaselineAll(self):
214 filename_pattern = re.compile('(\S+)_(\S+).png')
231 for subdir in self._subdirs: 215 for subdir in self._subdirs:
232 if not subdir in SUBDIR_MAPPING.keys(): 216 if not subdir in SUBDIR_MAPPING.keys():
233 raise Exception(('unrecognized platform subdir "%s"; ' + 217 raise Exception(('unrecognized platform subdir "%s"; ' +
234 'should be one of %s') % ( 218 'should be one of %s') % (
235 subdir, SUBDIR_MAPPING.keys())) 219 subdir, SUBDIR_MAPPING.keys()))
236 builder_name = SUBDIR_MAPPING[subdir] 220 builder_name = SUBDIR_MAPPING[subdir]
237 if self._tests: 221 json_url = '/'.join([self._json_base_url,
238 for test in self._tests: 222 subdir, builder_name, subdir,
239 self._RebaselineOneTest(expectations_subdir=subdir, 223 self._json_filename])
240 builder_name=builder_name, 224 filenames = self._GetFilesToRebaseline(json_url=json_url)
241 test=test) 225 for filename in filenames:
242 else: # get the raw list of files that need rebaselining from JSON 226 # Apply our filters, if we have any.
243 json_url = '/'.join([self._json_base_url, 227 match = filename_pattern.match(filename)
244 subdir, builder_name, subdir, 228 test = match.group(1)
245 self._json_filename]) 229 config = match.group(2)
246 filenames = self._GetFilesToRebaseline(json_url=json_url) 230 if self._tests and test not in self._tests:
247 for filename in filenames: 231 continue
248 outfilename = os.path.join(subdir, filename); 232 if self._configs and config not in self._configs:
249 self._RebaselineOneFile(expectations_subdir=subdir, 233 continue
250 builder_name=builder_name, 234
251 infilename=filename, 235 outfilename = os.path.join(subdir, filename);
252 outfilename=outfilename) 236 self._RebaselineOneFile(expectations_subdir=subdir,
237 builder_name=builder_name,
238 infilename=filename,
239 outfilename=outfilename)
253 240
254 # main... 241 # main...
255 242
256 parser = argparse.ArgumentParser() 243 parser = argparse.ArgumentParser()
244 parser.add_argument('--add-new', action='store_true',
245 help='in addition to the standard behavior of ' +
246 'downloading images whose tests are failing, ' +
247 'also download images for which we haven\'t checked in ' +
248 'expectations yet')
257 parser.add_argument('--configs', metavar='CONFIG', nargs='+', 249 parser.add_argument('--configs', metavar='CONFIG', nargs='+',
258 help='which configurations to rebaseline, e.g. ' + 250 help='which configurations to rebaseline, e.g. ' +
259 '"--configs 565 8888"; if unspecified, run a default ' + 251 '"--configs 565 8888", as a filter over the configs ' +
260 'set of configs. This should ONLY be specified if ' + 252 'which JSON_FILENAME tells us need rebaselining; ' +
261 '--tests has also been specified.') 253 'if unspecified, then rebaseline all the configs that ' +
262 parser.add_argument('--dry_run', action='store_true', 254 'JSON_FILENAME tells us need rebaselining.')
255 parser.add_argument('--dry-run', action='store_true',
263 help='instead of actually downloading files or adding ' + 256 help='instead of actually downloading files or adding ' +
264 'files to checkout, display a list of operations that ' + 257 'files to checkout, display a list of operations that ' +
265 'we would normally perform') 258 'we would normally perform')
266 parser.add_argument('--json_base_url', 259 parser.add_argument('--json-base-url',
267 help='base URL from which to read JSON_FILENAME ' + 260 help='base URL from which to read JSON_FILENAME ' +
268 'files; defaults to %(default)s', 261 'files; defaults to %(default)s',
269 default='http://skia-autogen.googlecode.com/svn/gm-actual') 262 default='http://skia-autogen.googlecode.com/svn/gm-actual')
270 parser.add_argument('--json_filename', 263 parser.add_argument('--json-filename',
271 help='filename (under JSON_BASE_URL) to read a summary ' + 264 help='filename (under JSON_BASE_URL) to read a summary ' +
272 'of results from; defaults to %(default)s', 265 'of results from; defaults to %(default)s',
273 default='actual-results.json') 266 default='actual-results.json')
274 parser.add_argument('--subdirs', metavar='SUBDIR', nargs='+', 267 parser.add_argument('--subdirs', metavar='SUBDIR', nargs='+',
275 help='which platform subdirectories to rebaseline; ' + 268 help='which platform subdirectories to rebaseline; ' +
276 'if unspecified, rebaseline all subdirs, same as ' + 269 'if unspecified, rebaseline all subdirs, same as ' +
277 '"--subdirs %s"' % ' '.join(sorted(SUBDIR_MAPPING.keys()))) 270 '"--subdirs %s"' % ' '.join(sorted(SUBDIR_MAPPING.keys())))
278 parser.add_argument('--tests', metavar='TEST', nargs='+', 271 parser.add_argument('--tests', metavar='TEST', nargs='+',
279 help='which tests to rebaseline, e.g. ' + 272 help='which tests to rebaseline, e.g. ' +
280 '"--tests aaclip bigmatrix"; if unspecified, then all ' + 273 '"--tests aaclip bigmatrix", as a filter over the tests ' +
281 'failing tests (according to the actual-results.json ' + 274 'which JSON_FILENAME tells us need rebaselining; ' +
282 'file) will be rebaselined.') 275 'if unspecified, then rebaseline all the tests that ' +
276 'JSON_FILENAME tells us need rebaselining.')
283 args = parser.parse_args() 277 args = parser.parse_args()
284 rebaseliner = Rebaseliner(tests=args.tests, configs=args.configs, 278 rebaseliner = Rebaseliner(tests=args.tests, configs=args.configs,
285 subdirs=args.subdirs, dry_run=args.dry_run, 279 subdirs=args.subdirs, add_new=args.add_new,
280 dry_run=args.dry_run,
286 json_base_url=args.json_base_url, 281 json_base_url=args.json_base_url,
287 json_filename=args.json_filename) 282 json_filename=args.json_filename)
288 rebaseliner.RebaselineAll() 283 rebaseliner.RebaselineAll()
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698