OLD | NEW |
---|---|
1 #!/usr/bin/python | 1 #!/usr/bin/python |
2 | 2 |
3 ''' | 3 ''' |
4 Copyright 2012 Google Inc. | 4 Copyright 2012 Google Inc. |
5 | 5 |
6 Use of this source code is governed by a BSD-style license that can be | 6 Use of this source code is governed by a BSD-style license that can be |
7 found in the LICENSE file. | 7 found in the LICENSE file. |
8 ''' | 8 ''' |
9 | 9 |
10 ''' | 10 ''' |
11 Rebaselines the given GM tests, on all bots and all configurations. | 11 Rebaselines the given GM tests, on all bots and all configurations. |
12 Must be run from the gm-expected directory. If run from a git or SVN | 12 Must be run from the gm-expected directory. If run from a git or SVN |
13 checkout, the files will be added to the staging area for commit. | 13 checkout, the files will be added to the staging area for commit. |
14 ''' | 14 ''' |
15 | 15 |
16 # System-level imports | |
16 import argparse | 17 import argparse |
17 import os | 18 import os |
18 import subprocess | 19 import subprocess |
19 import sys | 20 import sys |
21 import urllib2 | |
22 | |
23 # Imports from within Skia | |
24 # | |
25 # Make sure that they are in the PYTHONPATH, but add them at the *end* | |
26 # so any that are already in the PYTHONPATH will be preferred. | |
27 GM_DIRECTORY = os.path.realpath( | |
28 os.path.join(os.path.dirname(os.path.dirname(__file__)), 'gm')) | |
29 if GM_DIRECTORY not in sys.path: | |
30 sys.path.append(GM_DIRECTORY) | |
Stephen White
2013/06/05 16:44:20
A comment about why this is necessary would be hel
epoger
2013/06/05 17:46:39
I'll add a comment in an upcoming CL.
This *does*
Stephen White
2013/06/05 20:01:46
OK, I think that's a fair assumption to make: I do
| |
31 import gm_json | |
32 | |
20 | 33 |
21 # Mapping of gm-expectations subdir (under | 34 # Mapping of gm-expectations subdir (under |
22 # https://skia.googlecode.com/svn/gm-expected/ ) | 35 # https://skia.googlecode.com/svn/gm-expected/ ) |
23 # to builder name (see list at http://108.170.217.252:10117/builders ) | 36 # to builder name (see list at http://108.170.217.252:10117/builders ) |
24 SUBDIR_MAPPING = { | 37 SUBDIR_MAPPING = { |
25 'base-shuttle-win7-intel-float': | 38 'base-shuttle-win7-intel-float': |
26 'Test-Win7-ShuttleA-HD2000-x86-Release', | 39 'Test-Win7-ShuttleA-HD2000-x86-Release', |
27 'base-shuttle-win7-intel-angle': | 40 'base-shuttle-win7-intel-angle': |
28 'Test-Win7-ShuttleA-HD2000-x86-Release-ANGLE', | 41 'Test-Win7-ShuttleA-HD2000-x86-Release-ANGLE', |
29 'base-shuttle-win7-intel-directwrite': | 42 'base-shuttle-win7-intel-directwrite': |
(...skipping 16 matching lines...) Expand all Loading... | |
46 'Test-Android-Nexus10-MaliT604-Arm7-Release', | 59 'Test-Android-Nexus10-MaliT604-Arm7-Release', |
47 } | 60 } |
48 | 61 |
49 | 62 |
50 class CommandFailedException(Exception): | 63 class CommandFailedException(Exception): |
51 pass | 64 pass |
52 | 65 |
53 class Rebaseliner(object): | 66 class Rebaseliner(object): |
54 | 67 |
55 # params: | 68 # params: |
56 # tests: list of tests to rebaseline | 69 # json_base_url: base URL from which to read json_filename |
57 # configs: which configs to run for each test | 70 # json_filename: filename (under json_base_url) from which to read a |
58 # subdirs: which platform subdirectories to rebaseline; if an empty list, | 71 # summary of results; typically "actual-results.json" |
72 # subdirs: which platform subdirectories to rebaseline; if not specified, | |
59 # rebaseline all platform subdirectories | 73 # rebaseline all platform subdirectories |
74 # tests: list of tests to rebaseline, or None if we should rebaseline | |
75 # whatever files the JSON results summary file tells us to | |
76 # configs: which configs to run for each test; this should only be | |
epoger
2013/06/05 15:02:31
I just spoke to Rob about the behavior when --test
scroggo
2013/06/05 15:11:14
--add_new sounds fine to me.
Stephen White
2013/06/05 16:44:20
My OCD nit: please don't mix underscores and dashe
epoger
2013/06/05 17:46:39
I don't think that's a nit at all. I'll make it -
| |
77 # specified if the list of tests was also specified (otherwise, | |
78 # the JSON file will give us test names and configs) | |
60 # dry_run: if True, instead of actually downloading files or adding | 79 # dry_run: if True, instead of actually downloading files or adding |
61 # files to checkout, display a list of operations that | 80 # files to checkout, display a list of operations that |
62 # we would normally perform | 81 # we would normally perform |
63 def __init__(self, tests, configs=[], subdirs=[], dry_run=False): | 82 def __init__(self, json_base_url, json_filename, |
64 if not tests: | 83 subdirs=None, tests=None, configs=None, dry_run=False): |
65 raise Exception('at least one test must be specified') | 84 if configs and not tests: |
epoger
2013/06/05 03:41:00
Patchset 6 throws an error message if --configs is
| |
85 raise ValueError('configs should only be specified if tests ' + | |
86 'were specified also') | |
66 self._tests = tests | 87 self._tests = tests |
67 self._configs = configs | 88 self._configs = configs |
68 if not subdirs: | 89 if not subdirs: |
69 self._subdirs = sorted(SUBDIR_MAPPING.keys()) | 90 self._subdirs = sorted(SUBDIR_MAPPING.keys()) |
70 else: | 91 else: |
71 self._subdirs = subdirs | 92 self._subdirs = subdirs |
93 self._json_base_url = json_base_url | |
94 self._json_filename = json_filename | |
72 self._dry_run = dry_run | 95 self._dry_run = dry_run |
73 self._is_svn_checkout = ( | 96 self._is_svn_checkout = ( |
74 os.path.exists('.svn') or | 97 os.path.exists('.svn') or |
75 os.path.exists(os.path.join(os.pardir, '.svn'))) | 98 os.path.exists(os.path.join(os.pardir, '.svn'))) |
76 self._is_git_checkout = ( | 99 self._is_git_checkout = ( |
77 os.path.exists('.git') or | 100 os.path.exists('.git') or |
78 os.path.exists(os.path.join(os.pardir, '.git'))) | 101 os.path.exists(os.path.join(os.pardir, '.git'))) |
79 | 102 |
80 # If dry_run is False, execute subprocess.call(cmd). | 103 # If dry_run is False, execute subprocess.call(cmd). |
81 # If dry_run is True, print the command we would have otherwise run. | 104 # If dry_run is True, print the command we would have otherwise run. |
(...skipping 12 matching lines...) Expand all Loading... | |
94 # so that we don't corrupt the existing file if it fails midway thru. | 117 # so that we don't corrupt the existing file if it fails midway thru. |
95 temp_filename = os.path.join(os.path.dirname(dest_filename), | 118 temp_filename = os.path.join(os.path.dirname(dest_filename), |
96 '.temp-' + os.path.basename(dest_filename)) | 119 '.temp-' + os.path.basename(dest_filename)) |
97 | 120 |
98 # TODO(epoger): Replace calls to "curl"/"mv" (which will only work on | 121 # TODO(epoger): Replace calls to "curl"/"mv" (which will only work on |
99 # Unix) with a Python HTTP library (which should work cross-platform) | 122 # Unix) with a Python HTTP library (which should work cross-platform) |
100 self._Call([ 'curl', '--fail', '--silent', source_url, | 123 self._Call([ 'curl', '--fail', '--silent', source_url, |
101 '--output', temp_filename ]) | 124 '--output', temp_filename ]) |
102 self._Call([ 'mv', temp_filename, dest_filename ]) | 125 self._Call([ 'mv', temp_filename, dest_filename ]) |
103 | 126 |
127 # Returns the full contents of a URL, as a single string. | |
128 # | |
129 # Unlike standard URL handling, we allow relative "file:" URLs; | |
130 # for example, "file:one/two" resolves to the file ./one/two | |
131 # (relative to current working dir) | |
132 def _GetContentsOfUrl(self, url): | |
133 file_prefix = 'file:' | |
134 if url.startswith(file_prefix): | |
135 filename = url[len(file_prefix):] | |
136 return open(filename, 'r').read() | |
137 else: | |
138 return urllib2.urlopen(url).read() | |
139 | |
140 # Returns a list of files that require rebaselining. | |
141 # | |
142 # Note that this returns a list of FILES, like this: | |
143 # ['imageblur_565.png', 'xfermodes_pdf.png'] | |
144 # rather than a list of TESTS, like this: | |
145 # ['imageblur', 'xfermodes'] | |
146 # | |
147 # params: | |
148 # json_url: URL pointing to a JSON actual result summary file | |
149 # | |
150 # TODO(epoger): add a parameter indicating whether "no-comparison" | |
151 # results (those for which we don't have any expectations yet) | |
152 # should be rebaselined. For now, we only return failed expectations. | |
153 def _GetFilesToRebaseline(self, json_url): | |
154 print ('# Getting files to rebaseline from JSON summary URL %s ...' | |
epoger
2013/06/05 03:41:00
... and is more consistent about "files" vs "tests
| |
155 % json_url) | |
156 json_contents = self._GetContentsOfUrl(json_url) | |
157 json_dict = gm_json.LoadFromString(json_contents) | |
158 actual_results = json_dict[gm_json.JSONKEY_ACTUALRESULTS] | |
159 | |
160 files_to_rebaseline = [] | |
161 failed_results = actual_results[gm_json.JSONKEY_ACTUALRESULTS_FAILED] | |
162 if failed_results: | |
163 files_to_rebaseline.extend(failed_results.keys()) | |
164 | |
165 print '# ... found files_to_rebaseline %s' % files_to_rebaseline | |
166 return files_to_rebaseline | |
167 | |
104 # Rebaseline a single file. | 168 # Rebaseline a single file. |
105 def _RebaselineOneFile(self, expectations_subdir, builder_name, | 169 def _RebaselineOneFile(self, expectations_subdir, builder_name, |
106 infilename, outfilename): | 170 infilename, outfilename): |
171 print '# ' + infilename | |
107 url = ('http://skia-autogen.googlecode.com/svn/gm-actual/' + | 172 url = ('http://skia-autogen.googlecode.com/svn/gm-actual/' + |
108 expectations_subdir + '/' + builder_name + '/' + | 173 expectations_subdir + '/' + builder_name + '/' + |
109 expectations_subdir + '/' + infilename) | 174 expectations_subdir + '/' + infilename) |
110 | 175 |
111 # Try to download this file, but if that fails, keep going... | 176 # Try to download this file, but if that fails, keep going... |
112 # | 177 # |
113 # This not treated as a fatal failure because not all | 178 # This not treated as a fatal failure because not all |
114 # platforms generate all configs (e.g., Android does not | 179 # platforms generate all configs (e.g., Android does not |
115 # generate PDF). | 180 # generate PDF). |
116 # | 181 # |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
148 configs = self._configs | 213 configs = self._configs |
149 else: | 214 else: |
150 if (expectations_subdir == 'base-shuttle-win7-intel-angle'): | 215 if (expectations_subdir == 'base-shuttle-win7-intel-angle'): |
151 configs = [ 'angle', 'anglemsaa16' ] | 216 configs = [ 'angle', 'anglemsaa16' ] |
152 else: | 217 else: |
153 configs = [ '565', '8888', 'gpu', 'pdf', 'mesa', 'msaa16', | 218 configs = [ '565', '8888', 'gpu', 'pdf', 'mesa', 'msaa16', |
154 'msaa4' ] | 219 'msaa4' ] |
155 print '# ' + expectations_subdir + ':' | 220 print '# ' + expectations_subdir + ':' |
156 for config in configs: | 221 for config in configs: |
157 infilename = test + '_' + config + '.png' | 222 infilename = test + '_' + config + '.png' |
158 print '# ' + infilename | |
159 outfilename = os.path.join(expectations_subdir, infilename); | 223 outfilename = os.path.join(expectations_subdir, infilename); |
160 self._RebaselineOneFile(expectations_subdir=expectations_subdir, | 224 self._RebaselineOneFile(expectations_subdir=expectations_subdir, |
161 builder_name=builder_name, | 225 builder_name=builder_name, |
162 infilename=infilename, | 226 infilename=infilename, |
163 outfilename=outfilename) | 227 outfilename=outfilename) |
164 | 228 |
165 # Rebaseline all platforms/tests/types we specified in the constructor. | 229 # Rebaseline all platforms/tests/types we specified in the constructor. |
166 def RebaselineAll(self): | 230 def RebaselineAll(self): |
167 for test in self._tests: | 231 for subdir in self._subdirs: |
168 for subdir in self._subdirs: | 232 if not subdir in SUBDIR_MAPPING.keys(): |
169 if not subdir in SUBDIR_MAPPING.keys(): | 233 raise Exception(('unrecognized platform subdir "%s"; ' + |
170 raise Exception(('unrecognized platform subdir "%s"; ' + | 234 'should be one of %s') % ( |
171 'should be one of %s') % ( | 235 subdir, SUBDIR_MAPPING.keys())) |
172 subdir, SUBDIR_MAPPING.keys())) | 236 builder_name = SUBDIR_MAPPING[subdir] |
173 builder_name = SUBDIR_MAPPING[subdir] | 237 if self._tests: |
174 self._RebaselineOneTest(expectations_subdir=subdir, | 238 for test in self._tests: |
175 builder_name=builder_name, | 239 self._RebaselineOneTest(expectations_subdir=subdir, |
176 test=test) | 240 builder_name=builder_name, |
177 | 241 test=test) |
242 else: # get the raw list of files that need rebaselining from JSON | |
243 json_url = '/'.join([self._json_base_url, | |
244 subdir, builder_name, subdir, | |
245 self._json_filename]) | |
246 filenames = self._GetFilesToRebaseline(json_url=json_url) | |
247 for filename in filenames: | |
248 outfilename = os.path.join(subdir, filename); | |
249 self._RebaselineOneFile(expectations_subdir=subdir, | |
250 builder_name=builder_name, | |
251 infilename=filename, | |
252 outfilename=outfilename) | |
178 | 253 |
179 # main... | 254 # main... |
180 | 255 |
181 parser = argparse.ArgumentParser() | 256 parser = argparse.ArgumentParser() |
182 parser.add_argument('--configs', metavar='CONFIG', nargs='+', | 257 parser.add_argument('--configs', metavar='CONFIG', nargs='+', |
183 help='which configurations to rebaseline, e.g. ' + | 258 help='which configurations to rebaseline, e.g. ' + |
184 '"--configs 565 8888"; if unspecified, run a default ' + | 259 '"--configs 565 8888"; if unspecified, run a default ' + |
185 'set of configs') | 260 'set of configs. This should ONLY be specified if ' + |
261 '--tests has also been specified.') | |
186 parser.add_argument('--dry_run', action='store_true', | 262 parser.add_argument('--dry_run', action='store_true', |
187 help='instead of actually downloading files or adding ' + | 263 help='instead of actually downloading files or adding ' + |
188 'files to checkout, display a list of operations that ' + | 264 'files to checkout, display a list of operations that ' + |
189 'we would normally perform') | 265 'we would normally perform') |
266 parser.add_argument('--json_base_url', | |
267 help='base URL from which to read JSON_FILENAME ' + | |
268 'files; defaults to %(default)s', | |
269 default='http://skia-autogen.googlecode.com/svn/gm-actual') | |
270 parser.add_argument('--json_filename', | |
271 help='filename (under JSON_BASE_URL) to read a summary ' + | |
272 'of results from; defaults to %(default)s', | |
273 default='actual-results.json') | |
190 parser.add_argument('--subdirs', metavar='SUBDIR', nargs='+', | 274 parser.add_argument('--subdirs', metavar='SUBDIR', nargs='+', |
191 help='which platform subdirectories to rebaseline; ' + | 275 help='which platform subdirectories to rebaseline; ' + |
192 'if unspecified, rebaseline all subdirs, same as ' + | 276 'if unspecified, rebaseline all subdirs, same as ' + |
193 '"--subdirs %s"' % ' '.join(sorted(SUBDIR_MAPPING.keys()))) | 277 '"--subdirs %s"' % ' '.join(sorted(SUBDIR_MAPPING.keys()))) |
194 parser.add_argument('--tests', metavar='TEST', nargs='+', required=True, | 278 parser.add_argument('--tests', metavar='TEST', nargs='+', |
195 help='which tests to rebaseline, e.g. ' + | 279 help='which tests to rebaseline, e.g. ' + |
196 '"--tests aaclip bigmatrix"') | 280 '"--tests aaclip bigmatrix"; if unspecified, then all ' + |
281 'failing tests (according to the actual-results.json ' + | |
282 'file) will be rebaselined.') | |
197 args = parser.parse_args() | 283 args = parser.parse_args() |
198 rebaseliner = Rebaseliner(tests=args.tests, configs=args.configs, | 284 rebaseliner = Rebaseliner(tests=args.tests, configs=args.configs, |
199 subdirs=args.subdirs, dry_run=args.dry_run) | 285 subdirs=args.subdirs, dry_run=args.dry_run, |
286 json_base_url=args.json_base_url, | |
287 json_filename=args.json_filename) | |
200 rebaseliner.RebaselineAll() | 288 rebaseliner.RebaselineAll() |
OLD | NEW |