OLD | NEW |
| (Empty) |
1 #!/usr/bin/python | |
2 | |
3 ''' | |
4 Copyright 2013 Google Inc. | |
5 | |
6 Use of this source code is governed by a BSD-style license that can be | |
7 found in the LICENSE file. | |
8 ''' | |
9 | |
10 ''' | |
11 Rebaselines GM test results as individual image files | |
12 (the "old way", before https://goto.google.com/ChecksumTransitionDetail ). | |
13 | |
14 Once we have switched our expectations to JSON form for all platforms, | |
15 we can delete this file. | |
16 | |
17 There is a lot of code duplicated between here and rebaseline.py, but | |
18 that's fine because we will delete this file soon. | |
19 | |
20 TODO(epoger): Fix indentation in this file (2-space indents, not 4-space). | |
21 ''' | |
22 | |
23 # System-level imports | |
24 import os | |
25 import re | |
26 import subprocess | |
27 import sys | |
28 import urllib2 | |
29 | |
30 # Imports from within Skia | |
31 # | |
32 # We need to add the 'gm' directory, so that we can import gm_json.py within | |
33 # that directory. That script allows us to parse the actual-results.json file | |
34 # written out by the GM tool. | |
35 # Make sure that the 'gm' dir is in the PYTHONPATH, but add it at the *end* | |
36 # so any dirs that are already in the PYTHONPATH will be preferred. | |
37 # | |
38 # This assumes that the 'gm' directory has been checked out as a sibling of | |
39 # the 'tools' directory containing this script, which will be the case if | |
40 # 'trunk' was checked out as a single unit. | |
41 GM_DIRECTORY = os.path.realpath( | |
42 os.path.join(os.path.dirname(os.path.dirname(__file__)), 'gm')) | |
43 if GM_DIRECTORY not in sys.path: | |
44 sys.path.append(GM_DIRECTORY) | |
45 import gm_json | |
46 | |
47 | |
48 class CommandFailedException(Exception): | |
49 pass | |
50 | |
51 class ImageRebaseliner(object): | |
52 | |
53 # params: | |
54 # expectations_root: root directory of all expectations | |
55 # json_base_url: base URL from which to read json_filename | |
56 # json_filename: filename (under json_base_url) from which to read a | |
57 # summary of results; typically "actual-results.json" | |
58 # exception_handler: reference to rebaseline.ExceptionHandler object | |
59 # tests: list of tests to rebaseline, or None if we should rebaseline | |
60 # whatever files the JSON results summary file tells us to | |
61 # configs: which configs to run for each test, or None if we should | |
62 # rebaseline whatever configs the JSON results summary file tells | |
63 # us to | |
64 # dry_run: if True, instead of actually downloading files or adding | |
65 # files to checkout, display a list of operations that | |
66 # we would normally perform | |
67 # add_new: if True, add expectations for tests which don't have any yet | |
68 # missing_json_is_fatal: whether to halt execution if we cannot read a | |
69 # JSON actual result summary file | |
70 def __init__(self, expectations_root, json_base_url, json_filename, | |
71 exception_handler, tests=None, configs=None, dry_run=False, | |
72 add_new=False, missing_json_is_fatal=False): | |
73 self._expectations_root = expectations_root | |
74 self._tests = tests | |
75 self._configs = configs | |
76 self._json_base_url = json_base_url | |
77 self._json_filename = json_filename | |
78 self._exception_handler = exception_handler | |
79 self._dry_run = dry_run | |
80 self._add_new = add_new | |
81 self._missing_json_is_fatal = missing_json_is_fatal | |
82 self._image_filename_re = re.compile(gm_json.IMAGE_FILENAME_PATTERN) | |
83 self._is_svn_checkout = ( | |
84 os.path.exists(os.path.join(expectations_root, '.svn')) or | |
85 os.path.exists(os.path.join(expectations_root, os.pardir, '.svn'))) | |
86 self._is_git_checkout = ( | |
87 os.path.exists(os.path.join(expectations_root, '.git')) or | |
88 os.path.exists(os.path.join(expectations_root, os.pardir, '.git'))) | |
89 | |
90 # If dry_run is False, execute subprocess.call(cmd). | |
91 # If dry_run is True, print the command we would have otherwise run. | |
92 # Raises a CommandFailedException if the command fails. | |
93 def _Call(self, cmd): | |
94 if self._dry_run: | |
95 print '%s' % ' '.join(cmd) | |
96 return | |
97 if subprocess.call(cmd) != 0: | |
98 raise CommandFailedException('error running command: ' + | |
99 ' '.join(cmd)) | |
100 | |
101 # Download a single actual result from GoogleStorage. | |
102 # Raises an exception if it fails. | |
103 def _DownloadFromGoogleStorage(self, infilename, outfilename, all_results): | |
104 test_name = self._image_filename_re.match(infilename).group(1) | |
105 if not test_name: | |
106 raise Exception('unable to find test_name for infilename %s' % | |
107 infilename) | |
108 try: | |
109 hash_type, hash_value = all_results[infilename] | |
110 except KeyError: | |
111 raise Exception('unable to find filename %s in all_results dict' % | |
112 infilename) | |
113 except ValueError as e: | |
114 raise Exception( | |
115 'ValueError reading filename %s from all_results dict: %s' % ( | |
116 infilename, e)) | |
117 url = gm_json.CreateGmActualUrl( | |
118 test_name=test_name, hash_type=hash_type, hash_digest=hash_value) | |
119 try: | |
120 self._DownloadFile(source_url=url, dest_filename=outfilename) | |
121 except CommandFailedException: | |
122 raise Exception('Couldn\'t fetch gs_url %s as outfile %s' % ( | |
123 url, outfilename)) | |
124 | |
125 # Download a single file, raising a CommandFailedException if it fails. | |
126 def _DownloadFile(self, source_url, dest_filename): | |
127 # Download into a temporary file and then rename it afterwards, | |
128 # so that we don't corrupt the existing file if it fails midway thru. | |
129 temp_filename = os.path.join(os.path.dirname(dest_filename), | |
130 '.temp-' + os.path.basename(dest_filename)) | |
131 | |
132 # TODO(epoger): Replace calls to "curl"/"mv" (which will only work on | |
133 # Unix) with a Python HTTP library (which should work cross-platform) | |
134 self._Call([ 'curl', '--fail', '--silent', source_url, | |
135 '--output', temp_filename ]) | |
136 self._Call([ 'mv', temp_filename, dest_filename ]) | |
137 | |
138 # Returns the full contents of a URL, as a single string. | |
139 # | |
140 # Unlike standard URL handling, we allow relative "file:" URLs; | |
141 # for example, "file:one/two" resolves to the file ./one/two | |
142 # (relative to current working dir) | |
143 def _GetContentsOfUrl(self, url): | |
144 file_prefix = 'file:' | |
145 if url.startswith(file_prefix): | |
146 filename = url[len(file_prefix):] | |
147 return open(filename, 'r').read() | |
148 else: | |
149 return urllib2.urlopen(url).read() | |
150 | |
151 # Returns a dictionary of actual results from actual-results.json file. | |
152 # | |
153 # The dictionary returned has this format: | |
154 # { | |
155 # u'imageblur_565.png': [u'bitmap-64bitMD5', 3359963596899141322], | |
156 # u'imageblur_8888.png': [u'bitmap-64bitMD5', 4217923806027861152], | |
157 # u'shadertext3_8888.png': [u'bitmap-64bitMD5', 3713708307125704716] | |
158 # } | |
159 # | |
160 # If the JSON actual result summary file cannot be loaded, the behavior | |
161 # depends on self._missing_json_is_fatal: | |
162 # - if true: execution will halt with an exception | |
163 # - if false: we will log an error message but return an empty dictionary | |
164 # | |
165 # params: | |
166 # json_url: URL pointing to a JSON actual result summary file | |
167 # sections: a list of section names to include in the results, e.g. | |
168 # [gm_json.JSONKEY_ACTUALRESULTS_FAILED, | |
169 # gm_json.JSONKEY_ACTUALRESULTS_NOCOMPARISON] ; | |
170 # if None, then include ALL sections. | |
171 def _GetActualResults(self, json_url, sections=None): | |
172 try: | |
173 json_contents = self._GetContentsOfUrl(json_url) | |
174 except (urllib2.HTTPError, IOError): | |
175 message = 'unable to load JSON summary URL %s' % json_url | |
176 if self._missing_json_is_fatal: | |
177 raise ValueError(message) | |
178 else: | |
179 print '# %s' % message | |
180 return {} | |
181 | |
182 json_dict = gm_json.LoadFromString(json_contents) | |
183 results_to_return = {} | |
184 actual_results = json_dict[gm_json.JSONKEY_ACTUALRESULTS] | |
185 if not sections: | |
186 sections = actual_results.keys() | |
187 for section in sections: | |
188 section_results = actual_results[section] | |
189 if section_results: | |
190 results_to_return.update(section_results) | |
191 return results_to_return | |
192 | |
193 # Returns a list of files that require rebaselining. | |
194 # | |
195 # Note that this returns a list of FILES, like this: | |
196 # ['imageblur_565.png', 'xfermodes_pdf.png'] | |
197 # rather than a list of TESTS, like this: | |
198 # ['imageblur', 'xfermodes'] | |
199 # | |
200 # params: | |
201 # json_url: URL pointing to a JSON actual result summary file | |
202 # add_new: if True, then return files listed in any of these sections: | |
203 # - JSONKEY_ACTUALRESULTS_FAILED | |
204 # - JSONKEY_ACTUALRESULTS_NOCOMPARISON | |
205 # if False, then return files listed in these sections: | |
206 # - JSONKEY_ACTUALRESULTS_FAILED | |
207 # | |
208 def _GetFilesToRebaseline(self, json_url, add_new): | |
209 if self._dry_run: | |
210 print '' | |
211 print '#' | |
212 print ('# Getting files to rebaseline from JSON summary URL %s ...' | |
213 % json_url) | |
214 sections = [gm_json.JSONKEY_ACTUALRESULTS_FAILED] | |
215 if add_new: | |
216 sections.append(gm_json.JSONKEY_ACTUALRESULTS_NOCOMPARISON) | |
217 results_to_rebaseline = self._GetActualResults(json_url=json_url, | |
218 sections=sections) | |
219 files_to_rebaseline = results_to_rebaseline.keys() | |
220 files_to_rebaseline.sort() | |
221 print '# ... found files_to_rebaseline %s' % files_to_rebaseline | |
222 if self._dry_run: | |
223 print '#' | |
224 return files_to_rebaseline | |
225 | |
226 # Rebaseline a single file. | |
227 def _RebaselineOneFile(self, expectations_subdir, builder_name, | |
228 infilename, outfilename, all_results): | |
229 if self._dry_run: | |
230 print '' | |
231 print '# ' + infilename | |
232 | |
233 # Download this result image from Google Storage. | |
234 # If it fails, an exception will be raised. | |
235 self._DownloadFromGoogleStorage(infilename=infilename, | |
236 outfilename=outfilename, | |
237 all_results=all_results) | |
238 | |
239 # Add this file to version control (if appropriate). | |
240 if self._add_new: | |
241 if self._is_svn_checkout: | |
242 cmd = [ 'svn', 'add', '--quiet', outfilename ] | |
243 self._Call(cmd) | |
244 cmd = [ 'svn', 'propset', '--quiet', 'svn:mime-type', | |
245 'image/png', outfilename ]; | |
246 self._Call(cmd) | |
247 elif self._is_git_checkout: | |
248 cmd = [ 'git', 'add', outfilename ] | |
249 self._Call(cmd) | |
250 | |
251 # Rebaseline all tests/types we specified in the constructor, | |
252 # within this gm-expectations subdir. | |
253 # | |
254 # params: | |
255 # subdir : e.g. 'base-shuttle-win7-intel-float' | |
256 # builder : e.g. 'Test-Win7-ShuttleA-HD2000-x86-Release' | |
257 def RebaselineSubdir(self, subdir, builder): | |
258 if not os.path.isdir(os.path.join(self._expectations_root, subdir)): | |
259 self._exception_handler.RaiseExceptionOrContinue(Exception(( | |
260 'Could not find "%s" subdir within expectations_root "%s". ' + | |
261 'Are you sure --expectations-root is pointing at a valid ' + | |
262 'gm-expected directory?') % (subdir, self._expectations_root))) | |
263 return | |
264 | |
265 json_url = '/'.join([self._json_base_url, | |
266 subdir, builder, subdir, | |
267 self._json_filename]) | |
268 all_results = self._GetActualResults(json_url=json_url) | |
269 filenames = self._GetFilesToRebaseline(json_url=json_url, | |
270 add_new=self._add_new) | |
271 skipped_files = [] | |
272 for filename in filenames: | |
273 (test, config) = self._image_filename_re.match(filename).groups() | |
274 if self._tests: | |
275 if test not in self._tests: | |
276 skipped_files.append(filename) | |
277 continue | |
278 if self._configs: | |
279 if config not in self._configs: | |
280 skipped_files.append(filename) | |
281 continue | |
282 outfilename = os.path.join(self._expectations_root, subdir, | |
283 filename); | |
284 try: | |
285 self._RebaselineOneFile(expectations_subdir=subdir, | |
286 builder_name=builder, | |
287 infilename=filename, | |
288 outfilename=outfilename, | |
289 all_results=all_results) | |
290 except BaseException as e: | |
291 self._exception_handler.RaiseExceptionOrContinue(e) | |
OLD | NEW |