Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(48)

Side by Side Diff: gm/rebaseline_server/results.py

Issue 265793013: make compare_rendered_pictures process render_pictures's new JSON output format (Closed) Base URL: https://skia.googlesource.com/skia.git@master
Patch Set: add Eric's idea as comment Created 6 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 #!/usr/bin/python 1 #!/usr/bin/python
2 2
3 """ 3 """
4 Copyright 2013 Google Inc. 4 Copyright 2013 Google Inc.
5 5
6 Use of this source code is governed by a BSD-style license that can be 6 Use of this source code is governed by a BSD-style license that can be
7 found in the LICENSE file. 7 found in the LICENSE file.
8 8
9 Repackage expected/actual GM results as needed by our HTML rebaseline viewer. 9 Repackage expected/actual GM results as needed by our HTML rebaseline viewer.
10 """ 10 """
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after
183 True if we should ignore expectations and actuals for this builder. 183 True if we should ignore expectations and actuals for this builder.
184 """ 184 """
185 for pattern in self._skip_builders_pattern_list: 185 for pattern in self._skip_builders_pattern_list:
186 if pattern.match(builder): 186 if pattern.match(builder):
187 return True 187 return True
188 for pattern in self._match_builders_pattern_list: 188 for pattern in self._match_builders_pattern_list:
189 if pattern.match(builder): 189 if pattern.match(builder):
190 return False 190 return False
191 return True 191 return True
192 192
193 def _read_dicts_from_root(self, root, pattern='*.json'): 193 def _read_builder_dicts_from_root(self, root, pattern='*.json'):
194 """Read all JSON dictionaries within a directory tree. 194 """Read all JSON dictionaries within a directory tree.
195 195
196 Skips any dictionaries belonging to a builder we have chosen to ignore.
197
196 Args: 198 Args:
197 root: path to root of directory tree 199 root: path to root of directory tree
198 pattern: which files to read within root (fnmatch-style pattern) 200 pattern: which files to read within root (fnmatch-style pattern)
199 201
200 Returns: 202 Returns:
201 A meta-dictionary containing all the JSON dictionaries found within 203 A meta-dictionary containing all the JSON dictionaries found within
202 the directory tree, keyed by the builder name of each dictionary. 204 the directory tree, keyed by builder name (the basename of the directory
205 where each JSON dictionary was found).
203 206
204 Raises: 207 Raises:
205 IOError if root does not refer to an existing directory 208 IOError if root does not refer to an existing directory
206 """ 209 """
210 # I considered making this call _read_dicts_from_root(), but I decided
211 # it was better to prune out the ignored builders within the os.walk().
207 if not os.path.isdir(root): 212 if not os.path.isdir(root):
208 raise IOError('no directory found at path %s' % root) 213 raise IOError('no directory found at path %s' % root)
209 meta_dict = {} 214 meta_dict = {}
210 for dirpath, dirnames, filenames in os.walk(root): 215 for dirpath, dirnames, filenames in os.walk(root):
211 for matching_filename in fnmatch.filter(filenames, pattern): 216 for matching_filename in fnmatch.filter(filenames, pattern):
212 builder = os.path.basename(dirpath) 217 builder = os.path.basename(dirpath)
213 if self._ignore_builder(builder): 218 if self._ignore_builder(builder):
214 continue 219 continue
215 fullpath = os.path.join(dirpath, matching_filename) 220 full_path = os.path.join(dirpath, matching_filename)
216 meta_dict[builder] = gm_json.LoadFromFile(fullpath) 221 meta_dict[builder] = gm_json.LoadFromFile(full_path)
222 return meta_dict
223
224 def _read_dicts_from_root(self, root, pattern='*.json'):
225 """Read all JSON dictionaries within a directory tree.
226
227 Args:
228 root: path to root of directory tree
229 pattern: which files to read within root (fnmatch-style pattern)
230
231 Returns:
232 A meta-dictionary containing all the JSON dictionaries found within
233 the directory tree, keyed by the pathname (relative to root) of each JSON
234 dictionary.
235
236 Raises:
237 IOError if root does not refer to an existing directory
238 """
239 if not os.path.isdir(root):
240 raise IOError('no directory found at path %s' % root)
241 meta_dict = {}
242 for abs_dirpath, dirnames, filenames in os.walk(root):
243 rel_dirpath = os.path.relpath(abs_dirpath, root)
244 for matching_filename in fnmatch.filter(filenames, pattern):
245 abs_path = os.path.join(abs_dirpath, matching_filename)
246 rel_path = os.path.join(rel_dirpath, matching_filename)
247 meta_dict[rel_path] = gm_json.LoadFromFile(abs_path)
217 return meta_dict 248 return meta_dict
218 249
219 @staticmethod 250 @staticmethod
220 def _create_relative_url(hashtype_and_digest, test_name): 251 def _create_relative_url(hashtype_and_digest, test_name):
221 """Returns the URL for this image, relative to GM_ACTUALS_ROOT_HTTP_URL. 252 """Returns the URL for this image, relative to GM_ACTUALS_ROOT_HTTP_URL.
222 253
223 If we don't have a record of this image, returns None. 254 If we don't have a record of this image, returns None.
224 255
225 Args: 256 Args:
226 hashtype_and_digest: (hash_type, hash_digest) tuple, or None if we 257 hashtype_and_digest: (hash_type, hash_digest) tuple, or None if we
227 don't have a record of this image 258 don't have a record of this image
228 test_name: string; name of the GM test that created this image 259 test_name: string; name of the GM test that created this image
229 """ 260 """
230 if not hashtype_and_digest: 261 if not hashtype_and_digest:
231 return None 262 return None
232 return gm_json.CreateGmRelativeUrl( 263 return gm_json.CreateGmRelativeUrl(
233 test_name=test_name, 264 test_name=test_name,
234 hash_type=hashtype_and_digest[0], 265 hash_type=hashtype_and_digest[0],
235 hash_digest=hashtype_and_digest[1]) 266 hash_digest=hashtype_and_digest[1])
236 267
237 @staticmethod 268 @staticmethod
238 def combine_subdicts(input_dict): 269 def combine_subdicts(input_dict):
239 """ Flatten out a dictionary structure by one level. 270 """ Flatten out a dictionary structure by one level.
240 271
241 Input: 272 Input:
242 { 273 {
243 "failed" : { 274 KEY_A1 : {
244 "changed.png" : [ "bitmap-64bitMD5", 8891695120562235492 ], 275 KEY_B1 : VALUE_B1,
245 }, 276 },
246 "no-comparison" : { 277 KEY_A2 : {
247 "unchanged.png" : [ "bitmap-64bitMD5", 11092453015575919668 ], 278 KEY_B2 : VALUE_B2,
248 } 279 }
249 } 280 }
250 281
251 Output: 282 Output:
252 { 283 {
253 "changed.png" : [ "bitmap-64bitMD5", 8891695120562235492 ], 284 KEY_B1 : VALUE_B1,
254 "unchanged.png" : [ "bitmap-64bitMD5", 11092453015575919668 ], 285 KEY_B2 : VALUE_B2,
255 } 286 }
256 287
257 If this would result in any repeated keys, it will raise an Exception. 288 If this would result in any repeated keys, it will raise an Exception.
258 """ 289 """
259 output_dict = {} 290 output_dict = {}
260 for key, subdict in input_dict.iteritems(): 291 for key, subdict in input_dict.iteritems():
261 for subdict_key, subdict_value in subdict.iteritems(): 292 for subdict_key, subdict_value in subdict.iteritems():
262 if subdict_key in output_dict: 293 if subdict_key in output_dict:
263 raise Exception('duplicate key %s in combine_subdicts' % subdict_key) 294 raise Exception('duplicate key %s in combine_subdicts' % subdict_key)
264 output_dict[subdict_key] = subdict_value 295 output_dict[subdict_key] = subdict_value
265 return output_dict 296 return output_dict
297
298 @staticmethod
299 def get_multilevel(input_dict, *keys):
300 """ Returns input_dict[key1][key2][...], or None if any key is not found.
301 """
302 for key in keys:
303 if input_dict == None:
304 return None
305 input_dict = input_dict.get(key, None)
306 return input_dict
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698