| OLD | NEW |
| 1 # Copyright 2017 The Chromium Authors. All rights reserved. | 1 # Copyright 2017 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 """Classes for merging layout tests results directories together. | 5 """Classes for merging layout tests results directories together. |
| 6 | 6 |
| 7 This is split into three parts: | 7 This is split into three parts: |
| 8 | 8 |
| 9 * Generic code to merge JSON data together. | 9 * Generic code to merge JSON data together. |
| 10 * Generic code to merge directories together. | 10 * Generic code to merge directories together. |
| (...skipping 10 matching lines...) Expand all Loading... |
| 21 * Helper functions can be provided to deal with merging specific file objects. | 21 * Helper functions can be provided to deal with merging specific file objects. |
| 22 * Helper functions are called when a given Match object returns true for the | 22 * Helper functions are called when a given Match object returns true for the |
| 23 filenames. | 23 filenames. |
| 24 * The default helper functions only merge if file contents match or the file | 24 * The default helper functions only merge if file contents match or the file |
| 25 only exists in one input directory. | 25 only exists in one input directory. |
| 26 | 26 |
| 27 The quickest way to understand how the mergers, helper functions and match | 27 The quickest way to understand how the mergers, helper functions and match |
| 28 objects work together is to look at the unit tests. | 28 objects work together is to look at the unit tests. |
| 29 """ | 29 """ |
| 30 | 30 |
| 31 import collections |
| 31 import json | 32 import json |
| 32 import logging | 33 import logging |
| 33 import pprint | 34 import pprint |
| 34 import re | 35 import re |
| 35 import types | 36 import types |
| 36 | 37 |
| 37 from webkitpy.common.system.filesystem import FileSystem | 38 from webkitpy.common.system.filesystem import FileSystem |
| 38 | 39 |
| 39 | 40 |
| 40 _log = logging.getLogger(__name__) | 41 _log = logging.getLogger(__name__) |
| (...skipping 17 matching lines...) Expand all Loading... |
| 58 class TypeMatch(Match): | 59 class TypeMatch(Match): |
| 59 """Match based on instance of given types.""" | 60 """Match based on instance of given types.""" |
| 60 | 61 |
| 61 def __init__(self, *match_types): | 62 def __init__(self, *match_types): |
| 62 self.types = match_types | 63 self.types = match_types |
| 63 | 64 |
| 64 def __call__(self, obj, name=None): | 65 def __call__(self, obj, name=None): |
| 65 return isinstance(obj, self.types) | 66 return isinstance(obj, self.types) |
| 66 | 67 |
| 67 | 68 |
| 68 class NameMatch(Match): | 69 class NameRegexMatch(Match): |
| 69 """Match based on regex being found in name. | 70 """Match based on regex being found in name. |
| 70 | 71 |
| 71 Use start line (^) and end of line ($) anchors if you want to match on | 72 Use start line (^) and end of line ($) anchors if you want to match on |
| 72 exact name. | 73 exact name. |
| 73 """ | 74 """ |
| 74 | 75 |
| 75 def __init__(self, regex): | 76 def __init__(self, regex): |
| 76 self.regex = re.compile(regex) | 77 self.regex = re.compile(regex) |
| 77 | 78 |
| 78 def __call__(self, obj, name=None): | 79 def __call__(self, obj, name=None): |
| 79 if name is None: | 80 if name is None: |
| 80 return False | 81 return False |
| 81 return self.regex.search(name) is not None | 82 return self.regex.search(name) is not None |
| 82 | 83 |
| 83 | 84 |
| 84 class ValueMatch(Match): | 85 class ValueMatch(Match): |
| 85 """Match based on equaling a given value.""" | 86 """Match based on equaling a given value.""" |
| 86 | 87 |
| 87 def __init__(self, value): | 88 def __init__(self, value): |
| 88 self.value = value | 89 self.value = value |
| 89 | 90 |
| 90 def __call__(self, obj, name=None): | 91 def __call__(self, obj, name=None): |
| 91 return obj == self.value | 92 return obj == self.value |
| 92 | 93 |
| 93 | 94 |
| 94 class MergeFailure(Exception): | 95 class MergeFailure(Exception): |
| 95 """Base exception for merge failing.""" | 96 """Base exception for merge failing.""" |
| 96 | 97 |
| 97 def __init__(self, msg, name, obj_a, obj_b): | 98 def __init__(self, msg, name, objs): |
| 98 emsg = ( | 99 emsg = ( |
| 99 "Failure merging {name}: " | 100 "Failure merging {name}: " |
| 100 " {msg}\nTrying to merge {a} and {b}." | 101 " {msg}\nTrying to merge {objs}." |
| 101 ).format( | 102 ).format( |
| 102 name=name, | 103 name=name, |
| 103 msg=msg, | 104 msg=msg, |
| 104 a=obj_a, | 105 objs=objs, |
| 105 b=obj_b, | |
| 106 ) | 106 ) |
| 107 Exception.__init__(self, emsg) | 107 Exception.__init__(self, emsg) |
| 108 | 108 |
| 109 @classmethod | 109 @classmethod |
| 110 def assert_type_eq(cls, name, obj_a, obj_b): | 110 def assert_type_eq(cls, name, objs): |
| 111 if type(obj_a) != type(obj_b): | 111 obj_0 = objs[0] |
| 112 raise cls("Types don't match", name, obj_a, obj_b) | 112 for obj_n in objs[1:]: |
| 113 if type(obj_0) != type(obj_n): |
| 114 raise cls("Types don't match", name, (obj_0, obj_n)) |
| 113 | 115 |
| 114 | 116 |
| 115 class Merger(object): | 117 class Merger(object): |
| 116 """Base class for merger objects.""" | 118 """Base class for merger objects.""" |
| 117 | 119 |
| 118 def __init__(self): | 120 def __init__(self): |
| 119 self.helpers = [] | 121 self.helpers = [] |
| 120 | 122 |
| 121 def add_helper(self, match_func, merge_func): | 123 def add_helper(self, match_func, merge_func): |
| 122 """Add function which merges values. | 124 """Add function which merges values. |
| 123 | 125 |
| 124 match_func and merge_func are dependent on the merger object type. | 126 match_func and merge_func are dependent on the merger object type. |
| 125 When the function returns true, the merge_func will be called. | 127 When the function returns true, the merge_func will be called. |
| 126 | 128 |
| 127 Helpers are searched in last added, first checked order. This allows | 129 Helpers are searched in last added, first checked order. This allows |
| 128 more specific helpers to be added after more generic helpers. | 130 more specific helpers to be added after more generic helpers. |
| 129 """ | 131 """ |
| 130 self.helpers.append((match_func, merge_func)) | 132 self.helpers.append((match_func, merge_func)) |
| 131 | 133 |
| 132 | 134 |
| 133 class JSONMerger(Merger): | 135 class JSONMerger(Merger): |
| 134 """Merge two JSON-like objects. | 136 """Merge JSON-like objects. |
| 135 | 137 |
| 136 For adding helpers; | 138 For adding helpers; |
| 137 | 139 |
| 138 match_func is a function of form | 140 match_func is a function of form |
| 139 def f(obj, name=None) -> bool | 141 def f(obj, name=None) -> bool |
| 140 When the function returns true, the merge_func will be called. | 142 When the function returns true, the merge_func will be called. |
| 141 | 143 |
| 142 merge_func is a function of the form | 144 merge_func is a function of the form |
| 143 def f(obj_a, obj_b, name=None) -> obj_merged | 145 def f(list_of_objs, name=None) -> obj_merged |
| 144 Merge functions should *never* modify the input arguments. | 146 Merge functions should *never* modify the input arguments. |
| 145 """ | 147 """ |
| 146 | 148 |
| 147 def __init__(self): | 149 def __init__(self): |
| 148 Merger.__init__(self) | 150 Merger.__init__(self) |
| 149 | 151 |
| 150 self.add_helper( | 152 self.add_helper( |
| 151 TypeMatch(types.ListType, types.TupleType), self.merge_listlike) | 153 TypeMatch(types.ListType, types.TupleType), self.merge_listlike) |
| 152 self.add_helper( | 154 self.add_helper( |
| 153 TypeMatch(types.DictType), self.merge_dictlike) | 155 TypeMatch(types.DictType), self.merge_dictlike) |
| 154 | 156 |
| 155 def fallback_matcher(self, obj_a, obj_b, name=None): | 157 def fallback_matcher(self, objs, name=None): |
| 156 raise MergeFailure( | 158 raise MergeFailure( |
| 157 "No merge helper found!", name, obj_a, obj_b) | 159 "No merge helper found!", name, objs) |
| 158 | 160 |
| 159 def merge_equal(self, obj_a, obj_b, name=None): | 161 def merge_equal(self, objs, name=None): |
| 160 """Merge two equal objects together.""" | 162 """Merge equal objects together.""" |
| 161 if obj_a != obj_b: | 163 obj_0 = objs[0] |
| 162 raise MergeFailure( | 164 for obj_n in objs[1:]: |
| 163 "Unable to merge!", name, obj_a, obj_b) | 165 if obj_0 != obj_n: |
| 164 return obj_a | 166 raise MergeFailure( |
| 167 "Unable to merge!", name, (obj_0, obj_n)) |
| 168 return obj_0 |
| 165 | 169 |
| 166 def merge_listlike(self, list_a, list_b, name=None): # pylint: disable=unus
ed-argument | 170 def merge_listlike(self, lists, name=None): # pylint: disable=unused-argume
nt |
| 167 """Merge two things which are "list like" (tuples, lists, sets).""" | 171 """Merge things which are "list like" (tuples, lists, sets).""" |
| 168 assert type(list_a) == type(list_b), ( | 172 MergeFailure.assert_type_eq(name, lists) |
| 169 "Types of %r and %r don't match, refusing to merge." % ( | 173 output = list(lists[0]) |
| 170 list_a, list_b)) | 174 for list_n in lists[1:]: |
| 171 output = list(list_a) | 175 output.extend(list_n) |
| 172 output.extend(list_b) | 176 return lists[0].__class__(output) |
| 173 return list_a.__class__(output) | |
| 174 | 177 |
| 175 def merge_dictlike(self, dict_a, dict_b, name=None): | 178 def merge_dictlike(self, dicts, name=None, order_cls=collections.OrderedDict
): |
| 176 """Merge two things which are dictionaries.""" | 179 """Merge things which are dictionaries. |
| 177 assert type(dict_a) == type(dict_b), ( | 180 |
| 178 "Types of %r and %r don't match, refusing to merge." % ( | 181 Args: |
| 179 dict_a, dict_b)) | 182 dicts (list of dict): Dictionary like objects to merge (should all |
| 180 dict_out = dict_a.__class__({}) | 183 be the same type). |
| 181 for key in dict_a.keys() + dict_b.keys(): | 184 name (str): Name of the objects being merged (used for error |
| 182 if key in dict_a and key in dict_b: | 185 messages). |
| 183 dict_out[key] = self.merge( | 186 order_cls: Dict like object class used to produce key ordering. |
| 184 dict_a[key], dict_b[key], | 187 Defaults to collections.OrderedDict which means all keys in |
| 185 name=join_name(name, key)) | 188 dicts[0] come before all keys in dicts[1], etc. |
| 186 elif key in dict_a: | 189 |
| 187 dict_out[key] = dict_a[key] | 190 Returns: |
| 188 elif key in dict_b: | 191 dict: Merged dictionary object of same type as the objects in |
| 189 dict_out[key] = dict_b[key] | 192 dicts. |
| 190 else: | 193 """ |
| 191 assert False | 194 MergeFailure.assert_type_eq(name, dicts) |
| 195 |
| 196 dict_mid = order_cls() |
| 197 for dobj in dicts: |
| 198 for key in dobj: |
| 199 dict_mid.setdefault(key, []).append(dobj[key]) |
| 200 |
| 201 dict_out = dicts[0].__class__({}) |
| 202 for k, v in dict_mid.iteritems(): |
| 203 assert v |
| 204 if len(v) == 1: |
| 205 dict_out[k] = v[0] |
| 206 elif len(v) > 1: |
| 207 dict_out[k] = self.merge(v, name=join_name(name, k)) |
| 192 return dict_out | 208 return dict_out |
| 193 | 209 |
| 194 def merge(self, obj_a, obj_b, name=""): | 210 def merge(self, objs, name=""): |
| 195 """Generic merge function. | 211 """Generic merge function. |
| 196 | 212 |
| 197 name is a string representing the current key value separated by | 213 name is a string representing the current key value separated by |
| 198 semicolons. For example, if file.json had the following; | 214 semicolons. For example, if file.json had the following; |
| 199 | 215 |
| 200 {'key1': {'key2': 3}} | 216 {'key1': {'key2': 3}} |
| 201 | 217 |
| 202 Then the name of the value 3 is 'file.json:key1:key2' | 218 Then the name of the value 3 is 'file.json:key1:key2' |
| 203 """ | 219 """ |
| 204 if obj_a is None and obj_b is None: | 220 objs = [o for o in objs if o is not None] |
| 221 |
| 222 if not objs: |
| 205 return None | 223 return None |
| 206 elif obj_b is None: | |
| 207 return obj_a | |
| 208 elif obj_a is None: | |
| 209 return obj_b | |
| 210 | 224 |
| 211 MergeFailure.assert_type_eq(name, obj_a, obj_b) | 225 MergeFailure.assert_type_eq(name, objs) |
| 212 | 226 |
| 213 # Try the merge helpers. | 227 # Try the merge helpers. |
| 214 for match_func, merge_func in reversed(self.helpers): | 228 for match_func, merge_func in reversed(self.helpers): |
| 215 if match_func(obj_a, name): | 229 for obj in objs: |
| 216 return merge_func(obj_a, obj_b, name=name) | 230 if match_func(obj, name): |
| 217 if match_func(obj_b, name): | 231 return merge_func(objs, name=name) |
| 218 return merge_func(obj_a, obj_b, name=name) | |
| 219 | 232 |
| 220 return self.fallback_matcher(obj_a, obj_b, name=name) | 233 return self.fallback_matcher(objs, name=name) |
| 221 | 234 |
| 222 | 235 |
| 223 # Classes for recursively merging a directory together. | 236 # Classes for recursively merging a directory together. |
| 224 # ------------------------------------------------------------------------ | 237 # ------------------------------------------------------------------------ |
| 225 | 238 |
| 226 | 239 |
| 227 class FilenameMatch(object): | 240 class FilenameRegexMatch(object): |
| 228 """Match based on name matching a regex.""" | 241 """Match based on name matching a regex.""" |
| 229 | 242 |
| 230 def __init__(self, regex): | 243 def __init__(self, regex): |
| 231 self.regex = re.compile(regex) | 244 self.regex = re.compile(regex) |
| 232 | 245 |
| 233 def __call__(self, filename, to_merge): | 246 def __call__(self, filename, to_merge): |
| 234 return self.regex.search(filename) is not None | 247 return self.regex.search(filename) is not None |
| 235 | 248 |
| 236 def __str__(self): | 249 def __str__(self): |
| 237 return "FilenameMatch(%r)" % self.regex.pattern | 250 return "FilenameRegexMatch(%r)" % self.regex.pattern |
| 238 | 251 |
| 239 __repr__ = __str__ | 252 __repr__ = __str__ |
| 240 | 253 |
| 241 | 254 |
| 242 class MergeFiles(object): | 255 class MergeFiles(object): |
| 243 """Base class for things which merge files.""" | 256 """Base class for things which merge files.""" |
| 244 | 257 |
| 245 def __init__(self, filesystem): | 258 def __init__(self, filesystem): |
| 246 assert filesystem | 259 assert filesystem |
| 247 self.filesystem = filesystem | 260 self.filesystem = filesystem |
| (...skipping 20 matching lines...) Expand all Loading... |
| 268 for filename in to_merge[1:]: | 281 for filename in to_merge[1:]: |
| 269 other_data = self.filesystem.read_binary_file(filename) | 282 other_data = self.filesystem.read_binary_file(filename) |
| 270 if data != other_data: | 283 if data != other_data: |
| 271 nonmatching.append(filename) | 284 nonmatching.append(filename) |
| 272 | 285 |
| 273 if nonmatching: | 286 if nonmatching: |
| 274 raise MergeFailure( | 287 raise MergeFailure( |
| 275 '\n'.join( | 288 '\n'.join( |
| 276 ['File contents don\'t match:'] + nonmatching), | 289 ['File contents don\'t match:'] + nonmatching), |
| 277 out_filename, | 290 out_filename, |
| 278 to_merge[0], to_merge[1:]) | 291 to_merge) |
| 279 | 292 |
| 280 self.filesystem.write_binary_file(out_filename, data) | 293 self.filesystem.write_binary_file(out_filename, data) |
| 281 | 294 |
| 282 | 295 |
| 283 class MergeFilesLinesSorted(MergeFiles): | 296 class MergeFilesLinesSorted(MergeFiles): |
| 284 """Merge and sort the files of the given files.""" | 297 """Merge and sort the files of the given files.""" |
| 285 | 298 |
| 286 def __call__(self, out_filename, to_merge): | 299 def __call__(self, out_filename, to_merge): |
| 287 lines = [] | 300 lines = [] |
| 288 for filename in to_merge: | 301 for filename in to_merge: |
| (...skipping 26 matching lines...) Expand all Loading... |
| 315 output. | 328 output. |
| 316 """ | 329 """ |
| 317 | 330 |
| 318 def __init__(self, filesystem, json_data_merger=None, json_data_value_overri
des=None): | 331 def __init__(self, filesystem, json_data_merger=None, json_data_value_overri
des=None): |
| 319 MergeFiles.__init__(self, filesystem) | 332 MergeFiles.__init__(self, filesystem) |
| 320 self._json_data_merger = json_data_merger or JSONMerger() | 333 self._json_data_merger = json_data_merger or JSONMerger() |
| 321 self._json_data_value_overrides = json_data_value_overrides or {} | 334 self._json_data_value_overrides = json_data_value_overrides or {} |
| 322 | 335 |
| 323 def __call__(self, out_filename, to_merge): | 336 def __call__(self, out_filename, to_merge): |
| 324 try: | 337 try: |
| 325 before_a, output_data, after_a = self.load_jsonp( | 338 before_0, new_json_data_0, after_0 = self.load_jsonp( |
| 326 self.filesystem.open_binary_file_for_reading(to_merge[0])) | 339 self.filesystem.open_binary_file_for_reading(to_merge[0])) |
| 327 except ValueError as e: | 340 except ValueError as e: |
| 328 raise MergeFailure(e.message, to_merge[0], None, None) | 341 raise MergeFailure(e.message, to_merge[0], None) |
| 329 | 342 |
| 330 for filename in to_merge[1:]: | 343 input_data = [new_json_data_0] |
| 344 for filename_n in to_merge[1:]: |
| 331 try: | 345 try: |
| 332 before_b, new_json_data, after_b = self.load_jsonp( | 346 before_n, new_json_data_n, after_n = self.load_jsonp( |
| 333 self.filesystem.open_binary_file_for_reading(filename)) | 347 self.filesystem.open_binary_file_for_reading(filename_n)) |
| 334 except ValueError as e: | 348 except ValueError as e: |
| 335 raise MergeFailure(e.message, filename, None, None) | 349 raise MergeFailure(e.message, filename_n, None) |
| 336 | 350 |
| 337 if before_a != before_b: | 351 if before_0 != before_n: |
| 338 raise MergeFailure( | 352 raise MergeFailure( |
| 339 "jsonp starting data from %s doesn't match." % filename, | 353 "jsonp starting data from %s doesn't match." % filename_n, |
| 340 out_filename, | 354 out_filename, |
| 341 before_a, before_b) | 355 [before_0, before_n]) |
| 342 | 356 |
| 343 if after_a != after_b: | 357 if after_0 != after_n: |
| 344 raise MergeFailure( | 358 raise MergeFailure( |
| 345 "jsonp ending data from %s doesn't match." % filename, | 359 "jsonp ending data from %s doesn't match." % filename_n, |
| 346 out_filename, | 360 out_filename, |
| 347 after_a, after_b) | 361 [after_0, after_n]) |
| 348 | 362 |
| 349 output_data = self._json_data_merger.merge(output_data, new_json_dat
a, filename) | 363 input_data.append(new_json_data_n) |
| 350 | 364 |
| 365 output_data = self._json_data_merger.merge(input_data, name=out_filename
) |
| 351 output_data.update(self._json_data_value_overrides) | 366 output_data.update(self._json_data_value_overrides) |
| 352 | 367 |
| 353 self.dump_jsonp( | 368 self.dump_jsonp( |
| 354 self.filesystem.open_binary_file_for_writing(out_filename), | 369 self.filesystem.open_binary_file_for_writing(out_filename), |
| 355 before_a, output_data, after_a) | 370 before_0, output_data, after_0) |
| 356 | 371 |
| 357 @staticmethod | 372 @staticmethod |
| 358 def load_jsonp(fd): | 373 def load_jsonp(fd): |
| 359 """Load a JSONP file and return the JSON data parsed. | 374 """Load a JSONP file and return the JSON data parsed. |
| 360 | 375 |
| 361 JSONP files have a JSON data structure wrapped in a function call or | 376 JSONP files have a JSON data structure wrapped in a function call or |
| 362 other non-JSON data. | 377 other non-JSON data. |
| 363 """ | 378 """ |
| 364 in_data = fd.read() | 379 in_data = fd.read() |
| 365 | 380 |
| (...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 458 # rel_file is the path of f relative to the base directory | 473 # rel_file is the path of f relative to the base directory |
| 459 rel_file = self.filesystem.join(dir_path, f)[len(base_dir) +
1:] | 474 rel_file = self.filesystem.join(dir_path, f)[len(base_dir) +
1:] |
| 460 files.setdefault(rel_file, []).append(base_dir) | 475 files.setdefault(rel_file, []).append(base_dir) |
| 461 | 476 |
| 462 # Go through each file and try to merge it. | 477 # Go through each file and try to merge it. |
| 463 # partial_file_path is the file relative to the directories. | 478 # partial_file_path is the file relative to the directories. |
| 464 for partial_file_path, in_dirs in sorted(files.iteritems()): | 479 for partial_file_path, in_dirs in sorted(files.iteritems()): |
| 465 out_path = self.filesystem.join(output_dir, partial_file_path) | 480 out_path = self.filesystem.join(output_dir, partial_file_path) |
| 466 if self.filesystem.exists(out_path): | 481 if self.filesystem.exists(out_path): |
| 467 raise MergeFailure( | 482 raise MergeFailure( |
| 468 'File %s already exist in output.', out_path, None, None) | 483 'File %s already exist in output.', out_path, None) |
| 469 | 484 |
| 470 dirname = self.filesystem.dirname(out_path) | 485 dirname = self.filesystem.dirname(out_path) |
| 471 if not self.filesystem.exists(dirname): | 486 if not self.filesystem.exists(dirname): |
| 472 self.filesystem.maybe_make_directory(dirname) | 487 self.filesystem.maybe_make_directory(dirname) |
| 473 | 488 |
| 474 to_merge = [self.filesystem.join(d, partial_file_path) for d in in_d
irs] | 489 to_merge = [self.filesystem.join(d, partial_file_path) for d in in_d
irs] |
| 475 | 490 |
| 476 _log.debug("Creating merged %s from %s", out_path, to_merge) | 491 _log.debug("Creating merged %s from %s", out_path, to_merge) |
| 477 | 492 |
| 478 for match_func, merge_func in reversed(self.helpers): | 493 for match_func, merge_func in reversed(self.helpers): |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 512 ':chromium_revision$', | 527 ':chromium_revision$', |
| 513 ':has_pretty_patch$', | 528 ':has_pretty_patch$', |
| 514 ':has_wdiff$', | 529 ':has_wdiff$', |
| 515 ':path_delimiter$', | 530 ':path_delimiter$', |
| 516 ':pixel_tests_enabled$', | 531 ':pixel_tests_enabled$', |
| 517 ':random_order_seed$', | 532 ':random_order_seed$', |
| 518 ':version$', | 533 ':version$', |
| 519 ] | 534 ] |
| 520 for match_name in matching: | 535 for match_name in matching: |
| 521 self.add_helper( | 536 self.add_helper( |
| 522 NameMatch(match_name), | 537 NameRegexMatch(match_name), |
| 523 self.merge_equal) | 538 self.merge_equal) |
| 524 | 539 |
| 525 # These keys are accumulated sums we want to add together. | 540 # These keys are accumulated sums we want to add together. |
| 526 addable = [ | 541 addable = [ |
| 527 ':fixable$', | 542 ':fixable$', |
| 528 ':num_flaky$', | 543 ':num_flaky$', |
| 529 ':num_passes$', | 544 ':num_passes$', |
| 530 ':num_regressions$', | 545 ':num_regressions$', |
| 531 ':skipped$', | 546 ':skipped$', |
| 532 ':skips$', | 547 ':skips$', |
| 533 # All keys inside the num_failures_by_type entry. | 548 # All keys inside the num_failures_by_type entry. |
| 534 ':num_failures_by_type:', | 549 ':num_failures_by_type:', |
| 535 ] | 550 ] |
| 536 for match_name in addable: | 551 for match_name in addable: |
| 537 self.add_helper( | 552 self.add_helper( |
| 538 NameMatch(match_name), | 553 NameRegexMatch(match_name), |
| 539 lambda a, b, name=None: a + b) | 554 lambda o, name=None: sum(o)) |
| 540 | 555 |
| 541 # If any shard is interrupted, mark the whole thing as interrupted. | 556 # If any shard is interrupted, mark the whole thing as interrupted. |
| 542 self.add_helper( | 557 self.add_helper( |
| 543 NameMatch(':interrupted$'), | 558 NameRegexMatch(':interrupted$'), |
| 544 lambda a, b, name=None: a or b) | 559 lambda o, name=None: bool(sum(o))) |
| 545 | 560 |
| 546 # Layout test directory value is randomly created on each shard, so | 561 # Layout test directory value is randomly created on each shard, so |
| 547 # clear it. | 562 # clear it. |
| 548 self.add_helper( | 563 self.add_helper( |
| 549 NameMatch(':layout_tests_dir$'), | 564 NameRegexMatch(':layout_tests_dir$'), |
| 550 lambda a, b, name=None: None) | 565 lambda o, name=None: None) |
| 551 | 566 |
| 552 # seconds_since_epoch is the start time, so we just take the earliest. | 567 # seconds_since_epoch is the start time, so we just take the earliest. |
| 553 self.add_helper( | 568 self.add_helper( |
| 554 NameMatch(':seconds_since_epoch$'), | 569 NameRegexMatch(':seconds_since_epoch$'), |
| 555 lambda a, b, name=None: min(a, b)) | 570 lambda o, name=None: min(*o)) |
| 556 | 571 |
| 557 def fallback_matcher(self, obj_a, obj_b, name=None): | 572 def fallback_matcher(self, objs, name=None): |
| 558 if self.allow_unknown_if_matching: | 573 if self.allow_unknown_if_matching: |
| 559 result = self.merge_equal(obj_a, obj_b, name) | 574 result = self.merge_equal(objs, name) |
| 560 _log.warning('Unknown value %s, accepting anyway as it matches.', na
me) | 575 _log.warning('Unknown value %s, accepting anyway as it matches.', na
me) |
| 561 return result | 576 return result |
| 562 return JSONMerger.fallback_matcher(self, obj_a, obj_b, name) | 577 return JSONMerger.fallback_matcher(self, objs, name) |
| 563 | 578 |
| 564 | 579 |
| 565 class LayoutTestDirMerger(DirMerger): | 580 class LayoutTestDirMerger(DirMerger): |
| 566 """Merge layout test result directory.""" | 581 """Merge layout test result directory.""" |
| 567 | 582 |
| 568 def __init__(self, filesystem=None, | 583 def __init__(self, filesystem=None, |
| 569 results_json_value_overrides=None, | 584 results_json_value_overrides=None, |
| 570 results_json_allow_unknown_if_matching=False): | 585 results_json_allow_unknown_if_matching=False): |
| 571 DirMerger.__init__(self, filesystem) | 586 DirMerger.__init__(self, filesystem) |
| 572 | 587 |
| 573 # JSON merger for non-"result style" JSON files. | 588 # JSON merger for non-"result style" JSON files. |
| 574 basic_json_data_merger = JSONMerger() | 589 basic_json_data_merger = JSONMerger() |
| 575 basic_json_data_merger.fallback_matcher = basic_json_data_merger.merge_e
qual | 590 basic_json_data_merger.fallback_matcher = basic_json_data_merger.merge_e
qual |
| 576 self.add_helper( | 591 self.add_helper( |
| 577 FilenameMatch('\\.json'), | 592 FilenameRegexMatch(r'\.json$'), |
| 578 MergeFilesJSONP(self.filesystem, basic_json_data_merger)) | 593 MergeFilesJSONP(self.filesystem, basic_json_data_merger)) |
| 579 | 594 |
| 580 # access_log and error_log are httpd log files which are sortable. | 595 # access_log and error_log are httpd log files which are sortable. |
| 581 self.add_helper( | 596 self.add_helper( |
| 582 FilenameMatch('access_log\\.txt'), | 597 FilenameRegexMatch(r'access_log\.txt$'), |
| 583 MergeFilesLinesSorted(self.filesystem)) | 598 MergeFilesLinesSorted(self.filesystem)) |
| 584 self.add_helper( | 599 self.add_helper( |
| 585 FilenameMatch('error_log\\.txt'), | 600 FilenameRegexMatch(r'error_log\.txt$'), |
| 586 MergeFilesLinesSorted(self.filesystem)) | 601 MergeFilesLinesSorted(self.filesystem)) |
| 587 | 602 |
| 588 # pywebsocket files aren't particularly useful, so just save them. | 603 # pywebsocket files aren't particularly useful, so just save them. |
| 589 self.add_helper( | 604 self.add_helper( |
| 590 FilenameMatch('pywebsocket\\.ws\\.log-.*-err.txt'), | 605 FilenameRegexMatch(r'pywebsocket\.ws\.log-.*-err\.txt$'), |
| 591 MergeFilesKeepFiles(self.filesystem)) | 606 MergeFilesKeepFiles(self.filesystem)) |
| 592 | 607 |
| 593 # These JSON files have "result style" JSON in them. | 608 # These JSON files have "result style" JSON in them. |
| 594 results_json_file_merger = MergeFilesJSONP( | 609 results_json_file_merger = MergeFilesJSONP( |
| 595 self.filesystem, | 610 self.filesystem, |
| 596 JSONTestResultsMerger( | 611 JSONTestResultsMerger( |
| 597 allow_unknown_if_matching=results_json_allow_unknown_if_matching
), | 612 allow_unknown_if_matching=results_json_allow_unknown_if_matching
), |
| 598 json_data_value_overrides=results_json_value_overrides or {}) | 613 json_data_value_overrides=results_json_value_overrides or {}) |
| 599 | 614 |
| 600 self.add_helper( | 615 self.add_helper( |
| 601 FilenameMatch('failing_results.json'), | 616 FilenameRegexMatch(r'failing_results\.json$'), |
| 602 results_json_file_merger) | 617 results_json_file_merger) |
| 603 self.add_helper( | 618 self.add_helper( |
| 604 FilenameMatch('full_results.json'), | 619 FilenameRegexMatch(r'full_results\.json$'), |
| 605 results_json_file_merger) | 620 results_json_file_merger) |
| 606 self.add_helper( | 621 self.add_helper( |
| 607 FilenameMatch('output.json'), | 622 FilenameRegexMatch(r'output\.json$'), |
| 608 results_json_file_merger) | 623 results_json_file_merger) |
| OLD | NEW |