Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 # Copyright (c) 2010 Google Inc. All rights reserved. | 1 # Copyright (c) 2010 Google Inc. All rights reserved. |
| 2 # | 2 # |
| 3 # Redistribution and use in source and binary forms, with or without | 3 # Redistribution and use in source and binary forms, with or without |
| 4 # modification, are permitted provided that the following conditions are | 4 # modification, are permitted provided that the following conditions are |
| 5 # met: | 5 # met: |
| 6 # | 6 # |
| 7 # * Redistributions of source code must retain the above copyright | 7 # * Redistributions of source code must retain the above copyright |
| 8 # notice, this list of conditions and the following disclaimer. | 8 # notice, this list of conditions and the following disclaimer. |
| 9 # * Redistributions in binary form must reproduce the above | 9 # * Redistributions in binary form must reproduce the above |
| 10 # copyright notice, this list of conditions and the following disclaimer | 10 # copyright notice, this list of conditions and the following disclaimer |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 65 platform_options = factory.platform_options(use_globs=True) | 65 platform_options = factory.platform_options(use_globs=True) |
| 66 | 66 |
| 67 results_directory_option = optparse.make_option("--results-directory", help= "Local results directory to use") | 67 results_directory_option = optparse.make_option("--results-directory", help= "Local results directory to use") |
| 68 | 68 |
| 69 suffixes_option = optparse.make_option("--suffixes", default=','.join(BASELI NE_SUFFIX_LIST), action="store", | 69 suffixes_option = optparse.make_option("--suffixes", default=','.join(BASELI NE_SUFFIX_LIST), action="store", |
| 70 help="Comma-separated-list of file types to rebaseline") | 70 help="Comma-separated-list of file types to rebaseline") |
| 71 | 71 |
| 72 def __init__(self, options=None): | 72 def __init__(self, options=None): |
| 73 super(AbstractRebaseliningCommand, self).__init__(options=options) | 73 super(AbstractRebaseliningCommand, self).__init__(options=options) |
| 74 self._baseline_suffix_list = BASELINE_SUFFIX_LIST | 74 self._baseline_suffix_list = BASELINE_SUFFIX_LIST |
| 75 self._scm_changes = {'add': [], 'delete': [], 'remove-lines': []} | |
| 76 | |
| 77 def _add_to_scm(self, path): | |
|
Dirk Pranke
2014/05/30 23:35:11
I would probably use different names, like "rememb
ojan
2014/05/31 00:02:56
I went with add_to_scm_later.
| |
| 78 self._scm_changes['add'].append(path) | |
| 79 | |
| 80 def _delete_from_scm(self, path): | |
| 81 self._scm_changes['delete'].append(path) | |
| 75 | 82 |
| 76 | 83 |
| 77 class BaseInternalRebaselineCommand(AbstractRebaseliningCommand): | 84 class BaseInternalRebaselineCommand(AbstractRebaseliningCommand): |
| 78 def __init__(self): | 85 def __init__(self): |
| 79 super(BaseInternalRebaselineCommand, self).__init__(options=[ | 86 super(BaseInternalRebaselineCommand, self).__init__(options=[ |
| 80 self.results_directory_option, | 87 self.results_directory_option, |
| 81 self.suffixes_option, | 88 self.suffixes_option, |
| 82 optparse.make_option("--builder", help="Builder to pull new baseline s from"), | 89 optparse.make_option("--builder", help="Builder to pull new baseline s from"), |
| 83 optparse.make_option("--test", help="Test to rebaseline"), | 90 optparse.make_option("--test", help="Test to rebaseline"), |
| 84 ]) | 91 ]) |
| 85 self._scm_changes = {'add': [], 'remove-lines': []} | |
| 86 | |
| 87 def _add_to_scm(self, path): | |
| 88 self._scm_changes['add'].append(path) | |
| 89 | 92 |
| 90 def _baseline_directory(self, builder_name): | 93 def _baseline_directory(self, builder_name): |
| 91 port = self._tool.port_factory.get_from_builder_name(builder_name) | 94 port = self._tool.port_factory.get_from_builder_name(builder_name) |
| 92 override_dir = builders.rebaseline_override_dir(builder_name) | 95 override_dir = builders.rebaseline_override_dir(builder_name) |
| 93 if override_dir: | 96 if override_dir: |
| 94 return self._tool.filesystem.join(port.layout_tests_dir(), 'platform ', override_dir) | 97 return self._tool.filesystem.join(port.layout_tests_dir(), 'platform ', override_dir) |
| 95 return port.baseline_version_dir() | 98 return port.baseline_version_dir() |
| 96 | 99 |
| 97 def _test_root(self, test_name): | 100 def _test_root(self, test_name): |
| 98 return self._tool.filesystem.splitext(test_name)[0] | 101 return self._tool.filesystem.splitext(test_name)[0] |
| (...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 224 print json.dumps(self._scm_changes) | 227 print json.dumps(self._scm_changes) |
| 225 | 228 |
| 226 | 229 |
| 227 class OptimizeBaselines(AbstractRebaseliningCommand): | 230 class OptimizeBaselines(AbstractRebaseliningCommand): |
| 228 name = "optimize-baselines" | 231 name = "optimize-baselines" |
| 229 help_text = "Reshuffles the baselines for the given tests to use as litte sp ace on disk as possible." | 232 help_text = "Reshuffles the baselines for the given tests to use as litte sp ace on disk as possible." |
| 230 show_in_main_help = True | 233 show_in_main_help = True |
| 231 argument_names = "TEST_NAMES" | 234 argument_names = "TEST_NAMES" |
| 232 | 235 |
| 233 def __init__(self): | 236 def __init__(self): |
| 234 super(OptimizeBaselines, self).__init__(options=[self.suffixes_option] + self.platform_options) | 237 super(OptimizeBaselines, self).__init__(options=[ |
| 238 self.suffixes_option, | |
| 239 optparse.make_option('--no-modify-scm', action='store_true', default =False, help='Dump SCM commands as JSON instead of '), | |
| 240 ] + self.platform_options) | |
| 235 | 241 |
| 236 def _optimize_baseline(self, optimizer, test_name): | 242 def _optimize_baseline(self, optimizer, test_name): |
| 237 for suffix in self._baseline_suffix_list: | 243 for suffix in self._baseline_suffix_list: |
| 238 baseline_name = _baseline_name(self._tool.filesystem, test_name, suf fix) | 244 baseline_name = _baseline_name(self._tool.filesystem, test_name, suf fix) |
| 239 if not optimizer.optimize(baseline_name): | 245 succeeded, files_to_delete, files_to_add = optimizer.optimize(baseli ne_name) |
| 246 if not succeeded: | |
| 240 print "Heuristics failed to optimize %s" % baseline_name | 247 print "Heuristics failed to optimize %s" % baseline_name |
| 248 return files_to_delete, files_to_add | |
| 241 | 249 |
| 242 def execute(self, options, args, tool): | 250 def execute(self, options, args, tool): |
| 243 self._baseline_suffix_list = options.suffixes.split(',') | 251 self._baseline_suffix_list = options.suffixes.split(',') |
| 244 port_names = tool.port_factory.all_port_names(options.platform) | 252 port_names = tool.port_factory.all_port_names(options.platform) |
| 245 if not port_names: | 253 if not port_names: |
| 246 print "No port names match '%s'" % options.platform | 254 print "No port names match '%s'" % options.platform |
| 247 return | 255 return |
| 248 | 256 |
| 249 optimizer = BaselineOptimizer(tool, port_names) | 257 optimizer = BaselineOptimizer(tool, port_names, skip_scm_commands=option s.no_modify_scm) |
| 250 port = tool.port_factory.get(port_names[0]) | 258 port = tool.port_factory.get(port_names[0]) |
| 251 for test_name in port.tests(args): | 259 for test_name in port.tests(args): |
| 252 _log.info("Optimizing %s" % test_name) | 260 _log.info("Optimizing %s" % test_name) |
| 253 self._optimize_baseline(optimizer, test_name) | 261 files_to_delete, files_to_add = self._optimize_baseline(optimizer, t est_name) |
| 262 for path in files_to_delete: | |
| 263 self._delete_from_scm(path) | |
| 264 for path in files_to_add: | |
| 265 self._add_to_scm(path) | |
| 266 | |
| 267 print json.dumps(self._scm_changes) | |
| 254 | 268 |
| 255 | 269 |
| 256 class AnalyzeBaselines(AbstractRebaseliningCommand): | 270 class AnalyzeBaselines(AbstractRebaseliningCommand): |
| 257 name = "analyze-baselines" | 271 name = "analyze-baselines" |
| 258 help_text = "Analyzes the baselines for the given tests and prints results t hat are identical." | 272 help_text = "Analyzes the baselines for the given tests and prints results t hat are identical." |
| 259 show_in_main_help = True | 273 show_in_main_help = True |
| 260 argument_names = "TEST_NAMES" | 274 argument_names = "TEST_NAMES" |
| 261 | 275 |
| 262 def __init__(self): | 276 def __init__(self): |
| 263 super(AnalyzeBaselines, self).__init__(options=[ | 277 super(AnalyzeBaselines, self).__init__(options=[ |
| (...skipping 17 matching lines...) Expand all Loading... | |
| 281 elif options.missing: | 295 elif options.missing: |
| 282 self._write("%s: (no baselines found)" % baseline_name) | 296 self._write("%s: (no baselines found)" % baseline_name) |
| 283 | 297 |
| 284 def execute(self, options, args, tool): | 298 def execute(self, options, args, tool): |
| 285 self._baseline_suffix_list = options.suffixes.split(',') | 299 self._baseline_suffix_list = options.suffixes.split(',') |
| 286 port_names = tool.port_factory.all_port_names(options.platform) | 300 port_names = tool.port_factory.all_port_names(options.platform) |
| 287 if not port_names: | 301 if not port_names: |
| 288 print "No port names match '%s'" % options.platform | 302 print "No port names match '%s'" % options.platform |
| 289 return | 303 return |
| 290 | 304 |
| 291 self._baseline_optimizer = self._optimizer_class(tool, port_names) | 305 self._baseline_optimizer = self._optimizer_class(tool, port_names, skip_ scm_commands=False) |
| 292 self._port = tool.port_factory.get(port_names[0]) | 306 self._port = tool.port_factory.get(port_names[0]) |
| 293 for test_name in self._port.tests(args): | 307 for test_name in self._port.tests(args): |
| 294 self._analyze_baseline(options, test_name) | 308 self._analyze_baseline(options, test_name) |
| 295 | 309 |
| 296 | 310 |
| 297 class AbstractParallelRebaselineCommand(AbstractRebaseliningCommand): | 311 class AbstractParallelRebaselineCommand(AbstractRebaseliningCommand): |
| 298 # not overriding execute() - pylint: disable=W0223 | 312 # not overriding execute() - pylint: disable=W0223 |
| 299 | 313 |
| 300 def __init__(self, options=None): | 314 def __init__(self, options=None): |
| 301 super(AbstractParallelRebaselineCommand, self).__init__(options=options) | 315 super(AbstractParallelRebaselineCommand, self).__init__(options=options) |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 366 suffixes = ','.join(actual_failures_suffixes) | 380 suffixes = ','.join(actual_failures_suffixes) |
| 367 cmd_line = ['--suffixes', suffixes, '--builder', builder, '- -test', test] | 381 cmd_line = ['--suffixes', suffixes, '--builder', builder, '- -test', test] |
| 368 if options.results_directory: | 382 if options.results_directory: |
| 369 cmd_line.extend(['--results-directory', options.results_ directory]) | 383 cmd_line.extend(['--results-directory', options.results_ directory]) |
| 370 if options.verbose: | 384 if options.verbose: |
| 371 cmd_line.append('--verbose') | 385 cmd_line.append('--verbose') |
| 372 copy_baseline_commands.append(tuple([[path_to_webkit_patch, 'copy-existing-baselines-internal'] + cmd_line, cwd])) | 386 copy_baseline_commands.append(tuple([[path_to_webkit_patch, 'copy-existing-baselines-internal'] + cmd_line, cwd])) |
| 373 rebaseline_commands.append(tuple([[path_to_webkit_patch, 're baseline-test-internal'] + cmd_line, cwd])) | 387 rebaseline_commands.append(tuple([[path_to_webkit_patch, 're baseline-test-internal'] + cmd_line, cwd])) |
| 374 return copy_baseline_commands, rebaseline_commands | 388 return copy_baseline_commands, rebaseline_commands |
| 375 | 389 |
| 376 def _files_to_add(self, command_results): | 390 def _files_to_add(self, command_results): |
|
Dirk Pranke
2014/05/30 23:35:11
since we're now returning the files to delete as w
ojan
2014/05/31 00:02:56
I called it _serial_commands.
| |
| 377 files_to_add = set() | 391 files_to_add = set() |
| 392 files_to_delete = set() | |
| 378 lines_to_remove = {} | 393 lines_to_remove = {} |
| 379 for output in [result[1].split('\n') for result in command_results]: | 394 for output in [result[1].split('\n') for result in command_results]: |
| 380 file_added = False | 395 file_added = False |
| 381 for line in output: | 396 for line in output: |
| 382 try: | 397 try: |
| 383 if line: | 398 if line: |
| 384 parsed_line = json.loads(line) | 399 parsed_line = json.loads(line) |
| 385 if 'add' in parsed_line: | 400 if 'add' in parsed_line: |
| 386 files_to_add.update(parsed_line['add']) | 401 files_to_add.update(parsed_line['add']) |
| 402 if 'delete' in parsed_line: | |
| 403 files_to_delete.update(parsed_line['delete']) | |
| 387 if 'remove-lines' in parsed_line: | 404 if 'remove-lines' in parsed_line: |
| 388 for line_to_remove in parsed_line['remove-lines']: | 405 for line_to_remove in parsed_line['remove-lines']: |
| 389 test = line_to_remove['test'] | 406 test = line_to_remove['test'] |
| 390 builder = line_to_remove['builder'] | 407 builder = line_to_remove['builder'] |
| 391 if test not in lines_to_remove: | 408 if test not in lines_to_remove: |
| 392 lines_to_remove[test] = [] | 409 lines_to_remove[test] = [] |
| 393 lines_to_remove[test].append(builder) | 410 lines_to_remove[test].append(builder) |
| 394 file_added = True | 411 file_added = True |
| 395 except ValueError: | 412 except ValueError: |
| 396 _log.debug('"%s" is not a JSON object, ignoring' % line) | 413 _log.debug('"%s" is not a JSON object, ignoring' % line) |
| 397 | 414 |
| 398 if not file_added: | 415 if not file_added: |
| 399 _log.debug('Could not add file based off output "%s"' % output) | 416 _log.debug('Could not add file based off output "%s"' % output) |
| 400 | 417 |
| 401 return list(files_to_add), lines_to_remove | 418 return list(files_to_add), list(files_to_delete), lines_to_remove |
| 402 | 419 |
| 403 def _optimize_baselines(self, test_prefix_list, verbose=False): | 420 def _optimize_baselines(self, test_prefix_list, verbose=False): |
| 404 # We don't run this in parallel because modifying the SCM in parallel is unreliable. | 421 optimize_commands = [] |
| 405 for test in test_prefix_list: | 422 for test in test_prefix_list: |
| 406 all_suffixes = set() | 423 all_suffixes = set() |
| 407 for builder in self._builders_to_fetch_from(test_prefix_list[test]): | 424 for builder in self._builders_to_fetch_from(test_prefix_list[test]): |
| 408 all_suffixes.update(self._suffixes_for_actual_failures(test, bui lder, test_prefix_list[test][builder])) | 425 all_suffixes.update(self._suffixes_for_actual_failures(test, bui lder, test_prefix_list[test][builder])) |
| 426 | |
| 409 # FIXME: We should propagate the platform options as well. | 427 # FIXME: We should propagate the platform options as well. |
| 410 self._run_webkit_patch(['optimize-baselines', '--suffixes', ','.join (all_suffixes), test], verbose) | 428 cmd_line = ['--no-modify-scm', '--suffixes', ','.join(all_suffixes), test] |
| 429 if verbose: | |
| 430 cmd_line.append('--verbose') | |
| 431 | |
| 432 path_to_webkit_patch = self._tool.path() | |
| 433 cwd = self._tool.scm().checkout_root | |
| 434 optimize_commands.append(tuple([[path_to_webkit_patch, 'optimize-bas elines'] + cmd_line, cwd])) | |
| 435 | |
| 436 # TODO: add test that shows 'delete' getting populated. | |
|
Dirk Pranke
2014/05/30 23:35:11
can we actually write this test now, rather than l
ojan
2014/05/31 00:02:56
Whoops. I meant to do this. I use TODO instead of
| |
| 437 return optimize_commands | |
| 411 | 438 |
| 412 def _update_expectations_files(self, lines_to_remove): | 439 def _update_expectations_files(self, lines_to_remove): |
| 413 # FIXME: This routine is way too expensive. We're creating N ports and N TestExpectations | 440 # FIXME: This routine is way too expensive. We're creating N ports and N TestExpectations |
| 414 # objects and (re-)writing the actual expectations file N times, for eac h test we update. | 441 # objects and (re-)writing the actual expectations file N times, for eac h test we update. |
| 415 # We should be able to update everything in memory, once, and then write the file out a single time. | 442 # We should be able to update everything in memory, once, and then write the file out a single time. |
| 416 for test in lines_to_remove: | 443 for test in lines_to_remove: |
| 417 for builder in lines_to_remove[test]: | 444 for builder in lines_to_remove[test]: |
| 418 port = self._tool.port_factory.get_from_builder_name(builder) | 445 port = self._tool.port_factory.get_from_builder_name(builder) |
| 419 path = port.path_to_generic_test_expectations_file() | 446 path = port.path_to_generic_test_expectations_file() |
| 420 expectations = TestExpectations(port, include_overrides=False) | 447 expectations = TestExpectations(port, include_overrides=False) |
| (...skipping 23 matching lines...) Expand all Loading... | |
| 444 return (SKIP in full_expectations.get_expectations(test) and | 471 return (SKIP in full_expectations.get_expectations(test) and |
| 445 SKIP not in generic_expectations.get_expectations(test)) | 472 SKIP not in generic_expectations.get_expectations(test)) |
| 446 | 473 |
| 447 def _run_in_parallel_and_update_scm(self, commands): | 474 def _run_in_parallel_and_update_scm(self, commands): |
| 448 command_results = self._tool.executive.run_in_parallel(commands) | 475 command_results = self._tool.executive.run_in_parallel(commands) |
| 449 log_output = '\n'.join(result[2] for result in command_results).replace( '\n\n', '\n') | 476 log_output = '\n'.join(result[2] for result in command_results).replace( '\n\n', '\n') |
| 450 for line in log_output.split('\n'): | 477 for line in log_output.split('\n'): |
| 451 if line: | 478 if line: |
| 452 print >> sys.stderr, line # FIXME: Figure out how to log proper ly. | 479 print >> sys.stderr, line # FIXME: Figure out how to log proper ly. |
| 453 | 480 |
| 454 files_to_add, lines_to_remove = self._files_to_add(command_results) | 481 files_to_add, files_to_delete, lines_to_remove = self._files_to_add(comm and_results) |
|
Dirk Pranke
2014/05/30 23:35:11
see note on the naming of "self._files_to_add()",
ojan
2014/05/31 00:02:56
Done.
| |
| 482 if files_to_delete: | |
| 483 self._tool.scm().delete_list(files_to_delete) | |
| 455 if files_to_add: | 484 if files_to_add: |
| 456 self._tool.scm().add_list(list(files_to_add)) | 485 self._tool.scm().add_list(files_to_add) |
| 457 if lines_to_remove: | 486 if lines_to_remove: |
| 458 self._update_expectations_files(lines_to_remove) | 487 self._update_expectations_files(lines_to_remove) |
| 459 | 488 |
| 460 def _rebaseline(self, options, test_prefix_list): | 489 def _rebaseline(self, options, test_prefix_list): |
| 461 for test, builders_to_check in sorted(test_prefix_list.items()): | 490 for test, builders_to_check in sorted(test_prefix_list.items()): |
| 462 _log.info("Rebaselining %s" % test) | 491 _log.info("Rebaselining %s" % test) |
| 463 for builder, suffixes in sorted(builders_to_check.items()): | 492 for builder, suffixes in sorted(builders_to_check.items()): |
| 464 _log.debug(" %s: %s" % (builder, ",".join(suffixes))) | 493 _log.debug(" %s: %s" % (builder, ",".join(suffixes))) |
| 465 | 494 |
| 466 copy_baseline_commands, rebaseline_commands = self._rebaseline_commands( test_prefix_list, options) | 495 copy_baseline_commands, rebaseline_commands = self._rebaseline_commands( test_prefix_list, options) |
| 467 if copy_baseline_commands: | 496 if copy_baseline_commands: |
| 468 self._run_in_parallel_and_update_scm(copy_baseline_commands) | 497 self._run_in_parallel_and_update_scm(copy_baseline_commands) |
| 469 if rebaseline_commands: | 498 if rebaseline_commands: |
| 470 self._run_in_parallel_and_update_scm(rebaseline_commands) | 499 self._run_in_parallel_and_update_scm(rebaseline_commands) |
| 471 | |
| 472 if options.optimize: | 500 if options.optimize: |
| 473 self._optimize_baselines(test_prefix_list, options.verbose) | 501 self._run_in_parallel_and_update_scm(self._optimize_baselines(test_p refix_list, options.verbose)) |
| 474 | 502 |
| 475 def _suffixes_for_actual_failures(self, test, builder_name, existing_suffixe s): | 503 def _suffixes_for_actual_failures(self, test, builder_name, existing_suffixe s): |
| 476 actual_results = self.builder_data()[builder_name].actual_results(test) | 504 actual_results = self.builder_data()[builder_name].actual_results(test) |
| 477 if not actual_results: | 505 if not actual_results: |
| 478 return set() | 506 return set() |
| 479 return set(existing_suffixes) & TestExpectations.suffixes_for_actual_exp ectations_string(actual_results) | 507 return set(existing_suffixes) & TestExpectations.suffixes_for_actual_exp ectations_string(actual_results) |
| 480 | 508 |
| 481 | 509 |
| 482 class RebaselineJson(AbstractParallelRebaselineCommand): | 510 class RebaselineJson(AbstractParallelRebaselineCommand): |
| 483 name = "rebaseline-json" | 511 name = "rebaseline-json" |
| (...skipping 397 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 881 if options.verbose: | 909 if options.verbose: |
| 882 rebaseline_command.append('--verbose') | 910 rebaseline_command.append('--verbose') |
| 883 # Use call instead of run_command so that stdout doesn't get swa llowed. | 911 # Use call instead of run_command so that stdout doesn't get swa llowed. |
| 884 tool.executive.call(rebaseline_command) | 912 tool.executive.call(rebaseline_command) |
| 885 except: | 913 except: |
| 886 traceback.print_exc(file=sys.stderr) | 914 traceback.print_exc(file=sys.stderr) |
| 887 # Sometimes git crashes and leaves us on a detached head. | 915 # Sometimes git crashes and leaves us on a detached head. |
| 888 tool.scm().checkout_branch(old_branch_name) | 916 tool.scm().checkout_branch(old_branch_name) |
| 889 | 917 |
| 890 time.sleep(self.SLEEP_TIME_IN_SECONDS) | 918 time.sleep(self.SLEEP_TIME_IN_SECONDS) |
| OLD | NEW |