OLD | NEW |
1 # Copyright (c) 2010 Google Inc. All rights reserved. | 1 # Copyright (c) 2010 Google Inc. All rights reserved. |
2 # | 2 # |
3 # Redistribution and use in source and binary forms, with or without | 3 # Redistribution and use in source and binary forms, with or without |
4 # modification, are permitted provided that the following conditions are | 4 # modification, are permitted provided that the following conditions are |
5 # met: | 5 # met: |
6 # | 6 # |
7 # * Redistributions of source code must retain the above copyright | 7 # * Redistributions of source code must retain the above copyright |
8 # notice, this list of conditions and the following disclaimer. | 8 # notice, this list of conditions and the following disclaimer. |
9 # * Redistributions in binary form must reproduce the above | 9 # * Redistributions in binary form must reproduce the above |
10 # copyright notice, this list of conditions and the following disclaimer | 10 # copyright notice, this list of conditions and the following disclaimer |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
62 if not branch_name: | 62 if not branch_name: |
63 # If HEAD is detached use commit SHA instead. | 63 # If HEAD is detached use commit SHA instead. |
64 return tool.executive.run_command(['git', 'rev-parse', 'HEAD']).strip() | 64 return tool.executive.run_command(['git', 'rev-parse', 'HEAD']).strip() |
65 return branch_name | 65 return branch_name |
66 | 66 |
67 | 67 |
68 class AbstractRebaseliningCommand(AbstractDeclarativeCommand): | 68 class AbstractRebaseliningCommand(AbstractDeclarativeCommand): |
69 # not overriding execute() - pylint: disable=W0223 | 69 # not overriding execute() - pylint: disable=W0223 |
70 | 70 |
71 no_optimize_option = optparse.make_option('--no-optimize', dest='optimize',
action='store_false', default=True, | 71 no_optimize_option = optparse.make_option('--no-optimize', dest='optimize',
action='store_false', default=True, |
72 help=('Do not optimize/de-dup the expectations after rebaselining (defau
lt is to de-dup automatically). ' | 72 help=('Do not optimize/de-dup the
expectations after rebaselining (default is to de-dup automatically). ' |
73 'You can use "webkit-patch optimize-baselines" to optimize separat
ely.')) | 73 'You can use "webkit-patch o
ptimize-baselines" to optimize separately.')) |
74 | 74 |
75 platform_options = factory.platform_options(use_globs=True) | 75 platform_options = factory.platform_options(use_globs=True) |
76 | 76 |
77 results_directory_option = optparse.make_option("--results-directory", help=
"Local results directory to use") | 77 results_directory_option = optparse.make_option("--results-directory", help=
"Local results directory to use") |
78 | 78 |
79 suffixes_option = optparse.make_option("--suffixes", default=','.join(BASELI
NE_SUFFIX_LIST), action="store", | 79 suffixes_option = optparse.make_option("--suffixes", default=','.join(BASELI
NE_SUFFIX_LIST), action="store", |
80 help="Comma-separated-list of file types to rebaseline") | 80 help="Comma-separated-list of file ty
pes to rebaseline") |
81 | 81 |
82 def __init__(self, options=None): | 82 def __init__(self, options=None): |
83 super(AbstractRebaseliningCommand, self).__init__(options=options) | 83 super(AbstractRebaseliningCommand, self).__init__(options=options) |
84 self._baseline_suffix_list = BASELINE_SUFFIX_LIST | 84 self._baseline_suffix_list = BASELINE_SUFFIX_LIST |
85 self._scm_changes = {'add': [], 'delete': [], 'remove-lines': []} | 85 self._scm_changes = {'add': [], 'delete': [], 'remove-lines': []} |
86 | 86 |
87 def _add_to_scm_later(self, path): | 87 def _add_to_scm_later(self, path): |
88 self._scm_changes['add'].append(path) | 88 self._scm_changes['add'].append(path) |
89 | 89 |
90 def _delete_from_scm_later(self, path): | 90 def _delete_from_scm_later(self, path): |
91 self._scm_changes['delete'].append(path) | 91 self._scm_changes['delete'].append(path) |
92 | 92 |
93 | 93 |
94 class BaseInternalRebaselineCommand(AbstractRebaseliningCommand): | 94 class BaseInternalRebaselineCommand(AbstractRebaseliningCommand): |
| 95 |
95 def __init__(self): | 96 def __init__(self): |
96 super(BaseInternalRebaselineCommand, self).__init__(options=[ | 97 super(BaseInternalRebaselineCommand, self).__init__(options=[ |
97 self.results_directory_option, | 98 self.results_directory_option, |
98 self.suffixes_option, | 99 self.suffixes_option, |
99 optparse.make_option("--builder", help="Builder to pull new baseline
s from"), | 100 optparse.make_option("--builder", help="Builder to pull new baseline
s from"), |
100 optparse.make_option("--test", help="Test to rebaseline"), | 101 optparse.make_option("--test", help="Test to rebaseline"), |
101 ]) | 102 ]) |
102 | 103 |
103 def _baseline_directory(self, builder_name): | 104 def _baseline_directory(self, builder_name): |
104 port = self._tool.port_factory.get_from_builder_name(builder_name) | 105 port = self._tool.port_factory.get_from_builder_name(builder_name) |
105 override_dir = builders.rebaseline_override_dir(builder_name) | 106 override_dir = builders.rebaseline_override_dir(builder_name) |
106 if override_dir: | 107 if override_dir: |
107 return self._tool.filesystem.join(port.layout_tests_dir(), 'platform
', override_dir) | 108 return self._tool.filesystem.join(port.layout_tests_dir(), 'platform
', override_dir) |
108 return port.baseline_version_dir() | 109 return port.baseline_version_dir() |
109 | 110 |
110 def _test_root(self, test_name): | 111 def _test_root(self, test_name): |
111 return self._tool.filesystem.splitext(test_name)[0] | 112 return self._tool.filesystem.splitext(test_name)[0] |
(...skipping 28 matching lines...) Expand all Loading... |
140 return immediate_predecessors_in_fallback | 141 return immediate_predecessors_in_fallback |
141 | 142 |
142 def _port_for_primary_baseline(self, baseline): | 143 def _port_for_primary_baseline(self, baseline): |
143 for port in [self._tool.port_factory.get(port_name) for port_name in sel
f._tool.port_factory.all_port_names()]: | 144 for port in [self._tool.port_factory.get(port_name) for port_name in sel
f._tool.port_factory.all_port_names()]: |
144 if self._tool.filesystem.basename(port.baseline_version_dir()) == ba
seline: | 145 if self._tool.filesystem.basename(port.baseline_version_dir()) == ba
seline: |
145 return port | 146 return port |
146 raise Exception("Failed to find port for primary baseline %s." % baselin
e) | 147 raise Exception("Failed to find port for primary baseline %s." % baselin
e) |
147 | 148 |
148 def _copy_existing_baseline(self, builder_name, test_name, suffix): | 149 def _copy_existing_baseline(self, builder_name, test_name, suffix): |
149 baseline_directory = self._baseline_directory(builder_name) | 150 baseline_directory = self._baseline_directory(builder_name) |
150 ports = [self._port_for_primary_baseline(baseline) for baseline in self.
_immediate_predecessors_in_fallback(baseline_directory)] | 151 ports = [self._port_for_primary_baseline(baseline) |
| 152 for baseline in self._immediate_predecessors_in_fallback(baseli
ne_directory)] |
151 | 153 |
152 old_baselines = [] | 154 old_baselines = [] |
153 new_baselines = [] | 155 new_baselines = [] |
154 | 156 |
155 # Need to gather all the baseline paths before modifying the filesystem
since | 157 # Need to gather all the baseline paths before modifying the filesystem
since |
156 # the modifications can affect the results of port.expected_filename. | 158 # the modifications can affect the results of port.expected_filename. |
157 for port in ports: | 159 for port in ports: |
158 old_baseline = port.expected_filename(test_name, "." + suffix) | 160 old_baseline = port.expected_filename(test_name, "." + suffix) |
159 if not self._tool.filesystem.exists(old_baseline): | 161 if not self._tool.filesystem.exists(old_baseline): |
160 _log.debug("No existing baseline for %s." % test_name) | 162 _log.debug("No existing baseline for %s." % test_name) |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
207 if not self._tool.scm().exists(target_baseline): | 209 if not self._tool.scm().exists(target_baseline): |
208 self._add_to_scm_later(target_baseline) | 210 self._add_to_scm_later(target_baseline) |
209 | 211 |
210 def _rebaseline_test(self, builder_name, test_name, suffix, results_url): | 212 def _rebaseline_test(self, builder_name, test_name, suffix, results_url): |
211 baseline_directory = self._baseline_directory(builder_name) | 213 baseline_directory = self._baseline_directory(builder_name) |
212 | 214 |
213 source_baseline = "%s/%s" % (results_url, self._file_name_for_actual_res
ult(test_name, suffix)) | 215 source_baseline = "%s/%s" % (results_url, self._file_name_for_actual_res
ult(test_name, suffix)) |
214 target_baseline = self._tool.filesystem.join(baseline_directory, self._f
ile_name_for_expected_result(test_name, suffix)) | 216 target_baseline = self._tool.filesystem.join(baseline_directory, self._f
ile_name_for_expected_result(test_name, suffix)) |
215 | 217 |
216 _log.debug("Retrieving %s." % source_baseline) | 218 _log.debug("Retrieving %s." % source_baseline) |
217 self._save_baseline(self._tool.web.get_binary(source_baseline, convert_4
04_to_None=True), target_baseline, baseline_directory, test_name, suffix) | 219 self._save_baseline(self._tool.web.get_binary(source_baseline, convert_4
04_to_None=True), |
| 220 target_baseline, baseline_directory, test_name, suff
ix) |
218 | 221 |
219 def _rebaseline_test_and_update_expectations(self, options): | 222 def _rebaseline_test_and_update_expectations(self, options): |
220 port = self._tool.port_factory.get_from_builder_name(options.builder) | 223 port = self._tool.port_factory.get_from_builder_name(options.builder) |
221 if (port.reference_files(options.test)): | 224 if (port.reference_files(options.test)): |
222 _log.warning("Cannot rebaseline reftest: %s", options.test) | 225 _log.warning("Cannot rebaseline reftest: %s", options.test) |
223 return | 226 return |
224 | 227 |
225 if options.results_directory: | 228 if options.results_directory: |
226 results_url = 'file://' + options.results_directory | 229 results_url = 'file://' + options.results_directory |
227 else: | 230 else: |
(...skipping 11 matching lines...) Expand all Loading... |
239 | 242 |
240 class OptimizeBaselines(AbstractRebaseliningCommand): | 243 class OptimizeBaselines(AbstractRebaseliningCommand): |
241 name = "optimize-baselines" | 244 name = "optimize-baselines" |
242 help_text = "Reshuffles the baselines for the given tests to use as litte sp
ace on disk as possible." | 245 help_text = "Reshuffles the baselines for the given tests to use as litte sp
ace on disk as possible." |
243 show_in_main_help = True | 246 show_in_main_help = True |
244 argument_names = "TEST_NAMES" | 247 argument_names = "TEST_NAMES" |
245 | 248 |
246 def __init__(self): | 249 def __init__(self): |
247 super(OptimizeBaselines, self).__init__(options=[ | 250 super(OptimizeBaselines, self).__init__(options=[ |
248 self.suffixes_option, | 251 self.suffixes_option, |
249 optparse.make_option('--no-modify-scm', action='store_true', default
=False, help='Dump SCM commands as JSON instead of '), | 252 optparse.make_option('--no-modify-scm', action='store_true', default
=False, |
250 ] + self.platform_options) | 253 help='Dump SCM commands as JSON instead of '), |
| 254 ] + self.platform_options) |
251 | 255 |
252 def _optimize_baseline(self, optimizer, test_name): | 256 def _optimize_baseline(self, optimizer, test_name): |
253 files_to_delete = [] | 257 files_to_delete = [] |
254 files_to_add = [] | 258 files_to_add = [] |
255 for suffix in self._baseline_suffix_list: | 259 for suffix in self._baseline_suffix_list: |
256 baseline_name = _baseline_name(self._tool.filesystem, test_name, suf
fix) | 260 baseline_name = _baseline_name(self._tool.filesystem, test_name, suf
fix) |
257 succeeded, more_files_to_delete, more_files_to_add = optimizer.optim
ize(baseline_name) | 261 succeeded, more_files_to_delete, more_files_to_add = optimizer.optim
ize(baseline_name) |
258 if not succeeded: | 262 if not succeeded: |
259 print "Heuristics failed to optimize %s" % baseline_name | 263 print "Heuristics failed to optimize %s" % baseline_name |
260 files_to_delete.extend(more_files_to_delete) | 264 files_to_delete.extend(more_files_to_delete) |
(...skipping 22 matching lines...) Expand all Loading... |
283 class AnalyzeBaselines(AbstractRebaseliningCommand): | 287 class AnalyzeBaselines(AbstractRebaseliningCommand): |
284 name = "analyze-baselines" | 288 name = "analyze-baselines" |
285 help_text = "Analyzes the baselines for the given tests and prints results t
hat are identical." | 289 help_text = "Analyzes the baselines for the given tests and prints results t
hat are identical." |
286 show_in_main_help = True | 290 show_in_main_help = True |
287 argument_names = "TEST_NAMES" | 291 argument_names = "TEST_NAMES" |
288 | 292 |
289 def __init__(self): | 293 def __init__(self): |
290 super(AnalyzeBaselines, self).__init__(options=[ | 294 super(AnalyzeBaselines, self).__init__(options=[ |
291 self.suffixes_option, | 295 self.suffixes_option, |
292 optparse.make_option('--missing', action='store_true', default=False
, help='show missing baselines as well'), | 296 optparse.make_option('--missing', action='store_true', default=False
, help='show missing baselines as well'), |
293 ] + self.platform_options) | 297 ] + self.platform_options) |
294 self._optimizer_class = BaselineOptimizer # overridable for testing | 298 self._optimizer_class = BaselineOptimizer # overridable for testing |
295 self._baseline_optimizer = None | 299 self._baseline_optimizer = None |
296 self._port = None | 300 self._port = None |
297 | 301 |
298 def _write(self, msg): | 302 def _write(self, msg): |
299 print msg | 303 print msg |
300 | 304 |
301 def _analyze_baseline(self, options, test_name): | 305 def _analyze_baseline(self, options, test_name): |
302 for suffix in self._baseline_suffix_list: | 306 for suffix in self._baseline_suffix_list: |
303 baseline_name = _baseline_name(self._tool.filesystem, test_name, suf
fix) | 307 baseline_name = _baseline_name(self._tool.filesystem, test_name, suf
fix) |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
345 if builder_name.find('ASAN') != -1: | 349 if builder_name.find('ASAN') != -1: |
346 continue | 350 continue |
347 port = self._tool.port_factory.get_from_builder_name(builder_name) | 351 port = self._tool.port_factory.get_from_builder_name(builder_name) |
348 if port.test_configuration().build_type == 'release': | 352 if port.test_configuration().build_type == 'release': |
349 release_builders.append(builder_name) | 353 release_builders.append(builder_name) |
350 return release_builders | 354 return release_builders |
351 | 355 |
352 def _run_webkit_patch(self, args, verbose): | 356 def _run_webkit_patch(self, args, verbose): |
353 try: | 357 try: |
354 verbose_args = ['--verbose'] if verbose else [] | 358 verbose_args = ['--verbose'] if verbose else [] |
355 stderr = self._tool.executive.run_command([self._tool.path()] + verb
ose_args + args, cwd=self._tool.scm().checkout_root, return_stderr=True) | 359 stderr = self._tool.executive.run_command([self._tool.path()] + verb
ose_args + |
| 360 args, cwd=self._tool.scm()
.checkout_root, return_stderr=True) |
356 for line in stderr.splitlines(): | 361 for line in stderr.splitlines(): |
357 _log.warning(line) | 362 _log.warning(line) |
358 except ScriptError: | 363 except ScriptError: |
359 traceback.print_exc(file=sys.stderr) | 364 traceback.print_exc(file=sys.stderr) |
360 | 365 |
361 def _builders_to_fetch_from(self, builders_to_check): | 366 def _builders_to_fetch_from(self, builders_to_check): |
362 # This routine returns the subset of builders that will cover all of the
baseline search paths | 367 # This routine returns the subset of builders that will cover all of the
baseline search paths |
363 # used in the input list. In particular, if the input list contains both
Release and Debug | 368 # used in the input list. In particular, if the input list contains both
Release and Debug |
364 # versions of a configuration, we *only* return the Release version (sin
ce we don't save | 369 # versions of a configuration, we *only* return the Release version (sin
ce we don't save |
365 # debug versions of baselines). | 370 # debug versions of baselines). |
(...skipping 17 matching lines...) Expand all Loading... |
383 path_to_webkit_patch = self._tool.path() | 388 path_to_webkit_patch = self._tool.path() |
384 cwd = self._tool.scm().checkout_root | 389 cwd = self._tool.scm().checkout_root |
385 copy_baseline_commands = [] | 390 copy_baseline_commands = [] |
386 rebaseline_commands = [] | 391 rebaseline_commands = [] |
387 lines_to_remove = {} | 392 lines_to_remove = {} |
388 port = self._tool.port_factory.get() | 393 port = self._tool.port_factory.get() |
389 | 394 |
390 for test_prefix in test_prefix_list: | 395 for test_prefix in test_prefix_list: |
391 for test in port.tests([test_prefix]): | 396 for test in port.tests([test_prefix]): |
392 for builder in self._builders_to_fetch_from(test_prefix_list[tes
t_prefix]): | 397 for builder in self._builders_to_fetch_from(test_prefix_list[tes
t_prefix]): |
393 actual_failures_suffixes = self._suffixes_for_actual_failure
s(test, builder, test_prefix_list[test_prefix][builder]) | 398 actual_failures_suffixes = self._suffixes_for_actual_failure
s( |
| 399 test, builder, test_prefix_list[test_prefix][builder]) |
394 if not actual_failures_suffixes: | 400 if not actual_failures_suffixes: |
395 # If we're not going to rebaseline the test because it's
passing on this | 401 # If we're not going to rebaseline the test because it's
passing on this |
396 # builder, we still want to remove the line from TestExp
ectations. | 402 # builder, we still want to remove the line from TestExp
ectations. |
397 if test not in lines_to_remove: | 403 if test not in lines_to_remove: |
398 lines_to_remove[test] = [] | 404 lines_to_remove[test] = [] |
399 lines_to_remove[test].append(builder) | 405 lines_to_remove[test].append(builder) |
400 continue | 406 continue |
401 | 407 |
402 suffixes = ','.join(actual_failures_suffixes) | 408 suffixes = ','.join(actual_failures_suffixes) |
403 cmd_line = ['--suffixes', suffixes, '--builder', builder, '-
-test', test] | 409 cmd_line = ['--suffixes', suffixes, '--builder', builder, '-
-test', test] |
404 if options.results_directory: | 410 if options.results_directory: |
405 cmd_line.extend(['--results-directory', options.results_
directory]) | 411 cmd_line.extend(['--results-directory', options.results_
directory]) |
406 if options.verbose: | 412 if options.verbose: |
407 cmd_line.append('--verbose') | 413 cmd_line.append('--verbose') |
408 copy_baseline_commands.append(tuple([[self._tool.executable,
path_to_webkit_patch, 'copy-existing-baselines-internal'] + cmd_line, cwd])) | 414 copy_baseline_commands.append( |
409 rebaseline_commands.append(tuple([[self._tool.executable, pa
th_to_webkit_patch, 'rebaseline-test-internal'] + cmd_line, cwd])) | 415 tuple([[self._tool.executable, path_to_webkit_patch, 'co
py-existing-baselines-internal'] + cmd_line, cwd])) |
| 416 rebaseline_commands.append( |
| 417 tuple([[self._tool.executable, path_to_webkit_patch, 're
baseline-test-internal'] + cmd_line, cwd])) |
410 return copy_baseline_commands, rebaseline_commands, lines_to_remove | 418 return copy_baseline_commands, rebaseline_commands, lines_to_remove |
411 | 419 |
412 def _serial_commands(self, command_results): | 420 def _serial_commands(self, command_results): |
413 files_to_add = set() | 421 files_to_add = set() |
414 files_to_delete = set() | 422 files_to_delete = set() |
415 lines_to_remove = {} | 423 lines_to_remove = {} |
416 for output in [result[1].split('\n') for result in command_results]: | 424 for output in [result[1].split('\n') for result in command_results]: |
417 file_added = False | 425 file_added = False |
418 for line in output: | 426 for line in output: |
419 try: | 427 try: |
(...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
558 | 566 |
559 | 567 |
560 class RebaselineJson(AbstractParallelRebaselineCommand): | 568 class RebaselineJson(AbstractParallelRebaselineCommand): |
561 name = "rebaseline-json" | 569 name = "rebaseline-json" |
562 help_text = "Rebaseline based off JSON passed to stdin. Intended to only be
called from other scripts." | 570 help_text = "Rebaseline based off JSON passed to stdin. Intended to only be
called from other scripts." |
563 | 571 |
564 def __init__(self,): | 572 def __init__(self,): |
565 super(RebaselineJson, self).__init__(options=[ | 573 super(RebaselineJson, self).__init__(options=[ |
566 self.no_optimize_option, | 574 self.no_optimize_option, |
567 self.results_directory_option, | 575 self.results_directory_option, |
568 ]) | 576 ]) |
569 | 577 |
570 def execute(self, options, args, tool): | 578 def execute(self, options, args, tool): |
571 self._rebaseline(options, json.loads(sys.stdin.read())) | 579 self._rebaseline(options, json.loads(sys.stdin.read())) |
572 | 580 |
573 | 581 |
574 class RebaselineExpectations(AbstractParallelRebaselineCommand): | 582 class RebaselineExpectations(AbstractParallelRebaselineCommand): |
575 name = "rebaseline-expectations" | 583 name = "rebaseline-expectations" |
576 help_text = "Rebaselines the tests indicated in TestExpectations." | 584 help_text = "Rebaselines the tests indicated in TestExpectations." |
577 show_in_main_help = True | 585 show_in_main_help = True |
578 | 586 |
579 def __init__(self): | 587 def __init__(self): |
580 super(RebaselineExpectations, self).__init__(options=[ | 588 super(RebaselineExpectations, self).__init__(options=[ |
581 self.no_optimize_option, | 589 self.no_optimize_option, |
582 ] + self.platform_options) | 590 ] + self.platform_options) |
583 self._test_prefix_list = None | 591 self._test_prefix_list = None |
584 | 592 |
585 def _tests_to_rebaseline(self, port): | 593 def _tests_to_rebaseline(self, port): |
586 tests_to_rebaseline = {} | 594 tests_to_rebaseline = {} |
587 for path, value in port.expectations_dict().items(): | 595 for path, value in port.expectations_dict().items(): |
588 expectations = TestExpectations(port, include_overrides=False, expec
tations_dict={path: value}) | 596 expectations = TestExpectations(port, include_overrides=False, expec
tations_dict={path: value}) |
589 for test in expectations.get_rebaselining_failures(): | 597 for test in expectations.get_rebaselining_failures(): |
590 suffixes = TestExpectations.suffixes_for_expectations(expectatio
ns.get_expectations(test)) | 598 suffixes = TestExpectations.suffixes_for_expectations(expectatio
ns.get_expectations(test)) |
591 tests_to_rebaseline[test] = suffixes or BASELINE_SUFFIX_LIST | 599 tests_to_rebaseline[test] = suffixes or BASELINE_SUFFIX_LIST |
592 return tests_to_rebaseline | 600 return tests_to_rebaseline |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
624 help_text = "Rebaseline tests with results from the build bots. Shows the li
st of failing tests on the builders if no test names are provided." | 632 help_text = "Rebaseline tests with results from the build bots. Shows the li
st of failing tests on the builders if no test names are provided." |
625 show_in_main_help = True | 633 show_in_main_help = True |
626 argument_names = "[TEST_NAMES]" | 634 argument_names = "[TEST_NAMES]" |
627 | 635 |
628 def __init__(self): | 636 def __init__(self): |
629 super(Rebaseline, self).__init__(options=[ | 637 super(Rebaseline, self).__init__(options=[ |
630 self.no_optimize_option, | 638 self.no_optimize_option, |
631 # FIXME: should we support the platform options in addition to (or i
nstead of) --builders? | 639 # FIXME: should we support the platform options in addition to (or i
nstead of) --builders? |
632 self.suffixes_option, | 640 self.suffixes_option, |
633 self.results_directory_option, | 641 self.results_directory_option, |
634 optparse.make_option("--builders", default=None, action="append", he
lp="Comma-separated-list of builders to pull new baselines from (can also be pro
vided multiple times)"), | 642 optparse.make_option("--builders", default=None, action="append", |
635 ]) | 643 help="Comma-separated-list of builders to pull
new baselines from (can also be provided multiple times)"), |
| 644 ]) |
636 | 645 |
637 def _builders_to_pull_from(self): | 646 def _builders_to_pull_from(self): |
638 chosen_names = self._tool.user.prompt_with_list("Which builder to pull r
esults from:", self._release_builders(), can_choose_multiple=True) | 647 chosen_names = self._tool.user.prompt_with_list( |
| 648 "Which builder to pull results from:", self._release_builders(), can
_choose_multiple=True) |
639 return [self._builder_with_name(name) for name in chosen_names] | 649 return [self._builder_with_name(name) for name in chosen_names] |
640 | 650 |
641 def _builder_with_name(self, name): | 651 def _builder_with_name(self, name): |
642 return self._tool.buildbot.builder_with_name(name) | 652 return self._tool.buildbot.builder_with_name(name) |
643 | 653 |
644 def execute(self, options, args, tool): | 654 def execute(self, options, args, tool): |
645 if not args: | 655 if not args: |
646 _log.error("Must list tests to rebaseline.") | 656 _log.error("Must list tests to rebaseline.") |
647 return | 657 return |
648 | 658 |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
681 SECONDS_BEFORE_GIVING_UP = 300 | 691 SECONDS_BEFORE_GIVING_UP = 300 |
682 | 692 |
683 def __init__(self): | 693 def __init__(self): |
684 super(AutoRebaseline, self).__init__(options=[ | 694 super(AutoRebaseline, self).__init__(options=[ |
685 # FIXME: Remove this option. | 695 # FIXME: Remove this option. |
686 self.no_optimize_option, | 696 self.no_optimize_option, |
687 # FIXME: Remove this option. | 697 # FIXME: Remove this option. |
688 self.results_directory_option, | 698 self.results_directory_option, |
689 optparse.make_option("--auth-refresh-token-json", help="Rietveld aut
h refresh JSON token."), | 699 optparse.make_option("--auth-refresh-token-json", help="Rietveld aut
h refresh JSON token."), |
690 optparse.make_option("--dry-run", action='store_true', default=False
, | 700 optparse.make_option("--dry-run", action='store_true', default=False
, |
691 help='Run without creating a temporary branch, committing locall
y, or uploading/landing ' | 701 help='Run without creating a temporary branch,
committing locally, or uploading/landing ' |
692 'changes to the remote repository.') | 702 'changes to the remote repository.') |
693 ]) | 703 ]) |
694 self._blame_regex = re.compile(r""" | 704 self._blame_regex = re.compile(r""" |
695 ^(\S*) # Commit hash | 705 ^(\S*) # Commit hash |
696 [^(]* \( # Whitespace and open parenthesis | 706 [^(]* \( # Whitespace and open parenthesis |
697 < # Email address is surrounded by <> | 707 < # Email address is surrounded by <> |
698 ( | 708 ( |
699 [^@]+ # Username preceding @ | 709 [^@]+ # Username preceding @ |
700 @ | 710 @ |
701 [^@>]+ # Domain terminated by @ or >, some lines have an ad
ditional @ fragment after the email. | 711 [^@>]+ # Domain terminated by @ or >, some lines have an ad
ditional @ fragment after the email. |
702 ) | 712 ) |
703 .*?([^ ]*) # Test file name | 713 .*?([^ ]*) # Test file name |
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
812 return test_prefix_list, lines_to_remove | 822 return test_prefix_list, lines_to_remove |
813 | 823 |
814 def _run_git_cl_command(self, options, command): | 824 def _run_git_cl_command(self, options, command): |
815 subprocess_command = ['git', 'cl'] + command | 825 subprocess_command = ['git', 'cl'] + command |
816 if options.verbose: | 826 if options.verbose: |
817 subprocess_command.append('--verbose') | 827 subprocess_command.append('--verbose') |
818 if options.auth_refresh_token_json: | 828 if options.auth_refresh_token_json: |
819 subprocess_command.append('--auth-refresh-token-json') | 829 subprocess_command.append('--auth-refresh-token-json') |
820 subprocess_command.append(options.auth_refresh_token_json) | 830 subprocess_command.append(options.auth_refresh_token_json) |
821 | 831 |
822 process = self._tool.executive.popen(subprocess_command, stdout=self._to
ol.executive.PIPE, stderr=self._tool.executive.STDOUT) | 832 process = self._tool.executive.popen(subprocess_command, stdout=self._to
ol.executive.PIPE, |
| 833 stderr=self._tool.executive.STDOUT) |
823 last_output_time = time.time() | 834 last_output_time = time.time() |
824 | 835 |
825 # git cl sometimes completely hangs. Bail if we haven't gotten any outpu
t to stdout/stderr in a while. | 836 # git cl sometimes completely hangs. Bail if we haven't gotten any outpu
t to stdout/stderr in a while. |
826 while process.poll() == None and time.time() < last_output_time + self.S
ECONDS_BEFORE_GIVING_UP: | 837 while process.poll() == None and time.time() < last_output_time + self.S
ECONDS_BEFORE_GIVING_UP: |
827 # FIXME: This doesn't make any sense. readline blocks, so all this c
ode to | 838 # FIXME: This doesn't make any sense. readline blocks, so all this c
ode to |
828 # try and bail is useless. Instead, we should do the readline calls
on a | 839 # try and bail is useless. Instead, we should do the readline calls
on a |
829 # subthread. Then the rest of this code would make sense. | 840 # subthread. Then the rest of this code would make sense. |
830 out = process.stdout.readline().rstrip('\n') | 841 out = process.stdout.readline().rstrip('\n') |
831 if out: | 842 if out: |
832 last_output_time = time.time() | 843 last_output_time = time.time() |
(...skipping 21 matching lines...) Expand all Loading... |
854 | 865 |
855 if not options.dry_run and tool.scm().has_working_directory_changes(): | 866 if not options.dry_run and tool.scm().has_working_directory_changes(): |
856 _log.error("Cannot proceed with working directory changes. Clean wor
king directory first.") | 867 _log.error("Cannot proceed with working directory changes. Clean wor
king directory first.") |
857 return | 868 return |
858 | 869 |
859 revision_data = self.bot_revision_data() | 870 revision_data = self.bot_revision_data() |
860 if not revision_data: | 871 if not revision_data: |
861 return | 872 return |
862 | 873 |
863 min_revision = int(min([item["revision"] for item in revision_data])) | 874 min_revision = int(min([item["revision"] for item in revision_data])) |
864 tests, revision, commit, author, bugs, has_any_needs_rebaseline_lines =
self.tests_to_rebaseline(tool, min_revision, print_revisions=options.verbose) | 875 tests, revision, commit, author, bugs, has_any_needs_rebaseline_lines =
self.tests_to_rebaseline( |
| 876 tool, min_revision, print_revisions=options.verbose) |
865 | 877 |
866 if options.verbose: | 878 if options.verbose: |
867 _log.info("Min revision across all bots is %s." % min_revision) | 879 _log.info("Min revision across all bots is %s." % min_revision) |
868 for item in revision_data: | 880 for item in revision_data: |
869 _log.info("%s: r%s" % (item["builder"], item["revision"])) | 881 _log.info("%s: r%s" % (item["builder"], item["revision"])) |
870 | 882 |
871 if not tests: | 883 if not tests: |
872 _log.debug('No tests to rebaseline.') | 884 _log.debug('No tests to rebaseline.') |
873 return | 885 return |
874 | 886 |
(...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
988 out = self._log_line(process.stdout) | 1000 out = self._log_line(process.stdout) |
989 | 1001 |
990 def _do_one_rebaseline(self): | 1002 def _do_one_rebaseline(self): |
991 self._log_queue = Queue.Queue(256) | 1003 self._log_queue = Queue.Queue(256) |
992 log_thread = threading.Thread(name='LogToServer', target=self._log_to_se
rver_thread) | 1004 log_thread = threading.Thread(name='LogToServer', target=self._log_to_se
rver_thread) |
993 log_thread.start() | 1005 log_thread.start() |
994 old_branch_name_or_ref = '' | 1006 old_branch_name_or_ref = '' |
995 try: | 1007 try: |
996 old_branch_name_or_ref = _get_branch_name_or_ref(self._tool) | 1008 old_branch_name_or_ref = _get_branch_name_or_ref(self._tool) |
997 self._run_logged_command(['git', 'pull']) | 1009 self._run_logged_command(['git', 'pull']) |
998 rebaseline_command = [self._tool.filesystem.join(self._tool.scm().ch
eckout_root, 'third_party', 'WebKit', 'Tools', 'Scripts', 'webkit-patch'), 'auto
-rebaseline'] | 1010 rebaseline_command = [self._tool.filesystem.join( |
| 1011 self._tool.scm().checkout_root, 'third_party', 'WebKit', 'Tools'
, 'Scripts', 'webkit-patch'), 'auto-rebaseline'] |
999 if self._verbose: | 1012 if self._verbose: |
1000 rebaseline_command.append('--verbose') | 1013 rebaseline_command.append('--verbose') |
1001 self._run_logged_command(rebaseline_command) | 1014 self._run_logged_command(rebaseline_command) |
1002 except: | 1015 except: |
1003 self._log_queue.put(self.QUIT_LOG) | 1016 self._log_queue.put(self.QUIT_LOG) |
1004 traceback.print_exc(file=sys.stderr) | 1017 traceback.print_exc(file=sys.stderr) |
1005 # Sometimes git crashes and leaves us on a detached head. | 1018 # Sometimes git crashes and leaves us on a detached head. |
1006 if old_branch_name_or_ref: | 1019 if old_branch_name_or_ref: |
1007 self._tool.scm().checkout_branch(old_branch_name_or_ref) | 1020 self._tool.scm().checkout_branch(old_branch_name_or_ref) |
1008 else: | 1021 else: |
1009 self._log_queue.put(self.QUIT_LOG) | 1022 self._log_queue.put(self.QUIT_LOG) |
1010 log_thread.join() | 1023 log_thread.join() |
1011 | 1024 |
1012 def execute(self, options, args, tool): | 1025 def execute(self, options, args, tool): |
1013 self._verbose = options.verbose | 1026 self._verbose = options.verbose |
1014 while True: | 1027 while True: |
1015 self._do_one_rebaseline() | 1028 self._do_one_rebaseline() |
1016 time.sleep(self.SLEEP_TIME_IN_SECONDS) | 1029 time.sleep(self.SLEEP_TIME_IN_SECONDS) |
OLD | NEW |