Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(203)

Side by Side Diff: sky/tools/webkitpy/tool/commands/rebaseline.py

Issue 946753002: Delete a bunch of dead python code. (Closed) Base URL: git@github.com:domokit/mojo.git@master
Patch Set: Created 5 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 # Copyright (c) 2010 Google Inc. All rights reserved.
2 #
3 # Redistribution and use in source and binary forms, with or without
4 # modification, are permitted provided that the following conditions are
5 # met:
6 #
7 # * Redistributions of source code must retain the above copyright
8 # notice, this list of conditions and the following disclaimer.
9 # * Redistributions in binary form must reproduce the above
10 # copyright notice, this list of conditions and the following disclaimer
11 # in the documentation and/or other materials provided with the
12 # distribution.
13 # * Neither the name of Google Inc. nor the names of its
14 # contributors may be used to endorse or promote products derived from
15 # this software without specific prior written permission.
16 #
17 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 # (INCLUDING NEGLIGENCE OR/ OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28
29 import Queue
30 import json
31 import logging
32 import optparse
33 import re
34 import sys
35 import threading
36 import time
37 import traceback
38 import urllib
39 import urllib2
40
41 from webkitpy.common.checkout.baselineoptimizer import BaselineOptimizer
42 from webkitpy.common.memoized import memoized
43 from webkitpy.common.system.executive import ScriptError
44 from webkitpy.layout_tests.controllers.test_result_writer import TestResultWrite r
45 from webkitpy.layout_tests.models import test_failures
46 from webkitpy.layout_tests.models.test_expectations import TestExpectations, BAS ELINE_SUFFIX_LIST, SKIP
47 from webkitpy.layout_tests.port import builders
48 from webkitpy.layout_tests.port import factory
49 from webkitpy.tool.multicommandtool import AbstractDeclarativeCommand
50
51
52 _log = logging.getLogger(__name__)
53
54
55 # FIXME: Should TestResultWriter know how to compute this string?
56 def _baseline_name(fs, test_name, suffix):
57 return fs.splitext(test_name)[0] + TestResultWriter.FILENAME_SUFFIX_EXPECTED + "." + suffix
58
59
60 class AbstractRebaseliningCommand(AbstractDeclarativeCommand):
61 # not overriding execute() - pylint: disable=W0223
62
63 no_optimize_option = optparse.make_option('--no-optimize', dest='optimize', action='store_false', default=True,
64 help=('Do not optimize/de-dup the expectations after rebaselining (defau lt is to de-dup automatically). '
65 'You can use "webkit-patch optimize-baselines" to optimize separat ely.'))
66
67 platform_options = factory.platform_options(use_globs=True)
68
69 results_directory_option = optparse.make_option("--results-directory", help= "Local results directory to use")
70
71 suffixes_option = optparse.make_option("--suffixes", default=','.join(BASELI NE_SUFFIX_LIST), action="store",
72 help="Comma-separated-list of file types to rebaseline")
73
74 def __init__(self, options=None):
75 super(AbstractRebaseliningCommand, self).__init__(options=options)
76 self._baseline_suffix_list = BASELINE_SUFFIX_LIST
77 self._scm_changes = {'add': [], 'delete': [], 'remove-lines': []}
78
79 def _add_to_scm_later(self, path):
80 self._scm_changes['add'].append(path)
81
82 def _delete_from_scm_later(self, path):
83 self._scm_changes['delete'].append(path)
84
85
86 class BaseInternalRebaselineCommand(AbstractRebaseliningCommand):
87 def __init__(self):
88 super(BaseInternalRebaselineCommand, self).__init__(options=[
89 self.results_directory_option,
90 self.suffixes_option,
91 optparse.make_option("--builder", help="Builder to pull new baseline s from"),
92 optparse.make_option("--test", help="Test to rebaseline"),
93 ])
94
95 def _baseline_directory(self, builder_name):
96 port = self._tool.port_factory.get_from_builder_name(builder_name)
97 override_dir = builders.rebaseline_override_dir(builder_name)
98 if override_dir:
99 return self._tool.filesystem.join(port.layout_tests_dir(), 'platform ', override_dir)
100 return port.baseline_version_dir()
101
102 def _test_root(self, test_name):
103 return self._tool.filesystem.splitext(test_name)[0]
104
105 def _file_name_for_actual_result(self, test_name, suffix):
106 return "%s-actual.%s" % (self._test_root(test_name), suffix)
107
108 def _file_name_for_expected_result(self, test_name, suffix):
109 return "%s-expected.%s" % (self._test_root(test_name), suffix)
110
111
112 class CopyExistingBaselinesInternal(BaseInternalRebaselineCommand):
113 name = "copy-existing-baselines-internal"
114 help_text = "Copy existing baselines down one level in the baseline order to ensure new baselines don't break existing passing platforms."
115
116 @memoized
117 def _immediate_predecessors_in_fallback(self, path_to_rebaseline):
118 port_names = self._tool.port_factory.all_port_names()
119 immediate_predecessors_in_fallback = []
120 for port_name in port_names:
121 port = self._tool.port_factory.get(port_name)
122 if not port.buildbot_archives_baselines():
123 continue
124 baseline_search_path = port.baseline_search_path()
125 try:
126 index = baseline_search_path.index(path_to_rebaseline)
127 if index:
128 immediate_predecessors_in_fallback.append(self._tool.filesys tem.basename(baseline_search_path[index - 1]))
129 except ValueError:
130 # index throw's a ValueError if the item isn't in the list.
131 pass
132 return immediate_predecessors_in_fallback
133
134 def _port_for_primary_baseline(self, baseline):
135 for port in [self._tool.port_factory.get(port_name) for port_name in sel f._tool.port_factory.all_port_names()]:
136 if self._tool.filesystem.basename(port.baseline_version_dir()) == ba seline:
137 return port
138 raise Exception("Failed to find port for primary baseline %s." % baselin e)
139
140 def _copy_existing_baseline(self, builder_name, test_name, suffix):
141 baseline_directory = self._baseline_directory(builder_name)
142 ports = [self._port_for_primary_baseline(baseline) for baseline in self. _immediate_predecessors_in_fallback(baseline_directory)]
143
144 old_baselines = []
145 new_baselines = []
146
147 # Need to gather all the baseline paths before modifying the filesystem since
148 # the modifications can affect the results of port.expected_filename.
149 for port in ports:
150 old_baseline = port.expected_filename(test_name, "." + suffix)
151 if not self._tool.filesystem.exists(old_baseline):
152 _log.debug("No existing baseline for %s." % test_name)
153 continue
154
155 new_baseline = self._tool.filesystem.join(port.baseline_path(), self ._file_name_for_expected_result(test_name, suffix))
156 if self._tool.filesystem.exists(new_baseline):
157 _log.debug("Existing baseline at %s, not copying over it." % new _baseline)
158 continue
159
160 expectations = TestExpectations(port, [test_name])
161 if SKIP in expectations.get_expectations(test_name):
162 _log.debug("%s is skipped on %s." % (test_name, port.name()))
163 continue
164
165 old_baselines.append(old_baseline)
166 new_baselines.append(new_baseline)
167
168 for i in range(len(old_baselines)):
169 old_baseline = old_baselines[i]
170 new_baseline = new_baselines[i]
171
172 _log.debug("Copying baseline from %s to %s." % (old_baseline, new_ba seline))
173 self._tool.filesystem.maybe_make_directory(self._tool.filesystem.dir name(new_baseline))
174 self._tool.filesystem.copyfile(old_baseline, new_baseline)
175 if not self._tool.scm().exists(new_baseline):
176 self._add_to_scm_later(new_baseline)
177
178 def execute(self, options, args, tool):
179 for suffix in options.suffixes.split(','):
180 self._copy_existing_baseline(options.builder, options.test, suffix)
181 print json.dumps(self._scm_changes)
182
183
184 class RebaselineTest(BaseInternalRebaselineCommand):
185 name = "rebaseline-test-internal"
186 help_text = "Rebaseline a single test from a buildbot. Only intended for use by other webkit-patch commands."
187
188 def _results_url(self, builder_name):
189 return self._tool.buildbot_for_builder_name(builder_name).builder_with_n ame(builder_name).latest_layout_test_results_url()
190
191 def _save_baseline(self, data, target_baseline, baseline_directory, test_nam e, suffix):
192 if not data:
193 _log.debug("No baseline data to save.")
194 return
195
196 filesystem = self._tool.filesystem
197 filesystem.maybe_make_directory(filesystem.dirname(target_baseline))
198 filesystem.write_binary_file(target_baseline, data)
199 if not self._tool.scm().exists(target_baseline):
200 self._add_to_scm_later(target_baseline)
201
202 def _rebaseline_test(self, builder_name, test_name, suffix, results_url):
203 baseline_directory = self._baseline_directory(builder_name)
204
205 source_baseline = "%s/%s" % (results_url, self._file_name_for_actual_res ult(test_name, suffix))
206 target_baseline = self._tool.filesystem.join(baseline_directory, self._f ile_name_for_expected_result(test_name, suffix))
207
208 _log.debug("Retrieving %s." % source_baseline)
209 self._save_baseline(self._tool.web.get_binary(source_baseline, convert_4 04_to_None=True), target_baseline, baseline_directory, test_name, suffix)
210
211 def _rebaseline_test_and_update_expectations(self, options):
212 port = self._tool.port_factory.get_from_builder_name(options.builder)
213 if (port.reference_files(options.test)):
214 _log.warning("Cannot rebaseline reftest: %s", options.test)
215 return
216
217 if options.results_directory:
218 results_url = 'file://' + options.results_directory
219 else:
220 results_url = self._results_url(options.builder)
221 self._baseline_suffix_list = options.suffixes.split(',')
222
223 for suffix in self._baseline_suffix_list:
224 self._rebaseline_test(options.builder, options.test, suffix, results _url)
225 self._scm_changes['remove-lines'].append({'builder': options.builder, 't est': options.test})
226
227 def execute(self, options, args, tool):
228 self._rebaseline_test_and_update_expectations(options)
229 print json.dumps(self._scm_changes)
230
231
232 class OptimizeBaselines(AbstractRebaseliningCommand):
233 name = "optimize-baselines"
234 help_text = "Reshuffles the baselines for the given tests to use as litte sp ace on disk as possible."
235 show_in_main_help = True
236 argument_names = "TEST_NAMES"
237
238 def __init__(self):
239 super(OptimizeBaselines, self).__init__(options=[
240 self.suffixes_option,
241 optparse.make_option('--no-modify-scm', action='store_true', default =False, help='Dump SCM commands as JSON instead of '),
242 ] + self.platform_options)
243
244 def _optimize_baseline(self, optimizer, test_name):
245 files_to_delete = []
246 files_to_add = []
247 for suffix in self._baseline_suffix_list:
248 baseline_name = _baseline_name(self._tool.filesystem, test_name, suf fix)
249 succeeded, more_files_to_delete, more_files_to_add = optimizer.optim ize(baseline_name)
250 if not succeeded:
251 print "Heuristics failed to optimize %s" % baseline_name
252 files_to_delete.extend(more_files_to_delete)
253 files_to_add.extend(more_files_to_add)
254 return files_to_delete, files_to_add
255
256 def execute(self, options, args, tool):
257 self._baseline_suffix_list = options.suffixes.split(',')
258 port_names = tool.port_factory.all_port_names(options.platform)
259 if not port_names:
260 print "No port names match '%s'" % options.platform
261 return
262
263 optimizer = BaselineOptimizer(tool, port_names, skip_scm_commands=option s.no_modify_scm)
264 port = tool.port_factory.get(port_names[0])
265 for test_name in port.tests(args):
266 _log.info("Optimizing %s" % test_name)
267 files_to_delete, files_to_add = self._optimize_baseline(optimizer, t est_name)
268 for path in files_to_delete:
269 self._delete_from_scm_later(path)
270 for path in files_to_add:
271 self._add_to_scm_later(path)
272
273 print json.dumps(self._scm_changes)
274
275
276 class AnalyzeBaselines(AbstractRebaseliningCommand):
277 name = "analyze-baselines"
278 help_text = "Analyzes the baselines for the given tests and prints results t hat are identical."
279 show_in_main_help = True
280 argument_names = "TEST_NAMES"
281
282 def __init__(self):
283 super(AnalyzeBaselines, self).__init__(options=[
284 self.suffixes_option,
285 optparse.make_option('--missing', action='store_true', default=False , help='show missing baselines as well'),
286 ] + self.platform_options)
287 self._optimizer_class = BaselineOptimizer # overridable for testing
288 self._baseline_optimizer = None
289 self._port = None
290
291 def _write(self, msg):
292 print msg
293
294 def _analyze_baseline(self, options, test_name):
295 for suffix in self._baseline_suffix_list:
296 baseline_name = _baseline_name(self._tool.filesystem, test_name, suf fix)
297 results_by_directory = self._baseline_optimizer.read_results_by_dire ctory(baseline_name)
298 if results_by_directory:
299 self._write("%s:" % baseline_name)
300 self._baseline_optimizer.write_by_directory(results_by_directory , self._write, " ")
301 elif options.missing:
302 self._write("%s: (no baselines found)" % baseline_name)
303
304 def execute(self, options, args, tool):
305 self._baseline_suffix_list = options.suffixes.split(',')
306 port_names = tool.port_factory.all_port_names(options.platform)
307 if not port_names:
308 print "No port names match '%s'" % options.platform
309 return
310
311 self._baseline_optimizer = self._optimizer_class(tool, port_names, skip_ scm_commands=False)
312 self._port = tool.port_factory.get(port_names[0])
313 for test_name in self._port.tests(args):
314 self._analyze_baseline(options, test_name)
315
316
317 class AbstractParallelRebaselineCommand(AbstractRebaseliningCommand):
318 # not overriding execute() - pylint: disable=W0223
319
320 def __init__(self, options=None):
321 super(AbstractParallelRebaselineCommand, self).__init__(options=options)
322 self._builder_data = {}
323
324 def builder_data(self):
325 if not self._builder_data:
326 for builder_name in self._release_builders():
327 builder = self._tool.buildbot_for_builder_name(builder_name).bui lder_with_name(builder_name)
328 self._builder_data[builder_name] = builder.latest_layout_test_re sults()
329 return self._builder_data
330
331 # The release builders cycle much faster than the debug ones and cover all t he platforms.
332 def _release_builders(self):
333 release_builders = []
334 for builder_name in builders.all_builder_names():
335 if builder_name.find('ASAN') != -1:
336 continue
337 port = self._tool.port_factory.get_from_builder_name(builder_name)
338 if port.test_configuration().build_type == 'release':
339 release_builders.append(builder_name)
340 return release_builders
341
342 def _run_webkit_patch(self, args, verbose):
343 try:
344 verbose_args = ['--verbose'] if verbose else []
345 stderr = self._tool.executive.run_command([self._tool.path()] + verb ose_args + args, cwd=self._tool.scm().checkout_root, return_stderr=True)
346 for line in stderr.splitlines():
347 _log.warning(line)
348 except ScriptError, e:
349 _log.error(e)
350
351 def _builders_to_fetch_from(self, builders_to_check):
352 # This routine returns the subset of builders that will cover all of the baseline search paths
353 # used in the input list. In particular, if the input list contains both Release and Debug
354 # versions of a configuration, we *only* return the Release version (sin ce we don't save
355 # debug versions of baselines).
356 release_builders = set()
357 debug_builders = set()
358 builders_to_fallback_paths = {}
359 for builder in builders_to_check:
360 port = self._tool.port_factory.get_from_builder_name(builder)
361 if port.test_configuration().build_type == 'release':
362 release_builders.add(builder)
363 else:
364 debug_builders.add(builder)
365 for builder in list(release_builders) + list(debug_builders):
366 port = self._tool.port_factory.get_from_builder_name(builder)
367 fallback_path = port.baseline_search_path()
368 if fallback_path not in builders_to_fallback_paths.values():
369 builders_to_fallback_paths[builder] = fallback_path
370 return builders_to_fallback_paths.keys()
371
372 def _rebaseline_commands(self, test_prefix_list, options):
373 path_to_webkit_patch = self._tool.path()
374 cwd = self._tool.scm().checkout_root
375 copy_baseline_commands = []
376 rebaseline_commands = []
377 lines_to_remove = {}
378 port = self._tool.port_factory.get()
379
380 for test_prefix in test_prefix_list:
381 for test in port.tests([test_prefix]):
382 for builder in self._builders_to_fetch_from(test_prefix_list[tes t_prefix]):
383 actual_failures_suffixes = self._suffixes_for_actual_failure s(test, builder, test_prefix_list[test_prefix][builder])
384 if not actual_failures_suffixes:
385 # If we're not going to rebaseline the test because it's passing on this
386 # builder, we still want to remove the line from TestExp ectations.
387 if test not in lines_to_remove:
388 lines_to_remove[test] = []
389 lines_to_remove[test].append(builder)
390 continue
391
392 suffixes = ','.join(actual_failures_suffixes)
393 cmd_line = ['--suffixes', suffixes, '--builder', builder, '- -test', test]
394 if options.results_directory:
395 cmd_line.extend(['--results-directory', options.results_ directory])
396 if options.verbose:
397 cmd_line.append('--verbose')
398 copy_baseline_commands.append(tuple([[self._tool.executable, path_to_webkit_patch, 'copy-existing-baselines-internal'] + cmd_line, cwd]))
399 rebaseline_commands.append(tuple([[self._tool.executable, pa th_to_webkit_patch, 'rebaseline-test-internal'] + cmd_line, cwd]))
400 return copy_baseline_commands, rebaseline_commands, lines_to_remove
401
402 def _serial_commands(self, command_results):
403 files_to_add = set()
404 files_to_delete = set()
405 lines_to_remove = {}
406 for output in [result[1].split('\n') for result in command_results]:
407 file_added = False
408 for line in output:
409 try:
410 if line:
411 parsed_line = json.loads(line)
412 if 'add' in parsed_line:
413 files_to_add.update(parsed_line['add'])
414 if 'delete' in parsed_line:
415 files_to_delete.update(parsed_line['delete'])
416 if 'remove-lines' in parsed_line:
417 for line_to_remove in parsed_line['remove-lines']:
418 test = line_to_remove['test']
419 builder = line_to_remove['builder']
420 if test not in lines_to_remove:
421 lines_to_remove[test] = []
422 lines_to_remove[test].append(builder)
423 file_added = True
424 except ValueError:
425 _log.debug('"%s" is not a JSON object, ignoring' % line)
426
427 if not file_added:
428 _log.debug('Could not add file based off output "%s"' % output)
429
430 return list(files_to_add), list(files_to_delete), lines_to_remove
431
432 def _optimize_baselines(self, test_prefix_list, verbose=False):
433 optimize_commands = []
434 for test in test_prefix_list:
435 all_suffixes = set()
436 for builder in self._builders_to_fetch_from(test_prefix_list[test]):
437 all_suffixes.update(self._suffixes_for_actual_failures(test, bui lder, test_prefix_list[test][builder]))
438
439 # FIXME: We should propagate the platform options as well.
440 cmd_line = ['--no-modify-scm', '--suffixes', ','.join(all_suffixes), test]
441 if verbose:
442 cmd_line.append('--verbose')
443
444 path_to_webkit_patch = self._tool.path()
445 cwd = self._tool.scm().checkout_root
446 optimize_commands.append(tuple([[self._tool.executable, path_to_webk it_patch, 'optimize-baselines'] + cmd_line, cwd]))
447 return optimize_commands
448
449 def _update_expectations_files(self, lines_to_remove):
450 # FIXME: This routine is way too expensive. We're creating O(n ports) Te stExpectations objects.
451 # This is slow and uses a lot of memory.
452 tests = lines_to_remove.keys()
453 to_remove = []
454
455 # This is so we remove lines for builders that skip this test, e.g. Andr oid skips most
456 # tests and we don't want to leave stray [ Android ] lines in TestExpect ations..
457 # This is only necessary for "webkit-patch rebaseline" and for rebaselin ing expected
458 # failures from garden-o-matic. rebaseline-expectations and auto-rebasel ine will always
459 # pass the exact set of ports to rebaseline.
460 for port_name in self._tool.port_factory.all_port_names():
461 port = self._tool.port_factory.get(port_name)
462 generic_expectations = TestExpectations(port, tests=tests, include_o verrides=False)
463 full_expectations = TestExpectations(port, tests=tests, include_over rides=True)
464 for test in tests:
465 if self._port_skips_test(port, test, generic_expectations, full_ expectations):
466 for test_configuration in port.all_test_configurations():
467 if test_configuration.version == port.test_configuration ().version:
468 to_remove.append((test, test_configuration))
469
470 for test in lines_to_remove:
471 for builder in lines_to_remove[test]:
472 port = self._tool.port_factory.get_from_builder_name(builder)
473 for test_configuration in port.all_test_configurations():
474 if test_configuration.version == port.test_configuration().v ersion:
475 to_remove.append((test, test_configuration))
476
477 port = self._tool.port_factory.get()
478 expectations = TestExpectations(port, include_overrides=False)
479 expectationsString = expectations.remove_configurations(to_remove)
480 path = port.path_to_generic_test_expectations_file()
481 self._tool.filesystem.write_text_file(path, expectationsString)
482
483 def _port_skips_test(self, port, test, generic_expectations, full_expectatio ns):
484 fs = port.host.filesystem
485 if port.default_smoke_test_only():
486 smoke_test_filename = fs.join(port.layout_tests_dir(), 'SmokeTests')
487 if fs.exists(smoke_test_filename) and test not in fs.read_text_file( smoke_test_filename):
488 return True
489
490 return (SKIP in full_expectations.get_expectations(test) and
491 SKIP not in generic_expectations.get_expectations(test))
492
493 def _run_in_parallel_and_update_scm(self, commands):
494 command_results = self._tool.executive.run_in_parallel(commands)
495 log_output = '\n'.join(result[2] for result in command_results).replace( '\n\n', '\n')
496 for line in log_output.split('\n'):
497 if line:
498 print >> sys.stderr, line # FIXME: Figure out how to log proper ly.
499
500 files_to_add, files_to_delete, lines_to_remove = self._serial_commands(c ommand_results)
501 if files_to_delete:
502 self._tool.scm().delete_list(files_to_delete)
503 if files_to_add:
504 self._tool.scm().add_list(files_to_add)
505 return lines_to_remove
506
507 def _rebaseline(self, options, test_prefix_list):
508 for test, builders_to_check in sorted(test_prefix_list.items()):
509 _log.info("Rebaselining %s" % test)
510 for builder, suffixes in sorted(builders_to_check.items()):
511 _log.debug(" %s: %s" % (builder, ",".join(suffixes)))
512
513 copy_baseline_commands, rebaseline_commands, extra_lines_to_remove = sel f._rebaseline_commands(test_prefix_list, options)
514 lines_to_remove = {}
515
516 if copy_baseline_commands:
517 self._run_in_parallel_and_update_scm(copy_baseline_commands)
518 if rebaseline_commands:
519 lines_to_remove = self._run_in_parallel_and_update_scm(rebaseline_co mmands)
520
521 for test in extra_lines_to_remove:
522 if test in lines_to_remove:
523 lines_to_remove[test] = lines_to_remove[test] + extra_lines_to_r emove[test]
524 else:
525 lines_to_remove[test] = extra_lines_to_remove[test]
526
527 if lines_to_remove:
528 self._update_expectations_files(lines_to_remove)
529
530 if options.optimize:
531 self._run_in_parallel_and_update_scm(self._optimize_baselines(test_p refix_list, options.verbose))
532
533 def _suffixes_for_actual_failures(self, test, builder_name, existing_suffixe s):
534 actual_results = self.builder_data()[builder_name].actual_results(test)
535 if not actual_results:
536 return set()
537 return set(existing_suffixes) & TestExpectations.suffixes_for_actual_exp ectations_string(actual_results)
538
539
540 class RebaselineJson(AbstractParallelRebaselineCommand):
541 name = "rebaseline-json"
542 help_text = "Rebaseline based off JSON passed to stdin. Intended to only be called from other scripts."
543
544 def __init__(self,):
545 super(RebaselineJson, self).__init__(options=[
546 self.no_optimize_option,
547 self.results_directory_option,
548 ])
549
550 def execute(self, options, args, tool):
551 self._rebaseline(options, json.loads(sys.stdin.read()))
552
553
554 class RebaselineExpectations(AbstractParallelRebaselineCommand):
555 name = "rebaseline-expectations"
556 help_text = "Rebaselines the tests indicated in TestExpectations."
557 show_in_main_help = True
558
559 def __init__(self):
560 super(RebaselineExpectations, self).__init__(options=[
561 self.no_optimize_option,
562 ] + self.platform_options)
563 self._test_prefix_list = None
564
565 def _tests_to_rebaseline(self, port):
566 tests_to_rebaseline = {}
567 for path, value in port.expectations_dict().items():
568 expectations = TestExpectations(port, include_overrides=False, expec tations_dict={path: value})
569 for test in expectations.get_rebaselining_failures():
570 suffixes = TestExpectations.suffixes_for_expectations(expectatio ns.get_expectations(test))
571 tests_to_rebaseline[test] = suffixes or BASELINE_SUFFIX_LIST
572 return tests_to_rebaseline
573
574 def _add_tests_to_rebaseline_for_port(self, port_name):
575 builder_name = builders.builder_name_for_port_name(port_name)
576 if not builder_name:
577 return
578 tests = self._tests_to_rebaseline(self._tool.port_factory.get(port_name) ).items()
579
580 if tests:
581 _log.info("Retrieving results for %s from %s." % (port_name, builder _name))
582
583 for test_name, suffixes in tests:
584 _log.info(" %s (%s)" % (test_name, ','.join(suffixes)))
585 if test_name not in self._test_prefix_list:
586 self._test_prefix_list[test_name] = {}
587 self._test_prefix_list[test_name][builder_name] = suffixes
588
589 def execute(self, options, args, tool):
590 options.results_directory = None
591 self._test_prefix_list = {}
592 port_names = tool.port_factory.all_port_names(options.platform)
593 for port_name in port_names:
594 self._add_tests_to_rebaseline_for_port(port_name)
595 if not self._test_prefix_list:
596 _log.warning("Did not find any tests marked Rebaseline.")
597 return
598
599 self._rebaseline(options, self._test_prefix_list)
600
601
602 class Rebaseline(AbstractParallelRebaselineCommand):
603 name = "rebaseline"
604 help_text = "Rebaseline tests with results from the build bots. Shows the li st of failing tests on the builders if no test names are provided."
605 show_in_main_help = True
606 argument_names = "[TEST_NAMES]"
607
608 def __init__(self):
609 super(Rebaseline, self).__init__(options=[
610 self.no_optimize_option,
611 # FIXME: should we support the platform options in addition to (or i nstead of) --builders?
612 self.suffixes_option,
613 self.results_directory_option,
614 optparse.make_option("--builders", default=None, action="append", he lp="Comma-separated-list of builders to pull new baselines from (can also be pro vided multiple times)"),
615 ])
616
617 def _builders_to_pull_from(self):
618 chosen_names = self._tool.user.prompt_with_list("Which builder to pull r esults from:", self._release_builders(), can_choose_multiple=True)
619 return [self._builder_with_name(name) for name in chosen_names]
620
621 def _builder_with_name(self, name):
622 return self._tool.buildbot_for_builder_name(name).builder_with_name(name )
623
624 def execute(self, options, args, tool):
625 if not args:
626 _log.error("Must list tests to rebaseline.")
627 return
628
629 if options.builders:
630 builders_to_check = []
631 for builder_names in options.builders:
632 builders_to_check += [self._builder_with_name(name) for name in builder_names.split(",")]
633 else:
634 builders_to_check = self._builders_to_pull_from()
635
636 test_prefix_list = {}
637 suffixes_to_update = options.suffixes.split(",")
638
639 for builder in builders_to_check:
640 for test in args:
641 if test not in test_prefix_list:
642 test_prefix_list[test] = {}
643 test_prefix_list[test][builder.name()] = suffixes_to_update
644
645 if options.verbose:
646 _log.debug("rebaseline-json: " + str(test_prefix_list))
647
648 self._rebaseline(options, test_prefix_list)
649
650
651 class AutoRebaseline(AbstractParallelRebaselineCommand):
652 name = "auto-rebaseline"
653 help_text = "Rebaselines any NeedsRebaseline lines in TestExpectations that have cycled through all the bots."
654 AUTO_REBASELINE_BRANCH_NAME = "auto-rebaseline-temporary-branch"
655
656 # Rietveld uploader stinks. Limit the number of rebaselines in a given patch to keep upload from failing.
657 # FIXME: http://crbug.com/263676 Obviously we should fix the uploader here.
658 MAX_LINES_TO_REBASELINE = 200
659
660 SECONDS_BEFORE_GIVING_UP = 300
661
662 def __init__(self):
663 super(AutoRebaseline, self).__init__(options=[
664 # FIXME: Remove this option.
665 self.no_optimize_option,
666 # FIXME: Remove this option.
667 self.results_directory_option,
668 ])
669
670 def bot_revision_data(self):
671 revisions = []
672 for result in self.builder_data().values():
673 if result.run_was_interrupted():
674 _log.error("Can't rebaseline because the latest run on %s exited early." % result.builder_name())
675 return []
676 revisions.append({
677 "builder": result.builder_name(),
678 "revision": result.blink_revision(),
679 })
680 return revisions
681
682 def tests_to_rebaseline(self, tool, min_revision, print_revisions):
683 port = tool.port_factory.get()
684 expectations_file_path = port.path_to_generic_test_expectations_file()
685
686 tests = set()
687 revision = None
688 author = None
689 bugs = set()
690 has_any_needs_rebaseline_lines = False
691
692 for line in tool.scm().blame(expectations_file_path).split("\n"):
693 comment_index = line.find("#")
694 if comment_index == -1:
695 comment_index = len(line)
696 line_without_comments = re.sub(r"\s+", " ", line[:comment_index].str ip())
697
698 if "NeedsRebaseline" not in line_without_comments:
699 continue
700
701 has_any_needs_rebaseline_lines = True
702
703 parsed_line = re.match("^(\S*)[^(]*\((\S*).*?([^ ]*)\ \[[^[]*$", lin e_without_comments)
704
705 commit_hash = parsed_line.group(1)
706 svn_revision = tool.scm().svn_revision_from_git_commit(commit_hash)
707
708 test = parsed_line.group(3)
709 if print_revisions:
710 _log.info("%s is waiting for r%s" % (test, svn_revision))
711
712 if not svn_revision or svn_revision > min_revision:
713 continue
714
715 if revision and svn_revision != revision:
716 continue
717
718 if not revision:
719 revision = svn_revision
720 author = parsed_line.group(2)
721
722 bugs.update(re.findall("crbug\.com\/(\d+)", line_without_comments))
723 tests.add(test)
724
725 if len(tests) >= self.MAX_LINES_TO_REBASELINE:
726 _log.info("Too many tests to rebaseline in one patch. Doing the first %d." % self.MAX_LINES_TO_REBASELINE)
727 break
728
729 return tests, revision, author, bugs, has_any_needs_rebaseline_lines
730
731 def link_to_patch(self, revision):
732 return "http://src.chromium.org/viewvc/blink?view=revision&revision=" + str(revision)
733
734 def commit_message(self, author, revision, bugs):
735 bug_string = ""
736 if bugs:
737 bug_string = "BUG=%s\n" % ",".join(bugs)
738
739 return """Auto-rebaseline for r%s
740
741 %s
742
743 %sTBR=%s
744 """ % (revision, self.link_to_patch(revision), bug_string, author)
745
746 def get_test_prefix_list(self, tests):
747 test_prefix_list = {}
748 lines_to_remove = {}
749
750 for builder_name in self._release_builders():
751 port_name = builders.port_name_for_builder_name(builder_name)
752 port = self._tool.port_factory.get(port_name)
753 expectations = TestExpectations(port, include_overrides=True)
754 for test in expectations.get_needs_rebaseline_failures():
755 if test not in tests:
756 continue
757
758 if test not in test_prefix_list:
759 lines_to_remove[test] = []
760 test_prefix_list[test] = {}
761 lines_to_remove[test].append(builder_name)
762 test_prefix_list[test][builder_name] = BASELINE_SUFFIX_LIST
763
764 return test_prefix_list, lines_to_remove
765
766 def _run_git_cl_command(self, options, command):
767 subprocess_command = ['git', 'cl'] + command
768 if options.verbose:
769 subprocess_command.append('--verbose')
770
771 process = self._tool.executive.popen(subprocess_command, stdout=self._to ol.executive.PIPE, stderr=self._tool.executive.STDOUT)
772 last_output_time = time.time()
773
774 # git cl sometimes completely hangs. Bail if we haven't gotten any outpu t to stdout/stderr in a while.
775 while process.poll() == None and time.time() < last_output_time + self.S ECONDS_BEFORE_GIVING_UP:
776 # FIXME: This doesn't make any sense. readline blocks, so all this c ode to
777 # try and bail is useless. Instead, we should do the readline calls on a
778 # subthread. Then the rest of this code would make sense.
779 out = process.stdout.readline().rstrip('\n')
780 if out:
781 last_output_time = time.time()
782 _log.info(out)
783
784 if process.poll() == None:
785 _log.error('Command hung: %s' % subprocess_command)
786 return False
787 return True
788
789 # FIXME: Move this somewhere more general.
790 def tree_status(self):
791 blink_tree_status_url = "http://blink-status.appspot.com/status"
792 status = urllib2.urlopen(blink_tree_status_url).read().lower()
793 if status.find('closed') != -1 or status == "0":
794 return 'closed'
795 elif status.find('open') != -1 or status == "1":
796 return 'open'
797 return 'unknown'
798
799 def execute(self, options, args, tool):
800 if tool.scm().executable_name == "svn":
801 _log.error("Auto rebaseline only works with a git checkout.")
802 return
803
804 if tool.scm().has_working_directory_changes():
805 _log.error("Cannot proceed with working directory changes. Clean wor king directory first.")
806 return
807
808 revision_data = self.bot_revision_data()
809 if not revision_data:
810 return
811
812 min_revision = int(min([item["revision"] for item in revision_data]))
813 tests, revision, author, bugs, has_any_needs_rebaseline_lines = self.tes ts_to_rebaseline(tool, min_revision, print_revisions=options.verbose)
814
815 if options.verbose:
816 _log.info("Min revision across all bots is %s." % min_revision)
817 for item in revision_data:
818 _log.info("%s: r%s" % (item["builder"], item["revision"]))
819
820 if not tests:
821 _log.debug('No tests to rebaseline.')
822 return
823
824 if self.tree_status() == 'closed':
825 _log.info('Cannot proceed. Tree is closed.')
826 return
827
828 _log.info('Rebaselining %s for r%s by %s.' % (list(tests), revision, aut hor))
829
830 test_prefix_list, lines_to_remove = self.get_test_prefix_list(tests)
831
832 did_finish = False
833 try:
834 old_branch_name = tool.scm().current_branch()
835 tool.scm().delete_branch(self.AUTO_REBASELINE_BRANCH_NAME)
836 tool.scm().create_clean_branch(self.AUTO_REBASELINE_BRANCH_NAME)
837
838 # If the tests are passing everywhere, then this list will be empty. We don't need
839 # to rebaseline, but we'll still need to update TestExpectations.
840 if test_prefix_list:
841 self._rebaseline(options, test_prefix_list)
842
843 tool.scm().commit_locally_with_message(self.commit_message(author, r evision, bugs))
844
845 # FIXME: It would be nice if we could dcommit the patch without uplo ading, but still
846 # go through all the precommit hooks. For rebaselines with lots of f iles, uploading
847 # takes a long time and sometimes fails, but we don't want to commit if, e.g. the
848 # tree is closed.
849 did_finish = self._run_git_cl_command(options, ['upload', '-f'])
850
851 if did_finish:
852 # Uploading can take a very long time. Do another pull to make s ure TestExpectations is up to date,
853 # so the dcommit can go through.
854 # FIXME: Log the pull and dcommit stdout/stderr to the log-serve r.
855 tool.executive.run_command(['git', 'pull'])
856
857 self._run_git_cl_command(options, ['dcommit', '-f'])
858 except Exception as e:
859 _log.error(e)
860 finally:
861 if did_finish:
862 self._run_git_cl_command(options, ['set_close'])
863 tool.scm().ensure_cleanly_tracking_remote_master()
864 tool.scm().checkout_branch(old_branch_name)
865 tool.scm().delete_branch(self.AUTO_REBASELINE_BRANCH_NAME)
866
867
868 class RebaselineOMatic(AbstractDeclarativeCommand):
869 name = "rebaseline-o-matic"
870 help_text = "Calls webkit-patch auto-rebaseline in a loop."
871 show_in_main_help = True
872
873 SLEEP_TIME_IN_SECONDS = 30
874 LOG_SERVER = 'blinkrebaseline.appspot.com'
875 QUIT_LOG = '##QUIT##'
876
877 # Uploaded log entries append to the existing entry unless the
878 # newentry flag is set. In that case it starts a new entry to
879 # start appending to.
880 def _log_to_server(self, log='', is_new_entry=False):
881 query = {
882 'log': log,
883 }
884 if is_new_entry:
885 query['newentry'] = 'on'
886 try:
887 urllib2.urlopen("http://" + self.LOG_SERVER + "/updatelog", data=url lib.urlencode(query))
888 except:
889 traceback.print_exc(file=sys.stderr)
890
891 def _log_to_server_thread(self):
892 is_new_entry = True
893 while True:
894 messages = [self._log_queue.get()]
895 while not self._log_queue.empty():
896 messages.append(self._log_queue.get())
897 self._log_to_server('\n'.join(messages), is_new_entry=is_new_entry)
898 is_new_entry = False
899 if self.QUIT_LOG in messages:
900 return
901
902 def _post_log_to_server(self, log):
903 self._log_queue.put(log)
904
905 def _log_line(self, handle):
906 out = handle.readline().rstrip('\n')
907 if out:
908 if self._verbose:
909 print out
910 self._post_log_to_server(out)
911 return out
912
913 def _run_logged_command(self, command):
914 process = self._tool.executive.popen(command, stdout=self._tool.executiv e.PIPE, stderr=self._tool.executive.STDOUT)
915
916 out = self._log_line(process.stdout)
917 while out:
918 # FIXME: This should probably batch up lines if they're available an d log to the server once.
919 out = self._log_line(process.stdout)
920
921 def _do_one_rebaseline(self):
922 self._log_queue = Queue.Queue(256)
923 log_thread = threading.Thread(name='LogToServer', target=self._log_to_se rver_thread)
924 log_thread.start()
925 try:
926 old_branch_name = self._tool.scm().current_branch()
927 self._run_logged_command(['git', 'pull'])
928 rebaseline_command = [self._tool.filesystem.join(self._tool.scm().ch eckout_root, 'tools', 'webkit-patch'), 'auto-rebaseline']
929 if self._verbose:
930 rebaseline_command.append('--verbose')
931 self._run_logged_command(rebaseline_command)
932 except:
933 self._log_queue.put(self.QUIT_LOG)
934 traceback.print_exc(file=sys.stderr)
935 # Sometimes git crashes and leaves us on a detached head.
936 self._tool.scm().checkout_branch(old_branch_name)
937 else:
938 self._log_queue.put(self.QUIT_LOG)
939 log_thread.join()
940
941 def execute(self, options, args, tool):
942 self._verbose = options.verbose
943 while True:
944 self._do_one_rebaseline()
945 time.sleep(self.SLEEP_TIME_IN_SECONDS)
OLDNEW
« no previous file with comments | « sky/tools/webkitpy/tool/commands/queries_unittest.py ('k') | sky/tools/webkitpy/tool/commands/rebaseline_unittest.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698