OLD | NEW |
1 # Copyright (c) 2010 Google Inc. All rights reserved. | 1 # Copyright (c) 2010 Google Inc. All rights reserved. |
2 # | 2 # |
3 # Redistribution and use in source and binary forms, with or without | 3 # Redistribution and use in source and binary forms, with or without |
4 # modification, are permitted provided that the following conditions are | 4 # modification, are permitted provided that the following conditions are |
5 # met: | 5 # met: |
6 # | 6 # |
7 # * Redistributions of source code must retain the above copyright | 7 # * Redistributions of source code must retain the above copyright |
8 # notice, this list of conditions and the following disclaimer. | 8 # notice, this list of conditions and the following disclaimer. |
9 # * Redistributions in binary form must reproduce the above | 9 # * Redistributions in binary form must reproduce the above |
10 # copyright notice, this list of conditions and the following disclaimer | 10 # copyright notice, this list of conditions and the following disclaimer |
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
82 and the TestExpectations all at once at the end. | 82 and the TestExpectations all at once at the end. |
83 | 83 |
84 Each subprocess can communicate some set of changes by outputting JSON lines
, | 84 Each subprocess can communicate some set of changes by outputting JSON lines
, |
85 and another process can aggregate the changes. | 85 and another process can aggregate the changes. |
86 | 86 |
87 TODO(qyearsley): Refactor this so that: | 87 TODO(qyearsley): Refactor this so that: |
88 - lines to remove are only tracked in one format. | 88 - lines to remove are only tracked in one format. |
89 - files to add and delete are always disjoint sets. | 89 - files to add and delete are always disjoint sets. |
90 """ | 90 """ |
91 def __init__(self, files_to_add=None, files_to_delete=None, lines_to_remove=
None): | 91 def __init__(self, files_to_add=None, files_to_delete=None, lines_to_remove=
None): |
92 self.files_to_add = files_to_add or [] | 92 self._files_to_add = set(files_to_add or []) |
93 self.files_to_delete = files_to_delete or [] | 93 self._files_to_delete = set(files_to_delete or []) |
94 self.lines_to_remove = lines_to_remove or {} | 94 lines_to_remove = lines_to_remove or {} |
| 95 self._lines_to_remove = {t: set(builders) for t, builders in lines_to_re
move.iteritems()} |
95 | 96 |
96 def add_file(self, path): | 97 def add_file(self, path): |
97 self.files_to_add.append(path) | 98 self._files_to_add.add(path) |
98 | 99 |
99 def delete_file(self, path): | 100 def delete_file(self, path): |
100 self.files_to_delete.append(path) | 101 self._files_to_delete.add(path) |
101 | 102 |
102 def remove_line(self, test, builder): | 103 def remove_line(self, test, builder): |
103 if test not in self.lines_to_remove: | 104 if test not in self._lines_to_remove: |
104 self.lines_to_remove[test] = [] | 105 self._lines_to_remove[test] = set() |
105 self.lines_to_remove[test].append(builder) | 106 self._lines_to_remove[test].add(builder) |
| 107 |
| 108 @property |
| 109 def files_to_add(self): |
| 110 return sorted(self._files_to_add - self._files_to_delete) |
| 111 |
| 112 @property |
| 113 def files_to_delete(self): |
| 114 return sorted(self._files_to_delete - self._files_to_add) |
| 115 |
| 116 @property |
| 117 def lines_to_remove(self): |
| 118 return {t: sorted(set(builders)) for t, builders in sorted(self._lines_t
o_remove.iteritems())} |
106 | 119 |
107 def to_dict(self): | 120 def to_dict(self): |
108 remove_lines = [] | 121 remove_lines = [] |
109 for test in self.lines_to_remove: | 122 for test in sorted(self.lines_to_remove): |
110 for builder in self.lines_to_remove[test]: | 123 for builder in sorted(set(self.lines_to_remove[test])): |
111 remove_lines.append({'test': test, 'builder': builder}) | 124 remove_lines.append({'test': test, 'builder': builder}) |
112 return { | 125 return { |
113 'add': list(self.files_to_add), | 126 'add': self.files_to_add, |
114 'delete': list(self.files_to_delete), | 127 'delete': self.files_to_delete, |
115 'remove-lines': remove_lines, | 128 'remove-lines': remove_lines, |
116 } | 129 } |
117 | 130 |
118 @staticmethod | 131 @staticmethod |
119 def from_dict(change_dict): | 132 def from_dict(change_dict): |
120 files_to_add = set() | 133 files_to_add = set() |
121 files_to_delete = set() | 134 files_to_delete = set() |
122 lines_to_remove = {} | 135 lines_to_remove = {} |
123 if 'add' in change_dict: | 136 if 'add' in change_dict: |
124 files_to_add.update(change_dict['add']) | 137 files_to_add.update(change_dict['add']) |
125 if 'delete' in change_dict: | 138 if 'delete' in change_dict: |
126 files_to_delete.update(change_dict['delete']) | 139 files_to_delete.update(change_dict['delete']) |
127 if 'remove-lines' in change_dict: | 140 if 'remove-lines' in change_dict: |
128 for line_to_remove in change_dict['remove-lines']: | 141 for line_to_remove in change_dict['remove-lines']: |
129 test = line_to_remove['test'] | 142 test = line_to_remove['test'] |
130 builder = line_to_remove['builder'] | 143 builder = line_to_remove['builder'] |
131 if test not in lines_to_remove: | 144 if test not in lines_to_remove: |
132 lines_to_remove[test] = [] | 145 lines_to_remove[test] = [] |
133 lines_to_remove[test].append(builder) | 146 lines_to_remove[test].append(builder) |
134 return ChangeSet( | 147 return ChangeSet( |
135 files_to_add=list(files_to_add), | 148 files_to_add=files_to_add, |
136 files_to_delete=list(files_to_delete), | 149 files_to_delete=files_to_delete, |
137 lines_to_remove=lines_to_remove) | 150 lines_to_remove=lines_to_remove) |
138 | 151 |
139 def update(self, other): | 152 def update(self, other): |
140 assert isinstance(other, ChangeSet) | 153 assert isinstance(other, ChangeSet) |
141 assert type(other.lines_to_remove) is dict | 154 self._files_to_add.update(other.files_to_add) |
142 self.files_to_add.extend(other.files_to_add) | 155 self._files_to_delete.update(other.files_to_delete) |
143 self.files_to_delete.extend(other.files_to_delete) | |
144 for test in other.lines_to_remove: | 156 for test in other.lines_to_remove: |
145 if test not in self.lines_to_remove: | 157 if test not in self._lines_to_remove: |
146 self.lines_to_remove[test] = [] | 158 self._lines_to_remove[test] = set() |
147 self.lines_to_remove[test].extend(other.lines_to_remove[test]) | 159 self._lines_to_remove[test].update(other.lines_to_remove[test]) |
148 | 160 |
149 | 161 |
150 class BaseInternalRebaselineCommand(AbstractRebaseliningCommand): | 162 class BaseInternalRebaselineCommand(AbstractRebaseliningCommand): |
151 """Base class for rebaseline-related commands that are intended to be used b
y other commands.""" | 163 """Base class for rebaseline-related commands that are intended to be used b
y other commands.""" |
152 # Not overriding execute() - pylint: disable=abstract-method | 164 # Not overriding execute() - pylint: disable=abstract-method |
153 | 165 |
154 def __init__(self): | 166 def __init__(self): |
155 super(BaseInternalRebaselineCommand, self).__init__(options=[ | 167 super(BaseInternalRebaselineCommand, self).__init__(options=[ |
156 self.results_directory_option, | 168 self.results_directory_option, |
157 self.suffixes_option, | 169 self.suffixes_option, |
(...skipping 329 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
487 def _port_skips_test(port, test, generic_expectations, full_expectations): | 499 def _port_skips_test(port, test, generic_expectations, full_expectations): |
488 fs = port.host.filesystem | 500 fs = port.host.filesystem |
489 if port.default_smoke_test_only(): | 501 if port.default_smoke_test_only(): |
490 smoke_test_filename = fs.join(port.layout_tests_dir(), 'SmokeTests') | 502 smoke_test_filename = fs.join(port.layout_tests_dir(), 'SmokeTests') |
491 if fs.exists(smoke_test_filename) and test not in fs.read_text_file(
smoke_test_filename): | 503 if fs.exists(smoke_test_filename) and test not in fs.read_text_file(
smoke_test_filename): |
492 return True | 504 return True |
493 | 505 |
494 return (SKIP in full_expectations.get_expectations(test) and | 506 return (SKIP in full_expectations.get_expectations(test) and |
495 SKIP not in generic_expectations.get_expectations(test)) | 507 SKIP not in generic_expectations.get_expectations(test)) |
496 | 508 |
497 def _run_in_parallel(self, commands, update_scm=True): | 509 def _run_in_parallel(self, commands): |
498 if not commands: | 510 if not commands: |
499 return {} | 511 return ChangeSet() |
500 | 512 |
501 # TODO(qyearsley): Refactor this block. | 513 # TODO(qyearsley): Refactor this block. |
502 command_results = self._tool.executive.run_in_parallel(commands) | 514 command_results = self._tool.executive.run_in_parallel(commands) |
503 log_output = '\n'.join(result[2] for result in command_results).replace(
'\n\n', '\n') | 515 log_output = '\n'.join(result[2] for result in command_results).replace(
'\n\n', '\n') |
504 for line in log_output.split('\n'): | 516 for line in log_output.split('\n'): |
505 if line: | 517 if line: |
506 _log.error(line) | 518 _log.error(line) |
507 | 519 |
508 change_set = self._serial_commands(command_results) | 520 return self._serial_commands(command_results) |
509 | 521 |
510 # TODO(qyearsley): Instead of updating the SCM state here, aggregate cha
nges | 522 def _rebaseline(self, options, test_prefix_list): |
511 # and update once in _rebaseline. See http://crbug.com/639410. | |
512 if update_scm: | |
513 if change_set.files_to_delete: | |
514 self._tool.scm().delete_list(change_set.files_to_delete) | |
515 if change_set.files_to_add: | |
516 self._tool.scm().add_list(change_set.files_to_add) | |
517 | |
518 return change_set.lines_to_remove | |
519 | |
520 def _rebaseline(self, options, test_prefix_list, update_scm=True): | |
521 """Downloads new baselines in parallel, then updates expectations files | 523 """Downloads new baselines in parallel, then updates expectations files |
522 and optimizes baselines. | 524 and optimizes baselines. |
523 | 525 |
524 Args: | 526 Args: |
525 options: An object with the options passed to the current command. | 527 options: An object with the options passed to the current command. |
526 test_prefix_list: A map of test names to Build objects to file suffi
xes | 528 test_prefix_list: A map of test names to Build objects to file suffi
xes |
527 for new baselines. For example: | 529 for new baselines. For example: |
528 { | 530 { |
529 "some/test.html": {Build("builder-1", 412): ["txt"], Build("
builder-2", 100): ["txt"]}, | 531 "some/test.html": {Build("builder-1", 412): ["txt"], Build("
builder-2", 100): ["txt"]}, |
530 "some/other.html": {Build("builder-1", 412): ["txt"]} | 532 "some/other.html": {Build("builder-1", 412): ["txt"]} |
531 } | 533 } |
532 This would mean that new text baselines should be downloaded for | 534 This would mean that new text baselines should be downloaded for |
533 "some/test.html" on both builder-1 (build 412) and builder-2 | 535 "some/test.html" on both builder-1 (build 412) and builder-2 |
534 (build 100), and new text baselines should be downloaded for | 536 (build 100), and new text baselines should be downloaded for |
535 "some/other.html" but only from builder-1. | 537 "some/other.html" but only from builder-1. |
536 TODO(qyearsley): Replace test_prefix_list everywhere with some | 538 TODO(qyearsley): Replace test_prefix_list everywhere with some |
537 sort of class that contains the same data. | 539 sort of class that contains the same data. |
538 update_scm: If True, commands like `git add` and `git rm` will be ru
n. | |
539 """ | 540 """ |
540 for test, builds_to_check in sorted(test_prefix_list.items()): | 541 for test, builds_to_check in sorted(test_prefix_list.items()): |
541 _log.info("Rebaselining %s", test) | 542 _log.info("Rebaselining %s", test) |
542 for build, suffixes in sorted(builds_to_check.items()): | 543 for build, suffixes in sorted(builds_to_check.items()): |
543 _log.debug(" %s: %s", build, ",".join(suffixes)) | 544 _log.debug(" %s: %s", build, ",".join(suffixes)) |
544 | 545 |
545 copy_baseline_commands, rebaseline_commands, extra_lines_to_remove = sel
f._rebaseline_commands( | 546 copy_baseline_commands, rebaseline_commands, extra_lines_to_remove = sel
f._rebaseline_commands( |
546 test_prefix_list, options) | 547 test_prefix_list, options) |
547 lines_to_remove = {} | |
548 | 548 |
549 self._run_in_parallel(copy_baseline_commands, update_scm=update_scm) | 549 change_set = ChangeSet(lines_to_remove=extra_lines_to_remove) |
550 lines_to_remove = self._run_in_parallel(rebaseline_commands, update_scm=
update_scm) | |
551 | 550 |
552 for test in extra_lines_to_remove: | 551 change_set.update(self._run_in_parallel(copy_baseline_commands)) |
553 if test in lines_to_remove: | 552 change_set.update(self._run_in_parallel(rebaseline_commands)) |
554 lines_to_remove[test] = lines_to_remove[test] + extra_lines_to_r
emove[test] | |
555 else: | |
556 lines_to_remove[test] = extra_lines_to_remove[test] | |
557 | 553 |
558 if lines_to_remove: | 554 if change_set.lines_to_remove: |
559 self._update_expectations_files(lines_to_remove) | 555 self._update_expectations_files(change_set.lines_to_remove) |
560 | 556 |
561 if options.optimize: | 557 if options.optimize: |
562 # TODO(wkorman): Consider changing temporary branch to base off of H
EAD rather than | 558 # TODO(wkorman): Consider changing temporary branch to base off of H
EAD rather than |
563 # origin/master to ensure we run baseline optimization processes wit
h the same code as | 559 # origin/master to ensure we run baseline optimization processes wit
h the same code as |
564 # auto-rebaseline itself. | 560 # auto-rebaseline itself. |
565 self._run_in_parallel(self._optimize_baselines(test_prefix_list, opt
ions.verbose), update_scm=update_scm) | 561 change_set.update(self._run_in_parallel(self._optimize_baselines(tes
t_prefix_list, options.verbose))) |
| 562 |
| 563 if change_set.files_to_delete: |
| 564 self._tool.scm().delete_list(change_set.files_to_delete) |
| 565 if change_set.files_to_add: |
| 566 self._tool.scm().add_list(change_set.files_to_add) |
566 | 567 |
567 def _suffixes_for_actual_failures(self, test, build, existing_suffixes): | 568 def _suffixes_for_actual_failures(self, test, build, existing_suffixes): |
568 """Gets the baseline suffixes for actual mismatch failures in some resul
ts. | 569 """Gets the baseline suffixes for actual mismatch failures in some resul
ts. |
569 | 570 |
570 Args: | 571 Args: |
571 test: A full test path string. | 572 test: A full test path string. |
572 build: A Build object. | 573 build: A Build object. |
573 existing_suffixes: A collection of all suffixes to consider. | 574 existing_suffixes: A collection of all suffixes to consider. |
574 | 575 |
575 Returns: | 576 Returns: |
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
692 for test in args: | 693 for test in args: |
693 if test not in test_prefix_list: | 694 if test not in test_prefix_list: |
694 test_prefix_list[test] = {} | 695 test_prefix_list[test] = {} |
695 build = Build(builder) | 696 build = Build(builder) |
696 test_prefix_list[test][build] = suffixes_to_update | 697 test_prefix_list[test][build] = suffixes_to_update |
697 | 698 |
698 if options.verbose: | 699 if options.verbose: |
699 _log.debug("rebaseline-json: " + str(test_prefix_list)) | 700 _log.debug("rebaseline-json: " + str(test_prefix_list)) |
700 | 701 |
701 self._rebaseline(options, test_prefix_list) | 702 self._rebaseline(options, test_prefix_list) |
OLD | NEW |