OLD | NEW |
1 # Copyright (c) 2010 Google Inc. All rights reserved. | 1 # Copyright (c) 2010 Google Inc. All rights reserved. |
2 # | 2 # |
3 # Redistribution and use in source and binary forms, with or without | 3 # Redistribution and use in source and binary forms, with or without |
4 # modification, are permitted provided that the following conditions are | 4 # modification, are permitted provided that the following conditions are |
5 # met: | 5 # met: |
6 # | 6 # |
7 # * Redistributions of source code must retain the above copyright | 7 # * Redistributions of source code must retain the above copyright |
8 # notice, this list of conditions and the following disclaimer. | 8 # notice, this list of conditions and the following disclaimer. |
9 # * Redistributions in binary form must reproduce the above | 9 # * Redistributions in binary form must reproduce the above |
10 # copyright notice, this list of conditions and the following disclaimer | 10 # copyright notice, this list of conditions and the following disclaimer |
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
142 for baseline in self._immediate_predecessors_in_fallback(baseli
ne_directory)] | 142 for baseline in self._immediate_predecessors_in_fallback(baseli
ne_directory)] |
143 | 143 |
144 old_baselines = [] | 144 old_baselines = [] |
145 new_baselines = [] | 145 new_baselines = [] |
146 | 146 |
147 # Need to gather all the baseline paths before modifying the filesystem
since | 147 # Need to gather all the baseline paths before modifying the filesystem
since |
148 # the modifications can affect the results of port.expected_filename. | 148 # the modifications can affect the results of port.expected_filename. |
149 for port in ports: | 149 for port in ports: |
150 old_baseline = port.expected_filename(test_name, "." + suffix) | 150 old_baseline = port.expected_filename(test_name, "." + suffix) |
151 if not self._tool.filesystem.exists(old_baseline): | 151 if not self._tool.filesystem.exists(old_baseline): |
152 _log.debug("No existing baseline for %s." % test_name) | 152 _log.debug("No existing baseline for %s.", test_name) |
153 continue | 153 continue |
154 | 154 |
155 new_baseline = self._tool.filesystem.join(port.baseline_path(), self
._file_name_for_expected_result(test_name, suffix)) | 155 new_baseline = self._tool.filesystem.join(port.baseline_path(), self
._file_name_for_expected_result(test_name, suffix)) |
156 if self._tool.filesystem.exists(new_baseline): | 156 if self._tool.filesystem.exists(new_baseline): |
157 _log.debug("Existing baseline at %s, not copying over it." % new
_baseline) | 157 _log.debug("Existing baseline at %s, not copying over it.", new_
baseline) |
158 continue | 158 continue |
159 | 159 |
160 expectations = TestExpectations(port, [test_name]) | 160 expectations = TestExpectations(port, [test_name]) |
161 if SKIP in expectations.get_expectations(test_name): | 161 if SKIP in expectations.get_expectations(test_name): |
162 _log.debug("%s is skipped on %s." % (test_name, port.name())) | 162 _log.debug("%s is skipped on %s.", test_name, port.name()) |
163 continue | 163 continue |
164 | 164 |
165 old_baselines.append(old_baseline) | 165 old_baselines.append(old_baseline) |
166 new_baselines.append(new_baseline) | 166 new_baselines.append(new_baseline) |
167 | 167 |
168 for i in range(len(old_baselines)): | 168 for i in range(len(old_baselines)): |
169 old_baseline = old_baselines[i] | 169 old_baseline = old_baselines[i] |
170 new_baseline = new_baselines[i] | 170 new_baseline = new_baselines[i] |
171 | 171 |
172 _log.debug("Copying baseline from %s to %s." % (old_baseline, new_ba
seline)) | 172 _log.debug("Copying baseline from %s to %s.", old_baseline, new_base
line) |
173 self._tool.filesystem.maybe_make_directory(self._tool.filesystem.dir
name(new_baseline)) | 173 self._tool.filesystem.maybe_make_directory(self._tool.filesystem.dir
name(new_baseline)) |
174 self._tool.filesystem.copyfile(old_baseline, new_baseline) | 174 self._tool.filesystem.copyfile(old_baseline, new_baseline) |
175 if not self._tool.scm().exists(new_baseline): | 175 if not self._tool.scm().exists(new_baseline): |
176 self._add_to_scm_later(new_baseline) | 176 self._add_to_scm_later(new_baseline) |
177 | 177 |
178 def execute(self, options, args, tool): | 178 def execute(self, options, args, tool): |
179 for suffix in options.suffixes.split(','): | 179 for suffix in options.suffixes.split(','): |
180 self._copy_existing_baseline(options.builder, options.test, suffix) | 180 self._copy_existing_baseline(options.builder, options.test, suffix) |
181 self._print_scm_changes() | 181 self._print_scm_changes() |
182 | 182 |
(...skipping 12 matching lines...) Expand all Loading... |
195 filesystem.write_binary_file(target_baseline, data) | 195 filesystem.write_binary_file(target_baseline, data) |
196 if not self._tool.scm().exists(target_baseline): | 196 if not self._tool.scm().exists(target_baseline): |
197 self._add_to_scm_later(target_baseline) | 197 self._add_to_scm_later(target_baseline) |
198 | 198 |
199 def _rebaseline_test(self, builder_name, test_name, suffix, results_url): | 199 def _rebaseline_test(self, builder_name, test_name, suffix, results_url): |
200 baseline_directory = self._baseline_directory(builder_name) | 200 baseline_directory = self._baseline_directory(builder_name) |
201 | 201 |
202 source_baseline = "%s/%s" % (results_url, self._file_name_for_actual_res
ult(test_name, suffix)) | 202 source_baseline = "%s/%s" % (results_url, self._file_name_for_actual_res
ult(test_name, suffix)) |
203 target_baseline = self._tool.filesystem.join(baseline_directory, self._f
ile_name_for_expected_result(test_name, suffix)) | 203 target_baseline = self._tool.filesystem.join(baseline_directory, self._f
ile_name_for_expected_result(test_name, suffix)) |
204 | 204 |
205 _log.debug("Retrieving source %s for target %s." % (source_baseline, tar
get_baseline)) | 205 _log.debug("Retrieving source %s for target %s.", source_baseline, targe
t_baseline) |
206 self._save_baseline(self._tool.web.get_binary(source_baseline, convert_4
04_to_None=True), | 206 self._save_baseline(self._tool.web.get_binary(source_baseline, convert_4
04_to_None=True), |
207 target_baseline) | 207 target_baseline) |
208 | 208 |
209 def _rebaseline_test_and_update_expectations(self, options): | 209 def _rebaseline_test_and_update_expectations(self, options): |
210 self._baseline_suffix_list = options.suffixes.split(',') | 210 self._baseline_suffix_list = options.suffixes.split(',') |
211 | 211 |
212 port = self._tool.port_factory.get_from_builder_name(options.builder) | 212 port = self._tool.port_factory.get_from_builder_name(options.builder) |
213 if port.reference_files(options.test): | 213 if port.reference_files(options.test): |
214 if 'png' in self._baseline_suffix_list: | 214 if 'png' in self._baseline_suffix_list: |
215 _log.warning("Cannot rebaseline image result for reftest: %s", o
ptions.test) | 215 _log.warning("Cannot rebaseline image result for reftest: %s", o
ptions.test) |
(...skipping 196 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
412 files_to_delete.update(parsed_line['delete']) | 412 files_to_delete.update(parsed_line['delete']) |
413 if 'remove-lines' in parsed_line: | 413 if 'remove-lines' in parsed_line: |
414 for line_to_remove in parsed_line['remove-lines']: | 414 for line_to_remove in parsed_line['remove-lines']: |
415 test = line_to_remove['test'] | 415 test = line_to_remove['test'] |
416 builder = line_to_remove['builder'] | 416 builder = line_to_remove['builder'] |
417 if test not in lines_to_remove: | 417 if test not in lines_to_remove: |
418 lines_to_remove[test] = [] | 418 lines_to_remove[test] = [] |
419 lines_to_remove[test].append(builder) | 419 lines_to_remove[test].append(builder) |
420 file_added = True | 420 file_added = True |
421 except ValueError: | 421 except ValueError: |
422 _log.debug('"%s" is not a JSON object, ignoring' % line) | 422 _log.debug('"%s" is not a JSON object, ignoring', line) |
423 | 423 |
424 if not file_added: | 424 if not file_added: |
425 _log.debug('Could not add file based off output "%s"' % output) | 425 _log.debug('Could not add file based off output "%s"', output) |
426 | 426 |
427 return list(files_to_add), list(files_to_delete), lines_to_remove | 427 return list(files_to_add), list(files_to_delete), lines_to_remove |
428 | 428 |
429 def _optimize_baselines(self, test_prefix_list, verbose=False): | 429 def _optimize_baselines(self, test_prefix_list, verbose=False): |
430 optimize_commands = [] | 430 optimize_commands = [] |
431 for test in test_prefix_list: | 431 for test in test_prefix_list: |
432 all_suffixes = set() | 432 all_suffixes = set() |
433 builders_to_fetch_from = self._builders_to_fetch_from(self._builder_
names(test_prefix_list[test])) | 433 builders_to_fetch_from = self._builders_to_fetch_from(self._builder_
names(test_prefix_list[test])) |
434 for build in sorted(test_prefix_list[test]): | 434 for build in sorted(test_prefix_list[test]): |
435 if build.builder_name not in builders_to_fetch_from: | 435 if build.builder_name not in builders_to_fetch_from: |
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
528 (build 100), and new text baselines should be downloaded for | 528 (build 100), and new text baselines should be downloaded for |
529 "some/other.html" but only from builder-1. | 529 "some/other.html" but only from builder-1. |
530 TODO(qyearsley): Replace test_prefix_list everywhere with some | 530 TODO(qyearsley): Replace test_prefix_list everywhere with some |
531 sort of class that contains the same data. | 531 sort of class that contains the same data. |
532 skip_checking_actual_results: If True, then the lists of suffixes | 532 skip_checking_actual_results: If True, then the lists of suffixes |
533 to rebaseline from |test_prefix_list| will be used directly; | 533 to rebaseline from |test_prefix_list| will be used directly; |
534 if False, then the list of suffixes will filtered to include | 534 if False, then the list of suffixes will filtered to include |
535 suffixes with mismatches in actual results. | 535 suffixes with mismatches in actual results. |
536 """ | 536 """ |
537 for test, builds_to_check in sorted(test_prefix_list.items()): | 537 for test, builds_to_check in sorted(test_prefix_list.items()): |
538 _log.info("Rebaselining %s" % test) | 538 _log.info("Rebaselining %s", test) |
539 for build, suffixes in sorted(builds_to_check.items()): | 539 for build, suffixes in sorted(builds_to_check.items()): |
540 _log.debug(" %s: %s" % (build, ",".join(suffixes))) | 540 _log.debug(" %s: %s", build, ",".join(suffixes)) |
541 | 541 |
542 copy_baseline_commands, rebaseline_commands, extra_lines_to_remove = sel
f._rebaseline_commands( | 542 copy_baseline_commands, rebaseline_commands, extra_lines_to_remove = sel
f._rebaseline_commands( |
543 test_prefix_list, options, skip_checking_actual_results) | 543 test_prefix_list, options, skip_checking_actual_results) |
544 lines_to_remove = {} | 544 lines_to_remove = {} |
545 | 545 |
546 self._run_in_parallel_and_update_scm(copy_baseline_commands) | 546 self._run_in_parallel_and_update_scm(copy_baseline_commands) |
547 lines_to_remove = self._run_in_parallel_and_update_scm(rebaseline_comman
ds) | 547 lines_to_remove = self._run_in_parallel_and_update_scm(rebaseline_comman
ds) |
548 | 548 |
549 for test in extra_lines_to_remove: | 549 for test in extra_lines_to_remove: |
550 if test in lines_to_remove: | 550 if test in lines_to_remove: |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
614 tests_to_rebaseline[test] = suffixes or BASELINE_SUFFIX_LIST | 614 tests_to_rebaseline[test] = suffixes or BASELINE_SUFFIX_LIST |
615 return tests_to_rebaseline | 615 return tests_to_rebaseline |
616 | 616 |
617 def _add_tests_to_rebaseline_for_port(self, port_name): | 617 def _add_tests_to_rebaseline_for_port(self, port_name): |
618 builder_name = self._tool.builders.builder_name_for_port_name(port_name) | 618 builder_name = self._tool.builders.builder_name_for_port_name(port_name) |
619 if not builder_name: | 619 if not builder_name: |
620 return | 620 return |
621 tests = self._tests_to_rebaseline(self._tool.port_factory.get(port_name)
).items() | 621 tests = self._tests_to_rebaseline(self._tool.port_factory.get(port_name)
).items() |
622 | 622 |
623 if tests: | 623 if tests: |
624 _log.info("Retrieving results for %s from %s." % (port_name, builder
_name)) | 624 _log.info("Retrieving results for %s from %s.", port_name, builder_n
ame) |
625 | 625 |
626 for test_name, suffixes in tests: | 626 for test_name, suffixes in tests: |
627 _log.info(" %s (%s)" % (test_name, ','.join(suffixes))) | 627 _log.info(" %s (%s)", test_name, ','.join(suffixes)) |
628 if test_name not in self._test_prefix_list: | 628 if test_name not in self._test_prefix_list: |
629 self._test_prefix_list[test_name] = {} | 629 self._test_prefix_list[test_name] = {} |
630 self._test_prefix_list[test_name][Build(builder_name)] = suffixes | 630 self._test_prefix_list[test_name][Build(builder_name)] = suffixes |
631 | 631 |
632 def execute(self, options, args, tool): | 632 def execute(self, options, args, tool): |
633 options.results_directory = None | 633 options.results_directory = None |
634 self._test_prefix_list = {} | 634 self._test_prefix_list = {} |
635 port_names = tool.port_factory.all_port_names(options.platform) | 635 port_names = tool.port_factory.all_port_names(options.platform) |
636 for port_name in port_names: | 636 for port_name in port_names: |
637 self._add_tests_to_rebaseline_for_port(port_name) | 637 self._add_tests_to_rebaseline_for_port(port_name) |
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
724 ) | 724 ) |
725 .*?([^ ]*) # Test file name | 725 .*?([^ ]*) # Test file name |
726 \ \[ # Single space followed by opening [ for expectation
specifier | 726 \ \[ # Single space followed by opening [ for expectation
specifier |
727 [^[]*$ # Prevents matching previous [ for version specifier
s instead of expectation specifiers | 727 [^[]*$ # Prevents matching previous [ for version specifier
s instead of expectation specifiers |
728 """, re.VERBOSE) | 728 """, re.VERBOSE) |
729 | 729 |
730 def bot_revision_data(self, scm): | 730 def bot_revision_data(self, scm): |
731 revisions = [] | 731 revisions = [] |
732 for result in self.build_data().values(): | 732 for result in self.build_data().values(): |
733 if result.run_was_interrupted(): | 733 if result.run_was_interrupted(): |
734 _log.error("Can't rebaseline because the latest run on %s exited
early." % result.builder_name()) | 734 _log.error("Can't rebaseline because the latest run on %s exited
early.", result.builder_name()) |
735 return [] | 735 return [] |
736 revisions.append({ | 736 revisions.append({ |
737 "builder": result.builder_name(), | 737 "builder": result.builder_name(), |
738 "revision": result.chromium_revision(scm), | 738 "revision": result.chromium_revision(scm), |
739 }) | 739 }) |
740 return revisions | 740 return revisions |
741 | 741 |
742 def _strip_comments(self, line): | 742 def _strip_comments(self, line): |
743 comment_index = line.find("#") | 743 comment_index = line.find("#") |
744 if comment_index == -1: | 744 if comment_index == -1: |
(...skipping 16 matching lines...) Expand all Loading... |
761 if "NeedsRebaseline" not in line: | 761 if "NeedsRebaseline" not in line: |
762 continue | 762 continue |
763 | 763 |
764 has_any_needs_rebaseline_lines = True | 764 has_any_needs_rebaseline_lines = True |
765 | 765 |
766 parsed_line = self._blame_regex.match(line) | 766 parsed_line = self._blame_regex.match(line) |
767 if not parsed_line: | 767 if not parsed_line: |
768 # Deal gracefully with inability to parse blame info for a line
in TestExpectations. | 768 # Deal gracefully with inability to parse blame info for a line
in TestExpectations. |
769 # Parsing could fail if for example during local debugging the d
eveloper modifies | 769 # Parsing could fail if for example during local debugging the d
eveloper modifies |
770 # TestExpectations and does not commit. | 770 # TestExpectations and does not commit. |
771 _log.info("Couldn't find blame info for expectations line, skipp
ing [line=%s]." % line) | 771 _log.info("Couldn't find blame info for expectations line, skipp
ing [line=%s].", line) |
772 continue | 772 continue |
773 | 773 |
774 commit_hash = parsed_line.group(1) | 774 commit_hash = parsed_line.group(1) |
775 commit_position = tool.scm().commit_position_from_git_commit(commit_
hash) | 775 commit_position = tool.scm().commit_position_from_git_commit(commit_
hash) |
776 | 776 |
777 test = parsed_line.group(3) | 777 test = parsed_line.group(3) |
778 if print_revisions: | 778 if print_revisions: |
779 _log.info("%s is waiting for r%s" % (test, commit_position)) | 779 _log.info("%s is waiting for r%s", test, commit_position) |
780 | 780 |
781 if not commit_position or commit_position > min_revision: | 781 if not commit_position or commit_position > min_revision: |
782 continue | 782 continue |
783 | 783 |
784 if revision and commit_position != revision: | 784 if revision and commit_position != revision: |
785 continue | 785 continue |
786 | 786 |
787 if not revision: | 787 if not revision: |
788 revision = commit_position | 788 revision = commit_position |
789 commit = commit_hash | 789 commit = commit_hash |
790 author = parsed_line.group(2) | 790 author = parsed_line.group(2) |
791 | 791 |
792 bugs.update(re.findall(r"crbug\.com\/(\d+)", line)) | 792 bugs.update(re.findall(r"crbug\.com\/(\d+)", line)) |
793 tests.add(test) | 793 tests.add(test) |
794 | 794 |
795 if len(tests) >= self.MAX_LINES_TO_REBASELINE: | 795 if len(tests) >= self.MAX_LINES_TO_REBASELINE: |
796 _log.info("Too many tests to rebaseline in one patch. Doing the
first %d." % self.MAX_LINES_TO_REBASELINE) | 796 _log.info("Too many tests to rebaseline in one patch. Doing the
first %d.", self.MAX_LINES_TO_REBASELINE) |
797 break | 797 break |
798 | 798 |
799 return tests, revision, commit, author, bugs, has_any_needs_rebaseline_l
ines | 799 return tests, revision, commit, author, bugs, has_any_needs_rebaseline_l
ines |
800 | 800 |
801 def link_to_patch(self, commit): | 801 def link_to_patch(self, commit): |
802 return "https://chromium.googlesource.com/chromium/src/+/" + commit | 802 return "https://chromium.googlesource.com/chromium/src/+/" + commit |
803 | 803 |
804 def commit_message(self, author, revision, commit, bugs): | 804 def commit_message(self, author, revision, commit, bugs): |
805 bug_string = "" | 805 bug_string = "" |
806 if bugs: | 806 if bugs: |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
849 while process.poll() is None and time.time() < last_output_time + self.S
ECONDS_BEFORE_GIVING_UP: | 849 while process.poll() is None and time.time() < last_output_time + self.S
ECONDS_BEFORE_GIVING_UP: |
850 # FIXME: This doesn't make any sense. readline blocks, so all this c
ode to | 850 # FIXME: This doesn't make any sense. readline blocks, so all this c
ode to |
851 # try and bail is useless. Instead, we should do the readline calls
on a | 851 # try and bail is useless. Instead, we should do the readline calls
on a |
852 # subthread. Then the rest of this code would make sense. | 852 # subthread. Then the rest of this code would make sense. |
853 out = process.stdout.readline().rstrip('\n') | 853 out = process.stdout.readline().rstrip('\n') |
854 if out: | 854 if out: |
855 last_output_time = time.time() | 855 last_output_time = time.time() |
856 _log.info(out) | 856 _log.info(out) |
857 | 857 |
858 if process.poll() is None: | 858 if process.poll() is None: |
859 _log.error('Command hung: %s' % subprocess_command) | 859 _log.error('Command hung: %s', subprocess_command) |
860 return False | 860 return False |
861 return True | 861 return True |
862 | 862 |
863 # FIXME: Move this somewhere more general. | 863 # FIXME: Move this somewhere more general. |
864 def tree_status(self): | 864 def tree_status(self): |
865 blink_tree_status_url = "http://chromium-status.appspot.com/status" | 865 blink_tree_status_url = "http://chromium-status.appspot.com/status" |
866 status = urllib2.urlopen(blink_tree_status_url).read().lower() | 866 status = urllib2.urlopen(blink_tree_status_url).read().lower() |
867 if 'closed' in status or status == "0": | 867 if 'closed' in status or status == "0": |
868 return 'closed' | 868 return 'closed' |
869 elif 'open' in status or status == "1": | 869 elif 'open' in status or status == "1": |
(...skipping 11 matching lines...) Expand all Loading... |
881 | 881 |
882 revision_data = self.bot_revision_data(tool.scm()) | 882 revision_data = self.bot_revision_data(tool.scm()) |
883 if not revision_data: | 883 if not revision_data: |
884 return | 884 return |
885 | 885 |
886 min_revision = int(min([item["revision"] for item in revision_data])) | 886 min_revision = int(min([item["revision"] for item in revision_data])) |
887 tests, revision, commit, author, bugs, _ = self.tests_to_rebaseline( | 887 tests, revision, commit, author, bugs, _ = self.tests_to_rebaseline( |
888 tool, min_revision, print_revisions=options.verbose) | 888 tool, min_revision, print_revisions=options.verbose) |
889 | 889 |
890 if options.verbose: | 890 if options.verbose: |
891 _log.info("Min revision across all bots is %s." % min_revision) | 891 _log.info("Min revision across all bots is %s.", min_revision) |
892 for item in revision_data: | 892 for item in revision_data: |
893 _log.info("%s: r%s" % (item["builder"], item["revision"])) | 893 _log.info("%s: r%s", item["builder"], item["revision"]) |
894 | 894 |
895 if not tests: | 895 if not tests: |
896 _log.debug('No tests to rebaseline.') | 896 _log.debug('No tests to rebaseline.') |
897 return | 897 return |
898 | 898 |
899 if self.tree_status() == 'closed': | 899 if self.tree_status() == 'closed': |
900 _log.info('Cannot proceed. Tree is closed.') | 900 _log.info('Cannot proceed. Tree is closed.') |
901 return | 901 return |
902 | 902 |
903 _log.info('Rebaselining %s for r%s by %s.' % (list(tests), revision, aut
hor)) | 903 _log.info('Rebaselining %s for r%s by %s.', list(tests), revision, autho
r) |
904 | 904 |
905 test_prefix_list, _ = self.get_test_prefix_list(tests) | 905 test_prefix_list, _ = self.get_test_prefix_list(tests) |
906 | 906 |
907 did_switch_branches = False | 907 did_switch_branches = False |
908 did_finish = False | 908 did_finish = False |
909 old_branch_name_or_ref = '' | 909 old_branch_name_or_ref = '' |
910 rebaseline_branch_name = self.AUTO_REBASELINE_BRANCH_NAME | 910 rebaseline_branch_name = self.AUTO_REBASELINE_BRANCH_NAME |
911 try: | 911 try: |
912 # Save the current branch name and check out a clean branch for the
patch. | 912 # Save the current branch name and check out a clean branch for the
patch. |
913 old_branch_name_or_ref = tool.scm().current_branch_or_ref() | 913 old_branch_name_or_ref = tool.scm().current_branch_or_ref() |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
949 issue_already_closed = tool.executive.run_command( | 949 issue_already_closed = tool.executive.run_command( |
950 ['git', 'config', 'branch.%s.rietveldissue' % rebaseline
_branch_name], | 950 ['git', 'config', 'branch.%s.rietveldissue' % rebaseline
_branch_name], |
951 return_exit_code=True) | 951 return_exit_code=True) |
952 if not issue_already_closed: | 952 if not issue_already_closed: |
953 self._run_git_cl_command(options, ['set_close']) | 953 self._run_git_cl_command(options, ['set_close']) |
954 | 954 |
955 tool.scm().ensure_cleanly_tracking_remote_master() | 955 tool.scm().ensure_cleanly_tracking_remote_master() |
956 if old_branch_name_or_ref: | 956 if old_branch_name_or_ref: |
957 tool.scm().checkout_branch(old_branch_name_or_ref) | 957 tool.scm().checkout_branch(old_branch_name_or_ref) |
958 tool.scm().delete_branch(rebaseline_branch_name) | 958 tool.scm().delete_branch(rebaseline_branch_name) |
OLD | NEW |