OLD | NEW |
1 # Copyright (c) 2010 Google Inc. All rights reserved. | 1 # Copyright (c) 2010 Google Inc. All rights reserved. |
2 # | 2 # |
3 # Redistribution and use in source and binary forms, with or without | 3 # Redistribution and use in source and binary forms, with or without |
4 # modification, are permitted provided that the following conditions are | 4 # modification, are permitted provided that the following conditions are |
5 # met: | 5 # met: |
6 # | 6 # |
7 # * Redistributions of source code must retain the above copyright | 7 # * Redistributions of source code must retain the above copyright |
8 # notice, this list of conditions and the following disclaimer. | 8 # notice, this list of conditions and the following disclaimer. |
9 # * Redistributions in binary form must reproduce the above | 9 # * Redistributions in binary form must reproduce the above |
10 # copyright notice, this list of conditions and the following disclaimer | 10 # copyright notice, this list of conditions and the following disclaimer |
(...skipping 640 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
651 MAX_LINES_TO_REBASELINE = 200 | 651 MAX_LINES_TO_REBASELINE = 200 |
652 | 652 |
653 SECONDS_BEFORE_GIVING_UP = 300 | 653 SECONDS_BEFORE_GIVING_UP = 300 |
654 | 654 |
655 def __init__(self): | 655 def __init__(self): |
656 super(AutoRebaseline, self).__init__(options=[ | 656 super(AutoRebaseline, self).__init__(options=[ |
657 # FIXME: Remove this option. | 657 # FIXME: Remove this option. |
658 self.no_optimize_option, | 658 self.no_optimize_option, |
659 # FIXME: Remove this option. | 659 # FIXME: Remove this option. |
660 self.results_directory_option, | 660 self.results_directory_option, |
661 optparse.make_option("--log-server", help="Server to send logs to.") | |
662 ]) | 661 ]) |
663 | 662 |
664 def _log_to_server(self, log_server, query): | 663 def bot_revision_data(self): |
665 if not log_server: | |
666 return | |
667 urllib2.urlopen("http://" + log_server + "/updatelog", data=urllib.urlen
code(query)) | |
668 | |
669 # Logs when there are no NeedsRebaseline lines in TestExpectations. | |
670 # These entries overwrite the existing log entry if the existing | |
671 # entry is also a noneedsrebaseline entry. This is special cased | |
672 # so that the log doesn't get bloated with entries like this | |
673 # when there are no tests that needs rebaselining. | |
674 def _log_no_needs_rebaseline_lines(self, log_server): | |
675 self._log_to_server(log_server, { | |
676 "noneedsrebaseline": "on", | |
677 }) | |
678 | |
679 # Uploaded log entries append to the existing entry unless the | |
680 # newentry flag is set. In that case it starts a new entry to | |
681 # start appending to. So, we need to call this on any fresh run | |
682 # that is going to end up logging stuff (i.e. any run that isn't | |
683 # a noneedsrebaseline run). | |
684 def _start_new_log_entry(self, log_server): | |
685 self._log_to_server(log_server, { | |
686 "log": "", | |
687 "newentry": "on", | |
688 }) | |
689 | |
690 def _configure_logging(self, log_server): | |
691 if not log_server: | |
692 return | |
693 | |
694 def _log_alias(query): | |
695 self._log_to_server(log_server, query) | |
696 | |
697 class LogHandler(logging.Handler): | |
698 def __init__(self): | |
699 logging.Handler.__init__(self) | |
700 self._records = [] | |
701 | |
702 # Since this does not have the newentry flag, it will append | |
703 # to the most recent log entry (i.e. the one created by | |
704 # _start_new_log_entry. | |
705 def emit(self, record): | |
706 _log_alias({ | |
707 "log": record.getMessage(), | |
708 }) | |
709 | |
710 handler = LogHandler() | |
711 _log.setLevel(logging.DEBUG) | |
712 handler.setLevel(logging.DEBUG) | |
713 _log.addHandler(handler) | |
714 | |
715 def bot_revision_data(self, log_server): | |
716 revisions = [] | 664 revisions = [] |
717 for result in self.builder_data().values(): | 665 for result in self.builder_data().values(): |
718 if result.run_was_interrupted(): | 666 if result.run_was_interrupted(): |
719 self._start_new_log_entry(log_server) | |
720 _log.error("Can't rebaseline because the latest run on %s exited
early." % result.builder_name()) | 667 _log.error("Can't rebaseline because the latest run on %s exited
early." % result.builder_name()) |
721 return [] | 668 return [] |
722 revisions.append({ | 669 revisions.append({ |
723 "builder": result.builder_name(), | 670 "builder": result.builder_name(), |
724 "revision": result.blink_revision(), | 671 "revision": result.blink_revision(), |
725 }) | 672 }) |
726 return revisions | 673 return revisions |
727 | 674 |
728 def tests_to_rebaseline(self, tool, min_revision, print_revisions, log_serve
r): | 675 def tests_to_rebaseline(self, tool, min_revision, print_revisions): |
729 port = tool.port_factory.get() | 676 port = tool.port_factory.get() |
730 expectations_file_path = port.path_to_generic_test_expectations_file() | 677 expectations_file_path = port.path_to_generic_test_expectations_file() |
731 | 678 |
732 tests = set() | 679 tests = set() |
733 revision = None | 680 revision = None |
734 author = None | 681 author = None |
735 bugs = set() | 682 bugs = set() |
736 has_any_needs_rebaseline_lines = False | 683 has_any_needs_rebaseline_lines = False |
737 | 684 |
738 for line in tool.scm().blame(expectations_file_path).split("\n"): | 685 for line in tool.scm().blame(expectations_file_path).split("\n"): |
739 comment_index = line.find("#") | 686 comment_index = line.find("#") |
740 if comment_index == -1: | 687 if comment_index == -1: |
741 comment_index = len(line) | 688 comment_index = len(line) |
742 line_without_comments = re.sub(r"\s+", " ", line[:comment_index].str
ip()) | 689 line_without_comments = re.sub(r"\s+", " ", line[:comment_index].str
ip()) |
743 | 690 |
744 if "NeedsRebaseline" not in line_without_comments: | 691 if "NeedsRebaseline" not in line_without_comments: |
745 continue | 692 continue |
746 | 693 |
747 if not has_any_needs_rebaseline_lines: | |
748 self._start_new_log_entry(log_server) | |
749 has_any_needs_rebaseline_lines = True | 694 has_any_needs_rebaseline_lines = True |
750 | 695 |
751 parsed_line = re.match("^(\S*)[^(]*\((\S*).*?([^ ]*)\ \[[^[]*$", lin
e_without_comments) | 696 parsed_line = re.match("^(\S*)[^(]*\((\S*).*?([^ ]*)\ \[[^[]*$", lin
e_without_comments) |
752 | 697 |
753 commit_hash = parsed_line.group(1) | 698 commit_hash = parsed_line.group(1) |
754 svn_revision = tool.scm().svn_revision_from_git_commit(commit_hash) | 699 svn_revision = tool.scm().svn_revision_from_git_commit(commit_hash) |
755 | 700 |
756 test = parsed_line.group(3) | 701 test = parsed_line.group(3) |
757 if print_revisions: | 702 if print_revisions: |
758 _log.info("%s is waiting for r%s" % (test, svn_revision)) | 703 _log.info("%s is waiting for r%s" % (test, svn_revision)) |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
809 lines_to_remove[test].append(builder_name) | 754 lines_to_remove[test].append(builder_name) |
810 test_prefix_list[test][builder_name] = BASELINE_SUFFIX_LIST | 755 test_prefix_list[test][builder_name] = BASELINE_SUFFIX_LIST |
811 | 756 |
812 return test_prefix_list, lines_to_remove | 757 return test_prefix_list, lines_to_remove |
813 | 758 |
814 def _run_git_cl_command(self, options, command): | 759 def _run_git_cl_command(self, options, command): |
815 subprocess_command = ['git', 'cl'] + command | 760 subprocess_command = ['git', 'cl'] + command |
816 if options.verbose: | 761 if options.verbose: |
817 subprocess_command.append('--verbose') | 762 subprocess_command.append('--verbose') |
818 | 763 |
819 process = self._tool.executive.popen(subprocess_command, stdout=self._to
ol.executive.PIPE) | 764 process = self._tool.executive.popen(subprocess_command, stdout=self._to
ol.executive.PIPE, stderr=self._tool.executive.PIPE) |
820 last_output_time = time.time() | 765 last_output_time = time.time() |
821 | 766 |
822 # git cl sometimes completely hangs. Bail if we haven't gotten any outpu
t to stdout/stderr in a while. | 767 # git cl sometimes completely hangs. Bail if we haven't gotten any outpu
t to stdout/stderr in a while. |
823 while process.poll() == None and time.time() < last_output_time + self.S
ECONDS_BEFORE_GIVING_UP: | 768 while process.poll() == None and time.time() < last_output_time + self.S
ECONDS_BEFORE_GIVING_UP: |
824 # FIXME: Also log stderr. | 769 # FIXME: This isn't awesome. It may improperly interleave stdout and
stderr? |
825 out = process.stdout.readline().rstrip('\n') | 770 out = process.stdout.readline().rstrip('\n') |
826 if out: | 771 if out: |
827 last_output_time = time.time() | 772 last_output_time = time.time() |
828 _log.info(out) | 773 _log.info(out) |
829 | 774 |
| 775 err = process.stdout.readline().rstrip('\n') |
| 776 if err: |
| 777 last_output_time = time.time() |
| 778 _log.error(err) |
| 779 |
830 if process.poll() == None: | 780 if process.poll() == None: |
831 _log.error('Command hung: %s' % subprocess_command) | 781 _log.error('Command hung: %s' % subprocess_command) |
832 return False | 782 return False |
833 return True | 783 return True |
834 | 784 |
835 # FIXME: Move this somewhere more general. | 785 # FIXME: Move this somewhere more general. |
836 def tree_status(self): | 786 def tree_status(self): |
837 blink_tree_status_url = "http://blink-status.appspot.com/status" | 787 blink_tree_status_url = "http://blink-status.appspot.com/status" |
838 status = urllib2.urlopen(blink_tree_status_url).read().lower() | 788 status = urllib2.urlopen(blink_tree_status_url).read().lower() |
839 if status.find('closed') != -1 or status == "0": | 789 if status.find('closed') != -1 or status == "0": |
840 return 'closed' | 790 return 'closed' |
841 elif status.find('open') != -1 or status == "1": | 791 elif status.find('open') != -1 or status == "1": |
842 return 'open' | 792 return 'open' |
843 return 'unknown' | 793 return 'unknown' |
844 | 794 |
845 def execute(self, options, args, tool): | 795 def execute(self, options, args, tool): |
846 if tool.scm().executable_name == "svn": | 796 if tool.scm().executable_name == "svn": |
847 _log.error("Auto rebaseline only works with a git checkout.") | 797 _log.error("Auto rebaseline only works with a git checkout.") |
848 return | 798 return |
849 | 799 |
850 if tool.scm().has_working_directory_changes(): | 800 if tool.scm().has_working_directory_changes(): |
851 _log.error("Cannot proceed with working directory changes. Clean wor
king directory first.") | 801 _log.error("Cannot proceed with working directory changes. Clean wor
king directory first.") |
852 return | 802 return |
853 | 803 |
854 self._configure_logging(options.log_server) | 804 revision_data = self.bot_revision_data() |
855 | |
856 revision_data = self.bot_revision_data(options.log_server) | |
857 if not revision_data: | 805 if not revision_data: |
858 return | 806 return |
859 | 807 |
860 min_revision = int(min([item["revision"] for item in revision_data])) | 808 min_revision = int(min([item["revision"] for item in revision_data])) |
861 tests, revision, author, bugs, has_any_needs_rebaseline_lines = self.tes
ts_to_rebaseline(tool, min_revision, print_revisions=options.verbose, log_server
=options.log_server) | 809 tests, revision, author, bugs, has_any_needs_rebaseline_lines = self.tes
ts_to_rebaseline(tool, min_revision, print_revisions=options.verbose) |
862 | |
863 if not has_any_needs_rebaseline_lines: | |
864 self._log_no_needs_rebaseline_lines(options.log_server) | |
865 return | |
866 | 810 |
867 if options.verbose: | 811 if options.verbose: |
868 _log.info("Min revision across all bots is %s." % min_revision) | 812 _log.info("Min revision across all bots is %s." % min_revision) |
869 for item in revision_data: | 813 for item in revision_data: |
870 _log.info("%s: r%s" % (item["builder"], item["revision"])) | 814 _log.info("%s: r%s" % (item["builder"], item["revision"])) |
871 | 815 |
872 if not tests: | 816 if not tests: |
873 _log.debug('No tests to rebaseline.') | 817 _log.debug('No tests to rebaseline.') |
874 return | 818 return |
875 | 819 |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
912 tool.scm().checkout_branch(old_branch_name) | 856 tool.scm().checkout_branch(old_branch_name) |
913 tool.scm().delete_branch(self.AUTO_REBASELINE_BRANCH_NAME) | 857 tool.scm().delete_branch(self.AUTO_REBASELINE_BRANCH_NAME) |
914 | 858 |
915 | 859 |
916 class RebaselineOMatic(AbstractDeclarativeCommand): | 860 class RebaselineOMatic(AbstractDeclarativeCommand): |
917 name = "rebaseline-o-matic" | 861 name = "rebaseline-o-matic" |
918 help_text = "Calls webkit-patch auto-rebaseline in a loop." | 862 help_text = "Calls webkit-patch auto-rebaseline in a loop." |
919 show_in_main_help = True | 863 show_in_main_help = True |
920 | 864 |
921 SLEEP_TIME_IN_SECONDS = 30 | 865 SLEEP_TIME_IN_SECONDS = 30 |
| 866 LOG_SERVER = 'blinkrebaseline.appspot.com' |
| 867 |
| 868 # Uploaded log entries append to the existing entry unless the |
| 869 # newentry flag is set. In that case it starts a new entry to |
| 870 # start appending to. |
| 871 def _log_to_server(self, log='', is_new_entry=False): |
| 872 query = { |
| 873 'log': log, |
| 874 } |
| 875 if is_new_entry: |
| 876 query['newentry'] = 'on' |
| 877 urllib2.urlopen("http://" + self.LOG_SERVER + "/updatelog", data=urllib.
urlencode(query)) |
| 878 |
| 879 def _run_logged_command(self, command): |
| 880 process = self._tool.executive.popen(command, stdout=self._tool.executiv
e.PIPE, stderr=self._tool.executive.PIPE) |
| 881 while process.poll() == None: |
| 882 # FIXME: This should probably batch up lines if they're available an
d log to the server once. |
| 883 out = process.stdout.readline() |
| 884 if out: |
| 885 self._log_to_server(out) |
| 886 |
| 887 err = process.stderr.readline() |
| 888 if err: |
| 889 self._log_to_server(err) |
| 890 |
| 891 def _do_one_rebaseline(self, verbose): |
| 892 try: |
| 893 old_branch_name = self._tool.scm().current_branch() |
| 894 self._log_to_server(is_new_entry=True) |
| 895 self._run_logged_command(['git', 'pull']) |
| 896 rebaseline_command = [self._tool.filesystem.join(self._tool.scm().ch
eckout_root, 'Tools', 'Scripts', 'webkit-patch'), 'auto-rebaseline'] |
| 897 if verbose: |
| 898 rebaseline_command.append('--verbose') |
| 899 self._run_logged_command(rebaseline_command) |
| 900 except: |
| 901 traceback.print_exc(file=sys.stderr) |
| 902 # Sometimes git crashes and leaves us on a detached head. |
| 903 self._tool.scm().checkout_branch(old_branch_name) |
922 | 904 |
923 def execute(self, options, args, tool): | 905 def execute(self, options, args, tool): |
924 while True: | 906 while True: |
925 try: | 907 self._do_one_rebaseline(options.verbose) |
926 old_branch_name = tool.scm().current_branch() | |
927 tool.executive.run_command(['git', 'pull']) | |
928 rebaseline_command = [tool.filesystem.join(tool.scm().checkout_r
oot, 'Tools', 'Scripts', 'webkit-patch'), 'auto-rebaseline', '--log-server', 'bl
inkrebaseline.appspot.com'] | |
929 if options.verbose: | |
930 rebaseline_command.append('--verbose') | |
931 # Use call instead of run_command so that stdout doesn't get swa
llowed. | |
932 tool.executive.call(rebaseline_command) | |
933 except: | |
934 traceback.print_exc(file=sys.stderr) | |
935 # Sometimes git crashes and leaves us on a detached head. | |
936 tool.scm().checkout_branch(old_branch_name) | |
937 | |
938 time.sleep(self.SLEEP_TIME_IN_SECONDS) | 908 time.sleep(self.SLEEP_TIME_IN_SECONDS) |
OLD | NEW |