| OLD | NEW |
| 1 # Copyright (c) 2011 The Chromium Authors. All rights reserved. | 1 # Copyright (c) 2011 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 """Set of utilities to add commands to a buildbot factory. | 5 """Set of utilities to add commands to a buildbot factory. |
| 6 | 6 |
| 7 This is based on commands.py and adds chromium-specific commands.""" | 7 This is based on commands.py and adds chromium-specific commands.""" |
| 8 | 8 |
| 9 import logging | 9 import logging |
| 10 import os | 10 import os |
| (...skipping 557 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 568 heapcheck_tool = os.path.join('..', '..', '..', self._heapcheck_tool) | 568 heapcheck_tool = os.path.join('..', '..', '..', self._heapcheck_tool) |
| 569 cmd.extend([heapcheck_tool, | 569 cmd.extend([heapcheck_tool, |
| 570 '--build_dir', build_dir, | 570 '--build_dir', build_dir, |
| 571 '--test', test_name]) | 571 '--test', test_name]) |
| 572 | 572 |
| 573 test_name = 'heapcheck test: %s' % test_name | 573 test_name = 'heapcheck test: %s' % test_name |
| 574 self.AddTestStep(gtest_command.GTestFullCommand, test_name, cmd, | 574 self.AddTestStep(gtest_command.GTestFullCommand, test_name, cmd, |
| 575 timeout=timeout, | 575 timeout=timeout, |
| 576 do_step_if=self.TestStepFilter) | 576 do_step_if=self.TestStepFilter) |
| 577 | 577 |
| 578 def _AddBasicPythonTest(self, test_name, script, args=None, timeout=1200): | 578 def _AddBasicPythonTest(self, test_name, script, args=None, |
| 579 command_class=retcode_command.ReturnCodeCommand, |
| 580 **kwargs): |
| 581 """Adds a Python based test step. |
| 582 |
| 583 Args: |
| 584 test_name: A string describing the test, used to build its logfile |
| 585 name and its descriptions in the waterfall display. |
| 586 script: Path to the Python test to execute. |
| 587 args: Arguments to pass on to the Python test command line. |
| 588 command_class: The command type to run, such as shell.ShellCommand or |
| 589 gtest_command.GTestCommand. |
| 590 kwargs: Additional arguments to pass on to the AddTestStep function. |
| 591 """ |
| 579 args = args or [] | 592 args = args or [] |
| 580 J = self.PathJoin | 593 J = self.PathJoin |
| 581 if self._target_platform == 'win32': | 594 if self._target_platform == 'win32': |
| 582 py26 = J('src', 'third_party', 'python_26', 'python_slave.exe') | 595 py26 = J('src', 'third_party', 'python_26', 'python_slave.exe') |
| 583 test_cmd = ['cmd', '/C'] + [py26, script] + args | 596 test_cmd = ['cmd', '/C'] + [py26, script] + args |
| 584 elif self._target_platform == 'darwin': | 597 elif self._target_platform == 'darwin': |
| 585 test_cmd = ['python2.6', script] + args | 598 test_cmd = ['python2.6', script] + args |
| 586 elif self._target_platform == 'linux2': | 599 elif self._target_platform == 'linux2': |
| 587 # Run thru runtest.py on linux to launch virtual x server | 600 # Run thru runtest.py on linux to launch virtual x server |
| 588 test_cmd = self.GetTestCommand('/usr/local/bin/python2.6', | 601 test_cmd = self.GetTestCommand('/usr/local/bin/python2.6', |
| 589 [script] + args) | 602 [script] + args) |
| 590 | 603 |
| 591 self.AddTestStep(retcode_command.ReturnCodeCommand, | 604 self.AddTestStep(command_class, |
| 592 test_name, | 605 test_name, |
| 593 test_cmd, | 606 test_cmd, |
| 594 timeout=timeout, | 607 do_step_if=self.TestStepFilter, |
| 595 do_step_if=self.TestStepFilter) | 608 **kwargs) |
| 596 | 609 |
| 597 def AddChromeDriverTest(self, timeout=1200): | 610 def AddChromeDriverTest(self, timeout=1200): |
| 598 J = self.PathJoin | 611 J = self.PathJoin |
| 599 script = J('src', 'chrome', 'test', 'webdriver', 'test', | 612 script = J('src', 'chrome', 'test', 'webdriver', 'test', |
| 600 'run_chromedriver_tests.py') | 613 'run_chromedriver_tests.py') |
| 601 self._AddBasicPythonTest('chromedriver_tests', script, timeout=timeout) | 614 self._AddBasicPythonTest('chromedriver_tests', script, timeout=timeout) |
| 602 | 615 |
| 603 def AddWebDriverTest(self, timeout=1200): | 616 def AddWebDriverTest(self, timeout=1200): |
| 604 J = self.PathJoin | 617 J = self.PathJoin |
| 605 script = J('src', 'chrome', 'test', 'webdriver', 'test', | 618 script = J('src', 'chrome', 'test', 'webdriver', 'test', |
| 606 'run_webdriver_tests.py') | 619 'run_webdriver_tests.py') |
| 607 self._AddBasicPythonTest('webdriver_tests', script, timeout=timeout) | 620 self._AddBasicPythonTest('webdriver_tests', script, timeout=timeout) |
| 608 | 621 |
| 609 def AddPyAutoFunctionalTest(self, test_name, timeout=1200, | 622 def AddPyAutoFunctionalTest(self, test_name, timeout=1200, |
| 610 workdir=None, | 623 workdir=None, |
| 611 src_base=None, | 624 src_base=None, |
| 612 suite=None, | 625 suite=None, |
| 613 factory_properties=None): | 626 factory_properties=None, |
| 627 perf=False): |
| 614 """Adds a step to run PyAuto functional tests. | 628 """Adds a step to run PyAuto functional tests. |
| 615 | 629 |
| 616 Args: | 630 Args: |
| 617 workdir: the working dir for this step | 631 workdir: the working dir for this step |
| 618 src_base: relative path (from workdir) to src. Not needed if workdir is | 632 src_base: relative path (from workdir) to src. Not needed if workdir is |
| 619 'build' (the default) | 633 'build' (the default) |
| 620 | 634 suite: PyAuto suite to execute. |
| 635 perf: Is this a perf test or not? Requires suite to be set. |
| 621 """ | 636 """ |
| 622 factory_properties = factory_properties or {} | 637 factory_properties = factory_properties or {} |
| 623 J = self.PathJoin | |
| 624 pyauto_script = J('src', 'chrome', 'test', 'functional', | |
| 625 'pyauto_functional.py') | |
| 626 # in case a '..' prefix is needed | |
| 627 if src_base: | |
| 628 pyauto_script = J(src_base, pyauto_script) | |
| 629 | 638 |
| 630 pyauto_functional_cmd = ['python', pyauto_script, '-v'] | 639 src_base = src_base or '' |
| 631 if self._target_platform == 'win32': | 640 pyauto_script = self.PathJoin( |
| 632 pyauto_functional_cmd = self.GetPythonTestCommand(pyauto_script, ['-v']) | 641 src_base, 'src', 'chrome', 'test', 'functional', 'pyauto_functional.py') |
| 633 if src_base: # Adjust runtest.py path if needed. | |
| 634 pyauto_functional_cmd[1] = J(src_base, pyauto_functional_cmd[1]) | |
| 635 elif self._target_platform == 'darwin': | |
| 636 pyauto_functional_cmd = self.GetTestCommand('/usr/bin/python2.5', | |
| 637 [pyauto_script, '-v']) | |
| 638 if src_base: # Adjust runtest.py path if needed. | |
| 639 pyauto_functional_cmd[1] = J(src_base, pyauto_functional_cmd[1]) | |
| 640 elif (self._target_platform.startswith('linux') and | |
| 641 factory_properties.get('use_xvfb_on_linux')): | |
| 642 # Run thru runtest.py on linux to launch virtual x server | |
| 643 pyauto_functional_cmd = self.GetTestCommand('/usr/bin/python', | |
| 644 [pyauto_script, '-v']) | |
| 645 | 642 |
| 643 args = ['-v'] |
| 646 if suite: | 644 if suite: |
| 647 pyauto_functional_cmd.append('--suite=%s' % suite) | 645 args.append('--suite=%s' % suite) |
| 648 self.AddTestStep(retcode_command.ReturnCodeCommand, | 646 |
| 649 test_name, | 647 # Python test step args applicable for all types of tests. |
| 650 pyauto_functional_cmd, | 648 step_args = {'test_name': test_name, 'script': pyauto_script, 'args': args, |
| 651 env={'PYTHONPATH': '.'}, | 649 'timeout': timeout, 'env': {'PYTHONPATH': '.'}, |
| 652 workdir=workdir, | 650 'workdir': workdir} |
| 653 timeout=timeout, | 651 |
| 654 do_step_if=self.GetTestStepFilter(factory_properties)) | 652 # Use special command class for parsing perf values from output. |
| 653 if perf and suite: |
| 654 step_args['command_class'] = self.GetPerfStepClass( |
| 655 factory_properties, suite.lower(), |
| 656 process_log.GraphingLogProcessor) |
| 657 |
| 658 self._AddBasicPythonTest(**step_args) |
| 655 | 659 |
| 656 def AddDevToolsTests(self, factory_properties=None): | 660 def AddDevToolsTests(self, factory_properties=None): |
| 657 factory_properties = factory_properties or {} | 661 factory_properties = factory_properties or {} |
| 658 c = self.GetPerfStepClass(factory_properties, 'devtools_perf', | 662 c = self.GetPerfStepClass(factory_properties, 'devtools_perf', |
| 659 process_log.GraphingLogProcessor) | 663 process_log.GraphingLogProcessor) |
| 660 | 664 |
| 661 cmd = [self._python, self._devtools_perf_test_tool, | 665 cmd = [self._python, self._devtools_perf_test_tool, |
| 662 '--target', self._target, | 666 '--target', self._target, |
| 663 '--build-dir', self._build_dir, | 667 '--build-dir', self._build_dir, |
| 664 'inspector' | 668 'inspector' |
| (...skipping 231 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 896 TODO(jrg): resolve this inconsistency with the | 900 TODO(jrg): resolve this inconsistency with the |
| 897 chrome-infrastrucure team; we shouldn't need two functions.""" | 901 chrome-infrastrucure team; we shouldn't need two functions.""" |
| 898 self._factory.addStep(chromium_step.AnnotatedCommand, | 902 self._factory.addStep(chromium_step.AnnotatedCommand, |
| 899 name=name, | 903 name=name, |
| 900 description=name, | 904 description=name, |
| 901 timeout=timeout, | 905 timeout=timeout, |
| 902 haltOnFailure=True, | 906 haltOnFailure=True, |
| 903 workdir=self._build_dir, | 907 workdir=self._build_dir, |
| 904 command=cmd) | 908 command=cmd) |
| 905 | 909 |
| 906 | 910 def AddMediaTests(self, test_groups, factory_properties=None, timeout=1200): |
| 907 def _GetPyAutoCmd(self, src_base=None, script=None, factory_properties=None, | 911 """Adds media test steps according to the specified test_groups. |
| 908 dataset=None, matrix=True, media_home=None, test_name=None, | |
| 909 http=False, nocache=False, verbose=False, suite_name=None, | |
| 910 reference_build_dir=None, track=False): | |
| 911 """Get PyAuto command line based on arguments. | |
| 912 | 912 |
| 913 Args: | 913 Args: |
| 914 src_base: relative path (from workdir) to src. Not needed if workdir is | 914 test_groups: List of (str:Name, bool:Perf?) tuples which should be |
| 915 'build' (the default). Needed when workdir is not 'build' (like | 915 translated into test steps. |
| 916 parent of 'build'). | 916 """ |
| 917 script: script name that contain main function for test. | 917 for group, is_perf in test_groups: |
| 918 factory_properties: factory properties. | 918 self.AddPyAutoFunctionalTest( |
| 919 dataset: data set file name for data. | 919 'media_tests_' + group.lower(), suite=group, timeout=timeout, |
| 920 matrix: if True, use matrix form test data. If False, use list form | 920 perf=is_perf, factory_properties=factory_properties) |
| 921 test data. | |
| 922 media_home: media files home location. | |
| 923 test_name: test name to be used for testing. | |
| 924 http: if True, run test with http mode (media is served by http server). | |
| 925 nocache: if True, run test with media cache disabled. | |
| 926 verbose: if True, run test with verbose mode. | |
| 927 suite_name: the PYAUTO suite name (it is in PYAUTO_TESTS). | |
| 928 reference_build_dir: a directory with reference build binary. | |
| 929 track: if True, run test with track functionality. | |
| 930 | 921 |
| 931 Returns: | |
| 932 a commandline list that can be executed | |
| 933 """ | |
| 934 factory_properties = factory_properties or {} | |
| 935 J = self.PathJoin | |
| 936 pyauto_script = J('src', 'chrome', 'test', 'functional', 'media', script) | |
| 937 # In case a '..' prefix is needed. | |
| 938 if src_base: | |
| 939 pyauto_script = J(src_base, pyauto_script) | |
| 940 if verbose: | |
| 941 argv = ['-v'] | |
| 942 else: | |
| 943 argv = [] | |
| 944 if suite_name: | |
| 945 argv.append('-s' + suite_name) | |
| 946 if matrix: | |
| 947 argv.append('-x' + dataset) | |
| 948 else: | |
| 949 argv.append('-i' + dataset) | |
| 950 if media_home and matrix: | |
| 951 argv.append('-r' + media_home) | |
| 952 if test_name: | |
| 953 argv.append('-t' + test_name) | |
| 954 if http: | |
| 955 # TODO (imasaki@): adding network test here. Currently, not used. | |
| 956 pass | |
| 957 if nocache: | |
| 958 # Disable cache. | |
| 959 argv.append('-c') | |
| 960 if reference_build_dir: | |
| 961 # Run the test with reference build | |
| 962 argv.append('-a') | |
| 963 # Set reference build directory. | |
| 964 argv.append('-k' + reference_build_dir) | |
| 965 if track: | |
| 966 argv.append('-j') | |
| 967 pyauto_functional_cmd = ['python', pyauto_script] + argv | |
| 968 if self._target_platform == 'win32': # win needs python26 | |
| 969 py26 = J('src', 'third_party', 'python_26', 'python_slave.exe') | |
| 970 if src_base: | |
| 971 py26 = J(src_base, py26) | |
| 972 pyauto_functional_cmd = ['cmd', '/C'] + [py26, pyauto_script, '-v'] | |
| 973 elif self._target_platform == 'darwin': | |
| 974 pyauto_functional_cmd = ['python2.5', pyauto_script, '-v'] | |
| 975 elif (self._target_platform == 'linux2' and | |
| 976 factory_properties.get('use_xvfb_on_linux')): | |
| 977 # Run through runtest.py on linux to launch virtual X server. | |
| 978 pyauto_functional_cmd = self.GetTestCommand('/usr/bin/python', | |
| 979 [pyauto_script] + argv) | |
| 980 # Adjust runtest.py location. | |
| 981 pyauto_functional_cmd[1] = os.path.join(os.path.pardir, | |
| 982 pyauto_functional_cmd[1]) | |
| 983 return pyauto_functional_cmd | |
| 984 | |
| 985 # pylint: disable=R0201 | |
| 986 def _GetTestAvPerfFileNames(self, matrix=True): | |
| 987 """Generates AVPerf file names needed for testing. | |
| 988 | |
| 989 Args: | |
| 990 matrix: if True, use matrix form test data. If False, use list form | |
| 991 test data. | |
| 992 | |
| 993 Returns: | |
| 994 a list of file names used for AVPerf tests. | |
| 995 """ | |
| 996 file_names = [] | |
| 997 if matrix: | |
| 998 # Matrix form input file. The corrensonding data is in | |
| 999 # chrome/test/data/media/csv/media_matrix_data_public.csv. | |
| 1000 video_exts = ['webm', 'ogv', 'mp4'] | |
| 1001 audio_exts = ['wav', 'ogg', 'mp3'] | |
| 1002 av_titles = ['bear', 'tulip'] | |
| 1003 video_channels = [0, 1, 2] | |
| 1004 audio_channels = [1, 2] | |
| 1005 for av_title in av_titles: | |
| 1006 for video_ext in video_exts: | |
| 1007 for video_channel in video_channels: | |
| 1008 file_names.append(''.join([av_title, str(video_channel), '.', | |
| 1009 video_ext])) | |
| 1010 for audio_ext in audio_exts: | |
| 1011 for audio_channel in audio_channels: | |
| 1012 file_names.append(''.join([av_title, str(audio_channel), '.', | |
| 1013 audio_ext])) | |
| 1014 else: | |
| 1015 # List form input file. The corresponding data is in | |
| 1016 # chrome/test/data/media/csv/media_list_data.csv. | |
| 1017 file_names = ['bear_silent.ogv', 'bear.ogv', 'bear.wav', 'bear.webm'] | |
| 1018 return file_names | |
| 1019 | |
| 1020 def _GenerateAvPerfTests(self, matrix=True, | |
| 1021 number_of_media_files=None, | |
| 1022 number_of_tests_per_media=None, | |
| 1023 suite_name=None): | |
| 1024 """Generate tests based on AVPerf files and parameter configuration. | |
| 1025 | |
| 1026 Args: | |
| 1027 matrix: if True, use matrix form test data. If False, use list form | |
| 1028 test data. | |
| 1029 number_of_media_files: a integer to limit the number of media files | |
| 1030 used. If None, use all media files avialble. | |
| 1031 number_of_tests_per_media: a integer to limit the number of test case | |
| 1032 per media file used. If None, use all tests avialble. | |
| 1033 suite_name: the PYAUTO suite name (it is in PYAUTO_TESTS). | |
| 1034 | |
| 1035 Returns: | |
| 1036 a list of dictionaries, each dictionary containing the following keys: | |
| 1037 perf_name, file_name, http, nocache, track. The value of | |
| 1038 perf_name is a string for the the name of the performance | |
| 1039 test. The value of file_name is a string for the name of media | |
| 1040 file used for testing. The value of http is a boolean to indicate | |
| 1041 whether if the media file is retrived via http (rather than local | |
| 1042 file). The value of nocache is a boolean to indicate whether if | |
| 1043 the test disables media cache. The value of track is a boolean to | |
| 1044 indicate if the test uses track (caption). | |
| 1045 """ | |
| 1046 suite_name_map = {'AV_PERF': 'avperf', | |
| 1047 'AV_FUNC': 'avfunc'} | |
| 1048 # TODO (imasaki@chromium.org): add more parameters. | |
| 1049 parameter_configration = [ | |
| 1050 {'name': suite_name_map[suite_name]}, | |
| 1051 {'suite': 'AV_PERF', 'name': suite_name_map[suite_name], 'nocache': True}, | |
| 1052 {'name': suite_name_map[suite_name], 'track': True}, | |
| 1053 ] | |
| 1054 tests = [] | |
| 1055 for media_counter, file_name in ( | |
| 1056 enumerate(self._GetTestAvPerfFileNames(matrix))): | |
| 1057 for test_counter, parameter in enumerate(parameter_configration): | |
| 1058 if not parameter.get('suite', False) or ( | |
| 1059 (parameter.get('suite', False) and ( | |
| 1060 suite_name == parameter.get('suite')))): | |
| 1061 simple_file_name = file_name.replace('.', '') | |
| 1062 simple_file_name = simple_file_name.replace('_','') | |
| 1063 name = '%s-%s' % (parameter.get('name',''), simple_file_name) | |
| 1064 if parameter.get('http', False): | |
| 1065 name += '-http' | |
| 1066 if parameter.get('nocache', False): | |
| 1067 name += '-nocache' | |
| 1068 if parameter.get('track', False): | |
| 1069 name += '-track' | |
| 1070 tests.append({'perf_name': name, | |
| 1071 'file_name': file_name, | |
| 1072 'http': parameter.get('http', False), | |
| 1073 'nocache': parameter.get('nocache', False), | |
| 1074 'track': parameter.get('track', False)}) | |
| 1075 if number_of_tests_per_media and ( | |
| 1076 test_counter + 1 == number_of_tests_per_media): | |
| 1077 break | |
| 1078 if number_of_media_files and media_counter + 1 == number_of_media_files: | |
| 1079 break | |
| 1080 return tests | |
| 1081 | |
| 1082 def AddAvPerfTests(self, timeout=1200, workdir=None, src_base=None, | |
| 1083 factory_properties=None, matrix=True, | |
| 1084 number_of_media_files=None, | |
| 1085 number_of_tests_per_media=None, | |
| 1086 suite_name=None): | |
| 1087 """Add media performance tests. | |
| 1088 | |
| 1089 This method calls media_test_runner.py which runs AVPerf tests (including | |
| 1090 performance tests) with appropriate parameters (such as input data file | |
| 1091 name). | |
| 1092 | |
| 1093 Args: | |
| 1094 timeout: timeout value. | |
| 1095 workdir: working directory. | |
| 1096 src_base: src base directory. | |
| 1097 factory_properties: | |
| 1098 matrix: if True, use matrix form test data. If False, use list form | |
| 1099 test data. | |
| 1100 number_of_media_files: a integer to limit the number of media files | |
| 1101 used. If None, use all media files avialble. | |
| 1102 number_of_tests_per_media: a integer to limit the number of test case | |
| 1103 per media file used. If None, use all tests avialble. | |
| 1104 suite_name: the PYAUTO suite name (it is in PYAUTO_TESTS). | |
| 1105 """ | |
| 1106 reference_build_dir_name_mapping = { | |
| 1107 'win32': 'win', | |
| 1108 'darwin': 'mac', | |
| 1109 'linux': 'linux', | |
| 1110 'linux2': 'linux64', | |
| 1111 } | |
| 1112 reference_build_dir_name = 'chrome_%s' % ( | |
| 1113 reference_build_dir_name_mapping[self._target_platform]) | |
| 1114 J = self.PathJoin | |
| 1115 reference_build_dir = J('src', 'chrome', 'test', 'tools', | |
| 1116 'reference_build', reference_build_dir_name) | |
| 1117 matrix_dataset = J('src', 'chrome', 'test', 'data', 'media', 'csv', | |
| 1118 'media_matrix_data_public.csv') | |
| 1119 list_dataset = J('src', 'chrome', 'test', 'data', 'media', 'csv', | |
| 1120 'media_list_data.csv') | |
| 1121 media_home = J('avperf') | |
| 1122 if matrix: | |
| 1123 dataset = matrix_dataset | |
| 1124 else: | |
| 1125 dataset = list_dataset | |
| 1126 # in case a '..' prefix is needed. | |
| 1127 if src_base: | |
| 1128 dataset = J(src_base, dataset) | |
| 1129 media_home = J(src_base, media_home) | |
| 1130 reference_build_dir = J(src_base, reference_build_dir) | |
| 1131 for test in self._GenerateAvPerfTests(matrix, number_of_media_files, | |
| 1132 number_of_tests_per_media, | |
| 1133 suite_name): | |
| 1134 # Use verbose mode to see which binary to use. | |
| 1135 pyauto_functional_cmd = self._GetPyAutoCmd( | |
| 1136 src_base=src_base, script='media_test_runner.py', | |
| 1137 factory_properties=factory_properties, | |
| 1138 dataset=dataset, matrix=matrix, media_home=media_home, | |
| 1139 test_name=test['file_name'], http=test['http'], | |
| 1140 nocache=test['nocache'], suite_name=suite_name, | |
| 1141 reference_build_dir=reference_build_dir, | |
| 1142 track=test['track'], verbose=True) | |
| 1143 test['class'] = self.GetPerfStepClass(factory_properties, 'avperf', | |
| 1144 process_log.GraphingLogProcessor) | |
| 1145 test['step_name'] = test['perf_name'] | |
| 1146 self.AddTestStep(test['class'], test['step_name'], pyauto_functional_cmd, | |
| 1147 workdir=workdir, timeout=timeout) | |
| 1148 | 922 |
| 1149 def _GetArchiveUrl(archive_type, builder_name='%(build_name)s'): | 923 def _GetArchiveUrl(archive_type, builder_name='%(build_name)s'): |
| 1150 # The default builder name is dynamically filled in by | 924 # The default builder name is dynamically filled in by |
| 1151 # ArchiveCommand.createSummary. | 925 # ArchiveCommand.createSummary. |
| 1152 return '%s/%s/%s' % (config.Master.archive_url, archive_type, builder_name) | 926 return '%s/%s/%s' % (config.Master.archive_url, archive_type, builder_name) |
| 1153 | 927 |
| 1154 def _GetSnapshotUrl(factory_properties=None, builder_name='%(build_name)s'): | 928 def _GetSnapshotUrl(factory_properties=None, builder_name='%(build_name)s'): |
| 1155 if not factory_properties or 'gs_bucket' not in factory_properties: | 929 if not factory_properties or 'gs_bucket' not in factory_properties: |
| 1156 return (_GetArchiveUrl('snapshots', builder_name), None) | 930 return (_GetArchiveUrl('snapshots', builder_name), None) |
| 1157 gs_bucket = factory_properties['gs_bucket'] | 931 gs_bucket = factory_properties['gs_bucket'] |
| 1158 gs_bucket = re.sub(r'^gs://', 'http://commondatastorage.googleapis.com/', | 932 gs_bucket = re.sub(r'^gs://', 'http://commondatastorage.googleapis.com/', |
| 1159 gs_bucket) | 933 gs_bucket) |
| 1160 return ('%s/index.html?path=%s' % (gs_bucket, builder_name), '/') | 934 return ('%s/index.html?path=%s' % (gs_bucket, builder_name), '/') |
| OLD | NEW |