Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 # Copyright 2013 The Chromium Authors. All rights reserved. | 1 # Copyright 2013 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 import re | 5 import re |
| 6 import manual_bisect_files | 6 import manual_bisect_files |
| 7 from recipe_engine import recipe_api | 7 from recipe_engine import recipe_api |
| 8 | 8 |
| 9 | 9 |
| 10 # TODO(machenbach): Chromium specific data should move out of the archive | 10 # TODO(machenbach): Chromium specific data should move out of the archive |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 85 artifacts implemented as a wrapper around zip_build.py script. | 85 artifacts implemented as a wrapper around zip_build.py script. |
| 86 | 86 |
| 87 If you need to upload or download build artifacts (or any other files) for | 87 If you need to upload or download build artifacts (or any other files) for |
| 88 something other than Chromium flavor, consider using 'zip' + 'gsutil' or | 88 something other than Chromium flavor, consider using 'zip' + 'gsutil' or |
| 89 'isolate' modules instead. | 89 'isolate' modules instead. |
| 90 """ | 90 """ |
| 91 def zip_and_upload_build( | 91 def zip_and_upload_build( |
| 92 self, step_name, target, build_url=None, src_dir=None, | 92 self, step_name, target, build_url=None, src_dir=None, |
| 93 build_revision=None, cros_board=None, package_dsym_files=False, | 93 build_revision=None, cros_board=None, package_dsym_files=False, |
| 94 exclude_files=None, exclude_perf_test_files=False, | 94 exclude_files=None, exclude_perf_test_files=False, |
| 95 update_properties=None, store_by_hash=True, **kwargs): | 95 update_properties=None, store_by_hash=True, |
| 96 include_bisect_file_list=None, include_bisect_strip_list=None, | |
| 97 include_bisect_whitelist=None, **kwargs): | |
|
dtu
2016/08/30 02:40:22
This is a lot of plumbing/parameters in common cod
miimnk
2016/08/30 21:14:54
Yes. Instead, passed only platform name to get the
| |
| 96 """Returns a step invoking zip_build.py to zip up a Chromium build. | 98 """Returns a step invoking zip_build.py to zip up a Chromium build. |
| 97 If build_url is specified, also uploads the build.""" | 99 If build_url is specified, also uploads the build.""" |
| 98 if not src_dir: | 100 if not src_dir: |
| 99 src_dir = self.m.path['checkout'] | 101 src_dir = self.m.path['checkout'] |
| 100 args = [ | 102 args = [ |
| 101 '--show-path', | 103 '--show-path', |
| 102 'python', | 104 'python', |
| 103 self.package_repo_resource('scripts', 'slave', 'zip_build.py'), | 105 self.package_repo_resource('scripts', 'slave', 'zip_build.py'), |
| 104 '--target', target, | 106 '--target', target, |
| 105 '--gsutil-py-path', self.m.depot_tools.gsutil_py_path, | 107 '--gsutil-py-path', self.m.depot_tools.gsutil_py_path, |
| 106 '--staging-dir', self.m.path['cache'].join('chrome_staging'), | 108 '--staging-dir', self.m.path['cache'].join('chrome_staging'), |
| 107 '--src-dir', src_dir, | 109 '--src-dir', src_dir, |
| 108 ] | 110 ] |
| 109 if 'build_archive_url' in self.m.properties: | 111 if 'build_archive_url' in self.m.properties: |
| 110 args.extend(['--use-build-url-name', '--build-url', | 112 args.extend(['--use-build-url-name', '--build-url', |
| 111 self.m.properties['build_archive_url']]) | 113 self.m.properties['build_archive_url']]) |
| 112 elif build_url: | 114 elif build_url: |
| 113 args.extend(['--build-url', build_url]) | 115 args.extend(['--build-url', build_url]) |
| 114 if build_revision: | 116 if build_revision: |
| 115 args.extend(['--build_revision', build_revision]) | 117 args.extend(['--build_revision', build_revision]) |
| 116 if cros_board: | 118 if cros_board: |
| 117 args.extend(['--cros-board', cros_board]) | 119 args.extend(['--cros-board', cros_board]) |
| 118 if package_dsym_files: | 120 if package_dsym_files: |
| 119 args.append('--package-dsym-files') | 121 args.append('--package-dsym-files') |
| 120 if exclude_files: | 122 if exclude_files: |
| 121 args.extend(['--exclude-files', exclude_files]) | 123 args.extend(['--exclude-files', exclude_files]) |
| 122 if 'gs_acl' in self.m.properties: | 124 if 'gs_acl' in self.m.properties: |
| 123 args.extend(['--gs-acl', self.m.properties['gs_acl']]) | 125 args.extend(['--gs-acl', self.m.properties['gs_acl']]) |
| 124 if exclude_perf_test_files: | 126 if exclude_perf_test_files: |
| 125 inclusions = ','.join(manual_bisect_files.CHROME_REQUIRED_FILES) | 127 if include_bisect_file_list: |
| 126 strip_files = ','.join(manual_bisect_files.CHROME_STRIP_LIST) | 128 inclusions = ','.join(include_bisect_file_list) |
| 127 args.extend(['--include-files', inclusions]) | 129 args.extend(['--include-files', inclusions]) |
| 128 args.extend(['--ignore-regex']) | 130 if include_bisect_strip_list: |
| 129 args.extend(['--strip-files', strip_files]) | 131 strip_files = ','.join(include_bisect_strip_list) |
| 132 args.extend(['--strip-files', strip_files]) | |
| 133 if include_bisect_whitelist: | |
| 134 args.extend(['--whitelist', include_bisect_whitelist]) | |
| 135 args.extend(['--not-include-extra']) | |
| 136 | |
| 130 # If update_properties is passed in and store_by_hash is False, | 137 # If update_properties is passed in and store_by_hash is False, |
| 131 # we store it with commit position number instead of a hash | 138 # we store it with commit position number instead of a hash |
| 132 if update_properties and not store_by_hash: | 139 if update_properties and not store_by_hash: |
| 133 commit_position = self._get_commit_position( | 140 commit_position = self._get_commit_position( |
| 134 update_properties, None) | 141 update_properties, None) |
| 135 cp_branch, cp_number = self.m.commit_position.parse(commit_position) | 142 cp_branch, cp_number = self.m.commit_position.parse(commit_position) |
| 136 args.extend(['--build_revision', cp_number]) | 143 args.extend(['--build_revision', cp_number]) |
| 137 | 144 |
| 138 properties_json = self.m.json.dumps(self.m.properties.legacy()) | 145 properties_json = self.m.json.dumps(self.m.properties.legacy()) |
| 139 args.extend(['--factory-properties', properties_json, | 146 args.extend(['--factory-properties', properties_json, |
| (...skipping 285 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 425 def legacy_download_url(self, gs_bucket_name, extra_url_components=None): | 432 def legacy_download_url(self, gs_bucket_name, extra_url_components=None): |
| 426 """Returns a url suitable for downloading a Chromium build from | 433 """Returns a url suitable for downloading a Chromium build from |
| 427 Google Storage. | 434 Google Storage. |
| 428 | 435 |
| 429 extra_url_components, if specified, should be a string without a | 436 extra_url_components, if specified, should be a string without a |
| 430 trailing '/' which is inserted in the middle of the URL. | 437 trailing '/' which is inserted in the middle of the URL. |
| 431 | 438 |
| 432 The builder_name, or parent_buildername, is always automatically | 439 The builder_name, or parent_buildername, is always automatically |
| 433 inserted into the URL.""" | 440 inserted into the URL.""" |
| 434 return self._legacy_url(True, gs_bucket_name, extra_url_components) | 441 return self._legacy_url(True, gs_bucket_name, extra_url_components) |
| OLD | NEW |