OLD | NEW |
---|---|
1 # Copyright 2013 The Chromium Authors. All rights reserved. | 1 # Copyright 2013 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 import re | 5 import re |
6 | 6 |
7 from recipe_engine import recipe_api | 7 from recipe_engine import recipe_api |
8 from . import perf_test_files | |
8 | 9 |
9 | 10 |
10 # TODO(machenbach): Chromium specific data should move out of the archive | 11 # TODO(machenbach): Chromium specific data should move out of the archive |
11 # module, into e.g. the chromium test configs. | 12 # module, into e.g. the chromium test configs. |
12 EXCLUDED_FILES_ALL_PLATFORMS = [ | 13 EXCLUDED_FILES_ALL_PLATFORMS = [ |
13 '.landmines', | 14 '.landmines', |
14 '.ninja_deps', | 15 '.ninja_deps', |
15 '.ninja_log', | 16 '.ninja_log', |
16 'gen', | 17 'gen', |
17 'obj', | 18 'obj', |
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
94 build_revision=None, cros_board=None, package_dsym_files=False, | 95 build_revision=None, cros_board=None, package_dsym_files=False, |
95 exclude_files=None, **kwargs): | 96 exclude_files=None, **kwargs): |
96 """Returns a step invoking zip_build.py to zip up a Chromium build. | 97 """Returns a step invoking zip_build.py to zip up a Chromium build. |
97 If build_url is specified, also uploads the build.""" | 98 If build_url is specified, also uploads the build.""" |
98 args = [ | 99 args = [ |
99 '--show-path', | 100 '--show-path', |
100 'python', | 101 'python', |
101 self.package_repo_resource('scripts', 'slave', 'zip_build.py'), | 102 self.package_repo_resource('scripts', 'slave', 'zip_build.py'), |
102 '--target', target, | 103 '--target', target, |
103 ] | 104 ] |
105 | |
dimu1
2016/07/11 17:47:09
nit: remove empty line
miimnk
2016/07/11 22:08:47
Done.
| |
104 if build_url or 'build_archive_url' in self.m.properties: | 106 if build_url or 'build_archive_url' in self.m.properties: |
105 args.extend(['--build-url', | 107 args.extend(['--build-url', |
106 build_url or self.m.properties['build_archive_url']]) | 108 build_url or self.m.properties['build_archive_url']]) |
107 if build_revision: | 109 if build_revision: |
108 args.extend(['--build_revision', build_revision]) | 110 args.extend(['--build_revision', build_revision]) |
109 elif src_dir: | 111 elif src_dir: |
110 args.extend(['--src-dir', src_dir]) | 112 args.extend(['--src-dir', src_dir]) |
111 if cros_board: | 113 if cros_board: |
112 args.extend(['--cros-board', cros_board]) | 114 args.extend(['--cros-board', cros_board]) |
113 if package_dsym_files: | 115 if package_dsym_files: |
114 args.append('--package-dsym-files') | 116 args.append('--package-dsym-files') |
115 if exclude_files: | 117 if exclude_files: |
116 args.extend(['--exclude-files', exclude_files]) | 118 args.extend(['--exclude-files', exclude_files]) |
117 if 'gs_acl' in self.m.properties: | 119 if 'gs_acl' in self.m.properties: |
118 args.extend(['--gs-acl', self.m.properties['gs_acl']]) | 120 args.extend(['--gs-acl', self.m.properties['gs_acl']]) |
121 if not kwargs.pop("includePerfTestFiles", True): | |
122 inclusions = ",".join(perf_test_files.FILES) | |
stgao
2016/07/11 19:04:27
style nit: single quote instead of double quote.
miimnk
2016/07/11 22:08:47
Done.
| |
123 args.extend(['--include-files', inclusions]) | |
124 args.extend(['--exclusive_include']) | |
125 args.extend(['--strip_symbol']) | |
126 # If update_properties is passed in and store_by_hash is False, | |
127 # we store it with commit position number instead of a hash | |
128 update_properties = kwargs.pop("update_properties", None) | |
129 if update_properties and not kwargs.pop("store_by_hash", True): | |
130 commit_position = self._get_commit_position( | |
131 update_properties, None) | |
132 cp_branch, cp_number = self.m.commit_position.parse(commit_position) | |
133 args.extend(['--build_revision', cp_number]) | |
119 | 134 |
120 properties_json = self.m.json.dumps(self.m.properties.legacy()) | 135 properties_json = self.m.json.dumps(self.m.properties.legacy()) |
121 args.extend(['--factory-properties', properties_json, | 136 args.extend(['--factory-properties', properties_json, |
122 '--build-properties', properties_json]) | 137 '--build-properties', properties_json]) |
123 | 138 |
124 kwargs['allow_subannotations'] = True | 139 kwargs['allow_subannotations'] = True |
125 self.m.python( | 140 self.m.python( |
126 step_name, | 141 step_name, |
127 self.package_repo_resource('scripts', 'tools', 'runit.py'), | 142 self.package_repo_resource('scripts', 'tools', 'runit.py'), |
128 args, | 143 args, |
(...skipping 274 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
403 def legacy_download_url(self, gs_bucket_name, extra_url_components=None): | 418 def legacy_download_url(self, gs_bucket_name, extra_url_components=None): |
404 """Returns a url suitable for downloading a Chromium build from | 419 """Returns a url suitable for downloading a Chromium build from |
405 Google Storage. | 420 Google Storage. |
406 | 421 |
407 extra_url_components, if specified, should be a string without a | 422 extra_url_components, if specified, should be a string without a |
408 trailing '/' which is inserted in the middle of the URL. | 423 trailing '/' which is inserted in the middle of the URL. |
409 | 424 |
410 The builder_name, or parent_buildername, is always automatically | 425 The builder_name, or parent_buildername, is always automatically |
411 inserted into the URL.""" | 426 inserted into the URL.""" |
412 return self._legacy_url(True, gs_bucket_name, extra_url_components) | 427 return self._legacy_url(True, gs_bucket_name, extra_url_components) |
OLD | NEW |