Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(103)

Side by Side Diff: scripts/slave/recipe_modules/archive/api.py

Issue 2128613005: Archive Linux perf builds for manual bisect (Closed) Base URL: https://chromium.googlesource.com/chromium/tools/build.git@master
Patch Set: Merge remote-tracking branch 'refs/remotes/origin/master' into archive Created 4 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 # Copyright 2013 The Chromium Authors. All rights reserved. 1 # Copyright 2013 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 import re 5 import re
6 6
7 from recipe_engine import recipe_api 7 from recipe_engine import recipe_api
8 from . import manual_bisect_files
ghost stip (do not use) 2016/08/15 20:12:15 is that needed? can you not just do import manual_
miimnk 2016/08/15 21:41:22 Done.
8 9
9 10
10 # TODO(machenbach): Chromium specific data should move out of the archive 11 # TODO(machenbach): Chromium specific data should move out of the archive
11 # module, into e.g. the chromium test configs. 12 # module, into e.g. the chromium test configs.
12 EXCLUDED_FILES_ALL_PLATFORMS = [ 13 EXCLUDED_FILES_ALL_PLATFORMS = [
13 '.landmines', 14 '.landmines',
14 '.ninja_deps', 15 '.ninja_deps',
15 '.ninja_log', 16 '.ninja_log',
16 'gen', 17 'gen',
17 'obj', 18 'obj',
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
81 82
82 83
83 class ArchiveApi(recipe_api.RecipeApi): 84 class ArchiveApi(recipe_api.RecipeApi):
84 """Chromium specific module for zipping, uploading and downloading build 85 """Chromium specific module for zipping, uploading and downloading build
85 artifacts implemented as a wrapper around zip_build.py script. 86 artifacts implemented as a wrapper around zip_build.py script.
86 87
87 If you need to upload or download build artifacts (or any other files) for 88 If you need to upload or download build artifacts (or any other files) for
88 something other than Chromium flavor, consider using 'zip' + 'gsutil' or 89 something other than Chromium flavor, consider using 'zip' + 'gsutil' or
89 'isolate' modules instead. 90 'isolate' modules instead.
90 """ 91 """
91
92 def zip_and_upload_build( 92 def zip_and_upload_build(
93 self, step_name, target, build_url=None, src_dir=None, 93 self, step_name, target, build_url=None, src_dir=None,
94 build_revision=None, cros_board=None, package_dsym_files=False, 94 build_revision=None, cros_board=None, package_dsym_files=False,
95 exclude_files=None, **kwargs): 95 exclude_files=None, include_perf_test_files = True,
ghost stip (do not use) 2016/08/15 20:12:15 nit: no spaces around =
ghost stip (do not use) 2016/08/15 20:12:15 second nit: I'd prefer to flip this flag: "exclude
miimnk 2016/08/15 21:41:22 Done.
96 update_properties=None, store_by_hash=True, **kwargs):
96 """Returns a step invoking zip_build.py to zip up a Chromium build. 97 """Returns a step invoking zip_build.py to zip up a Chromium build.
97 If build_url is specified, also uploads the build.""" 98 If build_url is specified, also uploads the build."""
98 if not src_dir: 99 if not src_dir:
99 src_dir = self.m.path['checkout'] 100 src_dir = self.m.path['checkout']
100 args = [ 101 args = [
101 '--show-path', 102 '--show-path',
102 'python', 103 'python',
103 self.package_repo_resource('scripts', 'slave', 'zip_build.py'), 104 self.package_repo_resource('scripts', 'slave', 'zip_build.py'),
104 '--target', target, 105 '--target', target,
105 '--gsutil-py-path', self.m.depot_tools.gsutil_py_path, 106 '--gsutil-py-path', self.m.depot_tools.gsutil_py_path,
106 '--staging-dir', self.m.path['cache'].join('chrome_staging'), 107 '--staging-dir', self.m.path['cache'].join('chrome_staging'),
107 '--src-dir', src_dir, 108 '--src-dir', src_dir,
108 ] 109 ]
109 if 'build_archive_url' in self.m.properties: 110 if 'build_archive_url' in self.m.properties:
110 args.extend(['--use-build-url-name', '--build-url', 111 args.extend(['--use-build-url-name', '--build-url',
111 self.m.properties['build_archive_url']]) 112 self.m.properties['build_archive_url']])
112 elif build_url: 113 elif build_url:
113 args.extend(['--build-url', build_url]) 114 args.extend(['--build-url', build_url])
114 if build_revision: 115 if build_revision:
115 args.extend(['--build_revision', build_revision]) 116 args.extend(['--build_revision', build_revision])
116 if cros_board: 117 if cros_board:
117 args.extend(['--cros-board', cros_board]) 118 args.extend(['--cros-board', cros_board])
118 if package_dsym_files: 119 if package_dsym_files:
119 args.append('--package-dsym-files') 120 args.append('--package-dsym-files')
120 if exclude_files: 121 if exclude_files:
121 args.extend(['--exclude-files', exclude_files]) 122 args.extend(['--exclude-files', exclude_files])
122 if 'gs_acl' in self.m.properties: 123 if 'gs_acl' in self.m.properties:
123 args.extend(['--gs-acl', self.m.properties['gs_acl']]) 124 args.extend(['--gs-acl', self.m.properties['gs_acl']])
125 if not include_perf_test_files:
ghost stip (do not use) 2016/08/15 20:12:15 if exclude_perf_test_files: ...
126 inclusions = ','.join(manual_bisect_files.CHROME_REQUIRED_FILES)
127 strip_files = ','.join(manual_bisect_files.CHROME_STRIP_LIST)
128 args.extend(['--include-files', inclusions])
129 args.extend(['--ignore-regex'])
130 args.extend(['--strip-files', strip_files])
131 # If update_properties is passed in and store_by_hash is False,
132 # we store it with commit position number instead of a hash
133 if update_properties and not store_by_hash:
134 commit_position = self._get_commit_position(
135 update_properties, None)
136 cp_branch, cp_number = self.m.commit_position.parse(commit_position)
137 args.extend(['--build_revision', cp_number])
124 138
125 properties_json = self.m.json.dumps(self.m.properties.legacy()) 139 properties_json = self.m.json.dumps(self.m.properties.legacy())
126 args.extend(['--factory-properties', properties_json, 140 args.extend(['--factory-properties', properties_json,
127 '--build-properties', properties_json]) 141 '--build-properties', properties_json])
128 142
129 kwargs['allow_subannotations'] = True 143 kwargs['allow_subannotations'] = True
130 self.m.python( 144 self.m.python(
131 step_name, 145 step_name,
132 self.package_repo_resource('scripts', 'tools', 'runit.py'), 146 self.package_repo_resource('scripts', 'tools', 'runit.py'),
133 args, 147 args,
(...skipping 278 matching lines...) Expand 10 before | Expand all | Expand 10 after
412 def legacy_download_url(self, gs_bucket_name, extra_url_components=None): 426 def legacy_download_url(self, gs_bucket_name, extra_url_components=None):
413 """Returns a url suitable for downloading a Chromium build from 427 """Returns a url suitable for downloading a Chromium build from
414 Google Storage. 428 Google Storage.
415 429
416 extra_url_components, if specified, should be a string without a 430 extra_url_components, if specified, should be a string without a
417 trailing '/' which is inserted in the middle of the URL. 431 trailing '/' which is inserted in the middle of the URL.
418 432
419 The builder_name, or parent_buildername, is always automatically 433 The builder_name, or parent_buildername, is always automatically
420 inserted into the URL.""" 434 inserted into the URL."""
421 return self._legacy_url(True, gs_bucket_name, extra_url_components) 435 return self._legacy_url(True, gs_bucket_name, extra_url_components)
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698