Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(5)

Side by Side Diff: scripts/slave/zip_build.py

Issue 2128613005: Archive Linux perf builds for manual bisect (Closed) Base URL: https://chromium.googlesource.com/chromium/tools/build.git@master
Patch Set: removed unzip_strip_zip, code style change Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """ Creates a zip file in the staging dir with the result of a compile. 6 """ Creates a zip file in the staging dir with the result of a compile.
7 It can be sent to other machines for testing. 7 It can be sent to other machines for testing.
8 """ 8 """
9 9
10 import csv 10 import csv
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after
159 tmp_revision_file.write('%s' % build_revision) 159 tmp_revision_file.write('%s' % build_revision)
160 tmp_revision_file.close() 160 tmp_revision_file.close()
161 chromium_utils.MakeWorldReadable(tmp_revision_file.name) 161 chromium_utils.MakeWorldReadable(tmp_revision_file.name)
162 dest_path = os.path.join(dirname, 162 dest_path = os.path.join(dirname,
163 chromium_utils.FULL_BUILD_REVISION_FILENAME) 163 chromium_utils.FULL_BUILD_REVISION_FILENAME)
164 shutil.move(tmp_revision_file.name, dest_path) 164 shutil.move(tmp_revision_file.name, dest_path)
165 return dest_path 165 return dest_path
166 except IOError: 166 except IOError:
167 print 'Writing to revision file in %s failed.' % dirname 167 print 'Writing to revision file in %s failed.' % dirname
168 168
169
170 def MakeUnversionedArchive(build_dir, staging_dir, zip_file_list, 169 def MakeUnversionedArchive(build_dir, staging_dir, zip_file_list,
171 zip_file_name): 170 zip_file_name, strip_symbol=False):
172 """Creates an unversioned full build archive. 171 """Creates an unversioned full build archive.
173 Returns the path of the created archive.""" 172 Returns the path of the created archive."""
173 strip_files = None
174 if strip_symbol:
175 strip_files = ['chrome', 'nacl_helper']
stgao 2016/07/11 19:04:27 Not sure where this file list should be, but this
miimnk 2016/07/11 22:08:48 There are only two binary files with a significant
stgao 2016/07/12 20:40:40 Stripping big files are good for now. My comment
miimnk 2016/07/13 01:12:48 I stored the strip list in archive/perf_test_files
174 (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir, 176 (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
175 zip_file_name, 177 zip_file_name,
176 zip_file_list, 178 zip_file_list,
177 build_dir, 179 build_dir,
178 raise_error=True) 180 raise_error=True,
181 strip_files=strip_files)
182
179 chromium_utils.RemoveDirectory(zip_dir) 183 chromium_utils.RemoveDirectory(zip_dir)
180 if not os.path.exists(zip_file): 184 if not os.path.exists(zip_file):
181 raise StagingError('Failed to make zip package %s' % zip_file) 185 raise StagingError('Failed to make zip package %s' % zip_file)
182 chromium_utils.MakeWorldReadable(zip_file) 186 chromium_utils.MakeWorldReadable(zip_file)
183 187
184 # Report the size of the zip file to help catch when it gets too big and 188 # Report the size of the zip file to help catch when it gets too big and
185 # can cause bot failures from timeouts during downloads to testers. 189 # can cause bot failures from timeouts during downloads to testers.
186 zip_size = os.stat(zip_file)[stat.ST_SIZE] 190 zip_size = os.stat(zip_file)[stat.ST_SIZE]
187 print 'Zip file is %ld bytes' % zip_size 191 print 'Zip file is %ld bytes' % zip_size
188 192
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
253 def __init__(self, options): 257 def __init__(self, options):
254 def CommaStrParser(val): 258 def CommaStrParser(val):
255 return [f.strip() for f in csv.reader([val]).next()] 259 return [f.strip() for f in csv.reader([val]).next()]
256 self.inclusions = CommaStrParser(options.include_files) 260 self.inclusions = CommaStrParser(options.include_files)
257 self.exclusions = (CommaStrParser(options.exclude_files) 261 self.exclusions = (CommaStrParser(options.exclude_files)
258 + chromium_utils.FileExclusions()) 262 + chromium_utils.FileExclusions())
259 263
260 self.regex_whitelist = FileRegexWhitelist(options) 264 self.regex_whitelist = FileRegexWhitelist(options)
261 self.regex_blacklist = FileRegexBlacklist(options) 265 self.regex_blacklist = FileRegexBlacklist(options)
262 self.exclude_unmatched = options.exclude_unmatched 266 self.exclude_unmatched = options.exclude_unmatched
267 self.exclusive_include = options.exclusive_include
stgao 2016/07/11 19:04:27 What's this for? The naming seems a bit confusing
miimnk 2016/07/11 22:08:48 Currently, it is not possible to only include item
263 268
264 def __str__(self): 269 def __str__(self):
265 return '\n '.join([ 270 return '\n '.join([
266 'Zip rules', 271 'Zip rules',
267 'Inclusions: %s' % self.inclusions, 272 'Inclusions: %s' % self.inclusions,
268 'Exclusions: %s' % self.exclusions, 273 'Exclusions: %s' % self.exclusions,
269 "Whitelist regex: '%s'" % self.regex_whitelist, 274 "Whitelist regex: '%s'" % self.regex_whitelist,
270 "Blacklist regex: '%s'" % self.regex_blacklist, 275 "Blacklist regex: '%s'" % self.regex_blacklist,
271 'Zip unmatched files: %s' % (not self.exclude_unmatched)]) 276 'Zip unmatched files: %s' % (not self.exclude_unmatched)])
272 277
273 def Match(self, filename): 278 def Match(self, filename):
274 if filename in self.inclusions: 279 if filename in self.inclusions:
275 return True 280 return True
281 # Added to implement exclusive include
282 if self.exclusive_include:
283 return False
276 if filename in self.exclusions: 284 if filename in self.exclusions:
277 return False 285 return False
278 if re.match(self.regex_whitelist, filename): 286 if re.match(self.regex_whitelist, filename):
279 return True 287 return True
280 if re.match(self.regex_blacklist, filename): 288 if re.match(self.regex_blacklist, filename):
281 return False 289 return False
282 return not self.exclude_unmatched 290 return not self.exclude_unmatched
283 291
284 292
285 def Archive(options): 293 def Archive(options):
286 build_dir = build_directory.GetBuildOutputDirectory( 294 build_dir = build_directory.GetBuildOutputDirectory(
287 options.src_dir, options.cros_board) 295 options.src_dir, options.cros_board)
288 build_dir = os.path.abspath(os.path.join(build_dir, options.target)) 296 build_dir = os.path.abspath(os.path.join(build_dir, options.target))
289
290 staging_dir = slave_utils.GetStagingDir(options.src_dir) 297 staging_dir = slave_utils.GetStagingDir(options.src_dir)
291 chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir) 298 chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)
292 299
293 if not options.build_revision: 300 if not options.build_revision:
294 (build_revision, webkit_revision) = slave_utils.GetBuildRevisions( 301 (build_revision, webkit_revision) = slave_utils.GetBuildRevisions(
295 options.src_dir, options.webkit_dir, options.revision_dir) 302 options.src_dir, options.webkit_dir, options.revision_dir)
296 else: 303 else:
297 build_revision = options.build_revision 304 build_revision = options.build_revision
298 webkit_revision = options.webkit_revision 305 webkit_revision = options.webkit_revision
299 306
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
345 352
346 zip_file_list = [f for f in root_files if path_filter.Match(f)] 353 zip_file_list = [f for f in root_files if path_filter.Match(f)]
347 354
348 # TODO(yzshen): Once we have swarming support ready, we could use it to 355 # TODO(yzshen): Once we have swarming support ready, we could use it to
349 # archive run time dependencies of tests and remove this step. 356 # archive run time dependencies of tests and remove this step.
350 mojom_files = _MojomFiles(build_dir, ['.mojom.js', '_mojom.py']) 357 mojom_files = _MojomFiles(build_dir, ['.mojom.js', '_mojom.py'])
351 print 'Include mojom files: %s' % mojom_files 358 print 'Include mojom files: %s' % mojom_files
352 zip_file_list.extend(mojom_files) 359 zip_file_list.extend(mojom_files)
353 360
354 zip_file = MakeUnversionedArchive(build_dir, staging_dir, zip_file_list, 361 zip_file = MakeUnversionedArchive(build_dir, staging_dir, zip_file_list,
355 unversioned_base_name) 362 unversioned_base_name, strip_symbol=options. strip_symbol)
356 363
357 zip_base, zip_ext, versioned_file = MakeVersionedArchive( 364 zip_base, zip_ext, versioned_file = MakeVersionedArchive(
358 zip_file, version_suffix, options) 365 zip_file, version_suffix, options)
359 366
360 prune_limit = 10 367 prune_limit = 10
361 if options.build_url.startswith('gs://'): 368 if options.build_url.startswith('gs://'):
362 # Don't keep builds lying around when uploading them to google storage. 369 # Don't keep builds lying around when uploading them to google storage.
363 prune_limit = 3 370 prune_limit = 3
364 PruneOldArchives(staging_dir, zip_base, zip_ext, prune_limit=prune_limit) 371 PruneOldArchives(staging_dir, zip_base, zip_ext, prune_limit=prune_limit)
365 372
(...skipping 26 matching lines...) Expand all
392 help='build target to archive (Debug or Release)') 399 help='build target to archive (Debug or Release)')
393 option_parser.add_option('--src-dir', default='src', 400 option_parser.add_option('--src-dir', default='src',
394 help='path to the top-level sources directory') 401 help='path to the top-level sources directory')
395 option_parser.add_option('--build-dir', help='ignored') 402 option_parser.add_option('--build-dir', help='ignored')
396 option_parser.add_option('--exclude-files', default='', 403 option_parser.add_option('--exclude-files', default='',
397 help='Comma separated list of files that should ' 404 help='Comma separated list of files that should '
398 'always be excluded from the zip.') 405 'always be excluded from the zip.')
399 option_parser.add_option('--include-files', default='', 406 option_parser.add_option('--include-files', default='',
400 help='Comma separated list of files that should ' 407 help='Comma separated list of files that should '
401 'always be included in the zip.') 408 'always be included in the zip.')
409 option_parser.add_option('--exclusive_include', action='store_true',
410 default=False, help='Only include the files in includ e-files list')
402 option_parser.add_option('--master-name', help='Name of the buildbot master.') 411 option_parser.add_option('--master-name', help='Name of the buildbot master.')
403 option_parser.add_option('--slave-name', help='Name of the buildbot slave.') 412 option_parser.add_option('--slave-name', help='Name of the buildbot slave.')
404 option_parser.add_option('--build-number', type=int, 413 option_parser.add_option('--build-number', type=int,
405 help='Buildbot build number.') 414 help='Buildbot build number.')
406 option_parser.add_option('--parent-build-number', type=int, 415 option_parser.add_option('--parent-build-number', type=int,
407 help='Buildbot parent build number.') 416 help='Buildbot parent build number.')
408 option_parser.add_option('--webkit-dir', 417 option_parser.add_option('--webkit-dir',
409 help='webkit directory path, relative to --src-dir') 418 help='webkit directory path, relative to --src-dir')
410 option_parser.add_option('--revision-dir', 419 option_parser.add_option('--revision-dir',
411 help='Directory path that shall be used to decide ' 420 help='Directory path that shall be used to decide '
(...skipping 11 matching lines...) Expand all
423 help=('Optional URL to which to upload build ' 432 help=('Optional URL to which to upload build '
424 '(overrides build_url factory property)')) 433 '(overrides build_url factory property)'))
425 option_parser.add_option('--cros-board', 434 option_parser.add_option('--cros-board',
426 help=('If building for Chrom[e|ium]OS via the ' 435 help=('If building for Chrom[e|ium]OS via the '
427 'simple chrome workflow, the name of the ' 436 'simple chrome workflow, the name of the '
428 'target CROS board.')) 437 'target CROS board.'))
429 option_parser.add_option('--package-dsym-files', action='store_true', 438 option_parser.add_option('--package-dsym-files', action='store_true',
430 default=False, help='Add also dSYM files.') 439 default=False, help='Add also dSYM files.')
431 option_parser.add_option('--append-deps-patch-sha', action='store_true') 440 option_parser.add_option('--append-deps-patch-sha', action='store_true')
432 option_parser.add_option('--gs-acl') 441 option_parser.add_option('--gs-acl')
442 option_parser.add_option('--strip_symbol', action='store_true',
443 default=False, help='Strip symbols from chrome execut able.')
433 option_parser.add_option('--json-urls', 444 option_parser.add_option('--json-urls',
434 help=('Path to json file containing uploaded ' 445 help=('Path to json file containing uploaded '
435 'archive urls. If this is omitted then ' 446 'archive urls. If this is omitted then '
436 'the urls will be emitted as buildbot ' 447 'the urls will be emitted as buildbot '
437 'annotations.')) 448 'annotations.'))
438 chromium_utils.AddPropertiesOptions(option_parser) 449 chromium_utils.AddPropertiesOptions(option_parser)
439 450
440 options, args = option_parser.parse_args(argv) 451 options, args = option_parser.parse_args(argv)
441 452
442 if not options.master_name: 453 if not options.master_name:
(...skipping 27 matching lines...) Expand all
470 json.dump(urls, json_file) 481 json.dump(urls, json_file)
471 else: # we need to print buildbot annotations 482 else: # we need to print buildbot annotations
472 if 'storage_url' in urls: 483 if 'storage_url' in urls:
473 print '@@@STEP_LINK@download@%s@@@' % urls['storage_url'] 484 print '@@@STEP_LINK@download@%s@@@' % urls['storage_url']
474 if 'zip_url' in urls: 485 if 'zip_url' in urls:
475 print '@@@SET_BUILD_PROPERTY@build_archive_url@"%s"@@@' % urls['zip_url'] 486 print '@@@SET_BUILD_PROPERTY@build_archive_url@"%s"@@@' % urls['zip_url']
476 return 0 487 return 0
477 488
478 if '__main__' == __name__: 489 if '__main__' == __name__:
479 sys.exit(main(sys.argv)) 490 sys.exit(main(sys.argv))
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698