Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(148)

Side by Side Diff: scripts/slave/zip_build.py

Issue 2128613005: Archive Linux perf builds for manual bisect (Closed) Base URL: https://chromium.googlesource.com/chromium/tools/build.git@master
Patch Set: added a missing comma Created 4 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « scripts/slave/recipes/chromium.expected/full_chromium_perf_Linux_Builder.json ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """ Creates a zip file in the staging dir with the result of a compile. 6 """ Creates a zip file in the staging dir with the result of a compile.
7 It can be sent to other machines for testing. 7 It can be sent to other machines for testing.
8 """ 8 """
9 9
10 import csv 10 import csv
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after
159 tmp_revision_file.write('%s' % build_revision) 159 tmp_revision_file.write('%s' % build_revision)
160 tmp_revision_file.close() 160 tmp_revision_file.close()
161 chromium_utils.MakeWorldReadable(tmp_revision_file.name) 161 chromium_utils.MakeWorldReadable(tmp_revision_file.name)
162 dest_path = os.path.join(dirname, 162 dest_path = os.path.join(dirname,
163 chromium_utils.FULL_BUILD_REVISION_FILENAME) 163 chromium_utils.FULL_BUILD_REVISION_FILENAME)
164 shutil.move(tmp_revision_file.name, dest_path) 164 shutil.move(tmp_revision_file.name, dest_path)
165 return dest_path 165 return dest_path
166 except IOError: 166 except IOError:
167 print 'Writing to revision file in %s failed.' % dirname 167 print 'Writing to revision file in %s failed.' % dirname
168 168
169
170 def MakeUnversionedArchive(build_dir, staging_dir, zip_file_list, 169 def MakeUnversionedArchive(build_dir, staging_dir, zip_file_list,
171 zip_file_name): 170 zip_file_name, strip_files=None):
172 """Creates an unversioned full build archive. 171 """Creates an unversioned full build archive.
173 Returns the path of the created archive.""" 172 Returns the path of the created archive."""
174 (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir, 173 (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
175 zip_file_name, 174 zip_file_name,
176 zip_file_list, 175 zip_file_list,
177 build_dir, 176 build_dir,
178 raise_error=True) 177 raise_error=True,
178 strip_files=strip_files)
179
179 chromium_utils.RemoveDirectory(zip_dir) 180 chromium_utils.RemoveDirectory(zip_dir)
180 if not os.path.exists(zip_file): 181 if not os.path.exists(zip_file):
181 raise StagingError('Failed to make zip package %s' % zip_file) 182 raise StagingError('Failed to make zip package %s' % zip_file)
182 chromium_utils.MakeWorldReadable(zip_file) 183 chromium_utils.MakeWorldReadable(zip_file)
183 184
184 # Report the size of the zip file to help catch when it gets too big and 185 # Report the size of the zip file to help catch when it gets too big and
185 # can cause bot failures from timeouts during downloads to testers. 186 # can cause bot failures from timeouts during downloads to testers.
186 zip_size = os.stat(zip_file)[stat.ST_SIZE] 187 zip_size = os.stat(zip_file)[stat.ST_SIZE]
187 print 'Zip file is %ld bytes' % zip_size 188 print 'Zip file is %ld bytes' % zip_size
188 189
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
266 def __init__(self, options): 267 def __init__(self, options):
267 def CommaStrParser(val): 268 def CommaStrParser(val):
268 return [f.strip() for f in csv.reader([val]).next()] 269 return [f.strip() for f in csv.reader([val]).next()]
269 self.inclusions = CommaStrParser(options.include_files) 270 self.inclusions = CommaStrParser(options.include_files)
270 self.exclusions = (CommaStrParser(options.exclude_files) 271 self.exclusions = (CommaStrParser(options.exclude_files)
271 + chromium_utils.FileExclusions()) 272 + chromium_utils.FileExclusions())
272 273
273 self.regex_whitelist = FileRegexWhitelist(options) 274 self.regex_whitelist = FileRegexWhitelist(options)
274 self.regex_blacklist = FileRegexBlacklist(options) 275 self.regex_blacklist = FileRegexBlacklist(options)
275 self.exclude_unmatched = options.exclude_unmatched 276 self.exclude_unmatched = options.exclude_unmatched
277 self.ignore_regex = options.ignore_regex
276 278
277 def __str__(self): 279 def __str__(self):
278 return '\n '.join([ 280 return '\n '.join([
279 'Zip rules', 281 'Zip rules',
280 'Inclusions: %s' % self.inclusions, 282 'Inclusions: %s' % self.inclusions,
281 'Exclusions: %s' % self.exclusions, 283 'Exclusions: %s' % self.exclusions,
282 "Whitelist regex: '%s'" % self.regex_whitelist, 284 "Whitelist regex: '%s'" % self.regex_whitelist,
283 "Blacklist regex: '%s'" % self.regex_blacklist, 285 "Blacklist regex: '%s'" % self.regex_blacklist,
284 'Zip unmatched files: %s' % (not self.exclude_unmatched)]) 286 'Zip unmatched files: %s' % (not self.exclude_unmatched),
287 'Ignore regex matches: %s' % self.ignore_regex])
288
285 289
286 def Match(self, filename): 290 def Match(self, filename):
287 if filename in self.inclusions: 291 if filename in self.inclusions:
288 return True 292 return True
289 if filename in self.exclusions: 293 if filename in self.exclusions:
290 return False 294 return False
295 if self.ignore_regex:
296 return False
291 if re.match(self.regex_whitelist, filename): 297 if re.match(self.regex_whitelist, filename):
292 return True 298 return True
293 if re.match(self.regex_blacklist, filename): 299 if re.match(self.regex_blacklist, filename):
294 return False 300 return False
295 return not self.exclude_unmatched 301 return not self.exclude_unmatched
296 302
297 303
298 def Archive(options): 304 def Archive(options):
299 build_dir = build_directory.GetBuildOutputDirectory( 305 build_dir = build_directory.GetBuildOutputDirectory(
300 options.src_dir, options.cros_board) 306 options.src_dir, options.cros_board)
301 build_dir = os.path.abspath(os.path.join(build_dir, options.target)) 307 build_dir = os.path.abspath(os.path.join(build_dir, options.target))
302 308
303 staging_dir = (options.staging_dir or 309 staging_dir = (options.staging_dir or
304 slave_utils.GetStagingDir(options.src_dir)) 310 slave_utils.GetStagingDir(options.src_dir))
305 if not os.path.exists(staging_dir): 311 if not os.path.exists(staging_dir):
306 os.makedirs(staging_dir) 312 os.makedirs(staging_dir)
307 chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir) 313 chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)
308
309 if not options.build_revision: 314 if not options.build_revision:
310 (build_revision, webkit_revision) = slave_utils.GetBuildRevisions( 315 (build_revision, webkit_revision) = slave_utils.GetBuildRevisions(
311 options.src_dir, options.webkit_dir, options.revision_dir) 316 options.src_dir, options.webkit_dir, options.revision_dir)
312 else: 317 else:
313 build_revision = options.build_revision 318 build_revision = options.build_revision
314 webkit_revision = options.webkit_revision 319 webkit_revision = options.webkit_revision
315 320
316 unversioned_base_name, version_suffix = slave_utils.GetZipFileNames( 321 unversioned_base_name, version_suffix = slave_utils.GetZipFileNames(
317 options.master_name, 322 options.master_name,
318 options.build_number, 323 options.build_number,
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
361 366
362 zip_file_list = [f for f in root_files if path_filter.Match(f)] 367 zip_file_list = [f for f in root_files if path_filter.Match(f)]
363 368
364 # TODO(yzshen): Once we have swarming support ready, we could use it to 369 # TODO(yzshen): Once we have swarming support ready, we could use it to
365 # archive run time dependencies of tests and remove this step. 370 # archive run time dependencies of tests and remove this step.
366 mojom_files = _MojomFiles(build_dir, ['.mojom.js', '_mojom.py']) 371 mojom_files = _MojomFiles(build_dir, ['.mojom.js', '_mojom.py'])
367 print 'Include mojom files: %s' % mojom_files 372 print 'Include mojom files: %s' % mojom_files
368 zip_file_list.extend(mojom_files) 373 zip_file_list.extend(mojom_files)
369 374
370 zip_file = MakeUnversionedArchive(build_dir, staging_dir, zip_file_list, 375 zip_file = MakeUnversionedArchive(build_dir, staging_dir, zip_file_list,
371 unversioned_base_name) 376 unversioned_base_name,
377 strip_files=options.strip_files)
372 378
373 zip_base, zip_ext, versioned_file = MakeVersionedArchive( 379 zip_base, zip_ext, versioned_file = MakeVersionedArchive(
374 zip_file, version_suffix, options) 380 zip_file, version_suffix, options)
375 381
376 prune_limit = 10 382 prune_limit = 10
377 if options.build_url.startswith('gs://'): 383 if options.build_url.startswith('gs://'):
378 # Don't keep builds lying around when uploading them to google storage. 384 # Don't keep builds lying around when uploading them to google storage.
379 prune_limit = 3 385 prune_limit = 3
380 PruneOldArchives(staging_dir, zip_base, zip_ext, prune_limit=prune_limit) 386 PruneOldArchives(staging_dir, zip_base, zip_ext, prune_limit=prune_limit)
381 387
(...skipping 27 matching lines...) Expand all
409 help='build target to archive (Debug or Release)') 415 help='build target to archive (Debug or Release)')
410 option_parser.add_option('--src-dir', default='src', 416 option_parser.add_option('--src-dir', default='src',
411 help='path to the top-level sources directory') 417 help='path to the top-level sources directory')
412 option_parser.add_option('--build-dir', help='ignored') 418 option_parser.add_option('--build-dir', help='ignored')
413 option_parser.add_option('--exclude-files', default='', 419 option_parser.add_option('--exclude-files', default='',
414 help='Comma separated list of files that should ' 420 help='Comma separated list of files that should '
415 'always be excluded from the zip.') 421 'always be excluded from the zip.')
416 option_parser.add_option('--include-files', default='', 422 option_parser.add_option('--include-files', default='',
417 help='Comma separated list of files that should ' 423 help='Comma separated list of files that should '
418 'always be included in the zip.') 424 'always be included in the zip.')
425 option_parser.add_option('--ignore-regex', action='store_true',
426 default=False, help='Ignores regex matches')
419 option_parser.add_option('--master-name', help='Name of the buildbot master.') 427 option_parser.add_option('--master-name', help='Name of the buildbot master.')
420 option_parser.add_option('--slave-name', help='Name of the buildbot slave.') 428 option_parser.add_option('--slave-name', help='Name of the buildbot slave.')
421 option_parser.add_option('--build-number', type=int, 429 option_parser.add_option('--build-number', type=int,
422 help='Buildbot build number.') 430 help='Buildbot build number.')
423 option_parser.add_option('--parent-build-number', type=int, 431 option_parser.add_option('--parent-build-number', type=int,
424 help='Buildbot parent build number.') 432 help='Buildbot parent build number.')
425 option_parser.add_option('--webkit-dir', 433 option_parser.add_option('--webkit-dir',
426 help='webkit directory path, relative to --src-dir') 434 help='webkit directory path, relative to --src-dir')
427 option_parser.add_option('--revision-dir', 435 option_parser.add_option('--revision-dir',
428 help='Directory path that shall be used to decide ' 436 help='Directory path that shall be used to decide '
(...skipping 14 matching lines...) Expand all
443 help=('Use the filename given in --build-url instead' 451 help=('Use the filename given in --build-url instead'
444 'of generating one.')) 452 'of generating one.'))
445 option_parser.add_option('--cros-board', 453 option_parser.add_option('--cros-board',
446 help=('If building for Chrom[e|ium]OS via the ' 454 help=('If building for Chrom[e|ium]OS via the '
447 'simple chrome workflow, the name of the ' 455 'simple chrome workflow, the name of the '
448 'target CROS board.')) 456 'target CROS board.'))
449 option_parser.add_option('--package-dsym-files', action='store_true', 457 option_parser.add_option('--package-dsym-files', action='store_true',
450 default=False, help='Add also dSYM files.') 458 default=False, help='Add also dSYM files.')
451 option_parser.add_option('--append-deps-patch-sha', action='store_true') 459 option_parser.add_option('--append-deps-patch-sha', action='store_true')
452 option_parser.add_option('--gs-acl') 460 option_parser.add_option('--gs-acl')
461 option_parser.add_option('--strip-files', default='',
462 help='Comma separated list of files that should '
463 'be stripped of symbols in the zip.')
453 option_parser.add_option('--json-urls', 464 option_parser.add_option('--json-urls',
454 help=('Path to json file containing uploaded ' 465 help=('Path to json file containing uploaded '
455 'archive urls. If this is omitted then ' 466 'archive urls. If this is omitted then '
456 'the urls will be emitted as buildbot ' 467 'the urls will be emitted as buildbot '
457 'annotations.')) 468 'annotations.'))
458 option_parser.add_option('--staging-dir', 469 option_parser.add_option('--staging-dir',
459 help='Directory to use for staging the archives. ' 470 help='Directory to use for staging the archives. '
460 'Default behavior is to automatically detect ' 471 'Default behavior is to automatically detect '
461 'slave\'s build directory.') 472 'slave\'s build directory.')
462 option_parser.add_option('--gsutil-py-path', 473 option_parser.add_option('--gsutil-py-path',
(...skipping 13 matching lines...) Expand all
476 'parent_buildumber') 487 'parent_buildumber')
477 if not options.target: 488 if not options.target:
478 options.target = options.factory_properties.get('target', 'Release') 489 options.target = options.factory_properties.get('target', 'Release')
479 if not options.build_url: 490 if not options.build_url:
480 options.build_url = options.factory_properties.get('build_url', '') 491 options.build_url = options.factory_properties.get('build_url', '')
481 if not options.append_deps_patch_sha: 492 if not options.append_deps_patch_sha:
482 options.append_deps_patch_sha = options.factory_properties.get( 493 options.append_deps_patch_sha = options.factory_properties.get(
483 'append_deps_patch_sha') 494 'append_deps_patch_sha')
484 if not options.gs_acl: 495 if not options.gs_acl:
485 options.gs_acl = options.factory_properties.get('gs_acl') 496 options.gs_acl = options.factory_properties.get('gs_acl')
486 497 if options.strip_files:
498 options.strip_files = options.strip_files.split(',')
487 # When option_parser is passed argv as a list, it can return the caller as 499 # When option_parser is passed argv as a list, it can return the caller as
488 # first unknown arg. So throw a warning if we have two or more unknown 500 # first unknown arg. So throw a warning if we have two or more unknown
489 # arguments. 501 # arguments.
490 if args[1:]: 502 if args[1:]:
491 print 'Warning -- unknown arguments' % args[1:] 503 print 'Warning -- unknown arguments' % args[1:]
492 504
493 urls = Archive(options) 505 urls = Archive(options)
494 if options.json_urls: # we need to dump json 506 if options.json_urls: # we need to dump json
495 with open(options.json_urls, 'w') as json_file: 507 with open(options.json_urls, 'w') as json_file:
496 json.dump(urls, json_file) 508 json.dump(urls, json_file)
497 else: # we need to print buildbot annotations 509 else: # we need to print buildbot annotations
498 if 'storage_url' in urls: 510 if 'storage_url' in urls:
499 print '@@@STEP_LINK@download@%s@@@' % urls['storage_url'] 511 print '@@@STEP_LINK@download@%s@@@' % urls['storage_url']
500 if 'zip_url' in urls: 512 if 'zip_url' in urls:
501 print '@@@SET_BUILD_PROPERTY@build_archive_url@"%s"@@@' % urls['zip_url'] 513 print '@@@SET_BUILD_PROPERTY@build_archive_url@"%s"@@@' % urls['zip_url']
502 return 0 514 return 0
503 515
504 if '__main__' == __name__: 516 if '__main__' == __name__:
505 sys.exit(main(sys.argv)) 517 sys.exit(main(sys.argv))
OLDNEW
« no previous file with comments | « scripts/slave/recipes/chromium.expected/full_chromium_perf_Linux_Builder.json ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698