Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(184)

Side by Side Diff: scripts/slave/zip_build.py

Issue 2128613005: Archive Linux perf builds for manual bisect (Closed) Base URL: https://chromium.googlesource.com/chromium/tools/build.git@master
Patch Set: Code style change Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """ Creates a zip file in the staging dir with the result of a compile. 6 """ Creates a zip file in the staging dir with the result of a compile.
7 It can be sent to other machines for testing. 7 It can be sent to other machines for testing.
8 """ 8 """
9 9
10 import csv 10 import csv
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after
159 tmp_revision_file.write('%s' % build_revision) 159 tmp_revision_file.write('%s' % build_revision)
160 tmp_revision_file.close() 160 tmp_revision_file.close()
161 chromium_utils.MakeWorldReadable(tmp_revision_file.name) 161 chromium_utils.MakeWorldReadable(tmp_revision_file.name)
162 dest_path = os.path.join(dirname, 162 dest_path = os.path.join(dirname,
163 chromium_utils.FULL_BUILD_REVISION_FILENAME) 163 chromium_utils.FULL_BUILD_REVISION_FILENAME)
164 shutil.move(tmp_revision_file.name, dest_path) 164 shutil.move(tmp_revision_file.name, dest_path)
165 return dest_path 165 return dest_path
166 except IOError: 166 except IOError:
167 print 'Writing to revision file in %s failed.' % dirname 167 print 'Writing to revision file in %s failed.' % dirname
168 168
169
170 def MakeUnversionedArchive(build_dir, staging_dir, zip_file_list, 169 def MakeUnversionedArchive(build_dir, staging_dir, zip_file_list,
171 zip_file_name): 170 zip_file_name, strip_symbol=False):
172 """Creates an unversioned full build archive. 171 """Creates an unversioned full build archive.
173 Returns the path of the created archive.""" 172 Returns the path of the created archive."""
173 strip_files = None
174 if strip_symbol:
175 strip_files = ['chrome', 'nacl_helper']
174 (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir, 176 (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir,
175 zip_file_name, 177 zip_file_name,
176 zip_file_list, 178 zip_file_list,
177 build_dir, 179 build_dir,
178 raise_error=True) 180 raise_error=True,
181 strip_files=strip_files)
182
179 chromium_utils.RemoveDirectory(zip_dir) 183 chromium_utils.RemoveDirectory(zip_dir)
180 if not os.path.exists(zip_file): 184 if not os.path.exists(zip_file):
181 raise StagingError('Failed to make zip package %s' % zip_file) 185 raise StagingError('Failed to make zip package %s' % zip_file)
182 chromium_utils.MakeWorldReadable(zip_file) 186 chromium_utils.MakeWorldReadable(zip_file)
183 187
184 # Report the size of the zip file to help catch when it gets too big and 188 # Report the size of the zip file to help catch when it gets too big and
185 # can cause bot failures from timeouts during downloads to testers. 189 # can cause bot failures from timeouts during downloads to testers.
186 zip_size = os.stat(zip_file)[stat.ST_SIZE] 190 zip_size = os.stat(zip_file)[stat.ST_SIZE]
187 print 'Zip file is %ld bytes' % zip_size 191 print 'Zip file is %ld bytes' % zip_size
188 192
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
253 def __init__(self, options): 257 def __init__(self, options):
254 def CommaStrParser(val): 258 def CommaStrParser(val):
255 return [f.strip() for f in csv.reader([val]).next()] 259 return [f.strip() for f in csv.reader([val]).next()]
256 self.inclusions = CommaStrParser(options.include_files) 260 self.inclusions = CommaStrParser(options.include_files)
257 self.exclusions = (CommaStrParser(options.exclude_files) 261 self.exclusions = (CommaStrParser(options.exclude_files)
258 + chromium_utils.FileExclusions()) 262 + chromium_utils.FileExclusions())
259 263
260 self.regex_whitelist = FileRegexWhitelist(options) 264 self.regex_whitelist = FileRegexWhitelist(options)
261 self.regex_blacklist = FileRegexBlacklist(options) 265 self.regex_blacklist = FileRegexBlacklist(options)
262 self.exclude_unmatched = options.exclude_unmatched 266 self.exclude_unmatched = options.exclude_unmatched
267 self.ignore_regex = options.ignore_regex
263 268
264 def __str__(self): 269 def __str__(self):
265 return '\n '.join([ 270 return '\n '.join([
266 'Zip rules', 271 'Zip rules',
267 'Inclusions: %s' % self.inclusions, 272 'Inclusions: %s' % self.inclusions,
268 'Exclusions: %s' % self.exclusions, 273 'Exclusions: %s' % self.exclusions,
269 "Whitelist regex: '%s'" % self.regex_whitelist, 274 "Whitelist regex: '%s'" % self.regex_whitelist,
270 "Blacklist regex: '%s'" % self.regex_blacklist, 275 "Blacklist regex: '%s'" % self.regex_blacklist,
271 'Zip unmatched files: %s' % (not self.exclude_unmatched)]) 276 'Zip unmatched files: %s' % (not self.exclude_unmatched),
277 'Ignore regex matches: %s' % (self.ignore_regex)])
278
272 279
273 def Match(self, filename): 280 def Match(self, filename):
274 if filename in self.inclusions: 281 if filename in self.inclusions:
275 return True 282 return True
276 if filename in self.exclusions: 283 if filename in self.exclusions:
277 return False 284 return False
285 if self.ignore_regex:
286 return False
278 if re.match(self.regex_whitelist, filename): 287 if re.match(self.regex_whitelist, filename):
279 return True 288 return True
280 if re.match(self.regex_blacklist, filename): 289 if re.match(self.regex_blacklist, filename):
281 return False 290 return False
282 return not self.exclude_unmatched 291 return not self.exclude_unmatched
283 292
284 293
285 def Archive(options): 294 def Archive(options):
286 build_dir = build_directory.GetBuildOutputDirectory( 295 build_dir = build_directory.GetBuildOutputDirectory(
287 options.src_dir, options.cros_board) 296 options.src_dir, options.cros_board)
288 build_dir = os.path.abspath(os.path.join(build_dir, options.target)) 297 build_dir = os.path.abspath(os.path.join(build_dir, options.target))
289
290 staging_dir = slave_utils.GetStagingDir(options.src_dir) 298 staging_dir = slave_utils.GetStagingDir(options.src_dir)
291 chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir) 299 chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir)
292 300
293 if not options.build_revision: 301 if not options.build_revision:
294 (build_revision, webkit_revision) = slave_utils.GetBuildRevisions( 302 (build_revision, webkit_revision) = slave_utils.GetBuildRevisions(
295 options.src_dir, options.webkit_dir, options.revision_dir) 303 options.src_dir, options.webkit_dir, options.revision_dir)
296 else: 304 else:
297 build_revision = options.build_revision 305 build_revision = options.build_revision
298 webkit_revision = options.webkit_revision 306 webkit_revision = options.webkit_revision
299 307
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
345 353
346 zip_file_list = [f for f in root_files if path_filter.Match(f)] 354 zip_file_list = [f for f in root_files if path_filter.Match(f)]
347 355
348 # TODO(yzshen): Once we have swarming support ready, we could use it to 356 # TODO(yzshen): Once we have swarming support ready, we could use it to
349 # archive run time dependencies of tests and remove this step. 357 # archive run time dependencies of tests and remove this step.
350 mojom_files = _MojomFiles(build_dir, ['.mojom.js', '_mojom.py']) 358 mojom_files = _MojomFiles(build_dir, ['.mojom.js', '_mojom.py'])
351 print 'Include mojom files: %s' % mojom_files 359 print 'Include mojom files: %s' % mojom_files
352 zip_file_list.extend(mojom_files) 360 zip_file_list.extend(mojom_files)
353 361
354 zip_file = MakeUnversionedArchive(build_dir, staging_dir, zip_file_list, 362 zip_file = MakeUnversionedArchive(build_dir, staging_dir, zip_file_list,
355 unversioned_base_name) 363 unversioned_base_name, strip_symbol=options. strip_symbol)
356 364
357 zip_base, zip_ext, versioned_file = MakeVersionedArchive( 365 zip_base, zip_ext, versioned_file = MakeVersionedArchive(
358 zip_file, version_suffix, options) 366 zip_file, version_suffix, options)
359 367
360 prune_limit = 10 368 prune_limit = 10
361 if options.build_url.startswith('gs://'): 369 if options.build_url.startswith('gs://'):
362 # Don't keep builds lying around when uploading them to google storage. 370 # Don't keep builds lying around when uploading them to google storage.
363 prune_limit = 3 371 prune_limit = 3
364 PruneOldArchives(staging_dir, zip_base, zip_ext, prune_limit=prune_limit) 372 PruneOldArchives(staging_dir, zip_base, zip_ext, prune_limit=prune_limit)
365 373
(...skipping 26 matching lines...) Expand all
392 help='build target to archive (Debug or Release)') 400 help='build target to archive (Debug or Release)')
393 option_parser.add_option('--src-dir', default='src', 401 option_parser.add_option('--src-dir', default='src',
394 help='path to the top-level sources directory') 402 help='path to the top-level sources directory')
395 option_parser.add_option('--build-dir', help='ignored') 403 option_parser.add_option('--build-dir', help='ignored')
396 option_parser.add_option('--exclude-files', default='', 404 option_parser.add_option('--exclude-files', default='',
397 help='Comma separated list of files that should ' 405 help='Comma separated list of files that should '
398 'always be excluded from the zip.') 406 'always be excluded from the zip.')
399 option_parser.add_option('--include-files', default='', 407 option_parser.add_option('--include-files', default='',
400 help='Comma separated list of files that should ' 408 help='Comma separated list of files that should '
401 'always be included in the zip.') 409 'always be included in the zip.')
410 option_parser.add_option('--ignore_regex', action='store_true',
411 default=False, help='Ignores regex matches')
402 option_parser.add_option('--master-name', help='Name of the buildbot master.') 412 option_parser.add_option('--master-name', help='Name of the buildbot master.')
403 option_parser.add_option('--slave-name', help='Name of the buildbot slave.') 413 option_parser.add_option('--slave-name', help='Name of the buildbot slave.')
404 option_parser.add_option('--build-number', type=int, 414 option_parser.add_option('--build-number', type=int,
405 help='Buildbot build number.') 415 help='Buildbot build number.')
406 option_parser.add_option('--parent-build-number', type=int, 416 option_parser.add_option('--parent-build-number', type=int,
407 help='Buildbot parent build number.') 417 help='Buildbot parent build number.')
408 option_parser.add_option('--webkit-dir', 418 option_parser.add_option('--webkit-dir',
409 help='webkit directory path, relative to --src-dir') 419 help='webkit directory path, relative to --src-dir')
410 option_parser.add_option('--revision-dir', 420 option_parser.add_option('--revision-dir',
411 help='Directory path that shall be used to decide ' 421 help='Directory path that shall be used to decide '
(...skipping 11 matching lines...) Expand all
423 help=('Optional URL to which to upload build ' 433 help=('Optional URL to which to upload build '
424 '(overrides build_url factory property)')) 434 '(overrides build_url factory property)'))
425 option_parser.add_option('--cros-board', 435 option_parser.add_option('--cros-board',
426 help=('If building for Chrom[e|ium]OS via the ' 436 help=('If building for Chrom[e|ium]OS via the '
427 'simple chrome workflow, the name of the ' 437 'simple chrome workflow, the name of the '
428 'target CROS board.')) 438 'target CROS board.'))
429 option_parser.add_option('--package-dsym-files', action='store_true', 439 option_parser.add_option('--package-dsym-files', action='store_true',
430 default=False, help='Add also dSYM files.') 440 default=False, help='Add also dSYM files.')
431 option_parser.add_option('--append-deps-patch-sha', action='store_true') 441 option_parser.add_option('--append-deps-patch-sha', action='store_true')
432 option_parser.add_option('--gs-acl') 442 option_parser.add_option('--gs-acl')
443 option_parser.add_option('--strip_symbol', action='store_true',
444 default=False, help='Strip symbols from chrome execut able.')
433 option_parser.add_option('--json-urls', 445 option_parser.add_option('--json-urls',
434 help=('Path to json file containing uploaded ' 446 help=('Path to json file containing uploaded '
435 'archive urls. If this is omitted then ' 447 'archive urls. If this is omitted then '
436 'the urls will be emitted as buildbot ' 448 'the urls will be emitted as buildbot '
437 'annotations.')) 449 'annotations.'))
438 chromium_utils.AddPropertiesOptions(option_parser) 450 chromium_utils.AddPropertiesOptions(option_parser)
439 451
440 options, args = option_parser.parse_args(argv) 452 options, args = option_parser.parse_args(argv)
441 453
442 if not options.master_name: 454 if not options.master_name:
(...skipping 27 matching lines...) Expand all
470 json.dump(urls, json_file) 482 json.dump(urls, json_file)
471 else: # we need to print buildbot annotations 483 else: # we need to print buildbot annotations
472 if 'storage_url' in urls: 484 if 'storage_url' in urls:
473 print '@@@STEP_LINK@download@%s@@@' % urls['storage_url'] 485 print '@@@STEP_LINK@download@%s@@@' % urls['storage_url']
474 if 'zip_url' in urls: 486 if 'zip_url' in urls:
475 print '@@@SET_BUILD_PROPERTY@build_archive_url@"%s"@@@' % urls['zip_url'] 487 print '@@@SET_BUILD_PROPERTY@build_archive_url@"%s"@@@' % urls['zip_url']
476 return 0 488 return 0
477 489
478 if '__main__' == __name__: 490 if '__main__' == __name__:
479 sys.exit(main(sys.argv)) 491 sys.exit(main(sys.argv))
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698