OLD | NEW |
---|---|
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 """ Creates a zip file in the staging dir with the result of a compile. | 6 """ Creates a zip file in the staging dir with the result of a compile. |
7 It can be sent to other machines for testing. | 7 It can be sent to other machines for testing. |
8 """ | 8 """ |
9 | 9 |
10 import csv | 10 import csv |
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
159 tmp_revision_file.write('%s' % build_revision) | 159 tmp_revision_file.write('%s' % build_revision) |
160 tmp_revision_file.close() | 160 tmp_revision_file.close() |
161 chromium_utils.MakeWorldReadable(tmp_revision_file.name) | 161 chromium_utils.MakeWorldReadable(tmp_revision_file.name) |
162 dest_path = os.path.join(dirname, | 162 dest_path = os.path.join(dirname, |
163 chromium_utils.FULL_BUILD_REVISION_FILENAME) | 163 chromium_utils.FULL_BUILD_REVISION_FILENAME) |
164 shutil.move(tmp_revision_file.name, dest_path) | 164 shutil.move(tmp_revision_file.name, dest_path) |
165 return dest_path | 165 return dest_path |
166 except IOError: | 166 except IOError: |
167 print 'Writing to revision file in %s failed.' % dirname | 167 print 'Writing to revision file in %s failed.' % dirname |
168 | 168 |
169 | |
170 def MakeUnversionedArchive(build_dir, staging_dir, zip_file_list, | 169 def MakeUnversionedArchive(build_dir, staging_dir, zip_file_list, |
171 zip_file_name): | 170 zip_file_name, strip_files=None): |
172 """Creates an unversioned full build archive. | 171 """Creates an unversioned full build archive. |
173 Returns the path of the created archive.""" | 172 Returns the path of the created archive.""" |
173 if strip_files: | |
174 strip_files = [f.strip() for f in csv.reader([strip_files]).next()] | |
dtu
2016/08/05 21:06:19
might be more readable to do strip_files.split(','
miimnk
2016/08/08 22:41:32
Done.
| |
174 (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir, | 175 (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir, |
175 zip_file_name, | 176 zip_file_name, |
176 zip_file_list, | 177 zip_file_list, |
177 build_dir, | 178 build_dir, |
178 raise_error=True) | 179 raise_error=True, |
180 strip_files=strip_files) | |
181 | |
179 chromium_utils.RemoveDirectory(zip_dir) | 182 chromium_utils.RemoveDirectory(zip_dir) |
180 if not os.path.exists(zip_file): | 183 if not os.path.exists(zip_file): |
181 raise StagingError('Failed to make zip package %s' % zip_file) | 184 raise StagingError('Failed to make zip package %s' % zip_file) |
182 chromium_utils.MakeWorldReadable(zip_file) | 185 chromium_utils.MakeWorldReadable(zip_file) |
183 | 186 |
184 # Report the size of the zip file to help catch when it gets too big and | 187 # Report the size of the zip file to help catch when it gets too big and |
185 # can cause bot failures from timeouts during downloads to testers. | 188 # can cause bot failures from timeouts during downloads to testers. |
186 zip_size = os.stat(zip_file)[stat.ST_SIZE] | 189 zip_size = os.stat(zip_file)[stat.ST_SIZE] |
187 print 'Zip file is %ld bytes' % zip_size | 190 print 'Zip file is %ld bytes' % zip_size |
188 | 191 |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
253 def __init__(self, options): | 256 def __init__(self, options): |
254 def CommaStrParser(val): | 257 def CommaStrParser(val): |
255 return [f.strip() for f in csv.reader([val]).next()] | 258 return [f.strip() for f in csv.reader([val]).next()] |
256 self.inclusions = CommaStrParser(options.include_files) | 259 self.inclusions = CommaStrParser(options.include_files) |
257 self.exclusions = (CommaStrParser(options.exclude_files) | 260 self.exclusions = (CommaStrParser(options.exclude_files) |
258 + chromium_utils.FileExclusions()) | 261 + chromium_utils.FileExclusions()) |
259 | 262 |
260 self.regex_whitelist = FileRegexWhitelist(options) | 263 self.regex_whitelist = FileRegexWhitelist(options) |
261 self.regex_blacklist = FileRegexBlacklist(options) | 264 self.regex_blacklist = FileRegexBlacklist(options) |
262 self.exclude_unmatched = options.exclude_unmatched | 265 self.exclude_unmatched = options.exclude_unmatched |
266 self.ignore_regex = options.ignore_regex | |
263 | 267 |
264 def __str__(self): | 268 def __str__(self): |
265 return '\n '.join([ | 269 return '\n '.join([ |
266 'Zip rules', | 270 'Zip rules', |
267 'Inclusions: %s' % self.inclusions, | 271 'Inclusions: %s' % self.inclusions, |
268 'Exclusions: %s' % self.exclusions, | 272 'Exclusions: %s' % self.exclusions, |
269 "Whitelist regex: '%s'" % self.regex_whitelist, | 273 "Whitelist regex: '%s'" % self.regex_whitelist, |
270 "Blacklist regex: '%s'" % self.regex_blacklist, | 274 "Blacklist regex: '%s'" % self.regex_blacklist, |
271 'Zip unmatched files: %s' % (not self.exclude_unmatched)]) | 275 'Zip unmatched files: %s' % (not self.exclude_unmatched), |
276 'Ignore regex matches: %s' % (self.ignore_regex)]) | |
dtu
2016/08/05 21:06:19
style nit: unnecessary parentheses
miimnk
2016/08/08 22:41:32
Done.
| |
277 | |
272 | 278 |
273 def Match(self, filename): | 279 def Match(self, filename): |
274 if filename in self.inclusions: | 280 if filename in self.inclusions: |
275 return True | 281 return True |
276 if filename in self.exclusions: | 282 if filename in self.exclusions: |
277 return False | 283 return False |
284 if self.ignore_regex: | |
285 return False | |
278 if re.match(self.regex_whitelist, filename): | 286 if re.match(self.regex_whitelist, filename): |
279 return True | 287 return True |
280 if re.match(self.regex_blacklist, filename): | 288 if re.match(self.regex_blacklist, filename): |
281 return False | 289 return False |
282 return not self.exclude_unmatched | 290 return not self.exclude_unmatched |
283 | 291 |
284 | 292 |
285 def Archive(options): | 293 def Archive(options): |
286 build_dir = build_directory.GetBuildOutputDirectory( | 294 build_dir = build_directory.GetBuildOutputDirectory( |
287 options.src_dir, options.cros_board) | 295 options.src_dir, options.cros_board) |
288 build_dir = os.path.abspath(os.path.join(build_dir, options.target)) | 296 build_dir = os.path.abspath(os.path.join(build_dir, options.target)) |
289 | |
290 staging_dir = slave_utils.GetStagingDir(options.src_dir) | 297 staging_dir = slave_utils.GetStagingDir(options.src_dir) |
291 chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir) | 298 chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir) |
292 | |
293 if not options.build_revision: | 299 if not options.build_revision: |
294 (build_revision, webkit_revision) = slave_utils.GetBuildRevisions( | 300 (build_revision, webkit_revision) = slave_utils.GetBuildRevisions( |
295 options.src_dir, options.webkit_dir, options.revision_dir) | 301 options.src_dir, options.webkit_dir, options.revision_dir) |
296 else: | 302 else: |
297 build_revision = options.build_revision | 303 build_revision = options.build_revision |
298 webkit_revision = options.webkit_revision | 304 webkit_revision = options.webkit_revision |
299 | 305 |
300 unversioned_base_name, version_suffix = slave_utils.GetZipFileNames( | 306 unversioned_base_name, version_suffix = slave_utils.GetZipFileNames( |
301 options.master_name, | 307 options.master_name, |
302 options.build_number, | 308 options.build_number, |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
345 | 351 |
346 zip_file_list = [f for f in root_files if path_filter.Match(f)] | 352 zip_file_list = [f for f in root_files if path_filter.Match(f)] |
347 | 353 |
348 # TODO(yzshen): Once we have swarming support ready, we could use it to | 354 # TODO(yzshen): Once we have swarming support ready, we could use it to |
349 # archive run time dependencies of tests and remove this step. | 355 # archive run time dependencies of tests and remove this step. |
350 mojom_files = _MojomFiles(build_dir, ['.mojom.js', '_mojom.py']) | 356 mojom_files = _MojomFiles(build_dir, ['.mojom.js', '_mojom.py']) |
351 print 'Include mojom files: %s' % mojom_files | 357 print 'Include mojom files: %s' % mojom_files |
352 zip_file_list.extend(mojom_files) | 358 zip_file_list.extend(mojom_files) |
353 | 359 |
354 zip_file = MakeUnversionedArchive(build_dir, staging_dir, zip_file_list, | 360 zip_file = MakeUnversionedArchive(build_dir, staging_dir, zip_file_list, |
355 unversioned_base_name) | 361 unversioned_base_name, |
362 strip_files=options.strip_files) | |
356 | 363 |
357 zip_base, zip_ext, versioned_file = MakeVersionedArchive( | 364 zip_base, zip_ext, versioned_file = MakeVersionedArchive( |
358 zip_file, version_suffix, options) | 365 zip_file, version_suffix, options) |
359 | 366 |
360 prune_limit = 10 | 367 prune_limit = 10 |
361 if options.build_url.startswith('gs://'): | 368 if options.build_url.startswith('gs://'): |
362 # Don't keep builds lying around when uploading them to google storage. | 369 # Don't keep builds lying around when uploading them to google storage. |
363 prune_limit = 3 | 370 prune_limit = 3 |
364 PruneOldArchives(staging_dir, zip_base, zip_ext, prune_limit=prune_limit) | 371 PruneOldArchives(staging_dir, zip_base, zip_ext, prune_limit=prune_limit) |
365 | 372 |
(...skipping 26 matching lines...) Expand all Loading... | |
392 help='build target to archive (Debug or Release)') | 399 help='build target to archive (Debug or Release)') |
393 option_parser.add_option('--src-dir', default='src', | 400 option_parser.add_option('--src-dir', default='src', |
394 help='path to the top-level sources directory') | 401 help='path to the top-level sources directory') |
395 option_parser.add_option('--build-dir', help='ignored') | 402 option_parser.add_option('--build-dir', help='ignored') |
396 option_parser.add_option('--exclude-files', default='', | 403 option_parser.add_option('--exclude-files', default='', |
397 help='Comma separated list of files that should ' | 404 help='Comma separated list of files that should ' |
398 'always be excluded from the zip.') | 405 'always be excluded from the zip.') |
399 option_parser.add_option('--include-files', default='', | 406 option_parser.add_option('--include-files', default='', |
400 help='Comma separated list of files that should ' | 407 help='Comma separated list of files that should ' |
401 'always be included in the zip.') | 408 'always be included in the zip.') |
409 option_parser.add_option('--ignore_regex', action='store_true', | |
dtu
2016/08/05 21:06:19
Command-line args should be separated by - instead
miimnk
2016/08/08 22:41:30
Done.
| |
410 default=False, help='Ignores regex matches') | |
402 option_parser.add_option('--master-name', help='Name of the buildbot master.') | 411 option_parser.add_option('--master-name', help='Name of the buildbot master.') |
403 option_parser.add_option('--slave-name', help='Name of the buildbot slave.') | 412 option_parser.add_option('--slave-name', help='Name of the buildbot slave.') |
404 option_parser.add_option('--build-number', type=int, | 413 option_parser.add_option('--build-number', type=int, |
405 help='Buildbot build number.') | 414 help='Buildbot build number.') |
406 option_parser.add_option('--parent-build-number', type=int, | 415 option_parser.add_option('--parent-build-number', type=int, |
407 help='Buildbot parent build number.') | 416 help='Buildbot parent build number.') |
408 option_parser.add_option('--webkit-dir', | 417 option_parser.add_option('--webkit-dir', |
409 help='webkit directory path, relative to --src-dir') | 418 help='webkit directory path, relative to --src-dir') |
410 option_parser.add_option('--revision-dir', | 419 option_parser.add_option('--revision-dir', |
411 help='Directory path that shall be used to decide ' | 420 help='Directory path that shall be used to decide ' |
(...skipping 11 matching lines...) Expand all Loading... | |
423 help=('Optional URL to which to upload build ' | 432 help=('Optional URL to which to upload build ' |
424 '(overrides build_url factory property)')) | 433 '(overrides build_url factory property)')) |
425 option_parser.add_option('--cros-board', | 434 option_parser.add_option('--cros-board', |
426 help=('If building for Chrom[e|ium]OS via the ' | 435 help=('If building for Chrom[e|ium]OS via the ' |
427 'simple chrome workflow, the name of the ' | 436 'simple chrome workflow, the name of the ' |
428 'target CROS board.')) | 437 'target CROS board.')) |
429 option_parser.add_option('--package-dsym-files', action='store_true', | 438 option_parser.add_option('--package-dsym-files', action='store_true', |
430 default=False, help='Add also dSYM files.') | 439 default=False, help='Add also dSYM files.') |
431 option_parser.add_option('--append-deps-patch-sha', action='store_true') | 440 option_parser.add_option('--append-deps-patch-sha', action='store_true') |
432 option_parser.add_option('--gs-acl') | 441 option_parser.add_option('--gs-acl') |
442 option_parser.add_option('--strip_files', default='', | |
443 help='Comma separated list of files that should ' | |
444 'be stripped of symbols in the zip.') | |
dtu
2016/08/05 21:06:19
I'd prefer if you convert the argument to a list i
miimnk
2016/08/08 22:41:31
Done.
| |
433 option_parser.add_option('--json-urls', | 445 option_parser.add_option('--json-urls', |
434 help=('Path to json file containing uploaded ' | 446 help=('Path to json file containing uploaded ' |
435 'archive urls. If this is omitted then ' | 447 'archive urls. If this is omitted then ' |
436 'the urls will be emitted as buildbot ' | 448 'the urls will be emitted as buildbot ' |
437 'annotations.')) | 449 'annotations.')) |
438 chromium_utils.AddPropertiesOptions(option_parser) | 450 chromium_utils.AddPropertiesOptions(option_parser) |
439 | 451 |
440 options, args = option_parser.parse_args(argv) | 452 options, args = option_parser.parse_args(argv) |
441 | 453 |
442 if not options.master_name: | 454 if not options.master_name: |
(...skipping 27 matching lines...) Expand all Loading... | |
470 json.dump(urls, json_file) | 482 json.dump(urls, json_file) |
471 else: # we need to print buildbot annotations | 483 else: # we need to print buildbot annotations |
472 if 'storage_url' in urls: | 484 if 'storage_url' in urls: |
473 print '@@@STEP_LINK@download@%s@@@' % urls['storage_url'] | 485 print '@@@STEP_LINK@download@%s@@@' % urls['storage_url'] |
474 if 'zip_url' in urls: | 486 if 'zip_url' in urls: |
475 print '@@@SET_BUILD_PROPERTY@build_archive_url@"%s"@@@' % urls['zip_url'] | 487 print '@@@SET_BUILD_PROPERTY@build_archive_url@"%s"@@@' % urls['zip_url'] |
476 return 0 | 488 return 0 |
477 | 489 |
478 if '__main__' == __name__: | 490 if '__main__' == __name__: |
479 sys.exit(main(sys.argv)) | 491 sys.exit(main(sys.argv)) |
OLD | NEW |