Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 """ Creates a zip file in the staging dir with the result of a compile. | 6 """ Creates a zip file in the staging dir with the result of a compile. |
| 7 It can be sent to other machines for testing. | 7 It can be sent to other machines for testing. |
| 8 """ | 8 """ |
| 9 | 9 |
| 10 import csv | 10 import csv |
| (...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 163 chromium_utils.FULL_BUILD_REVISION_FILENAME) | 163 chromium_utils.FULL_BUILD_REVISION_FILENAME) |
| 164 shutil.move(tmp_revision_file.name, dest_path) | 164 shutil.move(tmp_revision_file.name, dest_path) |
| 165 return dest_path | 165 return dest_path |
| 166 except IOError: | 166 except IOError: |
| 167 print 'Writing to revision file in %s failed.' % dirname | 167 print 'Writing to revision file in %s failed.' % dirname |
| 168 | 168 |
| 169 def MakeUnversionedArchive(build_dir, staging_dir, zip_file_list, | 169 def MakeUnversionedArchive(build_dir, staging_dir, zip_file_list, |
| 170 zip_file_name, strip_files=None): | 170 zip_file_name, strip_files=None): |
| 171 """Creates an unversioned full build archive. | 171 """Creates an unversioned full build archive. |
| 172 Returns the path of the created archive.""" | 172 Returns the path of the created archive.""" |
| 173 # Prevents having zip_file_list to contain duplicates | |
| 174 zip_file_list = list(set(zip_file_list)) | |
| 173 (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir, | 175 (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir, |
| 174 zip_file_name, | 176 zip_file_name, |
| 175 zip_file_list, | 177 zip_file_list, |
| 176 build_dir, | 178 build_dir, |
| 177 raise_error=True, | 179 raise_error=True, |
| 178 strip_files=strip_files) | 180 strip_files=strip_files) |
| 179 | 181 |
| 180 chromium_utils.RemoveDirectory(zip_dir) | 182 chromium_utils.RemoveDirectory(zip_dir) |
| 181 if not os.path.exists(zip_file): | 183 if not os.path.exists(zip_file): |
| 182 raise StagingError('Failed to make zip package %s' % zip_file) | 184 raise StagingError('Failed to make zip package %s' % zip_file) |
| (...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 263 | 265 |
| 264 class PathMatcher(object): | 266 class PathMatcher(object): |
| 265 """Generates a matcher which can be used to filter file paths.""" | 267 """Generates a matcher which can be used to filter file paths.""" |
| 266 | 268 |
| 267 def __init__(self, options): | 269 def __init__(self, options): |
| 268 def CommaStrParser(val): | 270 def CommaStrParser(val): |
| 269 return [f.strip() for f in csv.reader([val]).next()] | 271 return [f.strip() for f in csv.reader([val]).next()] |
| 270 self.inclusions = CommaStrParser(options.include_files) | 272 self.inclusions = CommaStrParser(options.include_files) |
| 271 self.exclusions = (CommaStrParser(options.exclude_files) | 273 self.exclusions = (CommaStrParser(options.exclude_files) |
| 272 + chromium_utils.FileExclusions()) | 274 + chromium_utils.FileExclusions()) |
| 273 | |
| 274 self.regex_whitelist = FileRegexWhitelist(options) | 275 self.regex_whitelist = FileRegexWhitelist(options) |
| 275 self.regex_blacklist = FileRegexBlacklist(options) | 276 self.regex_blacklist = FileRegexBlacklist(options) |
| 276 self.exclude_unmatched = options.exclude_unmatched | 277 self.exclude_unmatched = options.exclude_unmatched |
| 277 self.ignore_regex = options.ignore_regex | 278 self.exclude_extra = options.exclude_extra |
| 279 self.custom_whitelist = options.whitelist | |
| 278 | 280 |
| 279 def __str__(self): | 281 def __str__(self): |
| 280 return '\n '.join([ | 282 return '\n '.join([ |
| 281 'Zip rules', | 283 'Zip rules', |
| 282 'Inclusions: %s' % self.inclusions, | 284 'Inclusions: %s' % self.inclusions, |
| 283 'Exclusions: %s' % self.exclusions, | 285 'Exclusions: %s' % self.exclusions, |
| 284 "Whitelist regex: '%s'" % self.regex_whitelist, | 286 "Whitelist regex: '%s'" % self.regex_whitelist, |
| 285 "Blacklist regex: '%s'" % self.regex_blacklist, | 287 "Blacklist regex: '%s'" % self.regex_blacklist, |
| 286 'Zip unmatched files: %s' % (not self.exclude_unmatched), | 288 'Zip unmatched files: %s' % (not self.exclude_unmatched), |
| 287 'Ignore regex matches: %s' % self.ignore_regex]) | 289 'Exclude extra: %s' % self.exclude_extra, |
| 290 "Custom Whitelist regex: '%s'" % self.custom_whitelist]) | |
| 288 | 291 |
| 289 | 292 |
| 290 def Match(self, filename): | 293 def Match(self, filename): |
| 291 if filename in self.inclusions: | 294 if filename in self.inclusions: |
| 292 return True | 295 return True |
| 293 if filename in self.exclusions: | 296 if filename in self.exclusions: |
| 294 return False | 297 return False |
| 295 if self.ignore_regex: | 298 if (self.custom_whitelist and |
| 299 re.match(self.custom_whitelist, filename)): | |
| 300 return True | |
| 301 if self.exclude_extra: | |
| 296 return False | 302 return False |
| 297 if re.match(self.regex_whitelist, filename): | 303 if re.match(self.regex_whitelist, filename): |
| 298 return True | 304 return True |
| 299 if re.match(self.regex_blacklist, filename): | 305 if re.match(self.regex_blacklist, filename): |
| 300 return False | 306 return False |
| 301 return not self.exclude_unmatched | 307 return not self.exclude_unmatched |
| 302 | 308 |
| 303 | 309 |
| 304 def Archive(options): | 310 def Archive(options): |
| 305 build_dir = build_directory.GetBuildOutputDirectory( | 311 build_dir = build_directory.GetBuildOutputDirectory( |
| (...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 415 help='build target to archive (Debug or Release)') | 421 help='build target to archive (Debug or Release)') |
| 416 option_parser.add_option('--src-dir', default='src', | 422 option_parser.add_option('--src-dir', default='src', |
| 417 help='path to the top-level sources directory') | 423 help='path to the top-level sources directory') |
| 418 option_parser.add_option('--build-dir', help='ignored') | 424 option_parser.add_option('--build-dir', help='ignored') |
| 419 option_parser.add_option('--exclude-files', default='', | 425 option_parser.add_option('--exclude-files', default='', |
| 420 help='Comma separated list of files that should ' | 426 help='Comma separated list of files that should ' |
| 421 'always be excluded from the zip.') | 427 'always be excluded from the zip.') |
| 422 option_parser.add_option('--include-files', default='', | 428 option_parser.add_option('--include-files', default='', |
| 423 help='Comma separated list of files that should ' | 429 help='Comma separated list of files that should ' |
| 424 'always be included in the zip.') | 430 'always be included in the zip.') |
| 425 option_parser.add_option('--ignore-regex', action='store_true', | 431 option_parser.add_option('--whitelist', default='', |
| 426 default=False, help='Ignores regex matches') | 432 help='Custom regex whitelist to include files') |
| 433 option_parser.add_option('--exclude-extra', action='store_true', | |
| 434 default=False, help='Only includes include file list and ' | |
|
ghost stip (do not use)
2016/08/30 23:48:53
nit: 80 chars
miimnk
2016/08/31 00:19:05
Done.
| |
| 435 'regex whitelist match provided') | |
| 427 option_parser.add_option('--master-name', help='Name of the buildbot master.') | 436 option_parser.add_option('--master-name', help='Name of the buildbot master.') |
| 428 option_parser.add_option('--slave-name', help='Name of the buildbot slave.') | 437 option_parser.add_option('--slave-name', help='Name of the buildbot slave.') |
| 429 option_parser.add_option('--build-number', type=int, | 438 option_parser.add_option('--build-number', type=int, |
| 430 help='Buildbot build number.') | 439 help='Buildbot build number.') |
| 431 option_parser.add_option('--parent-build-number', type=int, | 440 option_parser.add_option('--parent-build-number', type=int, |
| 432 help='Buildbot parent build number.') | 441 help='Buildbot parent build number.') |
| 433 option_parser.add_option('--webkit-dir', | 442 option_parser.add_option('--webkit-dir', |
| 434 help='webkit directory path, relative to --src-dir') | 443 help='webkit directory path, relative to --src-dir') |
| 435 option_parser.add_option('--revision-dir', | 444 option_parser.add_option('--revision-dir', |
| 436 help='Directory path that shall be used to decide ' | 445 help='Directory path that shall be used to decide ' |
| (...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 508 json.dump(urls, json_file) | 517 json.dump(urls, json_file) |
| 509 else: # we need to print buildbot annotations | 518 else: # we need to print buildbot annotations |
| 510 if 'storage_url' in urls: | 519 if 'storage_url' in urls: |
| 511 print '@@@STEP_LINK@download@%s@@@' % urls['storage_url'] | 520 print '@@@STEP_LINK@download@%s@@@' % urls['storage_url'] |
| 512 if 'zip_url' in urls: | 521 if 'zip_url' in urls: |
| 513 print '@@@SET_BUILD_PROPERTY@build_archive_url@"%s"@@@' % urls['zip_url'] | 522 print '@@@SET_BUILD_PROPERTY@build_archive_url@"%s"@@@' % urls['zip_url'] |
| 514 return 0 | 523 return 0 |
| 515 | 524 |
| 516 if '__main__' == __name__: | 525 if '__main__' == __name__: |
| 517 sys.exit(main(sys.argv)) | 526 sys.exit(main(sys.argv)) |
| OLD | NEW |