Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 """ Creates a zip file in the staging dir with the result of a compile. | 6 """ Creates a zip file in the staging dir with the result of a compile. |
| 7 It can be sent to other machines for testing. | 7 It can be sent to other machines for testing. |
| 8 """ | 8 """ |
| 9 | 9 |
| 10 import csv | 10 import csv |
| 11 import fnmatch | 11 import fnmatch |
| 12 import glob | 12 import glob |
| 13 import json | 13 import json |
| 14 import optparse | 14 import optparse |
| 15 import os | 15 import os |
| 16 import re | 16 import re |
| 17 import shutil | 17 import shutil |
| 18 import stat | 18 import stat |
| 19 import sys | 19 import sys |
| 20 import tempfile | 20 import tempfile |
| 21 | 21 |
| 22 from common import chromium_utils | 22 from common import chromium_utils |
| 23 from slave import build_directory | 23 from slave import build_directory |
| 24 from slave import slave_utils | 24 from slave import slave_utils |
| 25 | 25 |
| 26 STRIP_LIST_LINUX = ['chrome', 'nacl_helper'] | |
|
dimu1
2016/07/07 21:54:11
Add the list as an argument, and pass this as a ar
| |
| 27 | |
| 28 | |
| 26 class StagingError(Exception): pass | 29 class StagingError(Exception): pass |
| 27 | 30 |
| 28 | 31 |
| 29 def CopyDebugCRT(build_dir): | 32 def CopyDebugCRT(build_dir): |
| 30 # Copy the relevant CRT DLLs to |build_dir|. We copy DLLs from all versions | 33 # Copy the relevant CRT DLLs to |build_dir|. We copy DLLs from all versions |
| 31 # of VS installed to make sure we have the correct CRT version, unused DLLs | 34 # of VS installed to make sure we have the correct CRT version, unused DLLs |
| 32 # should not conflict with the others anyways. | 35 # should not conflict with the others anyways. |
| 33 crt_dlls = glob.glob( | 36 crt_dlls = glob.glob( |
| 34 'C:\\Program Files (x86)\\Microsoft Visual Studio *\\VC\\redist\\' | 37 'C:\\Program Files (x86)\\Microsoft Visual Studio *\\VC\\redist\\' |
| 35 'Debug_NonRedist\\x86\\Microsoft.*.DebugCRT\\*.dll') | 38 'Debug_NonRedist\\x86\\Microsoft.*.DebugCRT\\*.dll') |
| (...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 160 tmp_revision_file.close() | 163 tmp_revision_file.close() |
| 161 chromium_utils.MakeWorldReadable(tmp_revision_file.name) | 164 chromium_utils.MakeWorldReadable(tmp_revision_file.name) |
| 162 dest_path = os.path.join(dirname, | 165 dest_path = os.path.join(dirname, |
| 163 chromium_utils.FULL_BUILD_REVISION_FILENAME) | 166 chromium_utils.FULL_BUILD_REVISION_FILENAME) |
| 164 shutil.move(tmp_revision_file.name, dest_path) | 167 shutil.move(tmp_revision_file.name, dest_path) |
| 165 return dest_path | 168 return dest_path |
| 166 except IOError: | 169 except IOError: |
| 167 print 'Writing to revision file in %s failed.' % dirname | 170 print 'Writing to revision file in %s failed.' % dirname |
| 168 | 171 |
| 169 | 172 |
| 173 | |
| 174 def unzip_strip_zip(zip_file, zip_file_list, strip_list): | |
|
dimu1
2016/07/07 21:54:11
Is it possible to strip, then zip? To avoid the zi
| |
| 175 """Unzips a zip file and strips symbols from designated files and make it into a zip file. | |
| 176 Replaces the target file in place. | |
| 177 | |
| 178 Args: | |
| 179 zip_file: Directory of the zip file | |
| 180 zip_file_list: List of files to be contained in the new zip file | |
| 181 strip list: List of file names to strip symbols | |
| 182 | |
| 183 Returns: The path of the written file. | |
| 184 """ | |
| 185 output_dir = os.path.abspath(os.path.join(zip_file, '..')) | |
| 186 tmp_dir = os.path.abspath(os.path.join(output_dir, 'tmp')) | |
| 187 unzip_dir = os.path.abspath(os.path.join(tmp_dir, | |
| 188 os.path.basename(os.path.splitext(zip_file)[0]))) | |
| 189 chromium_utils.ExtractZip(zip_file, tmp_dir) | |
| 190 | |
| 191 for file in strip_list: | |
| 192 os.system('strip %s/%s' %(unzip_dir, file)) | |
| 193 | |
| 194 chromium_utils.RemoveFile(zip_file) | |
| 195 (zip_dir, zip_file) = chromium_utils.MakeZip(output_dir, | |
| 196 os.path.splitext(zip_file)[0], | |
| 197 zip_file_list, | |
| 198 unzip_dir | |
| 199 ) | |
| 200 chromium_utils.RemoveDirectory(tmp_dir) | |
| 201 chromium_utils.RemoveDirectory(zip_dir) | |
| 202 | |
| 203 | |
| 204 | |
| 170 def MakeUnversionedArchive(build_dir, staging_dir, zip_file_list, | 205 def MakeUnversionedArchive(build_dir, staging_dir, zip_file_list, |
| 171 zip_file_name): | 206 zip_file_name, strip_symbol=False): |
| 172 """Creates an unversioned full build archive. | 207 """Creates an unversioned full build archive. |
| 173 Returns the path of the created archive.""" | 208 Returns the path of the created archive.""" |
| 174 (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir, | 209 (zip_dir, zip_file) = chromium_utils.MakeZip(staging_dir, |
| 175 zip_file_name, | 210 zip_file_name, |
| 176 zip_file_list, | 211 zip_file_list, |
| 177 build_dir, | 212 build_dir, |
| 178 raise_error=True) | 213 raise_error=True) |
| 214 | |
| 179 chromium_utils.RemoveDirectory(zip_dir) | 215 chromium_utils.RemoveDirectory(zip_dir) |
| 180 if not os.path.exists(zip_file): | 216 if not os.path.exists(zip_file): |
| 181 raise StagingError('Failed to make zip package %s' % zip_file) | 217 raise StagingError('Failed to make zip package %s' % zip_file) |
| 218 if strip_symbol: | |
| 219 unzip_strip_zip(zip_file, zip_file_list, STRIP_LIST_LINUX) | |
| 182 chromium_utils.MakeWorldReadable(zip_file) | 220 chromium_utils.MakeWorldReadable(zip_file) |
| 183 | 221 |
| 184 # Report the size of the zip file to help catch when it gets too big and | 222 # Report the size of the zip file to help catch when it gets too big and |
| 185 # can cause bot failures from timeouts during downloads to testers. | 223 # can cause bot failures from timeouts during downloads to testers. |
| 186 zip_size = os.stat(zip_file)[stat.ST_SIZE] | 224 zip_size = os.stat(zip_file)[stat.ST_SIZE] |
| 187 print 'Zip file is %ld bytes' % zip_size | 225 print 'Zip file is %ld bytes' % zip_size |
| 188 | 226 |
| 189 return zip_file | 227 return zip_file |
| 190 | 228 |
| 191 | 229 |
| (...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 253 def __init__(self, options): | 291 def __init__(self, options): |
| 254 def CommaStrParser(val): | 292 def CommaStrParser(val): |
| 255 return [f.strip() for f in csv.reader([val]).next()] | 293 return [f.strip() for f in csv.reader([val]).next()] |
| 256 self.inclusions = CommaStrParser(options.include_files) | 294 self.inclusions = CommaStrParser(options.include_files) |
| 257 self.exclusions = (CommaStrParser(options.exclude_files) | 295 self.exclusions = (CommaStrParser(options.exclude_files) |
| 258 + chromium_utils.FileExclusions()) | 296 + chromium_utils.FileExclusions()) |
| 259 | 297 |
| 260 self.regex_whitelist = FileRegexWhitelist(options) | 298 self.regex_whitelist = FileRegexWhitelist(options) |
| 261 self.regex_blacklist = FileRegexBlacklist(options) | 299 self.regex_blacklist = FileRegexBlacklist(options) |
| 262 self.exclude_unmatched = options.exclude_unmatched | 300 self.exclude_unmatched = options.exclude_unmatched |
| 301 self.exclusive_include = options.exclusive_include | |
| 263 | 302 |
| 264 def __str__(self): | 303 def __str__(self): |
| 265 return '\n '.join([ | 304 return '\n '.join([ |
| 266 'Zip rules', | 305 'Zip rules', |
| 267 'Inclusions: %s' % self.inclusions, | 306 'Inclusions: %s' % self.inclusions, |
| 268 'Exclusions: %s' % self.exclusions, | 307 'Exclusions: %s' % self.exclusions, |
| 269 "Whitelist regex: '%s'" % self.regex_whitelist, | 308 "Whitelist regex: '%s'" % self.regex_whitelist, |
| 270 "Blacklist regex: '%s'" % self.regex_blacklist, | 309 "Blacklist regex: '%s'" % self.regex_blacklist, |
| 271 'Zip unmatched files: %s' % (not self.exclude_unmatched)]) | 310 'Zip unmatched files: %s' % (not self.exclude_unmatched)]) |
| 272 | 311 |
| 273 def Match(self, filename): | 312 def Match(self, filename): |
| 274 if filename in self.inclusions: | 313 if filename in self.inclusions: |
| 275 return True | 314 return True |
| 315 # Added to implement exclusive include | |
| 316 if self.exclusive_include: | |
| 317 return False | |
| 276 if filename in self.exclusions: | 318 if filename in self.exclusions: |
| 277 return False | 319 return False |
| 278 if re.match(self.regex_whitelist, filename): | 320 if re.match(self.regex_whitelist, filename): |
| 279 return True | 321 return True |
| 280 if re.match(self.regex_blacklist, filename): | 322 if re.match(self.regex_blacklist, filename): |
| 281 return False | 323 return False |
| 282 return not self.exclude_unmatched | 324 return not self.exclude_unmatched |
| 283 | 325 |
| 284 | 326 |
| 285 def Archive(options): | 327 def Archive(options): |
| 286 build_dir = build_directory.GetBuildOutputDirectory( | 328 build_dir = build_directory.GetBuildOutputDirectory( |
| 287 options.src_dir, options.cros_board) | 329 options.src_dir, options.cros_board) |
| 288 build_dir = os.path.abspath(os.path.join(build_dir, options.target)) | 330 build_dir = os.path.abspath(os.path.join(build_dir, options.target)) |
| 289 | |
| 290 staging_dir = slave_utils.GetStagingDir(options.src_dir) | 331 staging_dir = slave_utils.GetStagingDir(options.src_dir) |
| 291 chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir) | 332 chromium_utils.MakeParentDirectoriesWorldReadable(staging_dir) |
| 292 | 333 |
| 293 if not options.build_revision: | 334 if not options.build_revision: |
| 294 (build_revision, webkit_revision) = slave_utils.GetBuildRevisions( | 335 (build_revision, webkit_revision) = slave_utils.GetBuildRevisions( |
| 295 options.src_dir, options.webkit_dir, options.revision_dir) | 336 options.src_dir, options.webkit_dir, options.revision_dir) |
| 296 else: | 337 else: |
| 297 build_revision = options.build_revision | 338 build_revision = options.build_revision |
| 298 webkit_revision = options.webkit_revision | 339 webkit_revision = options.webkit_revision |
| 299 | 340 |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 345 | 386 |
| 346 zip_file_list = [f for f in root_files if path_filter.Match(f)] | 387 zip_file_list = [f for f in root_files if path_filter.Match(f)] |
| 347 | 388 |
| 348 # TODO(yzshen): Once we have swarming support ready, we could use it to | 389 # TODO(yzshen): Once we have swarming support ready, we could use it to |
| 349 # archive run time dependencies of tests and remove this step. | 390 # archive run time dependencies of tests and remove this step. |
| 350 mojom_files = _MojomFiles(build_dir, ['.mojom.js', '_mojom.py']) | 391 mojom_files = _MojomFiles(build_dir, ['.mojom.js', '_mojom.py']) |
| 351 print 'Include mojom files: %s' % mojom_files | 392 print 'Include mojom files: %s' % mojom_files |
| 352 zip_file_list.extend(mojom_files) | 393 zip_file_list.extend(mojom_files) |
| 353 | 394 |
| 354 zip_file = MakeUnversionedArchive(build_dir, staging_dir, zip_file_list, | 395 zip_file = MakeUnversionedArchive(build_dir, staging_dir, zip_file_list, |
| 355 unversioned_base_name) | 396 unversioned_base_name, strip_symbol=options. strip_symbol) |
| 356 | 397 |
| 357 zip_base, zip_ext, versioned_file = MakeVersionedArchive( | 398 zip_base, zip_ext, versioned_file = MakeVersionedArchive( |
| 358 zip_file, version_suffix, options) | 399 zip_file, version_suffix, options) |
| 359 | 400 |
| 360 prune_limit = 10 | 401 prune_limit = 10 |
| 361 if options.build_url.startswith('gs://'): | 402 if options.build_url.startswith('gs://'): |
| 362 # Don't keep builds lying around when uploading them to google storage. | 403 # Don't keep builds lying around when uploading them to google storage. |
| 363 prune_limit = 3 | 404 prune_limit = 3 |
| 364 PruneOldArchives(staging_dir, zip_base, zip_ext, prune_limit=prune_limit) | 405 PruneOldArchives(staging_dir, zip_base, zip_ext, prune_limit=prune_limit) |
| 365 | 406 |
| (...skipping 26 matching lines...) Expand all Loading... | |
| 392 help='build target to archive (Debug or Release)') | 433 help='build target to archive (Debug or Release)') |
| 393 option_parser.add_option('--src-dir', default='src', | 434 option_parser.add_option('--src-dir', default='src', |
| 394 help='path to the top-level sources directory') | 435 help='path to the top-level sources directory') |
| 395 option_parser.add_option('--build-dir', help='ignored') | 436 option_parser.add_option('--build-dir', help='ignored') |
| 396 option_parser.add_option('--exclude-files', default='', | 437 option_parser.add_option('--exclude-files', default='', |
| 397 help='Comma separated list of files that should ' | 438 help='Comma separated list of files that should ' |
| 398 'always be excluded from the zip.') | 439 'always be excluded from the zip.') |
| 399 option_parser.add_option('--include-files', default='', | 440 option_parser.add_option('--include-files', default='', |
| 400 help='Comma separated list of files that should ' | 441 help='Comma separated list of files that should ' |
| 401 'always be included in the zip.') | 442 'always be included in the zip.') |
| 443 option_parser.add_option('--exclusive_include', action='store_true', | |
| 444 default=False, help='Only include the files in includ e-files list') | |
| 402 option_parser.add_option('--master-name', help='Name of the buildbot master.') | 445 option_parser.add_option('--master-name', help='Name of the buildbot master.') |
| 403 option_parser.add_option('--slave-name', help='Name of the buildbot slave.') | 446 option_parser.add_option('--slave-name', help='Name of the buildbot slave.') |
| 404 option_parser.add_option('--build-number', type=int, | 447 option_parser.add_option('--build-number', type=int, |
| 405 help='Buildbot build number.') | 448 help='Buildbot build number.') |
| 406 option_parser.add_option('--parent-build-number', type=int, | 449 option_parser.add_option('--parent-build-number', type=int, |
| 407 help='Buildbot parent build number.') | 450 help='Buildbot parent build number.') |
| 408 option_parser.add_option('--webkit-dir', | 451 option_parser.add_option('--webkit-dir', |
| 409 help='webkit directory path, relative to --src-dir') | 452 help='webkit directory path, relative to --src-dir') |
| 410 option_parser.add_option('--revision-dir', | 453 option_parser.add_option('--revision-dir', |
| 411 help='Directory path that shall be used to decide ' | 454 help='Directory path that shall be used to decide ' |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 423 help=('Optional URL to which to upload build ' | 466 help=('Optional URL to which to upload build ' |
| 424 '(overrides build_url factory property)')) | 467 '(overrides build_url factory property)')) |
| 425 option_parser.add_option('--cros-board', | 468 option_parser.add_option('--cros-board', |
| 426 help=('If building for Chrom[e|ium]OS via the ' | 469 help=('If building for Chrom[e|ium]OS via the ' |
| 427 'simple chrome workflow, the name of the ' | 470 'simple chrome workflow, the name of the ' |
| 428 'target CROS board.')) | 471 'target CROS board.')) |
| 429 option_parser.add_option('--package-dsym-files', action='store_true', | 472 option_parser.add_option('--package-dsym-files', action='store_true', |
| 430 default=False, help='Add also dSYM files.') | 473 default=False, help='Add also dSYM files.') |
| 431 option_parser.add_option('--append-deps-patch-sha', action='store_true') | 474 option_parser.add_option('--append-deps-patch-sha', action='store_true') |
| 432 option_parser.add_option('--gs-acl') | 475 option_parser.add_option('--gs-acl') |
| 476 option_parser.add_option('--strip_symbol', action='store_true', | |
| 477 default=False, help='Strip symbols from chrome execut able.') | |
| 433 option_parser.add_option('--json-urls', | 478 option_parser.add_option('--json-urls', |
| 434 help=('Path to json file containing uploaded ' | 479 help=('Path to json file containing uploaded ' |
| 435 'archive urls. If this is omitted then ' | 480 'archive urls. If this is omitted then ' |
| 436 'the urls will be emitted as buildbot ' | 481 'the urls will be emitted as buildbot ' |
| 437 'annotations.')) | 482 'annotations.')) |
| 438 chromium_utils.AddPropertiesOptions(option_parser) | 483 chromium_utils.AddPropertiesOptions(option_parser) |
| 439 | 484 |
| 440 options, args = option_parser.parse_args(argv) | 485 options, args = option_parser.parse_args(argv) |
| 441 | 486 |
| 442 if not options.master_name: | 487 if not options.master_name: |
| (...skipping 27 matching lines...) Expand all Loading... | |
| 470 json.dump(urls, json_file) | 515 json.dump(urls, json_file) |
| 471 else: # we need to print buildbot annotations | 516 else: # we need to print buildbot annotations |
| 472 if 'storage_url' in urls: | 517 if 'storage_url' in urls: |
| 473 print '@@@STEP_LINK@download@%s@@@' % urls['storage_url'] | 518 print '@@@STEP_LINK@download@%s@@@' % urls['storage_url'] |
| 474 if 'zip_url' in urls: | 519 if 'zip_url' in urls: |
| 475 print '@@@SET_BUILD_PROPERTY@build_archive_url@"%s"@@@' % urls['zip_url'] | 520 print '@@@SET_BUILD_PROPERTY@build_archive_url@"%s"@@@' % urls['zip_url'] |
| 476 return 0 | 521 return 0 |
| 477 | 522 |
| 478 if '__main__' == __name__: | 523 if '__main__' == __name__: |
| 479 sys.exit(main(sys.argv)) | 524 sys.exit(main(sys.argv)) |
| OLD | NEW |