| OLD | NEW |
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2013 The Chromium Authors. All rights reserved. | 2 # Copyright 2013 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 """Downloads and unpacks a toolchain for building on Windows. The contents are | 6 """Downloads and unpacks a toolchain for building on Windows. The contents are |
| 7 matched by sha1 which will be updated when the toolchain is updated. | 7 matched by sha1 which will be updated when the toolchain is updated. |
| 8 | 8 |
| 9 Having a toolchain script in depot_tools means that it's not versioned | 9 Having a toolchain script in depot_tools means that it's not versioned |
| 10 directly with the source code. That is, if the toolchain is upgraded, but | 10 directly with the source code. That is, if the toolchain is upgraded, but |
| (...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 73 assert os.path.normpath(root) == root | 73 assert os.path.normpath(root) == root |
| 74 file_list = [] | 74 file_list = [] |
| 75 for base, _, files in os.walk(root): | 75 for base, _, files in os.walk(root): |
| 76 paths = [os.path.join(base, f) for f in files] | 76 paths = [os.path.join(base, f) for f in files] |
| 77 # Ignore WER ReportQueue entries that vctip/cl leave in the bin dir if/when | 77 # Ignore WER ReportQueue entries that vctip/cl leave in the bin dir if/when |
| 78 # they crash. | 78 # they crash. |
| 79 file_list.extend(x.lower() for x in paths if 'WER\\ReportQueue' not in x) | 79 file_list.extend(x.lower() for x in paths if 'WER\\ReportQueue' not in x) |
| 80 return sorted(file_list, key=lambda s: s.replace('/', '\\')) | 80 return sorted(file_list, key=lambda s: s.replace('/', '\\')) |
| 81 | 81 |
| 82 | 82 |
| 83 def MakeTimestampsFileName(root): | 83 def MakeTimestampsFileName(root, sha1): |
| 84 return os.path.join(root, '..', '.timestamps') | 84 return os.path.join(root, os.pardir, '%s.timestamps' % sha1) |
| 85 | 85 |
| 86 | 86 |
| 87 def CalculateHash(root): | 87 def CalculateHash(root, expected_hash): |
| 88 """Calculates the sha1 of the paths to all files in the given |root| and the | 88 """Calculates the sha1 of the paths to all files in the given |root| and the |
| 89 contents of those files, and returns as a hex string.""" | 89 contents of those files, and returns as a hex string. |
| 90 file_list = GetFileList(root) | |
| 91 | 90 |
| 92 # Check whether we previously saved timestamps in $root/../.timestamps. If | 91 |expected_hash| is the expected hash value for this toolchain if it has |
| 93 # we didn't, or they don't match, then do the full calculation, otherwise | 92 already been installed. |
| 93 """ |
| 94 if expected_hash: |
| 95 full_root_path = os.path.join(root, expected_hash) |
| 96 else: |
| 97 full_root_path = root |
| 98 file_list = GetFileList(full_root_path) |
| 99 # Check whether we previously saved timestamps in $root/../{sha1}.timestamps. |
| 100 # If we didn't, or they don't match, then do the full calculation, otherwise |
| 94 # return the saved value. | 101 # return the saved value. |
| 95 timestamps_file = MakeTimestampsFileName(root) | 102 timestamps_file = MakeTimestampsFileName(root, expected_hash) |
| 96 timestamps_data = {'files': [], 'sha1': ''} | 103 timestamps_data = {'files': [], 'sha1': ''} |
| 97 if os.path.exists(timestamps_file): | 104 if os.path.exists(timestamps_file): |
| 98 with open(timestamps_file, 'rb') as f: | 105 with open(timestamps_file, 'rb') as f: |
| 99 try: | 106 try: |
| 100 timestamps_data = json.load(f) | 107 timestamps_data = json.load(f) |
| 101 except ValueError: | 108 except ValueError: |
| 102 # json couldn't be loaded, empty data will force a re-hash. | 109 # json couldn't be loaded, empty data will force a re-hash. |
| 103 pass | 110 pass |
| 104 | 111 |
| 105 matches = len(file_list) == len(timestamps_data['files']) | 112 matches = len(file_list) == len(timestamps_data['files']) |
| 113 # Don't check the timestamp of the version file as we touch this file to |
| 114 # indicates which versions of the toolchain are still being used. |
| 115 vc_dir = os.path.join(full_root_path, 'VC').lower() |
| 106 if matches: | 116 if matches: |
| 107 for disk, cached in zip(file_list, timestamps_data['files']): | 117 for disk, cached in zip(file_list, timestamps_data['files']): |
| 108 if disk != cached[0] or os.stat(disk).st_mtime != cached[1]: | 118 if disk != cached[0] or ( |
| 119 disk != vc_dir and os.path.getmtime(disk) != cached[1]): |
| 109 matches = False | 120 matches = False |
| 110 break | 121 break |
| 111 if matches: | 122 if matches: |
| 112 return timestamps_data['sha1'] | 123 return timestamps_data['sha1'] |
| 113 | 124 |
| 114 digest = hashlib.sha1() | 125 digest = hashlib.sha1() |
| 115 for path in file_list: | 126 for path in file_list: |
| 116 digest.update(str(path).replace('/', '\\')) | 127 path_without_hash = str(path).replace('/', '\\') |
| 128 if expected_hash: |
| 129 path_without_hash = path_without_hash.replace( |
| 130 os.path.join(root, expected_hash), root) |
| 131 digest.update(path_without_hash) |
| 117 with open(path, 'rb') as f: | 132 with open(path, 'rb') as f: |
| 118 digest.update(f.read()) | 133 digest.update(f.read()) |
| 119 return digest.hexdigest() | 134 return digest.hexdigest() |
| 120 | 135 |
| 121 | 136 |
| 137 def CalculateToolchainHashes(root): |
| 138 """Calculate the hash of the different toolchains installed in the |root| |
| 139 directory.""" |
| 140 hashes = [] |
| 141 dir_list = [ |
| 142 d for d in os.listdir(root) if os.path.isdir(os.path.join(root, d))] |
| 143 for d in dir_list: |
| 144 hashes.append(CalculateHash(root, d)) |
| 145 return hashes |
| 146 |
| 147 |
| 122 def SaveTimestampsAndHash(root, sha1): | 148 def SaveTimestampsAndHash(root, sha1): |
| 123 """Saves timestamps and the final hash to be able to early-out more quickly | 149 """Saves timestamps and the final hash to be able to early-out more quickly |
| 124 next time.""" | 150 next time.""" |
| 125 file_list = GetFileList(root) | 151 file_list = GetFileList(os.path.join(root, sha1)) |
| 126 timestamps_data = { | 152 timestamps_data = { |
| 127 'files': [[f, os.stat(f).st_mtime] for f in file_list], | 153 'files': [[f, os.path.getmtime(f)] for f in file_list], |
| 128 'sha1': sha1, | 154 'sha1': sha1, |
| 129 } | 155 } |
| 130 with open(MakeTimestampsFileName(root), 'wb') as f: | 156 with open(MakeTimestampsFileName(root, sha1), 'wb') as f: |
| 131 json.dump(timestamps_data, f) | 157 json.dump(timestamps_data, f) |
| 132 | 158 |
| 133 | 159 |
| 134 def HaveSrcInternalAccess(): | 160 def HaveSrcInternalAccess(): |
| 135 """Checks whether access to src-internal is available.""" | 161 """Checks whether access to src-internal is available.""" |
| 136 with open(os.devnull, 'w') as nul: | 162 with open(os.devnull, 'w') as nul: |
| 137 if subprocess.call( | 163 if subprocess.call( |
| 138 ['svn', 'ls', '--non-interactive', | 164 ['svn', 'ls', '--non-interactive', |
| 139 'svn://svn.chromium.org/chrome-internal/trunk/src-internal/'], | 165 'svn://svn.chromium.org/chrome-internal/trunk/src-internal/'], |
| 140 shell=True, stdin=nul, stdout=nul, stderr=nul) == 0: | 166 shell=True, stdin=nul, stdout=nul, stderr=nul) == 0: |
| (...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 230 else: | 256 else: |
| 231 temp_dir, local_zip = DownloadUsingGsutil(tree_sha1 + '.zip') | 257 temp_dir, local_zip = DownloadUsingGsutil(tree_sha1 + '.zip') |
| 232 sys.stdout.write('Extracting %s...\n' % local_zip) | 258 sys.stdout.write('Extracting %s...\n' % local_zip) |
| 233 sys.stdout.flush() | 259 sys.stdout.flush() |
| 234 with zipfile.ZipFile(local_zip, 'r', zipfile.ZIP_DEFLATED, True) as zf: | 260 with zipfile.ZipFile(local_zip, 'r', zipfile.ZIP_DEFLATED, True) as zf: |
| 235 zf.extractall(target_dir) | 261 zf.extractall(target_dir) |
| 236 if temp_dir: | 262 if temp_dir: |
| 237 RmDir(temp_dir) | 263 RmDir(temp_dir) |
| 238 | 264 |
| 239 | 265 |
| 266 def RemoveToolchain(root, sha1, delay_before_removing): |
| 267 """Remove the |sha1| version of the toolchain from |root|.""" |
| 268 toolchain_target_dir = os.path.join(root, sha1) |
| 269 if delay_before_removing: |
| 270 DelayBeforeRemoving(toolchain_target_dir) |
| 271 if sys.platform == 'win32': |
| 272 # These stay resident and will make the rmdir below fail. |
| 273 kill_list = [ |
| 274 'mspdbsrv.exe', |
| 275 'vctip.exe', # Compiler and tools experience improvement data uploader. |
| 276 ] |
| 277 for process_name in kill_list: |
| 278 with open(os.devnull, 'wb') as nul: |
| 279 subprocess.call(['taskkill', '/f', '/im', process_name], |
| 280 stdin=nul, stdout=nul, stderr=nul) |
| 281 if os.path.isdir(toolchain_target_dir): |
| 282 RmDir(toolchain_target_dir) |
| 283 |
| 284 timestamp_file = MakeTimestampsFileName(root, sha1) |
| 285 if os.path.exists(timestamp_file): |
| 286 os.remove(timestamp_file) |
| 287 |
| 288 |
| 289 def RemoveUnusedToolchains(root): |
| 290 """Remove the versions of the toolchain that haven't been used recently.""" |
| 291 valid_toolchains = [] |
| 292 dirs_to_remove = [] |
| 293 |
| 294 for d in os.listdir(root): |
| 295 full_path = os.path.join(root, d) |
| 296 if os.path.isdir(full_path): |
| 297 if not os.path.exists(MakeTimestampsFileName(root, d)): |
| 298 dirs_to_remove.append(d) |
| 299 else: |
| 300 vc_dir = os.path.join(full_path, 'VC') |
| 301 valid_toolchains.append((os.path.getmtime(vc_dir), d)) |
| 302 elif os.path.isfile(full_path): |
| 303 os.remove(full_path) |
| 304 |
| 305 for d in dirs_to_remove: |
| 306 print ('Removing %s as it doesn\'t correspond to any known toolchain.' % |
| 307 os.path.join(root, d)) |
| 308 # Use the RemoveToolchain function to remove these directories as they might |
| 309 # contain an older version of the toolchain. |
| 310 RemoveToolchain(root, d, False) |
| 311 |
| 312 # Remove the versions of the toolchains that haven't been used in the past 30 |
| 313 # days. |
| 314 toolchain_expiration_time = 60 * 60 * 24 * 30 |
| 315 for toolchain in valid_toolchains: |
| 316 toolchain_age_in_sec = time.time() - toolchain[0] |
| 317 if toolchain_age_in_sec > toolchain_expiration_time: |
| 318 print ('Removing version %s of the Win toolchain has it hasn\'t been used' |
| 319 ' in the past %d days.' % (toolchain[1], |
| 320 toolchain_age_in_sec / 60 / 60 / 24)) |
| 321 RemoveToolchain(root, toolchain[1], True) |
| 322 |
| 323 |
| 240 def GetInstallerName(): | 324 def GetInstallerName(): |
| 241 """Return the name of the Windows 10 Universal C Runtime installer for the | 325 """Return the name of the Windows 10 Universal C Runtime installer for the |
| 242 current platform, or None if installer is not needed or not applicable. | 326 current platform, or None if installer is not needed or not applicable. |
| 243 The registry has to be used instead of sys.getwindowsversion() because | 327 The registry has to be used instead of sys.getwindowsversion() because |
| 244 Python 2.7 is only manifested as being compatible up to Windows 8, so the | 328 Python 2.7 is only manifested as being compatible up to Windows 8, so the |
| 245 version APIs helpfully return a maximum of 6.2 (Windows 8). | 329 version APIs helpfully return a maximum of 6.2 (Windows 8). |
| 246 """ | 330 """ |
| 247 key_name = r'Software\Microsoft\Windows NT\CurrentVersion' | 331 key_name = r'Software\Microsoft\Windows NT\CurrentVersion' |
| 248 key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, key_name) | 332 key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, key_name) |
| 249 value, keytype = winreg.QueryValueEx(key, "CurrentVersion") | 333 value, keytype = winreg.QueryValueEx(key, "CurrentVersion") |
| (...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 322 def winpath(path): | 406 def winpath(path): |
| 323 return subprocess.check_output(['cygpath', '-w', path]).strip() | 407 return subprocess.check_output(['cygpath', '-w', path]).strip() |
| 324 python = os.path.join(DEPOT_TOOLS_PATH, 'python.bat') | 408 python = os.path.join(DEPOT_TOOLS_PATH, 'python.bat') |
| 325 cmd = [python, winpath(__file__)] | 409 cmd = [python, winpath(__file__)] |
| 326 if options.output_json: | 410 if options.output_json: |
| 327 cmd.extend(['--output-json', winpath(options.output_json)]) | 411 cmd.extend(['--output-json', winpath(options.output_json)]) |
| 328 cmd.extend(args) | 412 cmd.extend(args) |
| 329 sys.exit(subprocess.call(cmd)) | 413 sys.exit(subprocess.call(cmd)) |
| 330 assert sys.platform != 'cygwin' | 414 assert sys.platform != 'cygwin' |
| 331 | 415 |
| 332 # We assume that the Pro hash is the first one. | 416 if len(args) == 0: |
| 333 desired_hashes = args | 417 sys.exit('Desired hash is required.') |
| 334 if len(desired_hashes) == 0: | 418 desired_hash = args[0] |
| 335 sys.exit('Desired hashes are required.') | |
| 336 | 419 |
| 337 # Move to depot_tools\win_toolchain where we'll store our files, and where | 420 # Move to depot_tools\win_toolchain where we'll store our files, and where |
| 338 # the downloader script is. | 421 # the downloader script is. |
| 339 os.chdir(os.path.normpath(os.path.join(BASEDIR))) | 422 os.chdir(os.path.normpath(os.path.join(BASEDIR))) |
| 340 toolchain_dir = '.' | 423 toolchain_dir = '.' |
| 341 if os.environ.get('GYP_MSVS_VERSION') == '2015': | 424 if os.environ.get('GYP_MSVS_VERSION') == '2015': |
| 342 target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs_files')) | 425 target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs_files')) |
| 343 else: | 426 else: |
| 344 target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs2013_files')) | 427 target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs2013_files')) |
| 345 abs_target_dir = os.path.abspath(target_dir) | 428 if not os.path.isdir(target_dir): |
| 429 os.mkdir(target_dir) |
| 430 toolchain_target_dir = os.path.join(target_dir, desired_hash) |
| 431 |
| 432 abs_toolchain_target_dir = os.path.abspath(toolchain_target_dir) |
| 346 | 433 |
| 347 got_new_toolchain = False | 434 got_new_toolchain = False |
| 348 | 435 |
| 349 # If the current hash doesn't match what we want in the file, nuke and pave. | 436 # If the current hash doesn't match what we want in the file, nuke and pave. |
| 350 # Typically this script is only run when the .sha1 one file is updated, but | 437 # Typically this script is only run when the .sha1 one file is updated, but |
| 351 # directly calling "gclient runhooks" will also run it, so we cache | 438 # directly calling "gclient runhooks" will also run it, so we cache |
| 352 # based on timestamps to make that case fast. | 439 # based on timestamps to make that case fast. |
| 353 current_hash = CalculateHash(target_dir) | 440 current_hashes = CalculateToolchainHashes(target_dir) |
| 354 if current_hash not in desired_hashes: | 441 if desired_hash not in current_hashes: |
| 355 should_use_gs = False | 442 should_use_gs = False |
| 356 if (HaveSrcInternalAccess() or | 443 if (HaveSrcInternalAccess() or |
| 357 LooksLikeGoogler() or | 444 LooksLikeGoogler() or |
| 358 CanAccessToolchainBucket()): | 445 CanAccessToolchainBucket()): |
| 359 should_use_gs = True | 446 should_use_gs = True |
| 360 if not CanAccessToolchainBucket(): | 447 if not CanAccessToolchainBucket(): |
| 361 RequestGsAuthentication() | 448 RequestGsAuthentication() |
| 362 if not should_use_gs: | 449 if not should_use_gs: |
| 363 print('\n\n\nPlease follow the instructions at ' | 450 print('\n\n\nPlease follow the instructions at ' |
| 364 'https://www.chromium.org/developers/how-tos/' | 451 'https://www.chromium.org/developers/how-tos/' |
| 365 'build-instructions-windows\n\n') | 452 'build-instructions-windows\n\n') |
| 366 return 1 | 453 return 1 |
| 367 print('Windows toolchain out of date or doesn\'t exist, updating (Pro)...') | 454 print('Windows toolchain out of date or doesn\'t exist, updating (Pro)...') |
| 368 print(' current_hash: %s' % current_hash) | 455 print(' current_hashes: %s' % ', '.join(current_hashes)) |
| 369 print(' desired_hashes: %s' % ', '.join(desired_hashes)) | 456 print(' desired_hash: %s' % desired_hash) |
| 370 sys.stdout.flush() | 457 sys.stdout.flush() |
| 371 DelayBeforeRemoving(target_dir) | |
| 372 if sys.platform == 'win32': | |
| 373 # These stay resident and will make the rmdir below fail. | |
| 374 kill_list = [ | |
| 375 'mspdbsrv.exe', | |
| 376 'vctip.exe', # Compiler and tools experience improvement data uploader. | |
| 377 ] | |
| 378 for process_name in kill_list: | |
| 379 with open(os.devnull, 'wb') as nul: | |
| 380 subprocess.call(['taskkill', '/f', '/im', process_name], | |
| 381 stdin=nul, stdout=nul, stderr=nul) | |
| 382 if os.path.isdir(target_dir): | |
| 383 RmDir(target_dir) | |
| 384 | 458 |
| 385 DoTreeMirror(target_dir, desired_hashes[0]) | 459 DoTreeMirror(toolchain_target_dir, desired_hash) |
| 386 | 460 |
| 387 got_new_toolchain = True | 461 got_new_toolchain = True |
| 388 | 462 |
| 389 win_sdk = os.path.join(abs_target_dir, 'win_sdk') | 463 win_sdk = os.path.join(abs_toolchain_target_dir, 'win_sdk') |
| 390 try: | 464 try: |
| 391 with open(os.path.join(target_dir, 'VS_VERSION'), 'rb') as f: | 465 version_file = os.path.join(toolchain_target_dir, 'VS_VERSION') |
| 466 vc_dir = os.path.join(toolchain_target_dir, 'VC') |
| 467 with open(version_file, 'rb') as f: |
| 392 vs_version = f.read().strip() | 468 vs_version = f.read().strip() |
| 469 # Touch the VC directory so we can use its timestamp to know when this |
| 470 # version of the toolchain has been used for the last time. |
| 471 os.utime(vc_dir, None) |
| 393 except IOError: | 472 except IOError: |
| 394 # Older toolchains didn't have the VS_VERSION file, and used 'win8sdk' | 473 # Older toolchains didn't have the VS_VERSION file, and used 'win8sdk' |
| 395 # instead of just 'win_sdk'. | 474 # instead of just 'win_sdk'. |
| 396 vs_version = '2013' | 475 vs_version = '2013' |
| 397 win_sdk = os.path.join(abs_target_dir, 'win8sdk') | 476 win_sdk = os.path.join(abs_toolchain_target_dir, 'win8sdk') |
| 398 | 477 |
| 399 data = { | 478 data = { |
| 400 'path': abs_target_dir, | 479 'path': abs_toolchain_target_dir, |
| 401 'version': vs_version, | 480 'version': vs_version, |
| 402 'win_sdk': win_sdk, | 481 'win_sdk': win_sdk, |
| 403 # Added for backwards compatibility with old toolchain packages. | 482 # Added for backwards compatibility with old toolchain packages. |
| 404 'win8sdk': win_sdk, | 483 'win8sdk': win_sdk, |
| 405 'wdk': os.path.join(abs_target_dir, 'wdk'), | 484 'wdk': os.path.join(abs_toolchain_target_dir, 'wdk'), |
| 406 'runtime_dirs': [ | 485 'runtime_dirs': [ |
| 407 os.path.join(abs_target_dir, 'sys64'), | 486 os.path.join(abs_toolchain_target_dir, 'sys64'), |
| 408 os.path.join(abs_target_dir, 'sys32'), | 487 os.path.join(abs_toolchain_target_dir, 'sys32'), |
| 409 ], | 488 ], |
| 410 } | 489 } |
| 411 with open(os.path.join(target_dir, '..', 'data.json'), 'w') as f: | 490 with open(os.path.join(target_dir, '..', 'data.json'), 'w') as f: |
| 412 json.dump(data, f) | 491 json.dump(data, f) |
| 413 | 492 |
| 414 if got_new_toolchain: | 493 if got_new_toolchain: |
| 415 current_hash = CalculateHash(target_dir) | 494 current_hashes = CalculateToolchainHashes(target_dir) |
| 416 if current_hash not in desired_hashes: | 495 if desired_hash not in current_hashes: |
| 417 print >> sys.stderr, ( | 496 print >> sys.stderr, ( |
| 418 'Got wrong hash after pulling a new toolchain. ' | 497 'Got wrong hash after pulling a new toolchain. ' |
| 419 'Wanted one of \'%s\', got \'%s\'.' % ( | 498 'Wanted \'%s\', got one of \'%s\'.' % ( |
| 420 ', '.join(desired_hashes), current_hash)) | 499 desired_hash, ', '.join(current_hashes))) |
| 421 return 1 | 500 return 1 |
| 422 SaveTimestampsAndHash(target_dir, current_hash) | 501 SaveTimestampsAndHash(target_dir, desired_hash) |
| 423 | 502 |
| 424 if options.output_json: | 503 if options.output_json: |
| 425 shutil.copyfile(os.path.join(target_dir, '..', 'data.json'), | 504 shutil.copyfile(os.path.join(target_dir, '..', 'data.json'), |
| 426 options.output_json) | 505 options.output_json) |
| 427 | 506 |
| 428 if os.environ.get('GYP_MSVS_VERSION') == '2015': | 507 if os.environ.get('GYP_MSVS_VERSION') == '2015': |
| 429 InstallUniversalCRTIfNeeded(abs_target_dir) | 508 InstallUniversalCRTIfNeeded(abs_toolchain_target_dir) |
| 509 |
| 510 RemoveUnusedToolchains(target_dir) |
| 430 | 511 |
| 431 return 0 | 512 return 0 |
| 432 | 513 |
| 433 | 514 |
| 434 if __name__ == '__main__': | 515 if __name__ == '__main__': |
| 435 sys.exit(main()) | 516 sys.exit(main()) |
| OLD | NEW |