OLD | NEW |
---|---|
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright 2013 The Chromium Authors. All rights reserved. | 2 # Copyright 2013 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 """Downloads and unpacks a toolchain for building on Windows. The contents are | 6 """Downloads and unpacks a toolchain for building on Windows. The contents are |
7 matched by sha1 which will be updated when the toolchain is updated. | 7 matched by sha1 which will be updated when the toolchain is updated. |
8 | 8 |
9 Having a toolchain script in depot_tools means that it's not versioned | 9 Having a toolchain script in depot_tools means that it's not versioned |
10 directly with the source code. That is, if the toolchain is upgraded, but | 10 directly with the source code. That is, if the toolchain is upgraded, but |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
75 for base, _, files in os.walk(root): | 75 for base, _, files in os.walk(root): |
76 paths = [os.path.join(base, f) for f in files] | 76 paths = [os.path.join(base, f) for f in files] |
77 file_list.extend(x.lower() for x in paths) | 77 file_list.extend(x.lower() for x in paths) |
78 return sorted(file_list, key=lambda s: s.replace('/', '\\')) | 78 return sorted(file_list, key=lambda s: s.replace('/', '\\')) |
79 | 79 |
80 | 80 |
81 def MakeTimestampsFileName(root): | 81 def MakeTimestampsFileName(root): |
82 return os.path.join(root, '..', '.timestamps') | 82 return os.path.join(root, '..', '.timestamps') |
83 | 83 |
84 | 84 |
85 def CalculateHash(root): | 85 def CalculateHash(root, expected_hash): |
86 """Calculates the sha1 of the paths to all files in the given |root| and the | 86 """Calculates the sha1 of the paths to all files in the given |root| and the |
87 contents of those files, and returns as a hex string.""" | 87 contents of those files, and returns as a hex string.""" |
88 file_list = GetFileList(root) | 88 file_list = GetFileList(os.path.join(root, hash)) |
89 | |
90 # Check whether we previously saved timestamps in $root/../.timestamps. If | 89 # Check whether we previously saved timestamps in $root/../.timestamps. If |
91 # we didn't, or they don't match, then do the full calculation, otherwise | 90 # we didn't, or they don't match, then do the full calculation, otherwise |
92 # return the saved value. | 91 # return the saved value. |
93 timestamps_file = MakeTimestampsFileName(root) | 92 timestamps_file = MakeTimestampsFileName(root) |
94 timestamps_data = {'files': [], 'sha1': ''} | 93 timestamps_data = {'files': [], 'sha1': ''} |
95 if os.path.exists(timestamps_file): | 94 if os.path.exists(timestamps_file): |
96 with open(timestamps_file, 'rb') as f: | 95 with open(timestamps_file, 'rb') as f: |
97 try: | 96 try: |
98 timestamps_data = json.load(f) | 97 timestamps_data = json.load(f) |
99 except ValueError: | 98 except ValueError: |
100 # json couldn't be loaded, empty data will force a re-hash. | 99 # json couldn't be loaded, empty data will force a re-hash. |
101 pass | 100 pass |
102 | 101 |
103 matches = len(file_list) == len(timestamps_data['files']) | 102 matches = len(file_list) == len(timestamps_data['files']) |
104 if matches: | 103 if matches: |
105 for disk, cached in zip(file_list, timestamps_data['files']): | 104 for disk, cached in zip(file_list, timestamps_data['files']): |
106 if disk != cached[0] or os.stat(disk).st_mtime != cached[1]: | 105 if disk != cached[0] or os.stat(disk).st_mtime != cached[1]: |
107 matches = False | 106 matches = False |
108 break | 107 break |
109 if matches: | 108 if matches: |
110 return timestamps_data['sha1'] | 109 return timestamps_data['sha1'] |
111 | 110 |
112 digest = hashlib.sha1() | 111 digest = hashlib.sha1() |
113 for path in file_list: | 112 for path in file_list: |
114 digest.update(str(path).replace('/', '\\')) | 113 path_without_hash = (str(path).replace('/', '\\').replace(expected_hash, '') |
114 .replace('\\\\', '\\')) | |
115 digest.update(path_without_hash) | |
115 with open(path, 'rb') as f: | 116 with open(path, 'rb') as f: |
116 digest.update(f.read()) | 117 digest.update(f.read()) |
117 return digest.hexdigest() | 118 return digest.hexdigest() |
118 | 119 |
119 | 120 |
121 def CalculateToolchainHashes(root): | |
122 """Calculate the hash of the different toolchains installed in the |root| | |
123 directory.""" | |
124 hashes = [] | |
125 dir_list = [ | |
126 d for d in os.listdir(root) if os.path.isdir(os.path.join(root, d))] | |
127 for d in dir_list: | |
128 hashes.append(CalculateHash(root, d)) | |
129 return hashes | |
130 | |
131 | |
120 def SaveTimestampsAndHash(root, sha1): | 132 def SaveTimestampsAndHash(root, sha1): |
121 """Saves timestamps and the final hash to be able to early-out more quickly | 133 """Saves timestamps and the final hash to be able to early-out more quickly |
122 next time.""" | 134 next time.""" |
123 file_list = GetFileList(root) | 135 file_list = GetFileList(root) |
124 timestamps_data = { | 136 timestamps_data = { |
125 'files': [[f, os.stat(f).st_mtime] for f in file_list], | 137 'files': [[f, os.stat(f).st_mtime] for f in file_list], |
126 'sha1': sha1, | 138 'sha1': sha1, |
127 } | 139 } |
128 with open(MakeTimestampsFileName(root), 'wb') as f: | 140 with open(MakeTimestampsFileName(root), 'wb') as f: |
129 json.dump(timestamps_data, f) | 141 json.dump(timestamps_data, f) |
(...skipping 191 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
321 return subprocess.check_output(['cygpath', '-w', path]).strip() | 333 return subprocess.check_output(['cygpath', '-w', path]).strip() |
322 python = os.path.join(DEPOT_TOOLS_PATH, 'python.bat') | 334 python = os.path.join(DEPOT_TOOLS_PATH, 'python.bat') |
323 cmd = [python, winpath(__file__)] | 335 cmd = [python, winpath(__file__)] |
324 if options.output_json: | 336 if options.output_json: |
325 cmd.extend(['--output-json', winpath(options.output_json)]) | 337 cmd.extend(['--output-json', winpath(options.output_json)]) |
326 cmd.extend(args) | 338 cmd.extend(args) |
327 sys.exit(subprocess.call(cmd)) | 339 sys.exit(subprocess.call(cmd)) |
328 assert sys.platform != 'cygwin' | 340 assert sys.platform != 'cygwin' |
329 | 341 |
330 # We assume that the Pro hash is the first one. | 342 # We assume that the Pro hash is the first one. |
331 desired_hashes = args | 343 desired_hash = args[0] |
332 if len(desired_hashes) == 0: | 344 if len(desired_hash) == 0: |
scottmg
2016/01/25 23:12:53
I don't think you'll get an empty args[0] here?
Sébastien Marchand
2016/01/27 21:35:19
Moved this check earlier in the script.
| |
333 sys.exit('Desired hashes are required.') | 345 sys.exit('Desired hash is required.') |
334 | 346 |
335 # Move to depot_tools\win_toolchain where we'll store our files, and where | 347 # Move to depot_tools\win_toolchain where we'll store our files, and where |
336 # the downloader script is. | 348 # the downloader script is. |
337 os.chdir(os.path.normpath(os.path.join(BASEDIR))) | 349 os.chdir(os.path.normpath(os.path.join(BASEDIR))) |
338 toolchain_dir = '.' | 350 toolchain_dir = '.' |
339 if os.environ.get('GYP_MSVS_VERSION') == '2015': | 351 if os.environ.get('GYP_MSVS_VERSION') == '2015': |
340 target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs_files')) | 352 target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs_files')) |
341 else: | 353 else: |
342 target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs2013_files')) | 354 target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs2013_files')) |
343 abs_target_dir = os.path.abspath(target_dir) | 355 toolchain_target_dir = os.path.join(target_dir, desired_hash) |
356 | |
357 abs_toolchain_target_dir = os.path.abspath(toolchain_target_dir) | |
344 | 358 |
345 got_new_toolchain = False | 359 got_new_toolchain = False |
346 | 360 |
347 # If the current hash doesn't match what we want in the file, nuke and pave. | 361 # If the current hash doesn't match what we want in the file, nuke and pave. |
348 # Typically this script is only run when the .sha1 one file is updated, but | 362 # Typically this script is only run when the .sha1 one file is updated, but |
349 # directly calling "gclient runhooks" will also run it, so we cache | 363 # directly calling "gclient runhooks" will also run it, so we cache |
350 # based on timestamps to make that case fast. | 364 # based on timestamps to make that case fast. |
351 current_hash = CalculateHash(target_dir) | 365 current_hashes = CalculateToolchainHashes(target_dir) |
352 if current_hash not in desired_hashes: | 366 if desired_hash not in current_hashes: |
353 should_use_gs = False | 367 should_use_gs = False |
354 if (HaveSrcInternalAccess() or | 368 if (HaveSrcInternalAccess() or |
355 LooksLikeGoogler() or | 369 LooksLikeGoogler() or |
356 CanAccessToolchainBucket()): | 370 CanAccessToolchainBucket()): |
357 should_use_gs = True | 371 should_use_gs = True |
358 if not CanAccessToolchainBucket(): | 372 if not CanAccessToolchainBucket(): |
359 RequestGsAuthentication() | 373 RequestGsAuthentication() |
360 if not should_use_gs: | 374 if not should_use_gs: |
361 print('\n\n\nPlease follow the instructions at ' | 375 print('\n\n\nPlease follow the instructions at ' |
362 'https://www.chromium.org/developers/how-tos/' | 376 'https://www.chromium.org/developers/how-tos/' |
363 'build-instructions-windows\n\n') | 377 'build-instructions-windows\n\n') |
364 return 1 | 378 return 1 |
365 print('Windows toolchain out of date or doesn\'t exist, updating (Pro)...') | 379 print('Windows toolchain out of date or doesn\'t exist, updating (Pro)...') |
366 print(' current_hash: %s' % current_hash) | 380 print(' current_hashes: %s' % ', '.join(current_hashes)) |
367 print(' desired_hashes: %s' % ', '.join(desired_hashes)) | 381 print(' desired_hash: %s' % desired_hash) |
368 sys.stdout.flush() | 382 sys.stdout.flush() |
369 DelayBeforeRemoving(target_dir) | 383 DelayBeforeRemoving(toolchain_target_dir) |
370 if sys.platform == 'win32': | 384 if sys.platform == 'win32': |
371 # These stay resident and will make the rmdir below fail. | 385 # These stay resident and will make the rmdir below fail. |
372 kill_list = [ | 386 kill_list = [ |
373 'mspdbsrv.exe', | 387 'mspdbsrv.exe', |
374 'vctip.exe', # Compiler and tools experience improvement data uploader. | 388 'vctip.exe', # Compiler and tools experience improvement data uploader. |
375 ] | 389 ] |
376 for process_name in kill_list: | 390 for process_name in kill_list: |
377 with open(os.devnull, 'wb') as nul: | 391 with open(os.devnull, 'wb') as nul: |
378 subprocess.call(['taskkill', '/f', '/im', process_name], | 392 subprocess.call(['taskkill', '/f', '/im', process_name], |
379 stdin=nul, stdout=nul, stderr=nul) | 393 stdin=nul, stdout=nul, stderr=nul) |
380 if os.path.isdir(target_dir): | 394 if os.path.isdir(toolchain_target_dir): |
381 RmDir(target_dir) | 395 RmDir(toolchain_target_dir) |
382 | 396 |
383 DoTreeMirror(target_dir, desired_hashes[0]) | 397 DoTreeMirror(toolchain_target_dir, desired_hash) |
384 | 398 |
385 got_new_toolchain = True | 399 got_new_toolchain = True |
386 | 400 |
387 win_sdk = os.path.join(abs_target_dir, 'win_sdk') | 401 win_sdk = os.path.join(abs_toolchain_target_dir, 'win_sdk') |
388 try: | 402 try: |
389 with open(os.path.join(target_dir, 'VS_VERSION'), 'rb') as f: | 403 with open(os.path.join(toolchain_target_dir, 'VS_VERSION'), 'rb') as f: |
390 vs_version = f.read().strip() | 404 vs_version = f.read().strip() |
391 except IOError: | 405 except IOError: |
392 # Older toolchains didn't have the VS_VERSION file, and used 'win8sdk' | 406 # Older toolchains didn't have the VS_VERSION file, and used 'win8sdk' |
393 # instead of just 'win_sdk'. | 407 # instead of just 'win_sdk'. |
394 vs_version = '2013' | 408 vs_version = '2013' |
395 win_sdk = os.path.join(abs_target_dir, 'win8sdk') | 409 win_sdk = os.path.join(abs_toolchain_target_dir, 'win8sdk') |
396 | 410 |
397 data = { | 411 data = { |
398 'path': abs_target_dir, | 412 'path': abs_toolchain_target_dir, |
399 'version': vs_version, | 413 'version': vs_version, |
400 'win_sdk': win_sdk, | 414 'win_sdk': win_sdk, |
401 # Added for backwards compatibility with old toolchain packages. | 415 # Added for backwards compatibility with old toolchain packages. |
402 'win8sdk': win_sdk, | 416 'win8sdk': win_sdk, |
403 'wdk': os.path.join(abs_target_dir, 'wdk'), | 417 'wdk': os.path.join(abs_toolchain_target_dir, 'wdk'), |
404 'runtime_dirs': [ | 418 'runtime_dirs': [ |
405 os.path.join(abs_target_dir, 'sys64'), | 419 os.path.join(abs_toolchain_target_dir, 'sys64'), |
406 os.path.join(abs_target_dir, 'sys32'), | 420 os.path.join(abs_toolchain_target_dir, 'sys32'), |
407 ], | 421 ], |
408 } | 422 } |
409 with open(os.path.join(target_dir, '..', 'data.json'), 'w') as f: | 423 with open(os.path.join(target_dir, '..', 'data.json'), 'w') as f: |
410 json.dump(data, f) | 424 json.dump(data, f) |
411 | 425 |
412 if got_new_toolchain: | 426 if got_new_toolchain: |
413 current_hash = CalculateHash(target_dir) | 427 current_hashes = CalculateToolchainHashes(target_dir) |
414 if current_hash not in desired_hashes: | 428 if desired_hash not in current_hashes: |
415 print >> sys.stderr, ( | 429 print >> sys.stderr, ( |
416 'Got wrong hash after pulling a new toolchain. ' | 430 'Got wrong hash after pulling a new toolchain. ' |
417 'Wanted one of \'%s\', got \'%s\'.' % ( | 431 'Wanted one of \'%s\', got \'%s\'.' % ( |
418 ', '.join(desired_hashes), current_hash)) | 432 desired_hash, ', '.join(current_hashes))) |
419 return 1 | 433 return 1 |
420 SaveTimestampsAndHash(target_dir, current_hash) | 434 SaveTimestampsAndHash(target_dir, current_hash) |
421 | 435 |
422 if options.output_json: | 436 if options.output_json: |
423 shutil.copyfile(os.path.join(target_dir, '..', 'data.json'), | 437 shutil.copyfile(os.path.join(target_dir, '..', 'data.json'), |
424 options.output_json) | 438 options.output_json) |
425 | 439 |
426 if os.environ.get('GYP_MSVS_VERSION') == '2015': | 440 if os.environ.get('GYP_MSVS_VERSION') == '2015': |
427 InstallUniversalCRTIfNeeded(abs_target_dir) | 441 InstallUniversalCRTIfNeeded(abs_toolchain_target_dir) |
428 | 442 |
429 return 0 | 443 return 0 |
430 | 444 |
431 | 445 |
432 if __name__ == '__main__': | 446 if __name__ == '__main__': |
433 sys.exit(main()) | 447 sys.exit(main()) |
OLD | NEW |