Index: win_toolchain/get_toolchain_if_necessary.py |
diff --git a/win_toolchain/get_toolchain_if_necessary.py b/win_toolchain/get_toolchain_if_necessary.py |
index 07e82729eee15089c6fbe9d90f4d8e99d580a6c9..f89bfa2ef78f7a218ddbac644f9ce83f3b900d31 100755 |
--- a/win_toolchain/get_toolchain_if_necessary.py |
+++ b/win_toolchain/get_toolchain_if_necessary.py |
@@ -78,19 +78,18 @@ def GetFileList(root): |
return sorted(file_list, key=lambda s: s.replace('/', '\\')) |
-def MakeTimestampsFileName(root): |
- return os.path.join(root, '..', '.timestamps') |
+def MakeTimestampsFileName(root, sha1): |
+ return os.path.join(root, os.pardir, '%s.timestamps' % sha1) |
-def CalculateHash(root): |
+def CalculateHash(root, expected_hash=''): |
scottmg
2016/01/27 21:46:12
Can we get rid of the default value for expected_h
Sébastien Marchand
2016/01/28 22:01:15
Done, it was here because it's used by vs_toolchai
|
"""Calculates the sha1 of the paths to all files in the given |root| and the |
contents of those files, and returns as a hex string.""" |
- file_list = GetFileList(root) |
- |
+ file_list = GetFileList(os.path.join(root, expected_hash)) |
# Check whether we previously saved timestamps in $root/../.timestamps. If |
scottmg
2016/01/27 21:46:12
../{sha1}.timestamps
Sébastien Marchand
2016/01/28 22:01:15
Done.
|
# we didn't, or they don't match, then do the full calculation, otherwise |
# return the saved value. |
- timestamps_file = MakeTimestampsFileName(root) |
+ timestamps_file = MakeTimestampsFileName(root, expected_hash) |
timestamps_data = {'files': [], 'sha1': ''} |
if os.path.exists(timestamps_file): |
with open(timestamps_file, 'rb') as f: |
@@ -111,21 +110,38 @@ def CalculateHash(root): |
digest = hashlib.sha1() |
for path in file_list: |
- digest.update(str(path).replace('/', '\\')) |
+ if expected_hash != '': |
+ path_without_hash = ( |
+ str(path).replace('/', '\\').replace(expected_hash, '').replace( |
+ '\\\\', '\\')) |
+ else: |
+ path_without_hash = str(path).replace('/', '\\') |
+ digest.update(path_without_hash) |
with open(path, 'rb') as f: |
digest.update(f.read()) |
return digest.hexdigest() |
+def CalculateToolchainHashes(root): |
+ """Calculate the hash of the different toolchains installed in the |root| |
+ directory.""" |
+ hashes = [] |
+ dir_list = [ |
+ d for d in os.listdir(root) if os.path.isdir(os.path.join(root, d))] |
+ for d in dir_list: |
+ hashes.append(CalculateHash(root, d)) |
+ return hashes |
+ |
+ |
def SaveTimestampsAndHash(root, sha1): |
"""Saves timestamps and the final hash to be able to early-out more quickly |
next time.""" |
- file_list = GetFileList(root) |
+ file_list = GetFileList(os.path.join(root, sha1)) |
timestamps_data = { |
'files': [[f, os.stat(f).st_mtime] for f in file_list], |
'sha1': sha1, |
} |
- with open(MakeTimestampsFileName(root), 'wb') as f: |
+ with open(MakeTimestampsFileName(root, sha1), 'wb') as f: |
json.dump(timestamps_data, f) |
@@ -327,10 +343,9 @@ def main(): |
sys.exit(subprocess.call(cmd)) |
assert sys.platform != 'cygwin' |
- # We assume that the Pro hash is the first one. |
- desired_hashes = args |
- if len(desired_hashes) == 0: |
- sys.exit('Desired hashes are required.') |
+ if len(args) == 0: |
+ sys.exit('Desired hash is required.') |
+ desired_hash = args[0] |
# Move to depot_tools\win_toolchain where we'll store our files, and where |
# the downloader script is. |
@@ -340,7 +355,11 @@ def main(): |
target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs_files')) |
else: |
target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs2013_files')) |
- abs_target_dir = os.path.abspath(target_dir) |
+ if not os.path.isdir(target_dir): |
+ os.mkdir(target_dir) |
+ toolchain_target_dir = os.path.join(target_dir, desired_hash) |
+ |
+ abs_toolchain_target_dir = os.path.abspath(toolchain_target_dir) |
got_new_toolchain = False |
@@ -348,8 +367,8 @@ def main(): |
# Typically this script is only run when the .sha1 one file is updated, but |
# directly calling "gclient runhooks" will also run it, so we cache |
# based on timestamps to make that case fast. |
- current_hash = CalculateHash(target_dir) |
- if current_hash not in desired_hashes: |
+ current_hashes = CalculateToolchainHashes(target_dir) |
+ if desired_hash not in current_hashes: |
should_use_gs = False |
if (HaveSrcInternalAccess() or |
LooksLikeGoogler() or |
@@ -363,10 +382,10 @@ def main(): |
'build-instructions-windows\n\n') |
return 1 |
print('Windows toolchain out of date or doesn\'t exist, updating (Pro)...') |
- print(' current_hash: %s' % current_hash) |
- print(' desired_hashes: %s' % ', '.join(desired_hashes)) |
+ print(' current_hashes: %s' % ', '.join(current_hashes)) |
+ print(' desired_hash: %s' % desired_hash) |
sys.stdout.flush() |
- DelayBeforeRemoving(target_dir) |
+ DelayBeforeRemoving(toolchain_target_dir) |
scottmg
2016/01/27 21:46:12
Can we can delete from line 388-400 now? It seems
Sébastien Marchand
2016/01/28 22:01:15
Move to the RemoveUnusedToolchains function.
|
if sys.platform == 'win32': |
# These stay resident and will make the rmdir below fail. |
kill_list = [ |
@@ -377,54 +396,54 @@ def main(): |
with open(os.devnull, 'wb') as nul: |
subprocess.call(['taskkill', '/f', '/im', process_name], |
stdin=nul, stdout=nul, stderr=nul) |
- if os.path.isdir(target_dir): |
- RmDir(target_dir) |
+ if os.path.isdir(toolchain_target_dir): |
+ RmDir(toolchain_target_dir) |
- DoTreeMirror(target_dir, desired_hashes[0]) |
+ DoTreeMirror(toolchain_target_dir, desired_hash) |
got_new_toolchain = True |
- win_sdk = os.path.join(abs_target_dir, 'win_sdk') |
+ win_sdk = os.path.join(abs_toolchain_target_dir, 'win_sdk') |
try: |
- with open(os.path.join(target_dir, 'VS_VERSION'), 'rb') as f: |
+ with open(os.path.join(toolchain_target_dir, 'VS_VERSION'), 'rb') as f: |
vs_version = f.read().strip() |
except IOError: |
# Older toolchains didn't have the VS_VERSION file, and used 'win8sdk' |
# instead of just 'win_sdk'. |
vs_version = '2013' |
- win_sdk = os.path.join(abs_target_dir, 'win8sdk') |
+ win_sdk = os.path.join(abs_toolchain_target_dir, 'win8sdk') |
data = { |
- 'path': abs_target_dir, |
+ 'path': abs_toolchain_target_dir, |
'version': vs_version, |
'win_sdk': win_sdk, |
# Added for backwards compatibility with old toolchain packages. |
'win8sdk': win_sdk, |
- 'wdk': os.path.join(abs_target_dir, 'wdk'), |
+ 'wdk': os.path.join(abs_toolchain_target_dir, 'wdk'), |
'runtime_dirs': [ |
- os.path.join(abs_target_dir, 'sys64'), |
- os.path.join(abs_target_dir, 'sys32'), |
+ os.path.join(abs_toolchain_target_dir, 'sys64'), |
+ os.path.join(abs_toolchain_target_dir, 'sys32'), |
], |
} |
with open(os.path.join(target_dir, '..', 'data.json'), 'w') as f: |
json.dump(data, f) |
if got_new_toolchain: |
- current_hash = CalculateHash(target_dir) |
- if current_hash not in desired_hashes: |
+ current_hashes = CalculateToolchainHashes(target_dir) |
+ if desired_hash not in current_hashes: |
print >> sys.stderr, ( |
'Got wrong hash after pulling a new toolchain. ' |
- 'Wanted one of \'%s\', got \'%s\'.' % ( |
- ', '.join(desired_hashes), current_hash)) |
+ 'Wanted \'%s\', got one of \'%s\'.' % ( |
+ desired_hash, ', '.join(current_hashes))) |
return 1 |
- SaveTimestampsAndHash(target_dir, current_hash) |
+ SaveTimestampsAndHash(target_dir, desired_hash) |
if options.output_json: |
shutil.copyfile(os.path.join(target_dir, '..', 'data.json'), |
options.output_json) |
if os.environ.get('GYP_MSVS_VERSION') == '2015': |
- InstallUniversalCRTIfNeeded(abs_target_dir) |
+ InstallUniversalCRTIfNeeded(abs_toolchain_target_dir) |
return 0 |