Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1039)

Side by Side Diff: win_toolchain/get_toolchain_if_necessary.py

Issue 1634923002: Add the possibility to keep several version of the VS toolchain. (Closed) Base URL: https://chromium.googlesource.com/chromium/tools/depot_tools.git@master
Patch Set: Update the timestamp logic. Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright 2013 The Chromium Authors. All rights reserved. 2 # Copyright 2013 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """Downloads and unpacks a toolchain for building on Windows. The contents are 6 """Downloads and unpacks a toolchain for building on Windows. The contents are
7 matched by sha1 which will be updated when the toolchain is updated. 7 matched by sha1 which will be updated when the toolchain is updated.
8 8
9 Having a toolchain script in depot_tools means that it's not versioned 9 Having a toolchain script in depot_tools means that it's not versioned
10 directly with the source code. That is, if the toolchain is upgraded, but 10 directly with the source code. That is, if the toolchain is upgraded, but
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
71 """Gets a normalized list of files under |root|.""" 71 """Gets a normalized list of files under |root|."""
72 assert not os.path.isabs(root) 72 assert not os.path.isabs(root)
73 assert os.path.normpath(root) == root 73 assert os.path.normpath(root) == root
74 file_list = [] 74 file_list = []
75 for base, _, files in os.walk(root): 75 for base, _, files in os.walk(root):
76 paths = [os.path.join(base, f) for f in files] 76 paths = [os.path.join(base, f) for f in files]
77 file_list.extend(x.lower() for x in paths) 77 file_list.extend(x.lower() for x in paths)
78 return sorted(file_list, key=lambda s: s.replace('/', '\\')) 78 return sorted(file_list, key=lambda s: s.replace('/', '\\'))
79 79
80 80
81 def MakeTimestampsFileName(root): 81 def MakeTimestampsFileName(root, sha1):
82 return os.path.join(root, '..', '.timestamps') 82 return os.path.join(root, os.pardir, '%s.timestamps' % sha1)
83 83
84 84
85 def CalculateHash(root): 85 def CalculateHash(root, expected_hash=''):
scottmg 2016/01/27 21:46:12 Can we get rid of the default value for expected_h
Sébastien Marchand 2016/01/28 22:01:15 Done, it was here because it's used by vs_toolchai
86 """Calculates the sha1 of the paths to all files in the given |root| and the 86 """Calculates the sha1 of the paths to all files in the given |root| and the
87 contents of those files, and returns as a hex string.""" 87 contents of those files, and returns as a hex string."""
88 file_list = GetFileList(root) 88 file_list = GetFileList(os.path.join(root, expected_hash))
89
90 # Check whether we previously saved timestamps in $root/../.timestamps. If 89 # Check whether we previously saved timestamps in $root/../.timestamps. If
scottmg 2016/01/27 21:46:12 ../{sha1}.timestamps
Sébastien Marchand 2016/01/28 22:01:15 Done.
91 # we didn't, or they don't match, then do the full calculation, otherwise 90 # we didn't, or they don't match, then do the full calculation, otherwise
92 # return the saved value. 91 # return the saved value.
93 timestamps_file = MakeTimestampsFileName(root) 92 timestamps_file = MakeTimestampsFileName(root, expected_hash)
94 timestamps_data = {'files': [], 'sha1': ''} 93 timestamps_data = {'files': [], 'sha1': ''}
95 if os.path.exists(timestamps_file): 94 if os.path.exists(timestamps_file):
96 with open(timestamps_file, 'rb') as f: 95 with open(timestamps_file, 'rb') as f:
97 try: 96 try:
98 timestamps_data = json.load(f) 97 timestamps_data = json.load(f)
99 except ValueError: 98 except ValueError:
100 # json couldn't be loaded, empty data will force a re-hash. 99 # json couldn't be loaded, empty data will force a re-hash.
101 pass 100 pass
102 101
103 matches = len(file_list) == len(timestamps_data['files']) 102 matches = len(file_list) == len(timestamps_data['files'])
104 if matches: 103 if matches:
105 for disk, cached in zip(file_list, timestamps_data['files']): 104 for disk, cached in zip(file_list, timestamps_data['files']):
106 if disk != cached[0] or os.stat(disk).st_mtime != cached[1]: 105 if disk != cached[0] or os.stat(disk).st_mtime != cached[1]:
107 matches = False 106 matches = False
108 break 107 break
109 if matches: 108 if matches:
110 return timestamps_data['sha1'] 109 return timestamps_data['sha1']
111 110
112 digest = hashlib.sha1() 111 digest = hashlib.sha1()
113 for path in file_list: 112 for path in file_list:
114 digest.update(str(path).replace('/', '\\')) 113 if expected_hash != '':
114 path_without_hash = (
115 str(path).replace('/', '\\').replace(expected_hash, '').replace(
116 '\\\\', '\\'))
117 else:
118 path_without_hash = str(path).replace('/', '\\')
119 digest.update(path_without_hash)
115 with open(path, 'rb') as f: 120 with open(path, 'rb') as f:
116 digest.update(f.read()) 121 digest.update(f.read())
117 return digest.hexdigest() 122 return digest.hexdigest()
118 123
119 124
125 def CalculateToolchainHashes(root):
126 """Calculate the hash of the different toolchains installed in the |root|
127 directory."""
128 hashes = []
129 dir_list = [
130 d for d in os.listdir(root) if os.path.isdir(os.path.join(root, d))]
131 for d in dir_list:
132 hashes.append(CalculateHash(root, d))
133 return hashes
134
135
120 def SaveTimestampsAndHash(root, sha1): 136 def SaveTimestampsAndHash(root, sha1):
121 """Saves timestamps and the final hash to be able to early-out more quickly 137 """Saves timestamps and the final hash to be able to early-out more quickly
122 next time.""" 138 next time."""
123 file_list = GetFileList(root) 139 file_list = GetFileList(os.path.join(root, sha1))
124 timestamps_data = { 140 timestamps_data = {
125 'files': [[f, os.stat(f).st_mtime] for f in file_list], 141 'files': [[f, os.stat(f).st_mtime] for f in file_list],
126 'sha1': sha1, 142 'sha1': sha1,
127 } 143 }
128 with open(MakeTimestampsFileName(root), 'wb') as f: 144 with open(MakeTimestampsFileName(root, sha1), 'wb') as f:
129 json.dump(timestamps_data, f) 145 json.dump(timestamps_data, f)
130 146
131 147
132 def HaveSrcInternalAccess(): 148 def HaveSrcInternalAccess():
133 """Checks whether access to src-internal is available.""" 149 """Checks whether access to src-internal is available."""
134 with open(os.devnull, 'w') as nul: 150 with open(os.devnull, 'w') as nul:
135 if subprocess.call( 151 if subprocess.call(
136 ['svn', 'ls', '--non-interactive', 152 ['svn', 'ls', '--non-interactive',
137 'svn://svn.chromium.org/chrome-internal/trunk/src-internal/'], 153 'svn://svn.chromium.org/chrome-internal/trunk/src-internal/'],
138 shell=True, stdin=nul, stdout=nul, stderr=nul) == 0: 154 shell=True, stdin=nul, stdout=nul, stderr=nul) == 0:
(...skipping 181 matching lines...) Expand 10 before | Expand all | Expand 10 after
320 def winpath(path): 336 def winpath(path):
321 return subprocess.check_output(['cygpath', '-w', path]).strip() 337 return subprocess.check_output(['cygpath', '-w', path]).strip()
322 python = os.path.join(DEPOT_TOOLS_PATH, 'python.bat') 338 python = os.path.join(DEPOT_TOOLS_PATH, 'python.bat')
323 cmd = [python, winpath(__file__)] 339 cmd = [python, winpath(__file__)]
324 if options.output_json: 340 if options.output_json:
325 cmd.extend(['--output-json', winpath(options.output_json)]) 341 cmd.extend(['--output-json', winpath(options.output_json)])
326 cmd.extend(args) 342 cmd.extend(args)
327 sys.exit(subprocess.call(cmd)) 343 sys.exit(subprocess.call(cmd))
328 assert sys.platform != 'cygwin' 344 assert sys.platform != 'cygwin'
329 345
330 # We assume that the Pro hash is the first one. 346 if len(args) == 0:
331 desired_hashes = args 347 sys.exit('Desired hash is required.')
332 if len(desired_hashes) == 0: 348 desired_hash = args[0]
333 sys.exit('Desired hashes are required.')
334 349
335 # Move to depot_tools\win_toolchain where we'll store our files, and where 350 # Move to depot_tools\win_toolchain where we'll store our files, and where
336 # the downloader script is. 351 # the downloader script is.
337 os.chdir(os.path.normpath(os.path.join(BASEDIR))) 352 os.chdir(os.path.normpath(os.path.join(BASEDIR)))
338 toolchain_dir = '.' 353 toolchain_dir = '.'
339 if os.environ.get('GYP_MSVS_VERSION') == '2015': 354 if os.environ.get('GYP_MSVS_VERSION') == '2015':
340 target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs_files')) 355 target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs_files'))
341 else: 356 else:
342 target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs2013_files')) 357 target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs2013_files'))
343 abs_target_dir = os.path.abspath(target_dir) 358 if not os.path.isdir(target_dir):
359 os.mkdir(target_dir)
360 toolchain_target_dir = os.path.join(target_dir, desired_hash)
361
362 abs_toolchain_target_dir = os.path.abspath(toolchain_target_dir)
344 363
345 got_new_toolchain = False 364 got_new_toolchain = False
346 365
347 # If the current hash doesn't match what we want in the file, nuke and pave. 366 # If the current hash doesn't match what we want in the file, nuke and pave.
348 # Typically this script is only run when the .sha1 one file is updated, but 367 # Typically this script is only run when the .sha1 one file is updated, but
349 # directly calling "gclient runhooks" will also run it, so we cache 368 # directly calling "gclient runhooks" will also run it, so we cache
350 # based on timestamps to make that case fast. 369 # based on timestamps to make that case fast.
351 current_hash = CalculateHash(target_dir) 370 current_hashes = CalculateToolchainHashes(target_dir)
352 if current_hash not in desired_hashes: 371 if desired_hash not in current_hashes:
353 should_use_gs = False 372 should_use_gs = False
354 if (HaveSrcInternalAccess() or 373 if (HaveSrcInternalAccess() or
355 LooksLikeGoogler() or 374 LooksLikeGoogler() or
356 CanAccessToolchainBucket()): 375 CanAccessToolchainBucket()):
357 should_use_gs = True 376 should_use_gs = True
358 if not CanAccessToolchainBucket(): 377 if not CanAccessToolchainBucket():
359 RequestGsAuthentication() 378 RequestGsAuthentication()
360 if not should_use_gs: 379 if not should_use_gs:
361 print('\n\n\nPlease follow the instructions at ' 380 print('\n\n\nPlease follow the instructions at '
362 'https://www.chromium.org/developers/how-tos/' 381 'https://www.chromium.org/developers/how-tos/'
363 'build-instructions-windows\n\n') 382 'build-instructions-windows\n\n')
364 return 1 383 return 1
365 print('Windows toolchain out of date or doesn\'t exist, updating (Pro)...') 384 print('Windows toolchain out of date or doesn\'t exist, updating (Pro)...')
366 print(' current_hash: %s' % current_hash) 385 print(' current_hashes: %s' % ', '.join(current_hashes))
367 print(' desired_hashes: %s' % ', '.join(desired_hashes)) 386 print(' desired_hash: %s' % desired_hash)
368 sys.stdout.flush() 387 sys.stdout.flush()
369 DelayBeforeRemoving(target_dir) 388 DelayBeforeRemoving(toolchain_target_dir)
scottmg 2016/01/27 21:46:12 Can we can delete from line 388-400 now? It seems
Sébastien Marchand 2016/01/28 22:01:15 Move to the RemoveUnusedToolchains function.
370 if sys.platform == 'win32': 389 if sys.platform == 'win32':
371 # These stay resident and will make the rmdir below fail. 390 # These stay resident and will make the rmdir below fail.
372 kill_list = [ 391 kill_list = [
373 'mspdbsrv.exe', 392 'mspdbsrv.exe',
374 'vctip.exe', # Compiler and tools experience improvement data uploader. 393 'vctip.exe', # Compiler and tools experience improvement data uploader.
375 ] 394 ]
376 for process_name in kill_list: 395 for process_name in kill_list:
377 with open(os.devnull, 'wb') as nul: 396 with open(os.devnull, 'wb') as nul:
378 subprocess.call(['taskkill', '/f', '/im', process_name], 397 subprocess.call(['taskkill', '/f', '/im', process_name],
379 stdin=nul, stdout=nul, stderr=nul) 398 stdin=nul, stdout=nul, stderr=nul)
380 if os.path.isdir(target_dir): 399 if os.path.isdir(toolchain_target_dir):
381 RmDir(target_dir) 400 RmDir(toolchain_target_dir)
382 401
383 DoTreeMirror(target_dir, desired_hashes[0]) 402 DoTreeMirror(toolchain_target_dir, desired_hash)
384 403
385 got_new_toolchain = True 404 got_new_toolchain = True
386 405
387 win_sdk = os.path.join(abs_target_dir, 'win_sdk') 406 win_sdk = os.path.join(abs_toolchain_target_dir, 'win_sdk')
388 try: 407 try:
389 with open(os.path.join(target_dir, 'VS_VERSION'), 'rb') as f: 408 with open(os.path.join(toolchain_target_dir, 'VS_VERSION'), 'rb') as f:
390 vs_version = f.read().strip() 409 vs_version = f.read().strip()
391 except IOError: 410 except IOError:
392 # Older toolchains didn't have the VS_VERSION file, and used 'win8sdk' 411 # Older toolchains didn't have the VS_VERSION file, and used 'win8sdk'
393 # instead of just 'win_sdk'. 412 # instead of just 'win_sdk'.
394 vs_version = '2013' 413 vs_version = '2013'
395 win_sdk = os.path.join(abs_target_dir, 'win8sdk') 414 win_sdk = os.path.join(abs_toolchain_target_dir, 'win8sdk')
396 415
397 data = { 416 data = {
398 'path': abs_target_dir, 417 'path': abs_toolchain_target_dir,
399 'version': vs_version, 418 'version': vs_version,
400 'win_sdk': win_sdk, 419 'win_sdk': win_sdk,
401 # Added for backwards compatibility with old toolchain packages. 420 # Added for backwards compatibility with old toolchain packages.
402 'win8sdk': win_sdk, 421 'win8sdk': win_sdk,
403 'wdk': os.path.join(abs_target_dir, 'wdk'), 422 'wdk': os.path.join(abs_toolchain_target_dir, 'wdk'),
404 'runtime_dirs': [ 423 'runtime_dirs': [
405 os.path.join(abs_target_dir, 'sys64'), 424 os.path.join(abs_toolchain_target_dir, 'sys64'),
406 os.path.join(abs_target_dir, 'sys32'), 425 os.path.join(abs_toolchain_target_dir, 'sys32'),
407 ], 426 ],
408 } 427 }
409 with open(os.path.join(target_dir, '..', 'data.json'), 'w') as f: 428 with open(os.path.join(target_dir, '..', 'data.json'), 'w') as f:
410 json.dump(data, f) 429 json.dump(data, f)
411 430
412 if got_new_toolchain: 431 if got_new_toolchain:
413 current_hash = CalculateHash(target_dir) 432 current_hashes = CalculateToolchainHashes(target_dir)
414 if current_hash not in desired_hashes: 433 if desired_hash not in current_hashes:
415 print >> sys.stderr, ( 434 print >> sys.stderr, (
416 'Got wrong hash after pulling a new toolchain. ' 435 'Got wrong hash after pulling a new toolchain. '
417 'Wanted one of \'%s\', got \'%s\'.' % ( 436 'Wanted \'%s\', got one of \'%s\'.' % (
418 ', '.join(desired_hashes), current_hash)) 437 desired_hash, ', '.join(current_hashes)))
419 return 1 438 return 1
420 SaveTimestampsAndHash(target_dir, current_hash) 439 SaveTimestampsAndHash(target_dir, desired_hash)
421 440
422 if options.output_json: 441 if options.output_json:
423 shutil.copyfile(os.path.join(target_dir, '..', 'data.json'), 442 shutil.copyfile(os.path.join(target_dir, '..', 'data.json'),
424 options.output_json) 443 options.output_json)
425 444
426 if os.environ.get('GYP_MSVS_VERSION') == '2015': 445 if os.environ.get('GYP_MSVS_VERSION') == '2015':
427 InstallUniversalCRTIfNeeded(abs_target_dir) 446 InstallUniversalCRTIfNeeded(abs_toolchain_target_dir)
428 447
429 return 0 448 return 0
430 449
431 450
432 if __name__ == '__main__': 451 if __name__ == '__main__':
433 sys.exit(main()) 452 sys.exit(main())
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698