Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(13)

Side by Side Diff: win_toolchain/get_toolchain_if_necessary.py

Issue 1669993002: Revert of Add the possibility to keep several version of the VS toolchain. (Closed) Base URL: https://chromium.googlesource.com/chromium/tools/depot_tools.git@master
Patch Set: Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | win_toolchain/package_from_installed.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright 2013 The Chromium Authors. All rights reserved. 2 # Copyright 2013 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """Downloads and unpacks a toolchain for building on Windows. The contents are 6 """Downloads and unpacks a toolchain for building on Windows. The contents are
7 matched by sha1 which will be updated when the toolchain is updated. 7 matched by sha1 which will be updated when the toolchain is updated.
8 8
9 Having a toolchain script in depot_tools means that it's not versioned 9 Having a toolchain script in depot_tools means that it's not versioned
10 directly with the source code. That is, if the toolchain is upgraded, but 10 directly with the source code. That is, if the toolchain is upgraded, but
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
73 assert os.path.normpath(root) == root 73 assert os.path.normpath(root) == root
74 file_list = [] 74 file_list = []
75 for base, _, files in os.walk(root): 75 for base, _, files in os.walk(root):
76 paths = [os.path.join(base, f) for f in files] 76 paths = [os.path.join(base, f) for f in files]
77 # Ignore WER ReportQueue entries that vctip/cl leave in the bin dir if/when 77 # Ignore WER ReportQueue entries that vctip/cl leave in the bin dir if/when
78 # they crash. 78 # they crash.
79 file_list.extend(x.lower() for x in paths if 'WER\\ReportQueue' not in x) 79 file_list.extend(x.lower() for x in paths if 'WER\\ReportQueue' not in x)
80 return sorted(file_list, key=lambda s: s.replace('/', '\\')) 80 return sorted(file_list, key=lambda s: s.replace('/', '\\'))
81 81
82 82
83 def MakeTimestampsFileName(root, sha1): 83 def MakeTimestampsFileName(root):
84 return os.path.join(root, os.pardir, '%s.timestamps' % sha1) 84 return os.path.join(root, '..', '.timestamps')
85 85
86 86
87 def CalculateHash(root, expected_hash): 87 def CalculateHash(root):
88 """Calculates the sha1 of the paths to all files in the given |root| and the 88 """Calculates the sha1 of the paths to all files in the given |root| and the
89 contents of those files, and returns as a hex string. 89 contents of those files, and returns as a hex string."""
90 file_list = GetFileList(root)
90 91
91 |expected_hash| is the expected hash value for this toolchain if it has 92 # Check whether we previously saved timestamps in $root/../.timestamps. If
92 already been installed. 93 # we didn't, or they don't match, then do the full calculation, otherwise
93 """
94 if expected_hash:
95 full_root_path = os.path.join(root, expected_hash)
96 else:
97 full_root_path = root
98 file_list = GetFileList(full_root_path)
99 # Check whether we previously saved timestamps in $root/../{sha1}.timestamps.
100 # If we didn't, or they don't match, then do the full calculation, otherwise
101 # return the saved value. 94 # return the saved value.
102 timestamps_file = MakeTimestampsFileName(root, expected_hash) 95 timestamps_file = MakeTimestampsFileName(root)
103 timestamps_data = {'files': [], 'sha1': ''} 96 timestamps_data = {'files': [], 'sha1': ''}
104 if os.path.exists(timestamps_file): 97 if os.path.exists(timestamps_file):
105 with open(timestamps_file, 'rb') as f: 98 with open(timestamps_file, 'rb') as f:
106 try: 99 try:
107 timestamps_data = json.load(f) 100 timestamps_data = json.load(f)
108 except ValueError: 101 except ValueError:
109 # json couldn't be loaded, empty data will force a re-hash. 102 # json couldn't be loaded, empty data will force a re-hash.
110 pass 103 pass
111 104
112 matches = len(file_list) == len(timestamps_data['files']) 105 matches = len(file_list) == len(timestamps_data['files'])
113 # Don't check the timestamp of the version file as we touch this file to
114 # indicates which versions of the toolchain are still being used.
115 vc_dir = os.path.join(full_root_path, 'VC').lower()
116 if matches: 106 if matches:
117 for disk, cached in zip(file_list, timestamps_data['files']): 107 for disk, cached in zip(file_list, timestamps_data['files']):
118 if disk != cached[0] or ( 108 if disk != cached[0] or os.stat(disk).st_mtime != cached[1]:
119 disk != vc_dir and os.path.getmtime(disk) != cached[1]):
120 matches = False 109 matches = False
121 break 110 break
122 if matches: 111 if matches:
123 return timestamps_data['sha1'] 112 return timestamps_data['sha1']
124 113
125 digest = hashlib.sha1() 114 digest = hashlib.sha1()
126 for path in file_list: 115 for path in file_list:
127 path_without_hash = str(path).replace('/', '\\') 116 digest.update(str(path).replace('/', '\\'))
128 if expected_hash:
129 path_without_hash = path_without_hash.replace(
130 os.path.join(root, expected_hash), root)
131 digest.update(path_without_hash)
132 with open(path, 'rb') as f: 117 with open(path, 'rb') as f:
133 digest.update(f.read()) 118 digest.update(f.read())
134 return digest.hexdigest() 119 return digest.hexdigest()
135 120
136 121
137 def CalculateToolchainHashes(root):
138 """Calculate the hash of the different toolchains installed in the |root|
139 directory."""
140 hashes = []
141 dir_list = [
142 d for d in os.listdir(root) if os.path.isdir(os.path.join(root, d))]
143 for d in dir_list:
144 hashes.append(CalculateHash(root, d))
145 return hashes
146
147
148 def SaveTimestampsAndHash(root, sha1): 122 def SaveTimestampsAndHash(root, sha1):
149 """Saves timestamps and the final hash to be able to early-out more quickly 123 """Saves timestamps and the final hash to be able to early-out more quickly
150 next time.""" 124 next time."""
151 file_list = GetFileList(os.path.join(root, sha1)) 125 file_list = GetFileList(root)
152 timestamps_data = { 126 timestamps_data = {
153 'files': [[f, os.path.getmtime(f)] for f in file_list], 127 'files': [[f, os.stat(f).st_mtime] for f in file_list],
154 'sha1': sha1, 128 'sha1': sha1,
155 } 129 }
156 with open(MakeTimestampsFileName(root, sha1), 'wb') as f: 130 with open(MakeTimestampsFileName(root), 'wb') as f:
157 json.dump(timestamps_data, f) 131 json.dump(timestamps_data, f)
158 132
159 133
160 def HaveSrcInternalAccess(): 134 def HaveSrcInternalAccess():
161 """Checks whether access to src-internal is available.""" 135 """Checks whether access to src-internal is available."""
162 with open(os.devnull, 'w') as nul: 136 with open(os.devnull, 'w') as nul:
163 if subprocess.call( 137 if subprocess.call(
164 ['svn', 'ls', '--non-interactive', 138 ['svn', 'ls', '--non-interactive',
165 'svn://svn.chromium.org/chrome-internal/trunk/src-internal/'], 139 'svn://svn.chromium.org/chrome-internal/trunk/src-internal/'],
166 shell=True, stdin=nul, stdout=nul, stderr=nul) == 0: 140 shell=True, stdin=nul, stdout=nul, stderr=nul) == 0:
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
256 else: 230 else:
257 temp_dir, local_zip = DownloadUsingGsutil(tree_sha1 + '.zip') 231 temp_dir, local_zip = DownloadUsingGsutil(tree_sha1 + '.zip')
258 sys.stdout.write('Extracting %s...\n' % local_zip) 232 sys.stdout.write('Extracting %s...\n' % local_zip)
259 sys.stdout.flush() 233 sys.stdout.flush()
260 with zipfile.ZipFile(local_zip, 'r', zipfile.ZIP_DEFLATED, True) as zf: 234 with zipfile.ZipFile(local_zip, 'r', zipfile.ZIP_DEFLATED, True) as zf:
261 zf.extractall(target_dir) 235 zf.extractall(target_dir)
262 if temp_dir: 236 if temp_dir:
263 RmDir(temp_dir) 237 RmDir(temp_dir)
264 238
265 239
266 def RemoveToolchain(root, sha1, delay_before_removing):
267 """Remove the |sha1| version of the toolchain from |root|."""
268 toolchain_target_dir = os.path.join(root, sha1)
269 if delay_before_removing:
270 DelayBeforeRemoving(toolchain_target_dir)
271 if sys.platform == 'win32':
272 # These stay resident and will make the rmdir below fail.
273 kill_list = [
274 'mspdbsrv.exe',
275 'vctip.exe', # Compiler and tools experience improvement data uploader.
276 ]
277 for process_name in kill_list:
278 with open(os.devnull, 'wb') as nul:
279 subprocess.call(['taskkill', '/f', '/im', process_name],
280 stdin=nul, stdout=nul, stderr=nul)
281 if os.path.isdir(toolchain_target_dir):
282 RmDir(toolchain_target_dir)
283
284 timestamp_file = MakeTimestampsFileName(root, sha1)
285 if os.path.exists(timestamp_file):
286 os.remove(timestamp_file)
287
288
289 def RemoveUnusedToolchains(root):
290 """Remove the versions of the toolchain that haven't been used recently."""
291 valid_toolchains = []
292 dirs_to_remove = []
293
294 for d in os.listdir(root):
295 full_path = os.path.join(root, d)
296 if os.path.isdir(full_path):
297 if not os.path.exists(MakeTimestampsFileName(root, d)):
298 dirs_to_remove.append(d)
299 else:
300 vc_dir = os.path.join(full_path, 'VC')
301 valid_toolchains.append((os.path.getmtime(vc_dir), d))
302 elif os.path.isfile(full_path):
303 os.remove(full_path)
304
305 for d in dirs_to_remove:
306 print ('Removing %s as it doesn\'t correspond to any known toolchain.' %
307 os.path.join(root, d))
308 # Use the RemoveToolchain function to remove these directories as they might
309 # contain an older version of the toolchain.
310 RemoveToolchain(root, d, False)
311
312 # Remove the versions of the toolchains that haven't been used in the past 30
313 # days.
314 toolchain_expiration_time = 60 * 60 * 24 * 30
315 for toolchain in valid_toolchains:
316 toolchain_age_in_sec = time.time() - toolchain[0]
317 if toolchain_age_in_sec > toolchain_expiration_time:
318 print ('Removing version %s of the Win toolchain has it hasn\'t been used'
319 ' in the past %d days.' % (toolchain[1],
320 toolchain_age_in_sec / 60 / 60 / 24))
321 RemoveToolchain(root, toolchain[1], True)
322
323
324 def GetInstallerName(): 240 def GetInstallerName():
325 """Return the name of the Windows 10 Universal C Runtime installer for the 241 """Return the name of the Windows 10 Universal C Runtime installer for the
326 current platform, or None if installer is not needed or not applicable. 242 current platform, or None if installer is not needed or not applicable.
327 The registry has to be used instead of sys.getwindowsversion() because 243 The registry has to be used instead of sys.getwindowsversion() because
328 Python 2.7 is only manifested as being compatible up to Windows 8, so the 244 Python 2.7 is only manifested as being compatible up to Windows 8, so the
329 version APIs helpfully return a maximum of 6.2 (Windows 8). 245 version APIs helpfully return a maximum of 6.2 (Windows 8).
330 """ 246 """
331 key_name = r'Software\Microsoft\Windows NT\CurrentVersion' 247 key_name = r'Software\Microsoft\Windows NT\CurrentVersion'
332 key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, key_name) 248 key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, key_name)
333 value, keytype = winreg.QueryValueEx(key, "CurrentVersion") 249 value, keytype = winreg.QueryValueEx(key, "CurrentVersion")
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
406 def winpath(path): 322 def winpath(path):
407 return subprocess.check_output(['cygpath', '-w', path]).strip() 323 return subprocess.check_output(['cygpath', '-w', path]).strip()
408 python = os.path.join(DEPOT_TOOLS_PATH, 'python.bat') 324 python = os.path.join(DEPOT_TOOLS_PATH, 'python.bat')
409 cmd = [python, winpath(__file__)] 325 cmd = [python, winpath(__file__)]
410 if options.output_json: 326 if options.output_json:
411 cmd.extend(['--output-json', winpath(options.output_json)]) 327 cmd.extend(['--output-json', winpath(options.output_json)])
412 cmd.extend(args) 328 cmd.extend(args)
413 sys.exit(subprocess.call(cmd)) 329 sys.exit(subprocess.call(cmd))
414 assert sys.platform != 'cygwin' 330 assert sys.platform != 'cygwin'
415 331
416 if len(args) == 0: 332 # We assume that the Pro hash is the first one.
417 sys.exit('Desired hash is required.') 333 desired_hashes = args
418 desired_hash = args[0] 334 if len(desired_hashes) == 0:
335 sys.exit('Desired hashes are required.')
419 336
420 # Move to depot_tools\win_toolchain where we'll store our files, and where 337 # Move to depot_tools\win_toolchain where we'll store our files, and where
421 # the downloader script is. 338 # the downloader script is.
422 os.chdir(os.path.normpath(os.path.join(BASEDIR))) 339 os.chdir(os.path.normpath(os.path.join(BASEDIR)))
423 toolchain_dir = '.' 340 toolchain_dir = '.'
424 if os.environ.get('GYP_MSVS_VERSION') == '2015': 341 if os.environ.get('GYP_MSVS_VERSION') == '2015':
425 target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs_files')) 342 target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs_files'))
426 else: 343 else:
427 target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs2013_files')) 344 target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs2013_files'))
428 if not os.path.isdir(target_dir): 345 abs_target_dir = os.path.abspath(target_dir)
429 os.mkdir(target_dir)
430 toolchain_target_dir = os.path.join(target_dir, desired_hash)
431
432 abs_toolchain_target_dir = os.path.abspath(toolchain_target_dir)
433 346
434 got_new_toolchain = False 347 got_new_toolchain = False
435 348
436 # If the current hash doesn't match what we want in the file, nuke and pave. 349 # If the current hash doesn't match what we want in the file, nuke and pave.
437 # Typically this script is only run when the .sha1 one file is updated, but 350 # Typically this script is only run when the .sha1 one file is updated, but
438 # directly calling "gclient runhooks" will also run it, so we cache 351 # directly calling "gclient runhooks" will also run it, so we cache
439 # based on timestamps to make that case fast. 352 # based on timestamps to make that case fast.
440 current_hashes = CalculateToolchainHashes(target_dir) 353 current_hash = CalculateHash(target_dir)
441 if desired_hash not in current_hashes: 354 if current_hash not in desired_hashes:
442 should_use_gs = False 355 should_use_gs = False
443 if (HaveSrcInternalAccess() or 356 if (HaveSrcInternalAccess() or
444 LooksLikeGoogler() or 357 LooksLikeGoogler() or
445 CanAccessToolchainBucket()): 358 CanAccessToolchainBucket()):
446 should_use_gs = True 359 should_use_gs = True
447 if not CanAccessToolchainBucket(): 360 if not CanAccessToolchainBucket():
448 RequestGsAuthentication() 361 RequestGsAuthentication()
449 if not should_use_gs: 362 if not should_use_gs:
450 print('\n\n\nPlease follow the instructions at ' 363 print('\n\n\nPlease follow the instructions at '
451 'https://www.chromium.org/developers/how-tos/' 364 'https://www.chromium.org/developers/how-tos/'
452 'build-instructions-windows\n\n') 365 'build-instructions-windows\n\n')
453 return 1 366 return 1
454 print('Windows toolchain out of date or doesn\'t exist, updating (Pro)...') 367 print('Windows toolchain out of date or doesn\'t exist, updating (Pro)...')
455 print(' current_hashes: %s' % ', '.join(current_hashes)) 368 print(' current_hash: %s' % current_hash)
456 print(' desired_hash: %s' % desired_hash) 369 print(' desired_hashes: %s' % ', '.join(desired_hashes))
457 sys.stdout.flush() 370 sys.stdout.flush()
371 DelayBeforeRemoving(target_dir)
372 if sys.platform == 'win32':
373 # These stay resident and will make the rmdir below fail.
374 kill_list = [
375 'mspdbsrv.exe',
376 'vctip.exe', # Compiler and tools experience improvement data uploader.
377 ]
378 for process_name in kill_list:
379 with open(os.devnull, 'wb') as nul:
380 subprocess.call(['taskkill', '/f', '/im', process_name],
381 stdin=nul, stdout=nul, stderr=nul)
382 if os.path.isdir(target_dir):
383 RmDir(target_dir)
458 384
459 DoTreeMirror(toolchain_target_dir, desired_hash) 385 DoTreeMirror(target_dir, desired_hashes[0])
460 386
461 got_new_toolchain = True 387 got_new_toolchain = True
462 388
463 win_sdk = os.path.join(abs_toolchain_target_dir, 'win_sdk') 389 win_sdk = os.path.join(abs_target_dir, 'win_sdk')
464 try: 390 try:
465 version_file = os.path.join(toolchain_target_dir, 'VS_VERSION') 391 with open(os.path.join(target_dir, 'VS_VERSION'), 'rb') as f:
466 vc_dir = os.path.join(toolchain_target_dir, 'VC')
467 with open(version_file, 'rb') as f:
468 vs_version = f.read().strip() 392 vs_version = f.read().strip()
469 # Touch the VC directory so we can use its timestamp to know when this
470 # version of the toolchain has been used for the last time.
471 os.utime(vc_dir, None)
472 except IOError: 393 except IOError:
473 # Older toolchains didn't have the VS_VERSION file, and used 'win8sdk' 394 # Older toolchains didn't have the VS_VERSION file, and used 'win8sdk'
474 # instead of just 'win_sdk'. 395 # instead of just 'win_sdk'.
475 vs_version = '2013' 396 vs_version = '2013'
476 win_sdk = os.path.join(abs_toolchain_target_dir, 'win8sdk') 397 win_sdk = os.path.join(abs_target_dir, 'win8sdk')
477 398
478 data = { 399 data = {
479 'path': abs_toolchain_target_dir, 400 'path': abs_target_dir,
480 'version': vs_version, 401 'version': vs_version,
481 'win_sdk': win_sdk, 402 'win_sdk': win_sdk,
482 # Added for backwards compatibility with old toolchain packages. 403 # Added for backwards compatibility with old toolchain packages.
483 'win8sdk': win_sdk, 404 'win8sdk': win_sdk,
484 'wdk': os.path.join(abs_toolchain_target_dir, 'wdk'), 405 'wdk': os.path.join(abs_target_dir, 'wdk'),
485 'runtime_dirs': [ 406 'runtime_dirs': [
486 os.path.join(abs_toolchain_target_dir, 'sys64'), 407 os.path.join(abs_target_dir, 'sys64'),
487 os.path.join(abs_toolchain_target_dir, 'sys32'), 408 os.path.join(abs_target_dir, 'sys32'),
488 ], 409 ],
489 } 410 }
490 with open(os.path.join(target_dir, '..', 'data.json'), 'w') as f: 411 with open(os.path.join(target_dir, '..', 'data.json'), 'w') as f:
491 json.dump(data, f) 412 json.dump(data, f)
492 413
493 if got_new_toolchain: 414 if got_new_toolchain:
494 current_hashes = CalculateToolchainHashes(target_dir) 415 current_hash = CalculateHash(target_dir)
495 if desired_hash not in current_hashes: 416 if current_hash not in desired_hashes:
496 print >> sys.stderr, ( 417 print >> sys.stderr, (
497 'Got wrong hash after pulling a new toolchain. ' 418 'Got wrong hash after pulling a new toolchain. '
498 'Wanted \'%s\', got one of \'%s\'.' % ( 419 'Wanted one of \'%s\', got \'%s\'.' % (
499 desired_hash, ', '.join(current_hashes))) 420 ', '.join(desired_hashes), current_hash))
500 return 1 421 return 1
501 SaveTimestampsAndHash(target_dir, desired_hash) 422 SaveTimestampsAndHash(target_dir, current_hash)
502 423
503 if options.output_json: 424 if options.output_json:
504 shutil.copyfile(os.path.join(target_dir, '..', 'data.json'), 425 shutil.copyfile(os.path.join(target_dir, '..', 'data.json'),
505 options.output_json) 426 options.output_json)
506 427
507 if os.environ.get('GYP_MSVS_VERSION') == '2015': 428 if os.environ.get('GYP_MSVS_VERSION') == '2015':
508 InstallUniversalCRTIfNeeded(abs_toolchain_target_dir) 429 InstallUniversalCRTIfNeeded(abs_target_dir)
509
510 RemoveUnusedToolchains(target_dir)
511 430
512 return 0 431 return 0
513 432
514 433
515 if __name__ == '__main__': 434 if __name__ == '__main__':
516 sys.exit(main()) 435 sys.exit(main())
OLDNEW
« no previous file with comments | « no previous file | win_toolchain/package_from_installed.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698