OLD | NEW |
---|---|
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright 2013 The Chromium Authors. All rights reserved. | 2 # Copyright 2013 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 """Downloads and unpacks a toolchain for building on Windows. The contents are | 6 """Downloads and unpacks a toolchain for building on Windows. The contents are |
7 matched by sha1 which will be updated when the toolchain is updated. | 7 matched by sha1 which will be updated when the toolchain is updated. |
8 | 8 |
9 Having a toolchain script in depot_tools means that it's not versioned | 9 Having a toolchain script in depot_tools means that it's not versioned |
10 directly with the source code. That is, if the toolchain is upgraded, but | 10 directly with the source code. That is, if the toolchain is upgraded, but |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
71 """Gets a normalized list of files under |root|.""" | 71 """Gets a normalized list of files under |root|.""" |
72 assert not os.path.isabs(root) | 72 assert not os.path.isabs(root) |
73 assert os.path.normpath(root) == root | 73 assert os.path.normpath(root) == root |
74 file_list = [] | 74 file_list = [] |
75 for base, _, files in os.walk(root): | 75 for base, _, files in os.walk(root): |
76 paths = [os.path.join(base, f) for f in files] | 76 paths = [os.path.join(base, f) for f in files] |
77 file_list.extend(x.lower() for x in paths) | 77 file_list.extend(x.lower() for x in paths) |
78 return sorted(file_list, key=lambda s: s.replace('/', '\\')) | 78 return sorted(file_list, key=lambda s: s.replace('/', '\\')) |
79 | 79 |
80 | 80 |
81 def MakeTimestampsFileName(root): | 81 def MakeTimestampsFileName(root, sha1): |
82 return os.path.join(root, '..', '.timestamps') | 82 return os.path.join(root, os.pardir, '%s.timestamps' % sha1 if sha1 else '') |
scottmg
2016/01/28 23:04:11
Get rid of "if sha1 else ''".
Sébastien Marchand
2016/02/01 19:29:29
Done.
| |
83 | 83 |
84 | 84 |
85 def CalculateHash(root): | 85 def CalculateHash(root, expected_hash): |
86 """Calculates the sha1 of the paths to all files in the given |root| and the | 86 """Calculates the sha1 of the paths to all files in the given |root| and the |
87 contents of those files, and returns as a hex string.""" | 87 contents of those files, and returns as a hex string. |
88 file_list = GetFileList(root) | |
89 | 88 |
90 # Check whether we previously saved timestamps in $root/../.timestamps. If | 89 |expected_hash| is the expected hash value for this toolchain if it has |
91 # we didn't, or they don't match, then do the full calculation, otherwise | 90 already been installed. |
91 """ | |
92 if expected_hash: | |
93 full_root_path = os.path.join(root, expected_hash) | |
94 else: | |
95 full_root_path = root | |
96 file_list = GetFileList(full_root_path) | |
97 # Check whether we previously saved timestamps in $root/../{sha1}.timestamps. | |
98 # If we didn't, or they don't match, then do the full calculation, otherwise | |
92 # return the saved value. | 99 # return the saved value. |
93 timestamps_file = MakeTimestampsFileName(root) | 100 timestamps_file = MakeTimestampsFileName(root, expected_hash) |
94 timestamps_data = {'files': [], 'sha1': ''} | 101 timestamps_data = {'files': [], 'sha1': ''} |
95 if os.path.exists(timestamps_file): | 102 if os.path.exists(timestamps_file): |
96 with open(timestamps_file, 'rb') as f: | 103 with open(timestamps_file, 'rb') as f: |
97 try: | 104 try: |
98 timestamps_data = json.load(f) | 105 timestamps_data = json.load(f) |
99 except ValueError: | 106 except ValueError: |
100 # json couldn't be loaded, empty data will force a re-hash. | 107 # json couldn't be loaded, empty data will force a re-hash. |
101 pass | 108 pass |
102 | 109 |
103 matches = len(file_list) == len(timestamps_data['files']) | 110 matches = len(file_list) == len(timestamps_data['files']) |
104 if matches: | 111 if matches: |
105 for disk, cached in zip(file_list, timestamps_data['files']): | 112 for disk, cached in zip(file_list, timestamps_data['files']): |
106 if disk != cached[0] or os.stat(disk).st_mtime != cached[1]: | 113 if disk != cached[0] or os.stat(disk).st_mtime != cached[1]: |
107 matches = False | 114 matches = False |
108 break | 115 break |
109 if matches: | 116 if matches: |
110 return timestamps_data['sha1'] | 117 return timestamps_data['sha1'] |
111 | 118 |
112 digest = hashlib.sha1() | 119 digest = hashlib.sha1() |
113 for path in file_list: | 120 for path in file_list: |
114 digest.update(str(path).replace('/', '\\')) | 121 if expected_hash: |
122 path_without_hash = ( | |
123 str(path).replace('/', '\\').replace(expected_hash, '').replace( | |
scottmg
2016/01/28 23:04:11
Can you trim expected_hash only from where you exp
Sébastien Marchand
2016/02/01 19:29:29
Done.
| |
124 '\\\\', '\\')) | |
scottmg
2016/01/28 23:04:11
Where are the double \ coming from?
Sébastien Marchand
2016/02/01 19:29:29
It was because I was just removing {hash} from {ro
| |
125 else: | |
126 path_without_hash = str(path).replace('/', '\\') | |
127 digest.update(path_without_hash) | |
115 with open(path, 'rb') as f: | 128 with open(path, 'rb') as f: |
116 digest.update(f.read()) | 129 digest.update(f.read()) |
117 return digest.hexdigest() | 130 return digest.hexdigest() |
118 | 131 |
119 | 132 |
133 def CalculateToolchainHashes(root): | |
134 """Calculate the hash of the different toolchains installed in the |root| | |
135 directory.""" | |
136 hashes = [] | |
137 dir_list = [ | |
138 d for d in os.listdir(root) if os.path.isdir(os.path.join(root, d))] | |
139 for d in dir_list: | |
140 hashes.append(CalculateHash(root, d)) | |
141 return hashes | |
142 | |
143 | |
120 def SaveTimestampsAndHash(root, sha1): | 144 def SaveTimestampsAndHash(root, sha1): |
121 """Saves timestamps and the final hash to be able to early-out more quickly | 145 """Saves timestamps and the final hash to be able to early-out more quickly |
122 next time.""" | 146 next time.""" |
123 file_list = GetFileList(root) | 147 file_list = GetFileList(os.path.join(root, sha1)) |
124 timestamps_data = { | 148 timestamps_data = { |
125 'files': [[f, os.stat(f).st_mtime] for f in file_list], | 149 'files': [[f, os.stat(f).st_mtime] for f in file_list], |
126 'sha1': sha1, | 150 'sha1': sha1, |
127 } | 151 } |
128 with open(MakeTimestampsFileName(root), 'wb') as f: | 152 with open(MakeTimestampsFileName(root, sha1), 'wb') as f: |
129 json.dump(timestamps_data, f) | 153 json.dump(timestamps_data, f) |
130 | 154 |
131 | 155 |
132 def HaveSrcInternalAccess(): | 156 def HaveSrcInternalAccess(): |
133 """Checks whether access to src-internal is available.""" | 157 """Checks whether access to src-internal is available.""" |
134 with open(os.devnull, 'w') as nul: | 158 with open(os.devnull, 'w') as nul: |
135 if subprocess.call( | 159 if subprocess.call( |
136 ['svn', 'ls', '--non-interactive', | 160 ['svn', 'ls', '--non-interactive', |
137 'svn://svn.chromium.org/chrome-internal/trunk/src-internal/'], | 161 'svn://svn.chromium.org/chrome-internal/trunk/src-internal/'], |
138 shell=True, stdin=nul, stdout=nul, stderr=nul) == 0: | 162 shell=True, stdin=nul, stdout=nul, stderr=nul) == 0: |
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
228 else: | 252 else: |
229 temp_dir, local_zip = DownloadUsingGsutil(tree_sha1 + '.zip') | 253 temp_dir, local_zip = DownloadUsingGsutil(tree_sha1 + '.zip') |
230 sys.stdout.write('Extracting %s...\n' % local_zip) | 254 sys.stdout.write('Extracting %s...\n' % local_zip) |
231 sys.stdout.flush() | 255 sys.stdout.flush() |
232 with zipfile.ZipFile(local_zip, 'r', zipfile.ZIP_DEFLATED, True) as zf: | 256 with zipfile.ZipFile(local_zip, 'r', zipfile.ZIP_DEFLATED, True) as zf: |
233 zf.extractall(target_dir) | 257 zf.extractall(target_dir) |
234 if temp_dir: | 258 if temp_dir: |
235 RmDir(temp_dir) | 259 RmDir(temp_dir) |
236 | 260 |
237 | 261 |
262 def RemoveToolchain(root, sha1, delay_before_removing): | |
263 """Remove the |sha1| version of the toolchain from |root|.""" | |
264 toolchain_target_dir = os.path.join(root, sha1) | |
265 if delay_before_removing: | |
266 DelayBeforeRemoving(toolchain_target_dir) | |
267 if sys.platform == 'win32': | |
268 # These stay resident and will make the rmdir below fail. | |
269 kill_list = [ | |
270 'mspdbsrv.exe', | |
271 'vctip.exe', # Compiler and tools experience improvement data uploader. | |
272 ] | |
273 for process_name in kill_list: | |
274 with open(os.devnull, 'wb') as nul: | |
275 subprocess.call(['taskkill', '/f', '/im', process_name], | |
276 stdin=nul, stdout=nul, stderr=nul) | |
277 if os.path.isdir(toolchain_target_dir): | |
278 RmDir(toolchain_target_dir) | |
279 | |
280 timestamp_file = MakeTimestampsFileName(root, sha1) | |
281 if os.path.exists(timestamp_file): | |
282 os.remove(timestamp_file) | |
283 | |
284 | |
285 def RemoveUnusedToolchains(root): | |
286 """Remove the versions of the toolchain that haven't been used recently.""" | |
287 valid_toolchains = [] | |
288 dirs_to_remove = [] | |
289 | |
290 for d in os.listdir(root): | |
291 full_path = os.path.join(root, d) | |
292 if os.path.isdir(full_path): | |
293 if not os.path.exists(MakeTimestampsFileName(root, d)): | |
294 dirs_to_remove.append(d) | |
295 else: | |
296 valid_toolchains.append((os.stat(full_path).st_atime, d)) | |
scottmg
2016/01/28 23:04:11
Hm, won't st_atime update for all of them every ti
Sébastien Marchand
2016/02/01 19:29:29
No, reading a file (via read()) doesn't seem to up
| |
297 elif os.path.isfile(full_path): | |
298 os.remove(full_path) | |
299 | |
300 for d in dirs_to_remove: | |
301 print ('Removing %s as it doesn\'t correspond to any known toolchain.' % | |
302 os.path.join(root, d)) | |
303 # Use the RemoveToolchain function to remove these directories as they might | |
304 # contain an older version of the toolchain. | |
305 RemoveToolchain(root, d, False) | |
306 | |
307 # Remove the versions of the toolchains that haven't been used in the past 30 | |
308 # days. | |
309 toolchain_expiration_time = 60 * 60 * 24 * 30 | |
scottmg
2016/01/28 23:04:11
I'm not sure about this heuristic now. What if a m
Sébastien Marchand
2016/02/01 19:29:29
No, I'm touching the timestamp file of the toolcha
| |
310 for toolchain in valid_toolchains: | |
311 toolchain_age_in_sec = time.time() - toolchain[0] | |
312 if toolchain_age_in_sec > toolchain_expiration_time: | |
313 print ('Removing version %s of the Win toolchain has it hasn\'t been used' | |
314 ' in the past %d days.' % (toolchain[1], | |
315 toolchain_age_in_sec / 60 / 60 / 24)) | |
316 RemoveToolchain(root, toolchain[1], True) | |
317 | |
318 | |
238 def GetInstallerName(): | 319 def GetInstallerName(): |
239 """Return the name of the Windows 10 Universal C Runtime installer for the | 320 """Return the name of the Windows 10 Universal C Runtime installer for the |
240 current platform, or None if installer is not needed or not applicable. | 321 current platform, or None if installer is not needed or not applicable. |
241 The registry has to be used instead of sys.getwindowsversion() because | 322 The registry has to be used instead of sys.getwindowsversion() because |
242 Python 2.7 is only manifested as being compatible up to Windows 8, so the | 323 Python 2.7 is only manifested as being compatible up to Windows 8, so the |
243 version APIs helpfully return a maximum of 6.2 (Windows 8). | 324 version APIs helpfully return a maximum of 6.2 (Windows 8). |
244 """ | 325 """ |
245 key_name = r'Software\Microsoft\Windows NT\CurrentVersion' | 326 key_name = r'Software\Microsoft\Windows NT\CurrentVersion' |
246 key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, key_name) | 327 key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, key_name) |
247 value, keytype = winreg.QueryValueEx(key, "CurrentVersion") | 328 value, keytype = winreg.QueryValueEx(key, "CurrentVersion") |
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
320 def winpath(path): | 401 def winpath(path): |
321 return subprocess.check_output(['cygpath', '-w', path]).strip() | 402 return subprocess.check_output(['cygpath', '-w', path]).strip() |
322 python = os.path.join(DEPOT_TOOLS_PATH, 'python.bat') | 403 python = os.path.join(DEPOT_TOOLS_PATH, 'python.bat') |
323 cmd = [python, winpath(__file__)] | 404 cmd = [python, winpath(__file__)] |
324 if options.output_json: | 405 if options.output_json: |
325 cmd.extend(['--output-json', winpath(options.output_json)]) | 406 cmd.extend(['--output-json', winpath(options.output_json)]) |
326 cmd.extend(args) | 407 cmd.extend(args) |
327 sys.exit(subprocess.call(cmd)) | 408 sys.exit(subprocess.call(cmd)) |
328 assert sys.platform != 'cygwin' | 409 assert sys.platform != 'cygwin' |
329 | 410 |
330 # We assume that the Pro hash is the first one. | 411 if len(args) == 0: |
331 desired_hashes = args | 412 sys.exit('Desired hash is required.') |
332 if len(desired_hashes) == 0: | 413 desired_hash = args[0] |
333 sys.exit('Desired hashes are required.') | |
334 | 414 |
335 # Move to depot_tools\win_toolchain where we'll store our files, and where | 415 # Move to depot_tools\win_toolchain where we'll store our files, and where |
336 # the downloader script is. | 416 # the downloader script is. |
337 os.chdir(os.path.normpath(os.path.join(BASEDIR))) | 417 os.chdir(os.path.normpath(os.path.join(BASEDIR))) |
338 toolchain_dir = '.' | 418 toolchain_dir = '.' |
339 if os.environ.get('GYP_MSVS_VERSION') == '2015': | 419 if os.environ.get('GYP_MSVS_VERSION') == '2015': |
340 target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs_files')) | 420 target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs_files')) |
341 else: | 421 else: |
342 target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs2013_files')) | 422 target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs2013_files')) |
343 abs_target_dir = os.path.abspath(target_dir) | 423 if not os.path.isdir(target_dir): |
424 os.mkdir(target_dir) | |
425 toolchain_target_dir = os.path.join(target_dir, desired_hash) | |
426 | |
427 abs_toolchain_target_dir = os.path.abspath(toolchain_target_dir) | |
344 | 428 |
345 got_new_toolchain = False | 429 got_new_toolchain = False |
346 | 430 |
347 # If the current hash doesn't match what we want in the file, nuke and pave. | 431 # If the current hash doesn't match what we want in the file, nuke and pave. |
348 # Typically this script is only run when the .sha1 one file is updated, but | 432 # Typically this script is only run when the .sha1 one file is updated, but |
349 # directly calling "gclient runhooks" will also run it, so we cache | 433 # directly calling "gclient runhooks" will also run it, so we cache |
350 # based on timestamps to make that case fast. | 434 # based on timestamps to make that case fast. |
351 current_hash = CalculateHash(target_dir) | 435 current_hashes = CalculateToolchainHashes(target_dir) |
352 if current_hash not in desired_hashes: | 436 if desired_hash not in current_hashes: |
353 should_use_gs = False | 437 should_use_gs = False |
354 if (HaveSrcInternalAccess() or | 438 if (HaveSrcInternalAccess() or |
355 LooksLikeGoogler() or | 439 LooksLikeGoogler() or |
356 CanAccessToolchainBucket()): | 440 CanAccessToolchainBucket()): |
357 should_use_gs = True | 441 should_use_gs = True |
358 if not CanAccessToolchainBucket(): | 442 if not CanAccessToolchainBucket(): |
359 RequestGsAuthentication() | 443 RequestGsAuthentication() |
360 if not should_use_gs: | 444 if not should_use_gs: |
361 print('\n\n\nPlease follow the instructions at ' | 445 print('\n\n\nPlease follow the instructions at ' |
362 'https://www.chromium.org/developers/how-tos/' | 446 'https://www.chromium.org/developers/how-tos/' |
363 'build-instructions-windows\n\n') | 447 'build-instructions-windows\n\n') |
364 return 1 | 448 return 1 |
365 print('Windows toolchain out of date or doesn\'t exist, updating (Pro)...') | 449 print('Windows toolchain out of date or doesn\'t exist, updating (Pro)...') |
366 print(' current_hash: %s' % current_hash) | 450 print(' current_hashes: %s' % ', '.join(current_hashes)) |
367 print(' desired_hashes: %s' % ', '.join(desired_hashes)) | 451 print(' desired_hash: %s' % desired_hash) |
368 sys.stdout.flush() | 452 sys.stdout.flush() |
369 DelayBeforeRemoving(target_dir) | |
370 if sys.platform == 'win32': | |
371 # These stay resident and will make the rmdir below fail. | |
372 kill_list = [ | |
373 'mspdbsrv.exe', | |
374 'vctip.exe', # Compiler and tools experience improvement data uploader. | |
375 ] | |
376 for process_name in kill_list: | |
377 with open(os.devnull, 'wb') as nul: | |
378 subprocess.call(['taskkill', '/f', '/im', process_name], | |
379 stdin=nul, stdout=nul, stderr=nul) | |
380 if os.path.isdir(target_dir): | |
381 RmDir(target_dir) | |
382 | 453 |
383 DoTreeMirror(target_dir, desired_hashes[0]) | 454 DoTreeMirror(toolchain_target_dir, desired_hash) |
384 | 455 |
385 got_new_toolchain = True | 456 got_new_toolchain = True |
457 else: | |
458 # Touch the timestamp file so we know that this version of the toolchain is | |
459 # still used. | |
460 os.utime(os.path.join(target_dir, desired_hash), None) | |
386 | 461 |
387 win_sdk = os.path.join(abs_target_dir, 'win_sdk') | 462 win_sdk = os.path.join(abs_toolchain_target_dir, 'win_sdk') |
388 try: | 463 try: |
389 with open(os.path.join(target_dir, 'VS_VERSION'), 'rb') as f: | 464 with open(os.path.join(toolchain_target_dir, 'VS_VERSION'), 'rb') as f: |
390 vs_version = f.read().strip() | 465 vs_version = f.read().strip() |
391 except IOError: | 466 except IOError: |
392 # Older toolchains didn't have the VS_VERSION file, and used 'win8sdk' | 467 # Older toolchains didn't have the VS_VERSION file, and used 'win8sdk' |
393 # instead of just 'win_sdk'. | 468 # instead of just 'win_sdk'. |
394 vs_version = '2013' | 469 vs_version = '2013' |
395 win_sdk = os.path.join(abs_target_dir, 'win8sdk') | 470 win_sdk = os.path.join(abs_toolchain_target_dir, 'win8sdk') |
396 | 471 |
397 data = { | 472 data = { |
398 'path': abs_target_dir, | 473 'path': abs_toolchain_target_dir, |
399 'version': vs_version, | 474 'version': vs_version, |
400 'win_sdk': win_sdk, | 475 'win_sdk': win_sdk, |
401 # Added for backwards compatibility with old toolchain packages. | 476 # Added for backwards compatibility with old toolchain packages. |
402 'win8sdk': win_sdk, | 477 'win8sdk': win_sdk, |
403 'wdk': os.path.join(abs_target_dir, 'wdk'), | 478 'wdk': os.path.join(abs_toolchain_target_dir, 'wdk'), |
404 'runtime_dirs': [ | 479 'runtime_dirs': [ |
405 os.path.join(abs_target_dir, 'sys64'), | 480 os.path.join(abs_toolchain_target_dir, 'sys64'), |
406 os.path.join(abs_target_dir, 'sys32'), | 481 os.path.join(abs_toolchain_target_dir, 'sys32'), |
407 ], | 482 ], |
408 } | 483 } |
409 with open(os.path.join(target_dir, '..', 'data.json'), 'w') as f: | 484 with open(os.path.join(target_dir, '..', 'data.json'), 'w') as f: |
410 json.dump(data, f) | 485 json.dump(data, f) |
411 | 486 |
412 if got_new_toolchain: | 487 if got_new_toolchain: |
413 current_hash = CalculateHash(target_dir) | 488 current_hashes = CalculateToolchainHashes(target_dir) |
414 if current_hash not in desired_hashes: | 489 if desired_hash not in current_hashes: |
415 print >> sys.stderr, ( | 490 print >> sys.stderr, ( |
416 'Got wrong hash after pulling a new toolchain. ' | 491 'Got wrong hash after pulling a new toolchain. ' |
417 'Wanted one of \'%s\', got \'%s\'.' % ( | 492 'Wanted \'%s\', got one of \'%s\'.' % ( |
418 ', '.join(desired_hashes), current_hash)) | 493 desired_hash, ', '.join(current_hashes))) |
419 return 1 | 494 return 1 |
420 SaveTimestampsAndHash(target_dir, current_hash) | 495 SaveTimestampsAndHash(target_dir, desired_hash) |
421 | 496 |
422 if options.output_json: | 497 if options.output_json: |
423 shutil.copyfile(os.path.join(target_dir, '..', 'data.json'), | 498 shutil.copyfile(os.path.join(target_dir, '..', 'data.json'), |
424 options.output_json) | 499 options.output_json) |
425 | 500 |
426 if os.environ.get('GYP_MSVS_VERSION') == '2015': | 501 if os.environ.get('GYP_MSVS_VERSION') == '2015': |
427 InstallUniversalCRTIfNeeded(abs_target_dir) | 502 InstallUniversalCRTIfNeeded(abs_toolchain_target_dir) |
503 | |
504 RemoveUnusedToolchains(target_dir) | |
428 | 505 |
429 return 0 | 506 return 0 |
430 | 507 |
431 | 508 |
432 if __name__ == '__main__': | 509 if __name__ == '__main__': |
433 sys.exit(main()) | 510 sys.exit(main()) |
OLD | NEW |