Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 #!/usr/bin/env python | |
| 2 | |
|
M-A Ruel
2014/01/13 18:50:32
no empty line
scottmg
2014/01/13 18:58:36
Done.
| |
| 3 # Copyright 2013 The Chromium Authors. All rights reserved. | |
| 4 # Use of this source code is governed by a BSD-style license that can be | |
| 5 # found in the LICENSE file. | |
| 6 | |
| 7 """Downloads and unpacks a toolchain for building on Windows. The contents are | |
| 8 matched by sha1 which will be updated when the toolchain is updated. | |
| 9 | |
| 10 Having a toolchain script in depot_tools means that it's not versioned | |
| 11 directly with the source code. That is, if the toolchain is upgraded, but | |
| 12 you're trying to build an historical version of Chromium from before the | |
| 13 toolchain upgrade, this will cause you to build with a newer toolchain than | |
| 14 was available when that code was committed. This is done for a two main | |
| 15 reasons: 1) it would likely be annoying to have the up-to-date toolchain | |
| 16 removed and replaced by one without a service pack applied); 2) it would | |
| 17 require maintaining scripts that can build older not-up-to-date revisions of | |
| 18 the toolchain. This is likely to be a poorly tested code path that probably | |
| 19 won't be properly maintained. See http://crbug.com/323300. | |
| 20 | |
| 21 This does not extend to major versions of the toolchain however, on the | |
| 22 assumption that there are more likely to be source incompatibilities between | |
| 23 major revisions. This script calls a subscript (currently, toolchain2013.py) | |
| 24 to do the main work. It is expected that toolchain2013.py will always be able | |
| 25 to acquire/build the most current revision of a VS2013-based toolchain. In the | |
| 26 future when a hypothetical VS2015 is released, the 2013 script will be | |
| 27 maintained, and a new 2015 script would be added. | |
| 28 """ | |
| 29 | |
| 30 import ctypes.wintypes | |
| 31 import hashlib | |
| 32 import json | |
| 33 import os | |
| 34 import subprocess | |
| 35 import sys | |
| 36 | |
| 37 | |
| 38 BASEDIR = os.path.dirname(os.path.abspath(__file__)) | |
| 39 | |
| 40 | |
| 41 GetFileAttributes = ctypes.windll.kernel32.GetFileAttributesW | |
| 42 GetFileAttributes.argtypes = (ctypes.wintypes.LPWSTR,) | |
| 43 GetFileAttributes.restype = ctypes.wintypes.DWORD | |
| 44 FILE_ATTRIBUTE_HIDDEN = 0x2 | |
| 45 FILE_ATTRIBUTE_SYSTEM = 0x4 | |
| 46 | |
| 47 | |
| 48 def IsHidden(file_path): | |
| 49 """Returns whether the given |file_path| has the 'system' or 'hidden' | |
| 50 attribute set.""" | |
| 51 p = GetFileAttributes(file_path) | |
| 52 assert p != 0xffffffff | |
| 53 return bool(p & (FILE_ATTRIBUTE_HIDDEN | FILE_ATTRIBUTE_SYSTEM)) | |
| 54 | |
| 55 | |
| 56 def GetFileList(root): | |
| 57 """Gets a normalized list of files under |root|.""" | |
| 58 assert not os.path.isabs(root) | |
| 59 assert os.path.normpath(root) == root | |
| 60 file_list = [] | |
| 61 for base, _, files in os.walk(root): | |
| 62 paths = [os.path.join(base, f) for f in files] | |
| 63 file_list.extend(x.lower() for x in paths if not IsHidden(x)) | |
| 64 return sorted(file_list) | |
| 65 | |
| 66 | |
| 67 def MakeTimestampsFileName(root): | |
| 68 return os.path.join(root, '..', '.timestamps') | |
| 69 | |
| 70 | |
| 71 def CalculateHash(root): | |
| 72 """Calculates the sha1 of the paths to all files in the given |root| and the | |
| 73 contents of those files, and returns as a hex string.""" | |
| 74 file_list = GetFileList(root) | |
| 75 | |
| 76 # Check whether we previously saved timestamps in $root/../.timestamps. If | |
| 77 # we didn't, or they don't match, then do the full calculation, otherwise | |
| 78 # return the saved value. | |
| 79 timestamps_file = MakeTimestampsFileName(root) | |
| 80 timestamps_data = {'files': [], 'sha1': ''} | |
| 81 if os.path.exists(timestamps_file): | |
| 82 with open(timestamps_file, 'rb') as f: | |
| 83 try: | |
| 84 timestamps_data = json.load(f) | |
| 85 except ValueError: | |
| 86 # json couldn't be loaded, empty data will force a re-hash. | |
| 87 pass | |
| 88 | |
| 89 matches = len(file_list) == len(timestamps_data['files']) | |
| 90 if matches: | |
| 91 for disk, cached in zip(file_list, timestamps_data['files']): | |
| 92 if disk != cached[0] or os.stat(disk).st_mtime != cached[1]: | |
| 93 matches = False | |
| 94 break | |
| 95 if matches: | |
| 96 return timestamps_data['sha1'] | |
| 97 | |
| 98 digest = hashlib.sha1() | |
| 99 for path in file_list: | |
| 100 digest.update(path) | |
| 101 with open(path, 'rb') as f: | |
| 102 digest.update(f.read()) | |
| 103 return digest.hexdigest() | |
| 104 | |
| 105 | |
| 106 def SaveTimestampsAndHash(root, sha1): | |
| 107 """Save timestamps and the final hash to be able to early-out more quickly | |
| 108 next time.""" | |
| 109 file_list = GetFileList(root) | |
| 110 timestamps_data = { | |
| 111 'files': [[f, os.stat(f).st_mtime] for f in file_list], | |
| 112 'sha1': sha1, | |
| 113 } | |
| 114 with open(MakeTimestampsFileName(root), 'wb') as f: | |
| 115 json.dump(timestamps_data, f) | |
| 116 | |
| 117 | |
| 118 def main(): | |
| 119 if sys.platform not in ('cygwin', 'win32'): | |
| 120 return 0 | |
| 121 | |
| 122 if len(sys.argv) != 1: | |
| 123 print >> sys.stderr, 'Unexpected arguments.' | |
| 124 return 1 | |
| 125 | |
| 126 # Move to depot_tools\win_toolchain where we'll store our files, and where | |
| 127 # the downloader script is. | |
| 128 os.chdir(os.path.normpath(os.path.join(BASEDIR))) | |
| 129 toolchain_dir = '.' | |
| 130 target_dir = os.path.normpath(os.path.join(toolchain_dir, 'vs2013_files')) | |
| 131 | |
| 132 sha1path = os.path.join(toolchain_dir, 'toolchain_vs2013.sha1') | |
| 133 desired_hash = '' | |
| 134 if os.path.isfile(sha1path): | |
| 135 with open(sha1path, 'rb') as f: | |
| 136 desired_hash = f.read().strip() | |
| 137 | |
| 138 # If the current hash doesn't match what we want in the file, nuke and pave. | |
| 139 # Typically this script is only run when the .sha1 one file is updated, but | |
| 140 # directly calling "gclient runhooks" will also run it, so we cache | |
| 141 # based on timestamps to make that case fast. | |
| 142 current_hash = CalculateHash(target_dir) | |
| 143 if current_hash != desired_hash: | |
| 144 print 'Windows toolchain out of date or doesn\'t exist, updating...' | |
| 145 if os.path.isdir(target_dir): | |
| 146 subprocess.check_call('rmdir /s/q "%s"' % target_dir, shell=True) | |
| 147 subprocess.check_call([ | |
| 148 sys.executable, | |
| 149 'toolchain2013.py', | |
| 150 '--targetdir', target_dir]) | |
| 151 current_hash = CalculateHash(target_dir) | |
| 152 if current_hash != desired_hash: | |
| 153 print >> sys.stderr, ( | |
| 154 'Got wrong hash after pulling a new toolchain. ' | |
| 155 'Wanted \'%s\', got \'%s\'.' % ( | |
| 156 desired_hash, current_hash)) | |
| 157 return 1 | |
| 158 SaveTimestampsAndHash(target_dir, current_hash) | |
| 159 | |
| 160 return 0 | |
| 161 | |
| 162 | |
| 163 if __name__ == '__main__': | |
| 164 sys.exit(main()) | |
| OLD | NEW |