OLD | NEW |
| (Empty) |
1 # Copyright 2013 The Chromium Authors. All rights reserved. | |
2 # Use of this source code is governed by a BSD-style license that can be | |
3 # found in the LICENSE file. | |
4 | |
5 import ast | |
6 import contextlib | |
7 import fnmatch | |
8 import json | |
9 import os | |
10 import pipes | |
11 import re | |
12 import shlex | |
13 import shutil | |
14 import subprocess | |
15 import sys | |
16 import tempfile | |
17 import zipfile | |
18 | |
19 | |
20 CHROMIUM_SRC = os.path.normpath( | |
21 os.path.join(os.path.dirname(__file__), | |
22 os.pardir, os.pardir, os.pardir, os.pardir)) | |
23 COLORAMA_ROOT = os.path.join(CHROMIUM_SRC, | |
24 'third_party', 'colorama', 'src') | |
25 # aapt should ignore OWNERS files in addition the default ignore pattern. | |
26 AAPT_IGNORE_PATTERN = ('!OWNERS:!.svn:!.git:!.ds_store:!*.scc:.*:<dir>_*:' + | |
27 '!CVS:!thumbs.db:!picasa.ini:!*~:!*.d.stamp') | |
28 | |
29 | |
30 @contextlib.contextmanager | |
31 def TempDir(): | |
32 dirname = tempfile.mkdtemp() | |
33 try: | |
34 yield dirname | |
35 finally: | |
36 shutil.rmtree(dirname) | |
37 | |
38 | |
39 def MakeDirectory(dir_path): | |
40 try: | |
41 os.makedirs(dir_path) | |
42 except OSError: | |
43 pass | |
44 | |
45 | |
46 def DeleteDirectory(dir_path): | |
47 if os.path.exists(dir_path): | |
48 shutil.rmtree(dir_path) | |
49 | |
50 | |
51 def Touch(path, fail_if_missing=False): | |
52 if fail_if_missing and not os.path.exists(path): | |
53 raise Exception(path + ' doesn\'t exist.') | |
54 | |
55 MakeDirectory(os.path.dirname(path)) | |
56 with open(path, 'a'): | |
57 os.utime(path, None) | |
58 | |
59 | |
60 def FindInDirectory(directory, filename_filter): | |
61 files = [] | |
62 for root, _dirnames, filenames in os.walk(directory): | |
63 matched_files = fnmatch.filter(filenames, filename_filter) | |
64 files.extend((os.path.join(root, f) for f in matched_files)) | |
65 return files | |
66 | |
67 | |
68 def FindInDirectories(directories, filename_filter): | |
69 all_files = [] | |
70 for directory in directories: | |
71 all_files.extend(FindInDirectory(directory, filename_filter)) | |
72 return all_files | |
73 | |
74 | |
75 def ParseGnList(gn_string): | |
76 return ast.literal_eval(gn_string) | |
77 | |
78 | |
79 def ParseGypList(gyp_string): | |
80 # The ninja generator doesn't support $ in strings, so use ## to | |
81 # represent $. | |
82 # TODO(cjhopman): Remove when | |
83 # https://code.google.com/p/gyp/issues/detail?id=327 | |
84 # is addressed. | |
85 gyp_string = gyp_string.replace('##', '$') | |
86 | |
87 if gyp_string.startswith('['): | |
88 return ParseGnList(gyp_string) | |
89 return shlex.split(gyp_string) | |
90 | |
91 | |
92 def CheckOptions(options, parser, required=None): | |
93 if not required: | |
94 return | |
95 for option_name in required: | |
96 if getattr(options, option_name) is None: | |
97 parser.error('--%s is required' % option_name.replace('_', '-')) | |
98 | |
99 | |
100 def WriteJson(obj, path, only_if_changed=False): | |
101 old_dump = None | |
102 if os.path.exists(path): | |
103 with open(path, 'r') as oldfile: | |
104 old_dump = oldfile.read() | |
105 | |
106 new_dump = json.dumps(obj, sort_keys=True, indent=2, separators=(',', ': ')) | |
107 | |
108 if not only_if_changed or old_dump != new_dump: | |
109 with open(path, 'w') as outfile: | |
110 outfile.write(new_dump) | |
111 | |
112 | |
113 def ReadJson(path): | |
114 with open(path, 'r') as jsonfile: | |
115 return json.load(jsonfile) | |
116 | |
117 | |
118 class CalledProcessError(Exception): | |
119 """This exception is raised when the process run by CheckOutput | |
120 exits with a non-zero exit code.""" | |
121 | |
122 def __init__(self, cwd, args, output): | |
123 super(CalledProcessError, self).__init__() | |
124 self.cwd = cwd | |
125 self.args = args | |
126 self.output = output | |
127 | |
128 def __str__(self): | |
129 # A user should be able to simply copy and paste the command that failed | |
130 # into their shell. | |
131 copyable_command = '( cd {}; {} )'.format(os.path.abspath(self.cwd), | |
132 ' '.join(map(pipes.quote, self.args))) | |
133 return 'Command failed: {}\n{}'.format(copyable_command, self.output) | |
134 | |
135 | |
136 # This can be used in most cases like subprocess.check_output(). The output, | |
137 # particularly when the command fails, better highlights the command's failure. | |
138 # If the command fails, raises a build_utils.CalledProcessError. | |
139 def CheckOutput(args, cwd=None, | |
140 print_stdout=False, print_stderr=True, | |
141 stdout_filter=None, | |
142 stderr_filter=None, | |
143 fail_func=lambda returncode, stderr: returncode != 0): | |
144 if not cwd: | |
145 cwd = os.getcwd() | |
146 | |
147 child = subprocess.Popen(args, | |
148 stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd) | |
149 stdout, stderr = child.communicate() | |
150 | |
151 if stdout_filter is not None: | |
152 stdout = stdout_filter(stdout) | |
153 | |
154 if stderr_filter is not None: | |
155 stderr = stderr_filter(stderr) | |
156 | |
157 if fail_func(child.returncode, stderr): | |
158 raise CalledProcessError(cwd, args, stdout + stderr) | |
159 | |
160 if print_stdout: | |
161 sys.stdout.write(stdout) | |
162 if print_stderr: | |
163 sys.stderr.write(stderr) | |
164 | |
165 return stdout | |
166 | |
167 | |
168 def GetModifiedTime(path): | |
169 # For a symlink, the modified time should be the greater of the link's | |
170 # modified time and the modified time of the target. | |
171 return max(os.lstat(path).st_mtime, os.stat(path).st_mtime) | |
172 | |
173 | |
174 def IsTimeStale(output, inputs): | |
175 if not os.path.exists(output): | |
176 return True | |
177 | |
178 output_time = GetModifiedTime(output) | |
179 for i in inputs: | |
180 if GetModifiedTime(i) > output_time: | |
181 return True | |
182 return False | |
183 | |
184 | |
185 def IsDeviceReady(): | |
186 device_state = CheckOutput(['adb', 'get-state']) | |
187 return device_state.strip() == 'device' | |
188 | |
189 | |
190 def CheckZipPath(name): | |
191 if os.path.normpath(name) != name: | |
192 raise Exception('Non-canonical zip path: %s' % name) | |
193 if os.path.isabs(name): | |
194 raise Exception('Absolute zip path: %s' % name) | |
195 | |
196 | |
197 def ExtractAll(zip_path, path=None, no_clobber=True, pattern=None): | |
198 if path is None: | |
199 path = os.getcwd() | |
200 elif not os.path.exists(path): | |
201 MakeDirectory(path) | |
202 | |
203 with zipfile.ZipFile(zip_path) as z: | |
204 for name in z.namelist(): | |
205 if name.endswith('/'): | |
206 continue | |
207 if pattern is not None: | |
208 if not fnmatch.fnmatch(name, pattern): | |
209 continue | |
210 CheckZipPath(name) | |
211 if no_clobber: | |
212 output_path = os.path.join(path, name) | |
213 if os.path.exists(output_path): | |
214 raise Exception( | |
215 'Path already exists from zip: %s %s %s' | |
216 % (zip_path, name, output_path)) | |
217 | |
218 z.extractall(path=path) | |
219 | |
220 | |
221 def DoZip(inputs, output, base_dir): | |
222 with zipfile.ZipFile(output, 'w') as outfile: | |
223 for f in inputs: | |
224 CheckZipPath(os.path.relpath(f, base_dir)) | |
225 outfile.write(f, os.path.relpath(f, base_dir)) | |
226 | |
227 | |
228 def ZipDir(output, base_dir): | |
229 with zipfile.ZipFile(output, 'w') as outfile: | |
230 for root, _, files in os.walk(base_dir): | |
231 for f in files: | |
232 path = os.path.join(root, f) | |
233 archive_path = os.path.relpath(path, base_dir) | |
234 CheckZipPath(archive_path) | |
235 outfile.write(path, archive_path) | |
236 | |
237 | |
238 def MergeZips(output, inputs, exclude_patterns=None): | |
239 added_names = set() | |
240 def Allow(name): | |
241 if exclude_patterns is not None: | |
242 for p in exclude_patterns: | |
243 if fnmatch.fnmatch(name, p): | |
244 return False | |
245 return True | |
246 | |
247 with zipfile.ZipFile(output, 'w') as out_zip: | |
248 for in_file in inputs: | |
249 with zipfile.ZipFile(in_file, 'r') as in_zip: | |
250 for name in in_zip.namelist(): | |
251 if name not in added_names and Allow(name): | |
252 out_zip.writestr(name, in_zip.read(name)) | |
253 added_names.add(name) | |
254 | |
255 | |
256 def PrintWarning(message): | |
257 print 'WARNING: ' + message | |
258 | |
259 | |
260 def PrintBigWarning(message): | |
261 print '***** ' * 8 | |
262 PrintWarning(message) | |
263 print '***** ' * 8 | |
264 | |
265 | |
266 def GetSortedTransitiveDependencies(top, deps_func): | |
267 """Gets the list of all transitive dependencies in sorted order. | |
268 | |
269 There should be no cycles in the dependency graph. | |
270 | |
271 Args: | |
272 top: a list of the top level nodes | |
273 deps_func: A function that takes a node and returns its direct dependencies. | |
274 Returns: | |
275 A list of all transitive dependencies of nodes in top, in order (a node will | |
276 appear in the list at a higher index than all of its dependencies). | |
277 """ | |
278 def Node(dep): | |
279 return (dep, deps_func(dep)) | |
280 | |
281 # First: find all deps | |
282 unchecked_deps = list(top) | |
283 all_deps = set(top) | |
284 while unchecked_deps: | |
285 dep = unchecked_deps.pop() | |
286 new_deps = deps_func(dep).difference(all_deps) | |
287 unchecked_deps.extend(new_deps) | |
288 all_deps = all_deps.union(new_deps) | |
289 | |
290 # Then: simple, slow topological sort. | |
291 sorted_deps = [] | |
292 unsorted_deps = dict(map(Node, all_deps)) | |
293 while unsorted_deps: | |
294 for library, dependencies in unsorted_deps.items(): | |
295 if not dependencies.intersection(unsorted_deps.keys()): | |
296 sorted_deps.append(library) | |
297 del unsorted_deps[library] | |
298 | |
299 return sorted_deps | |
300 | |
301 | |
302 def GetPythonDependencies(): | |
303 """Gets the paths of imported non-system python modules. | |
304 | |
305 A path is assumed to be a "system" import if it is outside of chromium's | |
306 src/. The paths will be relative to the current directory. | |
307 """ | |
308 module_paths = (m.__file__ for m in sys.modules.itervalues() | |
309 if m is not None and hasattr(m, '__file__')) | |
310 | |
311 abs_module_paths = map(os.path.abspath, module_paths) | |
312 | |
313 non_system_module_paths = [ | |
314 p for p in abs_module_paths if p.startswith(CHROMIUM_SRC)] | |
315 def ConvertPycToPy(s): | |
316 if s.endswith('.pyc'): | |
317 return s[:-1] | |
318 return s | |
319 | |
320 non_system_module_paths = map(ConvertPycToPy, non_system_module_paths) | |
321 non_system_module_paths = map(os.path.relpath, non_system_module_paths) | |
322 return sorted(set(non_system_module_paths)) | |
323 | |
324 | |
325 def AddDepfileOption(parser): | |
326 parser.add_option('--depfile', | |
327 help='Path to depfile. This must be specified as the ' | |
328 'action\'s first output.') | |
329 | |
330 | |
331 def WriteDepfile(path, dependencies): | |
332 with open(path, 'w') as depfile: | |
333 depfile.write(path) | |
334 depfile.write(': ') | |
335 depfile.write(' '.join(dependencies)) | |
336 depfile.write('\n') | |
337 | |
338 | |
339 def ExpandFileArgs(args): | |
340 """Replaces file-arg placeholders in args. | |
341 | |
342 These placeholders have the form: | |
343 @FileArg(filename:key1:key2:...:keyn) | |
344 | |
345 The value of such a placeholder is calculated by reading 'filename' as json. | |
346 And then extracting the value at [key1][key2]...[keyn]. | |
347 | |
348 Note: This intentionally does not return the list of files that appear in such | |
349 placeholders. An action that uses file-args *must* know the paths of those | |
350 files prior to the parsing of the arguments (typically by explicitly listing | |
351 them in the action's inputs in build files). | |
352 """ | |
353 new_args = list(args) | |
354 file_jsons = dict() | |
355 r = re.compile('@FileArg\((.*?)\)') | |
356 for i, arg in enumerate(args): | |
357 match = r.search(arg) | |
358 if not match: | |
359 continue | |
360 | |
361 if match.end() != len(arg): | |
362 raise Exception('Unexpected characters after FileArg: ' + arg) | |
363 | |
364 lookup_path = match.group(1).split(':') | |
365 file_path = lookup_path[0] | |
366 if not file_path in file_jsons: | |
367 file_jsons[file_path] = ReadJson(file_path) | |
368 | |
369 expansion = file_jsons[file_path] | |
370 for k in lookup_path[1:]: | |
371 expansion = expansion[k] | |
372 | |
373 new_args[i] = arg[:match.start()] + str(expansion) | |
374 | |
375 return new_args | |
376 | |
OLD | NEW |