| OLD | NEW |
| 1 # Copyright 2013 The Chromium Authors. All rights reserved. | 1 # Copyright 2013 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 import ast | 5 import ast |
| 6 import contextlib | 6 import contextlib |
| 7 import fnmatch | 7 import fnmatch |
| 8 import json | 8 import json |
| 9 import os | 9 import os |
| 10 import pipes | 10 import pipes |
| 11 import re | 11 import re |
| 12 import shlex | 12 import shlex |
| 13 import shutil | 13 import shutil |
| 14 import subprocess | 14 import subprocess |
| 15 import sys | 15 import sys |
| 16 import tempfile | 16 import tempfile |
| 17 import zipfile | 17 import zipfile |
| 18 | 18 |
| 19 | 19 |
| 20 CHROMIUM_SRC = os.path.normpath( | 20 CHROMIUM_SRC = os.path.normpath( |
| 21 os.path.join(os.path.dirname(__file__), | 21 os.path.join(os.path.dirname(__file__), |
| 22 os.pardir, os.pardir, os.pardir, os.pardir)) | 22 os.pardir, os.pardir, os.pardir, os.pardir)) |
| 23 COLORAMA_ROOT = os.path.join(CHROMIUM_SRC, | 23 COLORAMA_ROOT = os.path.join(CHROMIUM_SRC, |
| 24 'third_party', 'colorama', 'src') | 24 'third_party', 'colorama', 'src') |
| 25 # aapt should ignore OWNERS files in addition the default ignore pattern. | 25 # aapt should ignore OWNERS files in addition the default ignore pattern. |
| 26 AAPT_IGNORE_PATTERN = ('!OWNERS:!.svn:!.git:!.ds_store:!*.scc:.*:<dir>_*:' + | 26 AAPT_IGNORE_PATTERN = ('!OWNERS:!.svn:!.git:!.ds_store:!*.scc:.*:<dir>_*:' + |
| 27 '!CVS:!thumbs.db:!picasa.ini:!*~:!*.d.stamp') | 27 '!CVS:!thumbs.db:!picasa.ini:!*~:!*.d.stamp') |
| 28 HERMETIC_TIMESTAMP = (2001, 1, 1, 0, 0, 0) |
| 28 | 29 |
| 29 | 30 |
| 30 @contextlib.contextmanager | 31 @contextlib.contextmanager |
| 31 def TempDir(): | 32 def TempDir(): |
| 32 dirname = tempfile.mkdtemp() | 33 dirname = tempfile.mkdtemp() |
| 33 try: | 34 try: |
| 34 yield dirname | 35 yield dirname |
| 35 finally: | 36 finally: |
| 36 shutil.rmtree(dirname) | 37 shutil.rmtree(dirname) |
| 37 | 38 |
| (...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 211 if no_clobber: | 212 if no_clobber: |
| 212 output_path = os.path.join(path, name) | 213 output_path = os.path.join(path, name) |
| 213 if os.path.exists(output_path): | 214 if os.path.exists(output_path): |
| 214 raise Exception( | 215 raise Exception( |
| 215 'Path already exists from zip: %s %s %s' | 216 'Path already exists from zip: %s %s %s' |
| 216 % (zip_path, name, output_path)) | 217 % (zip_path, name, output_path)) |
| 217 | 218 |
| 218 z.extractall(path=path) | 219 z.extractall(path=path) |
| 219 | 220 |
| 220 | 221 |
| 221 def DoZip(inputs, output, base_dir): | 222 def DoZip(inputs, output, base_dir=None): |
| 223 """Creates a zip file from a list of files. |
| 224 |
| 225 Args: |
| 226 inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples. |
| 227 output: Destination .zip file. |
| 228 base_dir: Prefix to strip from inputs. |
| 229 """ |
| 230 input_tuples = [] |
| 231 for tup in inputs: |
| 232 if isinstance(tup, basestring): |
| 233 tup = (os.path.relpath(tup, base_dir), tup) |
| 234 input_tuples.append(tup) |
| 235 |
| 236 # Sort by zip path to ensure stable zip ordering. |
| 237 input_tuples.sort(key=lambda tup: tup[0]) |
| 222 with zipfile.ZipFile(output, 'w') as outfile: | 238 with zipfile.ZipFile(output, 'w') as outfile: |
| 223 for f in inputs: | 239 for zip_path, fs_path in input_tuples: |
| 224 CheckZipPath(os.path.relpath(f, base_dir)) | 240 CheckZipPath(zip_path) |
| 225 outfile.write(f, os.path.relpath(f, base_dir)) | 241 zipinfo = zipfile.ZipInfo(filename=zip_path, date_time=HERMETIC_TIMESTAMP) |
| 242 with file(fs_path) as f: |
| 243 contents = f.read() |
| 244 outfile.writestr(zipinfo, contents) |
| 226 | 245 |
| 227 | 246 |
| 228 def ZipDir(output, base_dir): | 247 def ZipDir(output, base_dir): |
| 229 with zipfile.ZipFile(output, 'w') as outfile: | 248 """Creates a zip file from a directory.""" |
| 230 for root, _, files in os.walk(base_dir): | 249 inputs = [] |
| 231 for f in files: | 250 for root, _, files in os.walk(base_dir): |
| 232 path = os.path.join(root, f) | 251 for f in files: |
| 233 archive_path = os.path.relpath(path, base_dir) | 252 inputs.append(os.path.join(root, f)) |
| 234 CheckZipPath(archive_path) | 253 DoZip(inputs, output, base_dir) |
| 235 outfile.write(path, archive_path) | |
| 236 | 254 |
| 237 | 255 |
| 238 def MatchesGlob(path, filters): | 256 def MatchesGlob(path, filters): |
| 239 """Returns whether the given path matches any of the given glob patterns.""" | 257 """Returns whether the given path matches any of the given glob patterns.""" |
| 240 return filters and any(fnmatch.fnmatch(path, f) for f in filters) | 258 return filters and any(fnmatch.fnmatch(path, f) for f in filters) |
| 241 | 259 |
| 242 | 260 |
| 243 def MergeZips(output, inputs, exclude_patterns=None): | 261 def MergeZips(output, inputs, exclude_patterns=None, path_transform=None): |
| 262 path_transform = path_transform or (lambda p, z: p) |
| 244 added_names = set() | 263 added_names = set() |
| 245 | 264 |
| 246 with zipfile.ZipFile(output, 'w') as out_zip: | 265 with zipfile.ZipFile(output, 'w') as out_zip: |
| 247 for in_file in inputs: | 266 for in_file in inputs: |
| 248 with zipfile.ZipFile(in_file, 'r') as in_zip: | 267 with zipfile.ZipFile(in_file, 'r') as in_zip: |
| 249 for name in in_zip.namelist(): | 268 for name in in_zip.namelist(): |
| 250 if not (name in added_names or MatchesGlob(name, exclude_patterns)): | 269 dst_name = path_transform(name, in_file) |
| 251 out_zip.writestr(name, in_zip.read(name)) | 270 already_added = dst_name in added_names |
| 252 added_names.add(name) | 271 if not already_added and not MatchesGlob(dst_name, exclude_patterns): |
| 272 zipinfo = zipfile.ZipInfo(filename=dst_name, |
| 273 date_time=HERMETIC_TIMESTAMP) |
| 274 out_zip.writestr(zipinfo, in_zip.read(name)) |
| 275 added_names.add(dst_name) |
| 253 | 276 |
| 254 | 277 |
| 255 def PrintWarning(message): | 278 def PrintWarning(message): |
| 256 print 'WARNING: ' + message | 279 print 'WARNING: ' + message |
| 257 | 280 |
| 258 | 281 |
| 259 def PrintBigWarning(message): | 282 def PrintBigWarning(message): |
| 260 print '***** ' * 8 | 283 print '***** ' * 8 |
| 261 PrintWarning(message) | 284 PrintWarning(message) |
| 262 print '***** ' * 8 | 285 print '***** ' * 8 |
| (...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 370 file_jsons[file_path] = ReadJson(file_path) | 393 file_jsons[file_path] = ReadJson(file_path) |
| 371 | 394 |
| 372 expansion = file_jsons[file_path] | 395 expansion = file_jsons[file_path] |
| 373 for k in lookup_path[1:]: | 396 for k in lookup_path[1:]: |
| 374 expansion = expansion[k] | 397 expansion = expansion[k] |
| 375 | 398 |
| 376 new_args[i] = arg[:match.start()] + str(expansion) | 399 new_args[i] = arg[:match.start()] + str(expansion) |
| 377 | 400 |
| 378 return new_args | 401 return new_args |
| 379 | 402 |
| OLD | NEW |