| OLD | NEW |
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2013 The LUCI Authors. All rights reserved. | 2 # Copyright 2013 The LUCI Authors. All rights reserved. |
| 3 # Use of this source code is governed under the Apache License, Version 2.0 | 3 # Use of this source code is governed under the Apache License, Version 2.0 |
| 4 # that can be found in the LICENSE file. | 4 # that can be found in the LICENSE file. |
| 5 | 5 |
| 6 """Archives a set of files or directories to an Isolate Server.""" | 6 """Archives a set of files or directories to an Isolate Server.""" |
| 7 | 7 |
| 8 __version__ = '0.4.8' | 8 __version__ = '0.4.9' |
| 9 | 9 |
| 10 import base64 | 10 import base64 |
| 11 import functools | 11 import functools |
| 12 import errno |
| 12 import logging | 13 import logging |
| 13 import optparse | 14 import optparse |
| 14 import os | 15 import os |
| 15 import re | 16 import re |
| 16 import signal | 17 import signal |
| 17 import sys | 18 import sys |
| 18 import tempfile | 19 import tempfile |
| 19 import threading | 20 import threading |
| 20 import time | 21 import time |
| 21 import types | 22 import types |
| 22 import urllib | |
| 23 import urlparse | |
| 24 import zlib | 23 import zlib |
| 25 | 24 |
| 26 from third_party import colorama | 25 from third_party import colorama |
| 27 from third_party.depot_tools import fix_encoding | 26 from third_party.depot_tools import fix_encoding |
| 28 from third_party.depot_tools import subcommand | 27 from third_party.depot_tools import subcommand |
| 29 | 28 |
| 30 from utils import file_path | 29 from utils import file_path |
| 31 from utils import fs | 30 from utils import fs |
| 32 from utils import logging_utils | 31 from utils import logging_utils |
| 33 from utils import lru | 32 from utils import lru |
| (...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 109 class Error(Exception): | 108 class Error(Exception): |
| 110 """Generic runtime error.""" | 109 """Generic runtime error.""" |
| 111 pass | 110 pass |
| 112 | 111 |
| 113 | 112 |
| 114 class Aborted(Error): | 113 class Aborted(Error): |
| 115 """Operation aborted.""" | 114 """Operation aborted.""" |
| 116 pass | 115 pass |
| 117 | 116 |
| 118 | 117 |
| 118 class AlreadyExists(Error): |
| 119 """File already exists.""" |
| 120 |
| 121 |
| 119 def file_read(path, chunk_size=isolated_format.DISK_FILE_CHUNK, offset=0): | 122 def file_read(path, chunk_size=isolated_format.DISK_FILE_CHUNK, offset=0): |
| 120 """Yields file content in chunks of |chunk_size| starting from |offset|.""" | 123 """Yields file content in chunks of |chunk_size| starting from |offset|.""" |
| 121 with fs.open(path, 'rb') as f: | 124 with fs.open(path, 'rb') as f: |
| 122 if offset: | 125 if offset: |
| 123 f.seek(offset) | 126 f.seek(offset) |
| 124 while True: | 127 while True: |
| 125 data = f.read(chunk_size) | 128 data = f.read(chunk_size) |
| 126 if not data: | 129 if not data: |
| 127 break | 130 break |
| 128 yield data | 131 yield data |
| (...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 214 def create_symlinks(base_directory, files): | 217 def create_symlinks(base_directory, files): |
| 215 """Creates any symlinks needed by the given set of files.""" | 218 """Creates any symlinks needed by the given set of files.""" |
| 216 for filepath, properties in files: | 219 for filepath, properties in files: |
| 217 if 'l' not in properties: | 220 if 'l' not in properties: |
| 218 continue | 221 continue |
| 219 if sys.platform == 'win32': | 222 if sys.platform == 'win32': |
| 220 # TODO(maruel): Create symlink via the win32 api. | 223 # TODO(maruel): Create symlink via the win32 api. |
| 221 logging.warning('Ignoring symlink %s', filepath) | 224 logging.warning('Ignoring symlink %s', filepath) |
| 222 continue | 225 continue |
| 223 outfile = os.path.join(base_directory, filepath) | 226 outfile = os.path.join(base_directory, filepath) |
| 224 # os.symlink() doesn't exist on Windows. | 227 try: |
| 225 os.symlink(properties['l'], outfile) # pylint: disable=E1101 | 228 os.symlink(properties['l'], outfile) # pylint: disable=E1101 |
| 229 except OSError as e: |
| 230 if e.errno == errno.EEXIST: |
| 231 raise AlreadyExists('File %s already exists.' % outfile) |
| 232 raise |
| 226 | 233 |
| 227 | 234 |
| 228 def is_valid_file(path, size): | 235 def is_valid_file(path, size): |
| 229 """Determines if the given files appears valid. | 236 """Determines if the given files appears valid. |
| 230 | 237 |
| 231 Currently it just checks the file's size. | 238 Currently it just checks the file's size. |
| 232 """ | 239 """ |
| 233 if size == UNKNOWN_FILE_SIZE: | 240 if size == UNKNOWN_FILE_SIZE: |
| 234 return fs.isfile(path) | 241 return fs.isfile(path) |
| 235 actual_size = fs.stat(path).st_size | 242 actual_size = fs.stat(path).st_size |
| (...skipping 1724 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1960 last_update = time.time() | 1967 last_update = time.time() |
| 1961 with threading_utils.DeadlockDetector(DEADLOCK_TIMEOUT) as detector: | 1968 with threading_utils.DeadlockDetector(DEADLOCK_TIMEOUT) as detector: |
| 1962 while remaining: | 1969 while remaining: |
| 1963 detector.ping() | 1970 detector.ping() |
| 1964 | 1971 |
| 1965 # Wait for any item to finish fetching to cache. | 1972 # Wait for any item to finish fetching to cache. |
| 1966 digest = fetch_queue.wait(remaining) | 1973 digest = fetch_queue.wait(remaining) |
| 1967 | 1974 |
| 1968 # Link corresponding files to a fetched item in cache. | 1975 # Link corresponding files to a fetched item in cache. |
| 1969 for filepath, props in remaining.pop(digest): | 1976 for filepath, props in remaining.pop(digest): |
| 1970 cache.hardlink( | 1977 dest = os.path.join(outdir, filepath) |
| 1971 digest, os.path.join(outdir, filepath), props.get('m')) | 1978 if os.path.exists(dest): |
| 1979 raise AlreadyExists('File %s already exists' % dest) |
| 1980 cache.hardlink(digest, dest, props.get('m')) |
| 1972 | 1981 |
| 1973 # Report progress. | 1982 # Report progress. |
| 1974 duration = time.time() - last_update | 1983 duration = time.time() - last_update |
| 1975 if duration > DELAY_BETWEEN_UPDATES_IN_SECS: | 1984 if duration > DELAY_BETWEEN_UPDATES_IN_SECS: |
| 1976 msg = '%d files remaining...' % len(remaining) | 1985 msg = '%d files remaining...' % len(remaining) |
| 1977 print msg | 1986 print msg |
| 1978 logging.info(msg) | 1987 logging.info(msg) |
| 1979 last_update = time.time() | 1988 last_update = time.time() |
| 1980 | 1989 |
| 1981 # Cache could evict some items we just tried to fetch, it's a fatal error. | 1990 # Cache could evict some items we just tried to fetch, it's a fatal error. |
| (...skipping 316 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2298 dispatcher = subcommand.CommandDispatcher(__name__) | 2307 dispatcher = subcommand.CommandDispatcher(__name__) |
| 2299 return dispatcher.execute(OptionParserIsolateServer(), args) | 2308 return dispatcher.execute(OptionParserIsolateServer(), args) |
| 2300 | 2309 |
| 2301 | 2310 |
| 2302 if __name__ == '__main__': | 2311 if __name__ == '__main__': |
| 2303 subprocess42.inhibit_os_error_reporting() | 2312 subprocess42.inhibit_os_error_reporting() |
| 2304 fix_encoding.fix_encoding() | 2313 fix_encoding.fix_encoding() |
| 2305 tools.disable_buffering() | 2314 tools.disable_buffering() |
| 2306 colorama.init() | 2315 colorama.init() |
| 2307 sys.exit(main(sys.argv[1:])) | 2316 sys.exit(main(sys.argv[1:])) |
| OLD | NEW |