| OLD | NEW |
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2013 The LUCI Authors. All rights reserved. | 2 # Copyright 2013 The LUCI Authors. All rights reserved. |
| 3 # Use of this source code is governed under the Apache License, Version 2.0 | 3 # Use of this source code is governed under the Apache License, Version 2.0 |
| 4 # that can be found in the LICENSE file. | 4 # that can be found in the LICENSE file. |
| 5 | 5 |
| 6 """Archives a set of files or directories to an Isolate Server.""" | 6 """Archives a set of files or directories to an Isolate Server.""" |
| 7 | 7 |
| 8 __version__ = '0.8.0' | 8 __version__ = '0.8.0' |
| 9 | 9 |
| 10 import base64 | |
| 11 import errno | 10 import errno |
| 12 import functools | 11 import functools |
| 13 import io | 12 import io |
| 14 import logging | 13 import logging |
| 15 import optparse | 14 import optparse |
| 16 import os | 15 import os |
| 17 import re | 16 import re |
| 18 import signal | 17 import signal |
| 19 import stat | 18 import stat |
| 20 import sys | 19 import sys |
| 21 import tarfile | 20 import tarfile |
| 22 import tempfile | 21 import tempfile |
| 23 import threading | |
| 24 import time | 22 import time |
| 25 import types | |
| 26 import zlib | 23 import zlib |
| 27 | 24 |
| 28 from third_party import colorama | 25 from third_party import colorama |
| 29 from third_party.depot_tools import fix_encoding | 26 from third_party.depot_tools import fix_encoding |
| 30 from third_party.depot_tools import subcommand | 27 from third_party.depot_tools import subcommand |
| 31 | 28 |
| 32 from libs import arfile | 29 from libs import arfile |
| 33 from utils import file_path | 30 from utils import file_path |
| 34 from utils import fs | 31 from utils import fs |
| 35 from utils import logging_utils | 32 from utils import logging_utils |
| (...skipping 305 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 341 os.symlink(properties['l'], outfile) # pylint: disable=E1101 | 338 os.symlink(properties['l'], outfile) # pylint: disable=E1101 |
| 342 except OSError as e: | 339 except OSError as e: |
| 343 if e.errno == errno.EEXIST: | 340 if e.errno == errno.EEXIST: |
| 344 raise AlreadyExists('File %s already exists.' % outfile) | 341 raise AlreadyExists('File %s already exists.' % outfile) |
| 345 raise | 342 raise |
| 346 | 343 |
| 347 | 344 |
| 348 def is_valid_file(path, size): | 345 def is_valid_file(path, size): |
| 349 """Determines if the given files appears valid. | 346 """Determines if the given files appears valid. |
| 350 | 347 |
| 351 Currently it just checks the file's size. | 348 Currently it just checks the file exists and its size matches the expectation. |
| 352 """ | 349 """ |
| 353 if size == UNKNOWN_FILE_SIZE: | 350 if size == UNKNOWN_FILE_SIZE: |
| 354 return fs.isfile(path) | 351 return fs.isfile(path) |
| 355 actual_size = fs.stat(path).st_size | 352 try: |
| 353 actual_size = fs.stat(path).st_size |
| 354 except OSError as e: |
| 355 logging.warning( |
| 356 'Can\'t read item %s, assuming it\'s invalid: %s', |
| 357 os.path.basename(path), e) |
| 358 return False |
| 356 if size != actual_size: | 359 if size != actual_size: |
| 357 logging.warning( | 360 logging.warning( |
| 358 'Found invalid item %s; %d != %d', | 361 'Found invalid item %s; %d != %d', |
| 359 os.path.basename(path), actual_size, size) | 362 os.path.basename(path), actual_size, size) |
| 360 return False | 363 return False |
| 361 return True | 364 return True |
| 362 | 365 |
| 363 | 366 |
| 364 class FileItem(Item): | 367 class FileItem(Item): |
| 365 """A file to push to Storage. | 368 """A file to push to Storage. |
| (...skipping 797 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1163 # call are manually verified. | 1166 # call are manually verified. |
| 1164 # | 1167 # |
| 1165 #with self._lock: | 1168 #with self._lock: |
| 1166 # for digest in self._lru: | 1169 # for digest in self._lru: |
| 1167 # if not isolated_format.is_valid_hash( | 1170 # if not isolated_format.is_valid_hash( |
| 1168 # self._path(digest), self.hash_algo): | 1171 # self._path(digest), self.hash_algo): |
| 1169 # self.evict(digest) | 1172 # self.evict(digest) |
| 1170 # logging.info('Deleted corrupted item: %s', digest) | 1173 # logging.info('Deleted corrupted item: %s', digest) |
| 1171 | 1174 |
| 1172 def touch(self, digest, size): | 1175 def touch(self, digest, size): |
| 1173 """Verifies an actual file is valid. | 1176 """Verifies an actual file is valid and bumps its LRU position. |
| 1177 |
| 1178 Returns False if the file is missing or invalid. Doesn't kick it from LRU |
| 1179 though (call 'evict' explicitly). |
| 1174 | 1180 |
| 1175 Note that is doesn't compute the hash so it could still be corrupted if the | 1181 Note that is doesn't compute the hash so it could still be corrupted if the |
| 1176 file size didn't change. | 1182 file size didn't change. |
| 1177 | 1183 |
| 1178 TODO(maruel): More stringent verification while keeping the check fast. | 1184 TODO(maruel): More stringent verification while keeping the check fast. |
| 1179 """ | 1185 """ |
| 1180 # Do the check outside the lock. | 1186 # Do the check outside the lock. |
| 1181 if not is_valid_file(self._path(digest), size): | 1187 if not is_valid_file(self._path(digest), size): |
| 1182 return False | 1188 return False |
| 1183 | 1189 |
| (...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1336 def _path(self, digest): | 1342 def _path(self, digest): |
| 1337 """Returns the path to one item.""" | 1343 """Returns the path to one item.""" |
| 1338 return os.path.join(self.cache_dir, digest) | 1344 return os.path.join(self.cache_dir, digest) |
| 1339 | 1345 |
| 1340 def _remove_lru_file(self, allow_protected): | 1346 def _remove_lru_file(self, allow_protected): |
| 1341 """Removes the lastest recently used file and returns its size.""" | 1347 """Removes the lastest recently used file and returns its size.""" |
| 1342 self._lock.assert_locked() | 1348 self._lock.assert_locked() |
| 1343 try: | 1349 try: |
| 1344 digest, (size, _) = self._lru.get_oldest() | 1350 digest, (size, _) = self._lru.get_oldest() |
| 1345 if not allow_protected and digest == self._protected: | 1351 if not allow_protected and digest == self._protected: |
| 1346 raise Error('Not enough space to map the whole isolated tree') | 1352 raise Error('Not enough space to fetch the whole isolated tree') |
| 1347 except KeyError: | 1353 except KeyError: |
| 1348 raise Error('Nothing to remove') | 1354 raise Error('Nothing to remove') |
| 1349 digest, (size, _) = self._lru.pop_oldest() | 1355 digest, (size, _) = self._lru.pop_oldest() |
| 1350 logging.debug("Removing LRU file %s", digest) | 1356 logging.debug('Removing LRU file %s', digest) |
| 1351 self._delete_file(digest, size) | 1357 self._delete_file(digest, size) |
| 1352 return size | 1358 return size |
| 1353 | 1359 |
| 1354 def _add(self, digest, size=UNKNOWN_FILE_SIZE): | 1360 def _add(self, digest, size=UNKNOWN_FILE_SIZE): |
| 1355 """Adds an item into LRU cache marking it as a newest one.""" | 1361 """Adds an item into LRU cache marking it as a newest one.""" |
| 1356 self._lock.assert_locked() | 1362 self._lock.assert_locked() |
| 1357 if size == UNKNOWN_FILE_SIZE: | 1363 if size == UNKNOWN_FILE_SIZE: |
| 1358 size = fs.stat(self._path(digest)).st_size | 1364 size = fs.stat(self._path(digest)).st_size |
| 1359 self._added.append(size) | 1365 self._added.append(size) |
| 1360 self._lru.add(digest, size) | 1366 self._lru.add(digest, size) |
| 1361 self._free_disk -= size | 1367 self._free_disk -= size |
| 1362 # Do a quicker version of self._trim(). It only enforces free disk space, | 1368 # Do a quicker version of self._trim(). It only enforces free disk space, |
| 1363 # not cache size limits. It doesn't actually look at real free disk space, | 1369 # not cache size limits. It doesn't actually look at real free disk space, |
| 1364 # only uses its cache values. self._trim() will be called later to enforce | 1370 # only uses its cache values. self._trim() will be called later to enforce |
| 1365 # real trimming but doing this quick version here makes it possible to map | 1371 # real trimming but doing this quick version here makes it possible to map |
| 1366 # an isolated that is larger than the current amount of free disk space when | 1372 # an isolated that is larger than the current amount of free disk space when |
| 1367 # the cache size is already large. | 1373 # the cache size is already large. |
| 1368 while ( | 1374 while ( |
| 1369 self.policies.min_free_space and | 1375 self.policies.min_free_space and |
| 1370 self._lru and | 1376 self._lru and |
| 1371 self._free_disk < self.policies.min_free_space): | 1377 self._free_disk < self.policies.min_free_space): |
| 1372 self._remove_lru_file(False) | 1378 self._remove_lru_file(False) |
| 1373 | 1379 |
| 1374 def _delete_file(self, digest, size=UNKNOWN_FILE_SIZE): | 1380 def _delete_file(self, digest, size=UNKNOWN_FILE_SIZE): |
| 1375 """Deletes cache file from the file system.""" | 1381 """Deletes cache file from the file system.""" |
| 1376 self._lock.assert_locked() | 1382 self._lock.assert_locked() |
| 1377 try: | 1383 try: |
| 1378 if size == UNKNOWN_FILE_SIZE: | 1384 if size == UNKNOWN_FILE_SIZE: |
| 1379 size = fs.stat(self._path(digest)).st_size | 1385 try: |
| 1386 size = fs.stat(self._path(digest)).st_size |
| 1387 except OSError: |
| 1388 size = 0 |
| 1380 file_path.try_remove(self._path(digest)) | 1389 file_path.try_remove(self._path(digest)) |
| 1381 self._evicted.append(size) | 1390 self._evicted.append(size) |
| 1382 self._free_disk += size | 1391 self._free_disk += size |
| 1383 except OSError as e: | 1392 except OSError as e: |
| 1384 logging.error('Error attempting to delete a file %s:\n%s' % (digest, e)) | 1393 if e.errno != errno.ENOENT: |
| 1394 logging.error('Error attempting to delete a file %s:\n%s' % (digest, e)) |
| 1385 | 1395 |
| 1386 | 1396 |
| 1387 class IsolatedBundle(object): | 1397 class IsolatedBundle(object): |
| 1388 """Fetched and parsed .isolated file with all dependencies.""" | 1398 """Fetched and parsed .isolated file with all dependencies.""" |
| 1389 | 1399 |
| 1390 def __init__(self): | 1400 def __init__(self): |
| 1391 self.command = [] | 1401 self.command = [] |
| 1392 self.files = {} | 1402 self.files = {} |
| 1393 self.read_only = None | 1403 self.read_only = None |
| 1394 self.relative_cwd = None | 1404 self.relative_cwd = None |
| (...skipping 623 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2018 return dispatcher.execute(OptionParserIsolateServer(), args) | 2028 return dispatcher.execute(OptionParserIsolateServer(), args) |
| 2019 | 2029 |
| 2020 | 2030 |
| 2021 if __name__ == '__main__': | 2031 if __name__ == '__main__': |
| 2022 subprocess42.inhibit_os_error_reporting() | 2032 subprocess42.inhibit_os_error_reporting() |
| 2023 fix_encoding.fix_encoding() | 2033 fix_encoding.fix_encoding() |
| 2024 tools.disable_buffering() | 2034 tools.disable_buffering() |
| 2025 colorama.init() | 2035 colorama.init() |
| 2026 file_path.enable_symlink() | 2036 file_path.enable_symlink() |
| 2027 sys.exit(main(sys.argv[1:])) | 2037 sys.exit(main(sys.argv[1:])) |
| OLD | NEW |