Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(223)

Side by Side Diff: client/isolateserver.py

Issue 2060983006: luci-py/isolateserver.py: Add archive support when downloading. (Closed) Base URL: https://github.com/luci/luci-py.git@master
Patch Set: Small fixes. Created 4 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright 2013 The LUCI Authors. All rights reserved. 2 # Copyright 2013 The LUCI Authors. All rights reserved.
3 # Use of this source code is governed under the Apache License, Version 2.0 3 # Use of this source code is governed under the Apache License, Version 2.0
4 # that can be found in the LICENSE file. 4 # that can be found in the LICENSE file.
5 5
6 """Archives a set of files or directories to an Isolate Server.""" 6 """Archives a set of files or directories to an Isolate Server."""
7 7
8 __version__ = '0.4.9' 8 __version__ = '0.4.9'
9 9
10 import base64 10 import base64
11 import functools 11 import functools
12 import errno 12 import errno
13 import logging 13 import logging
14 import optparse 14 import optparse
15 import os 15 import os
16 import re 16 import re
17 import stat
17 import signal 18 import signal
18 import sys 19 import sys
19 import tempfile 20 import tempfile
20 import threading 21 import threading
21 import time 22 import time
22 import types 23 import types
23 import zlib 24 import zlib
24 25
26 import cStringIO as StringIO
27
25 from third_party import colorama 28 from third_party import colorama
26 from third_party.depot_tools import fix_encoding 29 from third_party.depot_tools import fix_encoding
27 from third_party.depot_tools import subcommand 30 from third_party.depot_tools import subcommand
28 31
32 from libs import arfile
29 from utils import file_path 33 from utils import file_path
30 from utils import fs 34 from utils import fs
31 from utils import logging_utils 35 from utils import logging_utils
32 from utils import lru 36 from utils import lru
33 from utils import net 37 from utils import net
34 from utils import on_error 38 from utils import on_error
35 from utils import subprocess42 39 from utils import subprocess42
36 from utils import threading_utils 40 from utils import threading_utils
37 from utils import tools 41 from utils import tools
38 42
(...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after
142 """ 146 """
143 file_path.ensure_tree(os.path.dirname(path)) 147 file_path.ensure_tree(os.path.dirname(path))
144 total = 0 148 total = 0
145 with fs.open(path, 'wb') as f: 149 with fs.open(path, 'wb') as f:
146 for d in content_generator: 150 for d in content_generator:
147 total += len(d) 151 total += len(d)
148 f.write(d) 152 f.write(d)
149 return total 153 return total
150 154
151 155
156 def fileobj_path(fileobj):
M-A Ruel 2016/06/21 13:31:09 add docstring to these 3 functions
mithro 2016/06/22 12:03:11 Done.
157 name = getattr(fileobj, 'name', None)
158 if name is None:
159 return
160
161 if not isinstance(name, unicode):
M-A Ruel 2016/06/21 13:31:08 Please return unicode paths instead of encoded str
mithro 2016/06/22 12:03:10 This is *creating* a unicode value from a byte (st
M-A Ruel 2016/06/22 17:17:49 Err, I had misread, sorry.
mithro 2016/06/23 07:17:21 Acknowledged.
162 name = name.decode(sys.getfilesystemencoding())
163
164 if fs.exists(name):
M-A Ruel 2016/06/21 13:31:09 why check for existence?
mithro 2016/06/22 12:03:10 There are many reasons why a fileobj has a name wh
M-A Ruel 2016/06/22 17:17:49 Oh I agree in the general case, but this is not ge
165 return name
166
167
168 def fileobj_copy(dstfileobj, srcfileobj, amount=-1):
M-A Ruel 2016/06/21 13:31:09 s/amount/size/ ?
mithro 2016/06/22 12:03:11 Done.
169 written = 0
170 while written != amount:
171 readsize = NET_IO_FILE_CHUNK
172 if amount > 0:
173 readsize = min(readsize, amount-written)
174 data = srcfileobj.read(readsize)
175 if not data:
176 if amount == -1:
177 break
178 raise IOError('partial file, got %s, wanted %s' % (written, amount))
179 dstfileobj.write(data)
180 written += len(data)
181
182
183 def putfile(srcfileobj, dstpath, file_mode=None, limit=-1):
184 srcpath = fileobj_path(srcfileobj)
185 if srcpath:
186 readonly = file_mode is None or (
187 file_mode & (stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH))
188 if readonly:
189 # We can link the file
190 file_path.link_file(dstpath, srcpath, file_path.HARDLINK_WITH_FALLBACK)
191 else:
192 # We must copy the file
193 file_path.link_file(dstpath, srcpath, file_path.COPY)
194 else:
195 # Need to write out the file
196 with fs.open(dstpath, 'wb') as dstfileobj:
197 fileobj_copy(dstfileobj, srcfileobj, limit)
198
199 srcfileobj.close()
M-A Ruel 2016/06/21 13:31:08 that shouldn't be done here.
nodir 2016/06/21 22:20:05 +1
mithro 2016/06/22 12:03:10 Done.
mithro 2016/06/22 12:03:11 Done.
200
201 # file_mode of 0 is actually valid, so need explicit check.
202 if file_mode is not None:
203 fs.chmod(dstpath, file_mode)
204
205
152 def zip_compress(content_generator, level=7): 206 def zip_compress(content_generator, level=7):
153 """Reads chunks from |content_generator| and yields zip compressed chunks.""" 207 """Reads chunks from |content_generator| and yields zip compressed chunks."""
154 compressor = zlib.compressobj(level) 208 compressor = zlib.compressobj(level)
155 for chunk in content_generator: 209 for chunk in content_generator:
156 compressed = compressor.compress(chunk) 210 compressed = compressor.compress(chunk)
157 if compressed: 211 if compressed:
158 yield compressed 212 yield compressed
159 tail = compressor.flush(zlib.Z_FINISH) 213 tail = compressor.flush(zlib.Z_FINISH)
160 if tail: 214 if tail:
161 yield tail 215 yield tail
(...skipping 1054 matching lines...) Expand 10 before | Expand all | Expand 10 after
1216 """ 1270 """
1217 cache_dir = None 1271 cache_dir = None
1218 1272
1219 def __init__(self): 1273 def __init__(self):
1220 self._lock = threading_utils.LockWithAssert() 1274 self._lock = threading_utils.LockWithAssert()
1221 # Profiling values. 1275 # Profiling values.
1222 self._added = [] 1276 self._added = []
1223 self._initial_number_items = 0 1277 self._initial_number_items = 0
1224 self._initial_size = 0 1278 self._initial_size = 0
1225 self._evicted = [] 1279 self._evicted = []
1226 self._linked = [] 1280 self._used = []
1227 1281
1228 def __contains__(self, digest): 1282 def __contains__(self, digest):
1229 raise NotImplementedError() 1283 raise NotImplementedError()
1230 1284
1231 def __enter__(self): 1285 def __enter__(self):
1232 """Context manager interface.""" 1286 """Context manager interface."""
1233 return self 1287 return self
1234 1288
1235 def __exit__(self, _exc_type, _exec_value, _traceback): 1289 def __exit__(self, _exc_type, _exec_value, _traceback):
1236 """Context manager interface.""" 1290 """Context manager interface."""
1237 return False 1291 return False
1238 1292
1239 @property 1293 @property
1240 def added(self): 1294 def added(self):
1241 return self._added[:] 1295 return self._added[:]
1242 1296
1243 @property 1297 @property
1244 def evicted(self): 1298 def evicted(self):
1245 return self._evicted[:] 1299 return self._evicted[:]
1246 1300
1247 @property 1301 @property
1302 def used(self):
1303 return self._used[:]
1304
1305 @property
1248 def initial_number_items(self): 1306 def initial_number_items(self):
1249 return self._initial_number_items 1307 return self._initial_number_items
1250 1308
1251 @property 1309 @property
1252 def initial_size(self): 1310 def initial_size(self):
1253 return self._initial_size 1311 return self._initial_size
1254 1312
1255 @property
1256 def linked(self):
1257 return self._linked[:]
1258
1259 def cached_set(self): 1313 def cached_set(self):
1260 """Returns a set of all cached digests (always a new object).""" 1314 """Returns a set of all cached digests (always a new object)."""
1261 raise NotImplementedError() 1315 raise NotImplementedError()
1262 1316
1263 def cleanup(self): 1317 def cleanup(self):
1264 """Deletes any corrupted item from the cache and trims it if necessary.""" 1318 """Deletes any corrupted item from the cache and trims it if necessary."""
1265 raise NotImplementedError() 1319 raise NotImplementedError()
1266 1320
1267 def touch(self, digest, size): 1321 def touch(self, digest, size):
1268 """Ensures item is not corrupted and updates its LRU position. 1322 """Ensures item is not corrupted and updates its LRU position.
1269 1323
1270 Arguments: 1324 Arguments:
1271 digest: hash digest of item to check. 1325 digest: hash digest of item to check.
1272 size: expected size of this item. 1326 size: expected size of this item.
1273 1327
1274 Returns: 1328 Returns:
1275 True if item is in cache and not corrupted. 1329 True if item is in cache and not corrupted.
1276 """ 1330 """
1277 raise NotImplementedError() 1331 raise NotImplementedError()
1278 1332
1279 def evict(self, digest): 1333 def evict(self, digest):
1280 """Removes item from cache if it's there.""" 1334 """Removes item from cache if it's there."""
1281 raise NotImplementedError() 1335 raise NotImplementedError()
1282 1336
1283 def read(self, digest): 1337 def getfileobj(self, digest):
1284 """Returns contents of the cached item as a single str.""" 1338 """Returns a readable file like object.
1339
1340 If file exists on the file system it will have a .name attribute with an
1341 absolute path to the file.
1342 """
1285 raise NotImplementedError() 1343 raise NotImplementedError()
1286 1344
1287 def write(self, digest, content): 1345 def write(self, digest, content):
1288 """Reads data from |content| generator and stores it in cache. 1346 """Reads data from |content| generator and stores it in cache.
1289 1347
1290 Returns digest to simplify chaining. 1348 Returns digest to simplify chaining.
1291 """ 1349 """
1292 raise NotImplementedError() 1350 raise NotImplementedError()
1293 1351
1294 def hardlink(self, digest, dest, file_mode):
1295 """Ensures file at |dest| has same content as cached |digest|.
1296
1297 If file_mode is provided, it is used to set the executable bit if
1298 applicable.
1299 """
1300 raise NotImplementedError()
1301
1302 1352
1303 class MemoryCache(LocalCache): 1353 class MemoryCache(LocalCache):
1304 """LocalCache implementation that stores everything in memory.""" 1354 """LocalCache implementation that stores everything in memory."""
1305 1355
1306 def __init__(self, file_mode_mask=0500): 1356 def __init__(self, file_mode_mask=0500):
1307 """Args: 1357 """Args:
1308 file_mode_mask: bit mask to AND file mode with. Default value will make 1358 file_mode_mask: bit mask to AND file mode with. Default value will make
1309 all mapped files to be read only. 1359 all mapped files to be read only.
1310 """ 1360 """
1311 super(MemoryCache, self).__init__() 1361 super(MemoryCache, self).__init__()
(...skipping 14 matching lines...) Expand all
1326 def touch(self, digest, size): 1376 def touch(self, digest, size):
1327 with self._lock: 1377 with self._lock:
1328 return digest in self._contents 1378 return digest in self._contents
1329 1379
1330 def evict(self, digest): 1380 def evict(self, digest):
1331 with self._lock: 1381 with self._lock:
1332 v = self._contents.pop(digest, None) 1382 v = self._contents.pop(digest, None)
1333 if v is not None: 1383 if v is not None:
1334 self._evicted.add(v) 1384 self._evicted.add(v)
1335 1385
1336 def read(self, digest): 1386 def getfileobj(self, digest):
1337 with self._lock: 1387 with self._lock:
M-A Ruel 2016/06/21 13:31:08 Nesting is not optimal; with self._lock: try:
mithro 2016/06/22 12:03:10 Done.
1338 try: 1388 try:
1339 return self._contents[digest] 1389 d = self._contents[digest]
1390 self._used.append(len(d))
1391 return StringIO.StringIO(d)
1340 except KeyError: 1392 except KeyError:
1341 raise CacheMiss(digest) 1393 raise CacheMiss(digest)
1342 1394
1343 def write(self, digest, content): 1395 def write(self, digest, content):
1344 # Assemble whole stream before taking the lock. 1396 # Assemble whole stream before taking the lock.
1345 data = ''.join(content) 1397 data = ''.join(content)
1346 with self._lock: 1398 with self._lock:
1347 self._contents[digest] = data 1399 self._contents[digest] = data
1348 self._added.append(len(data)) 1400 self._added.append(len(data))
1349 return digest 1401 return digest
1350 1402
1351 def hardlink(self, digest, dest, file_mode):
1352 """Since data is kept in memory, there is no filenode to hardlink."""
1353 data = self.read(digest)
1354 file_write(dest, [data])
1355 if file_mode is not None:
1356 fs.chmod(dest, file_mode & self._file_mode_mask)
1357 with self._lock:
1358 self._linked.append(len(data))
1359
1360 1403
1361 class CachePolicies(object): 1404 class CachePolicies(object):
1362 def __init__(self, max_cache_size, min_free_space, max_items): 1405 def __init__(self, max_cache_size, min_free_space, max_items):
1363 """ 1406 """
1364 Arguments: 1407 Arguments:
1365 - max_cache_size: Trim if the cache gets larger than this value. If 0, the 1408 - max_cache_size: Trim if the cache gets larger than this value. If 0, the
1366 cache is effectively a leak. 1409 cache is effectively a leak.
1367 - min_free_space: Trim if disk free space becomes lower than this value. If 1410 - min_free_space: Trim if disk free space becomes lower than this value. If
1368 0, it unconditionally fill the disk. 1411 0, it unconditionally fill the disk.
1369 - max_items: Maximum number of items to keep in the cache. If 0, do not 1412 - max_items: Maximum number of items to keep in the cache. If 0, do not
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after
1481 self._protected.add(digest) 1524 self._protected.add(digest)
1482 return True 1525 return True
1483 1526
1484 def evict(self, digest): 1527 def evict(self, digest):
1485 with self._lock: 1528 with self._lock:
1486 # Do not check for 'digest in self._protected' since it could be because 1529 # Do not check for 'digest in self._protected' since it could be because
1487 # the object is corrupted. 1530 # the object is corrupted.
1488 self._lru.pop(digest) 1531 self._lru.pop(digest)
1489 self._delete_file(digest, UNKNOWN_FILE_SIZE) 1532 self._delete_file(digest, UNKNOWN_FILE_SIZE)
1490 1533
1491 def read(self, digest): 1534 def getfileobj(self, digest):
1492 try: 1535 try:
1493 with fs.open(self._path(digest), 'rb') as f: 1536 f = fs.open(self._path(digest), 'rb')
1494 return f.read() 1537 with self._lock:
1538 self._used.append(self._lru[digest])
1539 return f
1495 except IOError: 1540 except IOError:
1496 raise CacheMiss(digest) 1541 raise CacheMiss(digest)
1497 1542
1498 def write(self, digest, content): 1543 def write(self, digest, content):
1499 assert content is not None 1544 assert content is not None
1500 with self._lock: 1545 with self._lock:
1501 self._protected.add(digest) 1546 self._protected.add(digest)
1502 path = self._path(digest) 1547 path = self._path(digest)
1503 # A stale broken file may remain. It is possible for the file to have write 1548 # A stale broken file may remain. It is possible for the file to have write
1504 # access bit removed which would cause the file_write() call to fail to open 1549 # access bit removed which would cause the file_write() call to fail to open
(...skipping 10 matching lines...) Expand all
1515 file_path.try_remove(path) 1560 file_path.try_remove(path)
1516 raise 1561 raise
1517 # Make the file read-only in the cache. This has a few side-effects since 1562 # Make the file read-only in the cache. This has a few side-effects since
1518 # the file node is modified, so every directory entries to this file becomes 1563 # the file node is modified, so every directory entries to this file becomes
1519 # read-only. It's fine here because it is a new file. 1564 # read-only. It's fine here because it is a new file.
1520 file_path.set_read_only(path, True) 1565 file_path.set_read_only(path, True)
1521 with self._lock: 1566 with self._lock:
1522 self._add(digest, size) 1567 self._add(digest, size)
1523 return digest 1568 return digest
1524 1569
1525 def hardlink(self, digest, dest, file_mode):
1526 """Hardlinks the file to |dest|.
1527
1528 Note that the file permission bits are on the file node, not the directory
1529 entry, so changing the access bit on any of the directory entries for the
1530 file node will affect them all.
1531 """
1532 path = self._path(digest)
1533 if not file_path.link_file(dest, path, file_path.HARDLINK_WITH_FALLBACK):
1534 # Report to the server that it failed with more details. We'll want to
1535 # squash them all.
1536 on_error.report('Failed to hardlink\n%s -> %s' % (path, dest))
1537
1538 if file_mode is not None:
1539 # Ignores all other bits.
1540 fs.chmod(dest, file_mode & 0500)
1541 with self._lock:
1542 self._linked.append(self._lru[digest])
1543
1544 def _load(self): 1570 def _load(self):
1545 """Loads state of the cache from json file.""" 1571 """Loads state of the cache from json file."""
1546 self._lock.assert_locked() 1572 self._lock.assert_locked()
1547 1573
1548 if not os.path.isdir(self.cache_dir): 1574 if not os.path.isdir(self.cache_dir):
1549 fs.makedirs(self.cache_dir) 1575 fs.makedirs(self.cache_dir)
1550 else: 1576 else:
1551 # Make sure the cache is read-only. 1577 # Make sure the cache is read-only.
1552 # TODO(maruel): Calculate the cost and optimize the performance 1578 # TODO(maruel): Calculate the cost and optimize the performance
1553 # accordingly. 1579 # accordingly.
(...skipping 210 matching lines...) Expand 10 before | Expand all | Expand 10 after
1764 pending[h] = isolated_file 1790 pending[h] = isolated_file
1765 fetch_queue.add(h, priority=threading_utils.PRIORITY_HIGH) 1791 fetch_queue.add(h, priority=threading_utils.PRIORITY_HIGH)
1766 1792
1767 # Start fetching root *.isolated file (single file, not the whole bundle). 1793 # Start fetching root *.isolated file (single file, not the whole bundle).
1768 retrieve_async(self.root) 1794 retrieve_async(self.root)
1769 1795
1770 while pending: 1796 while pending:
1771 # Wait until some *.isolated file is fetched, parse it. 1797 # Wait until some *.isolated file is fetched, parse it.
1772 item_hash = fetch_queue.wait(pending) 1798 item_hash = fetch_queue.wait(pending)
1773 item = pending.pop(item_hash) 1799 item = pending.pop(item_hash)
1774 item.load(fetch_queue.cache.read(item_hash)) 1800 item.load(fetch_queue.cache.getfileobj(item_hash).read())
M-A Ruel 2016/06/21 13:31:09 with fetch_queue.cache.getfileobj(item_hash) as f:
mithro 2016/06/22 12:03:10 Done
1775 1801
1776 # Start fetching included *.isolated files. 1802 # Start fetching included *.isolated files.
1777 for new_child in item.children: 1803 for new_child in item.children:
1778 retrieve_async(new_child) 1804 retrieve_async(new_child)
1779 1805
1780 # Always fetch *.isolated files in traversal order, waiting if necessary 1806 # Always fetch *.isolated files in traversal order, waiting if necessary
1781 # until next to-be-processed node loads. "Waiting" is done by yielding 1807 # until next to-be-processed node loads. "Waiting" is done by yielding
1782 # back to the outer loop, that waits until some *.isolated is loaded. 1808 # back to the outer loop, that waits until some *.isolated is loaded.
1783 for node in isolated_format.walk_includes(self.root): 1809 for node in isolated_format.walk_includes(self.root):
1784 if node not in processed: 1810 if node not in processed:
(...skipping 17 matching lines...) Expand all
1802 """Starts fetching files from |isolated| that are not yet being fetched. 1828 """Starts fetching files from |isolated| that are not yet being fetched.
1803 1829
1804 Modifies self.files. 1830 Modifies self.files.
1805 """ 1831 """
1806 logging.debug('fetch_files(%s)', isolated.obj_hash) 1832 logging.debug('fetch_files(%s)', isolated.obj_hash)
1807 for filepath, properties in isolated.data.get('files', {}).iteritems(): 1833 for filepath, properties in isolated.data.get('files', {}).iteritems():
1808 # Root isolated has priority on the files being mapped. In particular, 1834 # Root isolated has priority on the files being mapped. In particular,
1809 # overridden files must not be fetched. 1835 # overridden files must not be fetched.
1810 if filepath not in self.files: 1836 if filepath not in self.files:
1811 self.files[filepath] = properties 1837 self.files[filepath] = properties
1838
1839 # Make sure if the isolated is read only, the mode doesn't have write
1840 # bits.
1841 if 'm' in properties and self.read_only:
1842 properties['m'] &= ~(stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH)
1843
1844 # Preemptively request hashed files.
1812 if 'h' in properties: 1845 if 'h' in properties:
1813 # Preemptively request files.
1814 logging.debug('fetching %s', filepath) 1846 logging.debug('fetching %s', filepath)
1815 fetch_queue.add( 1847 fetch_queue.add(
1816 properties['h'], properties['s'], threading_utils.PRIORITY_MED) 1848 properties['h'], properties['s'], threading_utils.PRIORITY_MED)
1817 1849
1818 def _update_self(self, node): 1850 def _update_self(self, node):
1819 """Extracts bundle global parameters from loaded *.isolated file. 1851 """Extracts bundle global parameters from loaded *.isolated file.
1820 1852
1821 Will be called with each loaded *.isolated file in order of traversal of 1853 Will be called with each loaded *.isolated file in order of traversal of
1822 isolated include graph (see isolated_format.walk_includes). 1854 isolated include graph (see isolated_format.walk_includes).
1823 """ 1855 """
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after
1965 logging.info('Retrieving remaining files (%d of them)...', 1997 logging.info('Retrieving remaining files (%d of them)...',
1966 fetch_queue.pending_count) 1998 fetch_queue.pending_count)
1967 last_update = time.time() 1999 last_update = time.time()
1968 with threading_utils.DeadlockDetector(DEADLOCK_TIMEOUT) as detector: 2000 with threading_utils.DeadlockDetector(DEADLOCK_TIMEOUT) as detector:
1969 while remaining: 2001 while remaining:
1970 detector.ping() 2002 detector.ping()
1971 2003
1972 # Wait for any item to finish fetching to cache. 2004 # Wait for any item to finish fetching to cache.
1973 digest = fetch_queue.wait(remaining) 2005 digest = fetch_queue.wait(remaining)
1974 2006
1975 # Link corresponding files to a fetched item in cache. 2007 # Create the files in the destination using item in cache as the
2008 # source.
1976 for filepath, props in remaining.pop(digest): 2009 for filepath, props in remaining.pop(digest):
1977 dest = os.path.join(outdir, filepath) 2010 fullpath = os.path.join(outdir, filepath)
1978 if os.path.exists(dest): 2011 srcfileobj = cache.getfileobj(digest)
M-A Ruel 2016/06/21 13:31:08 with cache.getfileobj(digest) as f:
mithro 2016/06/22 12:03:10 Done.
1979 raise AlreadyExists('File %s already exists' % dest) 2012
1980 cache.hardlink(digest, dest, props.get('m')) 2013 isarchive = props.get('a')
2014 if isarchive:
2015 basedir = os.path.dirname(fullpath)
2016
2017 if isarchive == 'ar':
2018 extractor = arfile.ArFileReader(srcfileobj, fullparse=False)
2019 else:
2020 raise isolated_format.IsolatedError(
2021 'Unknown archive format %r', isarchive)
2022
2023 for ai, ifd in extractor:
2024 fp = os.path.normpath(os.path.join(basedir, ai.name))
2025 file_path.ensure_tree(os.path.dirname(fp))
2026 putfile(ifd, fp, ai.mode, ai.size)
2027
2028 else:
2029 file_mode = props.get('m')
2030 if file_mode:
2031 # Ignore all bits apart from the user
2032 file_mode &= 0700
2033 putfile(srcfileobj, fullpath, file_mode)
1981 2034
1982 # Report progress. 2035 # Report progress.
1983 duration = time.time() - last_update 2036 duration = time.time() - last_update
1984 if duration > DELAY_BETWEEN_UPDATES_IN_SECS: 2037 if duration > DELAY_BETWEEN_UPDATES_IN_SECS:
1985 msg = '%d files remaining...' % len(remaining) 2038 msg = '%d files remaining...' % len(remaining)
1986 print msg 2039 print msg
1987 logging.info(msg) 2040 logging.info(msg)
1988 last_update = time.time() 2041 last_update = time.time()
1989 2042
1990 # Cache could evict some items we just tried to fetch, it's a fatal error. 2043 # Cache could evict some items we just tried to fetch, it's a fatal error.
(...skipping 316 matching lines...) Expand 10 before | Expand all | Expand 10 after
2307 dispatcher = subcommand.CommandDispatcher(__name__) 2360 dispatcher = subcommand.CommandDispatcher(__name__)
2308 return dispatcher.execute(OptionParserIsolateServer(), args) 2361 return dispatcher.execute(OptionParserIsolateServer(), args)
2309 2362
2310 2363
2311 if __name__ == '__main__': 2364 if __name__ == '__main__':
2312 subprocess42.inhibit_os_error_reporting() 2365 subprocess42.inhibit_os_error_reporting()
2313 fix_encoding.fix_encoding() 2366 fix_encoding.fix_encoding()
2314 tools.disable_buffering() 2367 tools.disable_buffering()
2315 colorama.init() 2368 colorama.init()
2316 sys.exit(main(sys.argv[1:])) 2369 sys.exit(main(sys.argv[1:]))
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698