OLD | NEW |
(Empty) | |
| 1 # Copyright (c) 2013 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. |
| 4 |
| 5 # Monkeypatch IMapIterator so that Ctrl-C can kill everything properly. |
| 6 # Derived from https://gist.github.com/aljungberg/626518 |
| 7 import multiprocessing.pool |
| 8 from multiprocessing.pool import IMapIterator |
| 9 def wrapper(func): |
| 10 def wrap(self, timeout=None): |
| 11 return func(self, timeout=timeout or 1e100) |
| 12 return wrap |
| 13 IMapIterator.next = wrapper(IMapIterator.next) |
| 14 IMapIterator.__next__ = IMapIterator.next |
| 15 # TODO(iannucci): Monkeypatch all other 'wait' methods too. |
| 16 |
| 17 |
| 18 import binascii |
| 19 import contextlib |
| 20 import functools |
| 21 import logging |
| 22 import signal |
| 23 import sys |
| 24 import tempfile |
| 25 import threading |
| 26 |
| 27 import subprocess2 |
| 28 |
| 29 |
| 30 GIT_EXE = 'git.bat' if sys.platform.startswith('win') else 'git' |
| 31 |
| 32 |
| 33 class BadCommitRefException(Exception): |
| 34 def __init__(self, refs): |
| 35 msg = ('one of %s does not seem to be a valid commitref.' % |
| 36 str(refs)) |
| 37 super(BadCommitRefException, self).__init__(msg) |
| 38 |
| 39 |
| 40 def memoize_one(**kwargs): |
| 41 """Memoizes a single-argument pure function. |
| 42 |
| 43 Values of None are not cached. |
| 44 |
| 45 Kwargs: |
| 46 threadsafe (bool) - REQUIRED. Specifies whether to use locking around |
| 47 cache manipulation functions. This is a kwarg so that users of memoize_one |
| 48 are forced to explicitly and verbosely pick True or False. |
| 49 |
| 50 Adds three methods to the decorated function: |
| 51 * get(key, default=None) - Gets the value for this key from the cache. |
| 52 * set(key, value) - Sets the value for this key from the cache. |
| 53 * clear() - Drops the entire contents of the cache. Useful for unittests. |
| 54 * update(other) - Updates the contents of the cache from another dict. |
| 55 """ |
| 56 assert 'threadsafe' in kwargs, 'Must specify threadsafe={True,False}' |
| 57 threadsafe = kwargs['threadsafe'] |
| 58 |
| 59 if threadsafe: |
| 60 def withlock(lock, f): |
| 61 def inner(*args, **kwargs): |
| 62 with lock: |
| 63 return f(*args, **kwargs) |
| 64 return inner |
| 65 else: |
| 66 def withlock(_lock, f): |
| 67 return f |
| 68 |
| 69 def decorator(f): |
| 70 # Instantiate the lock in decorator, in case users of memoize_one do: |
| 71 # |
| 72 # memoizer = memoize_one(threadsafe=True) |
| 73 # |
| 74 # @memoizer |
| 75 # def fn1(val): ... |
| 76 # |
| 77 # @memoizer |
| 78 # def fn2(val): ... |
| 79 |
| 80 lock = threading.Lock() if threadsafe else None |
| 81 cache = {} |
| 82 _get = withlock(lock, cache.get) |
| 83 _set = withlock(lock, cache.__setitem__) |
| 84 |
| 85 @functools.wraps(f) |
| 86 def inner(arg): |
| 87 ret = _get(arg) |
| 88 if ret is None: |
| 89 ret = f(arg) |
| 90 if ret is not None: |
| 91 _set(arg, ret) |
| 92 return ret |
| 93 inner.get = _get |
| 94 inner.set = _set |
| 95 inner.clear = withlock(lock, cache.clear) |
| 96 inner.update = withlock(lock, cache.update) |
| 97 return inner |
| 98 return decorator |
| 99 |
| 100 |
| 101 def _ScopedPool_initer(orig, orig_args): # pragma: no cover |
| 102 """Initializer method for ScopedPool's subprocesses. |
| 103 |
| 104 This helps ScopedPool handle Ctrl-C's correctly. |
| 105 """ |
| 106 signal.signal(signal.SIGINT, signal.SIG_IGN) |
| 107 if orig: |
| 108 orig(*orig_args) |
| 109 |
| 110 |
| 111 @contextlib.contextmanager |
| 112 def ScopedPool(*args, **kwargs): |
| 113 """Context Manager which returns a multiprocessing.pool instance which |
| 114 correctly deals with thrown exceptions. |
| 115 |
| 116 *args - Arguments to multiprocessing.pool |
| 117 |
| 118 Kwargs: |
| 119 kind ('threads', 'procs') - The type of underlying coprocess to use. |
| 120 **etc - Arguments to multiprocessing.pool |
| 121 """ |
| 122 if kwargs.pop('kind', None) == 'threads': |
| 123 pool = multiprocessing.pool.ThreadPool(*args, **kwargs) |
| 124 else: |
| 125 orig, orig_args = kwargs.get('initializer'), kwargs.get('initargs', ()) |
| 126 kwargs['initializer'] = _ScopedPool_initer |
| 127 kwargs['initargs'] = orig, orig_args |
| 128 pool = multiprocessing.pool.Pool(*args, **kwargs) |
| 129 |
| 130 try: |
| 131 yield pool |
| 132 pool.close() |
| 133 except: |
| 134 pool.terminate() |
| 135 raise |
| 136 finally: |
| 137 pool.join() |
| 138 |
| 139 |
| 140 class ProgressPrinter(object): |
| 141 """Threaded single-stat status message printer.""" |
| 142 def __init__(self, fmt, enabled=None, stream=sys.stderr, period=0.5): |
| 143 """Create a ProgressPrinter. |
| 144 |
| 145 Use it as a context manager which produces a simple 'increment' method: |
| 146 |
| 147 with ProgressPrinter('(%%(count)d/%d)' % 1000) as inc: |
| 148 for i in xrange(1000): |
| 149 # do stuff |
| 150 if i % 10 == 0: |
| 151 inc(10) |
| 152 |
| 153 Args: |
| 154 fmt - String format with a single '%(count)d' where the counter value |
| 155 should go. |
| 156 enabled (bool) - If this is None, will default to True if |
| 157 logging.getLogger() is set to INFO or more verbose. |
| 158 stream (file-like) - The stream to print status messages to. |
| 159 period (float) - The time in seconds for the printer thread to wait |
| 160 between printing. |
| 161 """ |
| 162 self.fmt = fmt |
| 163 if enabled is None: # pragma: no cover |
| 164 self.enabled = logging.getLogger().isEnabledFor(logging.INFO) |
| 165 else: |
| 166 self.enabled = enabled |
| 167 |
| 168 self._count = 0 |
| 169 self._dead = False |
| 170 self._dead_cond = threading.Condition() |
| 171 self._stream = stream |
| 172 self._thread = threading.Thread(target=self._run) |
| 173 self._period = period |
| 174 |
| 175 def _emit(self, s): |
| 176 if self.enabled: |
| 177 self._stream.write('\r' + s) |
| 178 self._stream.flush() |
| 179 |
| 180 def _run(self): |
| 181 with self._dead_cond: |
| 182 while not self._dead: |
| 183 self._emit(self.fmt % {'count': self._count}) |
| 184 self._dead_cond.wait(self._period) |
| 185 self._emit((self.fmt + '\n') % {'count': self._count}) |
| 186 |
| 187 def inc(self, amount=1): |
| 188 self._count += amount |
| 189 |
| 190 def __enter__(self): |
| 191 self._thread.start() |
| 192 return self.inc |
| 193 |
| 194 def __exit__(self, _exc_type, _exc_value, _traceback): |
| 195 self._dead = True |
| 196 with self._dead_cond: |
| 197 self._dead_cond.notifyAll() |
| 198 self._thread.join() |
| 199 del self._thread |
| 200 |
| 201 |
| 202 def parse_commitrefs(*commitrefs): |
| 203 """Returns binary encoded commit hashes for one or more commitrefs. |
| 204 |
| 205 A commitref is anything which can resolve to a commit. Popular examples: |
| 206 * 'HEAD' |
| 207 * 'origin/master' |
| 208 * 'cool_branch~2' |
| 209 """ |
| 210 try: |
| 211 return map(binascii.unhexlify, hashes(*commitrefs)) |
| 212 except subprocess2.CalledProcessError: |
| 213 raise BadCommitRefException(commitrefs) |
| 214 |
| 215 |
| 216 def run(*cmd, **kwargs): |
| 217 """Runs a git command. Returns stdout as a string. |
| 218 |
| 219 If logging is DEBUG, we'll print the command before we run it. |
| 220 |
| 221 kwargs |
| 222 autostrip (bool) - Strip the output. Defaults to True. |
| 223 Output string is always strip()'d. |
| 224 """ |
| 225 autostrip = kwargs.pop('autostrip', True) |
| 226 cmd = (GIT_EXE,) + cmd |
| 227 logging.debug('Running %s', ' '.join(repr(tok) for tok in cmd)) |
| 228 ret = subprocess2.check_output(cmd, stderr=subprocess2.PIPE, **kwargs) |
| 229 if autostrip: |
| 230 ret = (ret or '').strip() |
| 231 return ret |
| 232 |
| 233 |
| 234 def hashes(*reflike): |
| 235 return run('rev-parse', *reflike).splitlines() |
| 236 |
| 237 |
| 238 def intern_f(f, kind='blob'): |
| 239 """Interns a file object into the git object store. |
| 240 |
| 241 Args: |
| 242 f (file-like object) - The file-like object to intern |
| 243 kind (git object type) - One of 'blob', 'commit', 'tree', 'tag'. |
| 244 |
| 245 Returns the git hash of the interned object (hex encoded). |
| 246 """ |
| 247 ret = run('hash-object', '-t', kind, '-w', '--stdin', stdin=f) |
| 248 f.close() |
| 249 return ret |
| 250 |
| 251 |
| 252 def tree(treeref, recurse=False): |
| 253 """Returns a dict representation of a git tree object. |
| 254 |
| 255 Args: |
| 256 treeref (str) - a git ref which resolves to a tree (commits count as trees). |
| 257 recurse (bool) - include all of the tree's decendants too. File names will |
| 258 take the form of 'some/path/to/file'. |
| 259 |
| 260 Return format: |
| 261 { 'file_name': (mode, type, ref) } |
| 262 |
| 263 mode is an integer where: |
| 264 * 0040000 - Directory |
| 265 * 0100644 - Regular non-executable file |
| 266 * 0100664 - Regular non-executable group-writeable file |
| 267 * 0100755 - Regular executable file |
| 268 * 0120000 - Symbolic link |
| 269 * 0160000 - Gitlink |
| 270 |
| 271 type is a string where it's one of 'blob', 'commit', 'tree', 'tag'. |
| 272 |
| 273 ref is the hex encoded hash of the entry. |
| 274 """ |
| 275 ret = {} |
| 276 opts = ['ls-tree', '--full-tree'] |
| 277 if recurse: |
| 278 opts.append('-r') |
| 279 opts.append(treeref) |
| 280 try: |
| 281 for line in run(*opts).splitlines(): |
| 282 mode, typ, ref, name = line.split(None, 3) |
| 283 ret[name] = (mode, typ, ref) |
| 284 except subprocess2.CalledProcessError: |
| 285 return None |
| 286 return ret |
| 287 |
| 288 |
| 289 def mktree(treedict): |
| 290 """Makes a git tree object and returns its hash. |
| 291 |
| 292 See |tree()| for the values of mode, type, and ref. |
| 293 |
| 294 Args: |
| 295 treedict - { name: (mode, type, ref) } |
| 296 """ |
| 297 with tempfile.TemporaryFile() as f: |
| 298 for name, (mode, typ, ref) in treedict.iteritems(): |
| 299 f.write('%s %s %s\t%s\0' % (mode, typ, ref, name)) |
| 300 f.seek(0) |
| 301 return run('mktree', '-z', stdin=f) |
OLD | NEW |