OLD | NEW |
(Empty) | |
| 1 import ast |
| 2 import collections |
| 3 import contextlib |
| 4 import copy |
| 5 import itertools |
| 6 import logging |
| 7 import os |
| 8 import subprocess |
| 9 import sys |
| 10 import tempfile |
| 11 |
| 12 sys.path.append( |
| 13 os.path.join(os.path.dirname(os.path.abspath(__file__)), 'third_party')) |
| 14 |
| 15 import dateutil.parser |
| 16 |
| 17 class UncleanFilesystemError(Exception): |
| 18 pass |
| 19 |
| 20 |
| 21 class InconsistentDependencyGraphError(Exception): |
| 22 pass |
| 23 |
| 24 |
| 25 class PackageContext(object): |
| 26 """Contains information about where the root package and its dependency |
| 27 checkouts live.""" |
| 28 |
| 29 def __init__(self, root_dir, package_dir): |
| 30 self.root_dir = root_dir |
| 31 self.package_dir = package_dir |
| 32 |
| 33 @classmethod |
| 34 def from_pyl_path(cls, pyl_path): |
| 35 root_dir = os.path.dirname(pyl_path) |
| 36 return cls(root_dir, os.path.join(root_dir, '.recipe_deps')) |
| 37 |
| 38 |
| 39 class RepoSpec(object): |
| 40 def checkout(self, context): |
| 41 """Fetches the specified package and returns the path of the package root |
| 42 (the directory that contains recipe_package.pyl). |
| 43 """ |
| 44 raise NotImplementedError() |
| 45 |
| 46 |
| 47 class GitRepoSpec(RepoSpec): |
| 48 def __init__(self, id, repo, branch, revision, path): |
| 49 self.id = id |
| 50 self.repo = repo |
| 51 self.branch = branch |
| 52 self.revision = revision |
| 53 self.path = path |
| 54 |
| 55 def checkout(self, context): |
| 56 package_dir = context.package_dir |
| 57 dep_dir = os.path.join(package_dir, self.id) |
| 58 logging.info('Freshening repository %s' % dep_dir) |
| 59 |
| 60 if os.path.exists(os.path.join(package_dir, '.dont_mess_with_this')): |
| 61 logging.warn('Skipping checkout of %s because of .dont_mess_with_this' |
| 62 % self.id) |
| 63 return os.path.join(dep_dir, self.path) |
| 64 |
| 65 if not os.path.isdir(dep_dir): |
| 66 _run_cmd(['git', 'clone', self.repo, dep_dir]) |
| 67 elif not os.path.isdir(os.path.join(dep_dir, '.git')): |
| 68 raise UncleanFilesystemError('%s exists but is not a git repo' % dep_dir) |
| 69 |
| 70 with in_directory(dep_dir): |
| 71 _run_cmd(['git', 'fetch']) |
| 72 _run_cmd(['git', 'checkout', '-q', self.revision]) |
| 73 |
| 74 return os.path.join(dep_dir, self.path) |
| 75 |
| 76 def dump(self): |
| 77 return { |
| 78 'repo': self.repo, |
| 79 'branch': self.branch, |
| 80 'revision': self.revision, |
| 81 'path': self.path, |
| 82 } |
| 83 |
| 84 def updates(self, context): |
| 85 """Returns a list of all updates to the branch since the revision this |
| 86 repo spec refers to, paired with their commit timestamps; i.e. |
| 87 (timestamp, GitRepoSpec). |
| 88 |
| 89 Although timestamps are not completely reliable, they are the best tool we |
| 90 have to approximate global coherence. |
| 91 """ |
| 92 lines = filter(bool, self._raw_updates(context).strip().split('\n')) |
| 93 return [ (_parse_date(date), |
| 94 GitRepoSpec(self.id, self.repo, self.branch, rev, self.path)) |
| 95 for date,rev in map(str.split, lines) ] |
| 96 |
| 97 def root_dir(self, context): |
| 98 return os.path.join(context.package_dir, self.id, self.path) |
| 99 |
| 100 def _raw_updates(self, context): |
| 101 self.checkout(context) |
| 102 git = subprocess.Popen(['git', 'log', |
| 103 '%s..origin/%s' % (self.revision, self.branch), |
| 104 '--pretty=%aI %H', |
| 105 '--reverse'], |
| 106 stdout=subprocess.PIPE, |
| 107 cwd=os.path.join(context.package_dir, self.id)) |
| 108 (stdout, _) = git.communicate() |
| 109 return stdout |
| 110 |
| 111 def _components(self): |
| 112 return (self.id, self.repo, self.revision, self.path) |
| 113 |
| 114 def __eq__(self, other): |
| 115 return self._components() == other._components() |
| 116 |
| 117 def __ne__(self, other): |
| 118 return not self.__eq__(other) |
| 119 |
| 120 |
| 121 class RootRepoSpec(RepoSpec): |
| 122 def __init__(self, pyl_path): |
| 123 self.pyl_path = pyl_path |
| 124 |
| 125 def checkout(self, context): |
| 126 # We assume this is already checked out. |
| 127 return context.root_dir |
| 128 |
| 129 def root_dir(self, context): |
| 130 return context.root_dir |
| 131 |
| 132 |
| 133 class Package(object): |
| 134 def __init__(self, repo, deps, root_dir): |
| 135 self.repo = repo |
| 136 self.deps = deps |
| 137 self.root_dir = root_dir |
| 138 |
| 139 @property |
| 140 def recipe_dirs(self): |
| 141 return [os.path.join(self.root_dir, 'recipes')] |
| 142 |
| 143 @property |
| 144 def module_dirs(self): |
| 145 return [os.path.join(self.root_dir, 'recipe_modules')] |
| 146 |
| 147 def find_dep(self, dep_name): |
| 148 return self.deps[dep_name] |
| 149 |
| 150 def module_path(self, module_name): |
| 151 return os.path.join(self.root_dir, 'recipe_modules', module_name) |
| 152 |
| 153 |
| 154 class PackageSpec(object): |
| 155 def __init__(self, id, deps): |
| 156 self.id = id |
| 157 self.deps = deps |
| 158 |
| 159 @classmethod |
| 160 def load_pyl(cls, pyl_path): |
| 161 with open(pyl_path, 'r') as fh: |
| 162 pyl_spec = ast.literal_eval(fh.read()) |
| 163 return cls.load(pyl_spec) |
| 164 |
| 165 @classmethod |
| 166 def load(cls, spec): |
| 167 assert spec['api_version'] == 0 |
| 168 deps = { dep: GitRepoSpec(dep, |
| 169 dep_dict['repo'], |
| 170 dep_dict['branch'], |
| 171 dep_dict['revision'], |
| 172 dep_dict['path']) |
| 173 for dep, dep_dict in spec['deps'].iteritems() } |
| 174 |
| 175 return cls(spec['id'], deps) |
| 176 |
| 177 def dump(self): |
| 178 return { |
| 179 'api_version': 0, |
| 180 'id': self.id, |
| 181 'deps': { dep_id: dep.dump() for dep_id, dep in self.deps.iteritems() }, |
| 182 } |
| 183 |
| 184 def updates(self, context): |
| 185 dep_updates = _merge([ [ (date, dep, update) |
| 186 for date, update in repo.updates(context) ] |
| 187 for dep, repo in self.deps.iteritems() ]) |
| 188 |
| 189 deps_so_far = self.deps |
| 190 ret_updates = [] |
| 191 for (date, dep_id, dep) in dep_updates: |
| 192 deps_so_far = _updated(deps_so_far, { dep_id: dep }) |
| 193 ret_updates.append((date, PackageSpec(self.id, deps_so_far))) |
| 194 return ret_updates |
| 195 |
| 196 def iterate_consistent_updates(self, context): |
| 197 root_spec = RootRepoSpec( |
| 198 os.path.join(context.root_dir, 'recipe_package.pyl')) |
| 199 for date, spec in self.updates(context): |
| 200 consistent_spec = True |
| 201 try: |
| 202 package_deps = PackageDeps(context) |
| 203 package_deps._create_from_spec(root_spec, spec) |
| 204 except InconsistentDependencyGraphError: |
| 205 # Skip inconsistent graphs, which are blocked on dependency rolls |
| 206 consistent_spec = False |
| 207 if consistent_spec: |
| 208 yield date, spec |
| 209 |
| 210 def __eq__(self, other): |
| 211 return self.id == other.id and self.deps == other.deps |
| 212 |
| 213 def __ne__(self, other): |
| 214 return not self.__eq__(other) |
| 215 |
| 216 |
| 217 class PackageDeps(object): |
| 218 """An object containing all the transitive dependencies of the root package. |
| 219 """ |
| 220 def __init__(self, context): |
| 221 self._context = context |
| 222 self._repos = {} |
| 223 |
| 224 @classmethod |
| 225 def create(cls, pyl_path): |
| 226 context = PackageContext.from_pyl_path(pyl_path) |
| 227 |
| 228 package_deps = cls(context) |
| 229 root_package = package_deps._create_package(RootRepoSpec(pyl_path)) |
| 230 return package_deps |
| 231 |
| 232 def _create_package(self, repo_spec): |
| 233 package_root = repo_spec.checkout(self._context) |
| 234 pyl_path = os.path.join(package_root, 'recipe_package.pyl') |
| 235 |
| 236 package_spec = PackageSpec.load_pyl(pyl_path) |
| 237 self._create_from_spec(repo_spec, package_spec) |
| 238 |
| 239 def _create_from_spec(self, repo_spec, package_spec): |
| 240 deps = {} |
| 241 for dep, dep_repo in sorted(package_spec.deps.items()): |
| 242 deps[dep] = self._create_package(dep_repo) |
| 243 |
| 244 if (package_spec.id in self._repos and |
| 245 not repo_spec == self._repos[package_spec.id].repo): |
| 246 raise InconsistentDependencyGraphError( |
| 247 'Package specs do not match: %s vs %s' % |
| 248 (repo_spec, self._repos[package_spec.id].repo)) |
| 249 |
| 250 package = Package(repo_spec, deps, repo_spec.root_dir(self._context)) |
| 251 self._repos[package_spec.id] = package |
| 252 return package |
| 253 |
| 254 # TODO(luqui): Remove this, so all accesses to packages are done |
| 255 # via other packages with properly scoped deps. |
| 256 def get_package(self, package_id): |
| 257 return self._repos[package_id] |
| 258 |
| 259 @property |
| 260 def all_recipe_dirs(self): |
| 261 for repo in self._repos.values(): |
| 262 for subdir in repo.recipe_dirs: |
| 263 yield subdir |
| 264 |
| 265 @property |
| 266 def all_module_dirs(self): |
| 267 for repo in self._repos.values(): |
| 268 for subdir in repo.module_dirs: |
| 269 yield subdir |
| 270 |
| 271 |
| 272 def _run_cmd(cmd): |
| 273 logging.info('%s', cmd) |
| 274 subprocess.check_call(cmd) |
| 275 |
| 276 |
| 277 def _parse_date(datestr): |
| 278 """Parses an ISO-8601 date string into a datetime object. |
| 279 |
| 280 >>> ( _parse_date('2015-06-30T10:15:20-00:00') |
| 281 ... <= _parse_date('2015-06-30T11:20:31-00:00')) |
| 282 True |
| 283 >>> ( _parse_date('2015-06-30T11:33:52-07:00') |
| 284 ... <= _parse_date('2015-06-30T11:33:52-08:00')) |
| 285 True |
| 286 >>> ( _parse_date('2015-06-30T11:33:52-07:00') |
| 287 ... <= _parse_date('2015-06-30T11:33:52-06:00')) |
| 288 False |
| 289 """ |
| 290 return dateutil.parser.parse(datestr) |
| 291 |
| 292 |
| 293 def _merge2(xs, ys, compare=lambda x, y: x <= y): |
| 294 """Merges two sorted iterables, preserving sort order. |
| 295 |
| 296 >>> list(_merge2([1, 3, 6], [2, 4, 5])) |
| 297 [1, 2, 3, 4, 5, 6] |
| 298 >>> list(_merge2([1, 2, 3], [])) |
| 299 [1, 2, 3] |
| 300 >>> list(_merge2([], [4, 5, 6])) |
| 301 [4, 5, 6] |
| 302 >>> list(_merge2([], [])) |
| 303 [] |
| 304 >>> list(_merge2([4, 2], [3, 1], compare=lambda x, y: x >= y)) |
| 305 [4, 3, 2, 1] |
| 306 |
| 307 The merge is left-biased and preserves order within each argument. |
| 308 |
| 309 >>> list(_merge2([1, 4], [3, 2], compare=lambda x, y: True)) |
| 310 [1, 4, 3, 2] |
| 311 """ |
| 312 |
| 313 xs = iter(xs) |
| 314 ys = iter(ys) |
| 315 x = None |
| 316 y = None |
| 317 try: |
| 318 x = (xs.next(),) |
| 319 y = (ys.next(),) |
| 320 |
| 321 while True: |
| 322 if compare(x[0], y[0]): |
| 323 yield x[0] |
| 324 x = None |
| 325 x = (xs.next(),) |
| 326 else: |
| 327 yield y[0] |
| 328 y = None |
| 329 y = (ys.next(),) |
| 330 except StopIteration: |
| 331 if x: yield x[0] |
| 332 for x in xs: yield x |
| 333 if y: yield y[0] |
| 334 for y in ys: yield y |
| 335 |
| 336 |
| 337 def _merge(xss, compare=lambda x, y: x <= y): |
| 338 """Merges a sequence of sorted iterables in sorted order. |
| 339 |
| 340 >>> list(_merge([ [1,5], [2,5,6], [], [0,7] ])) |
| 341 [0, 1, 2, 5, 5, 6, 7] |
| 342 >>> list(_merge([ [1,2,3] ])) |
| 343 [1, 2, 3] |
| 344 >>> list(_merge([])) |
| 345 [] |
| 346 """ |
| 347 return reduce(lambda xs, ys: _merge2(xs, ys, compare=compare), xss, []) |
| 348 |
| 349 |
| 350 def _updated(d, updates): |
| 351 """Updates a dictionary without mutation. |
| 352 |
| 353 >>> d = { 'x': 1, 'y': 2 } |
| 354 >>> sorted(_updated(d, { 'y': 3, 'z': 4 }).items()) |
| 355 [('x', 1), ('y', 3), ('z', 4)] |
| 356 >>> sorted(d.items()) |
| 357 [('x', 1), ('y', 2)] |
| 358 """ |
| 359 |
| 360 d = copy.copy(d) |
| 361 d.update(updates) |
| 362 return d |
| 363 |
| 364 |
| 365 @contextlib.contextmanager |
| 366 def in_directory(target_dir): |
| 367 cwd = os.getcwd() |
| 368 os.chdir(target_dir) |
| 369 try: |
| 370 yield |
| 371 finally: |
| 372 os.chdir(cwd) |
OLD | NEW |