Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 import ast | |
| 2 import collections | |
| 3 import contextlib | |
| 4 import copy | |
| 5 import functools | |
| 6 import itertools | |
| 7 import logging | |
| 8 import os | |
| 9 import subprocess | |
| 10 import sys | |
| 11 import tempfile | |
| 12 | |
| 13 sys.path.append( | |
|
luqui
2015/08/07 19:30:36
import third_party.google instead
luqui
2015/08/20 22:45:24
Done
| |
| 14 os.path.join(os.path.dirname(os.path.abspath(__file__)), 'third_party')) | |
| 15 | |
| 16 import dateutil.parser | |
| 17 | |
| 18 from google import protobuf | |
| 19 from recipe_engine import package_pb2 | |
| 20 | |
| 21 class UncleanFilesystemError(Exception): | |
| 22 pass | |
| 23 | |
| 24 | |
| 25 class InconsistentDependencyGraphError(Exception): | |
| 26 pass | |
| 27 | |
| 28 | |
| 29 class ProtoFile(object): | |
|
iannucci
2015/08/06 23:57:12
document why an object and not just a collection o
luqui
2015/08/20 22:45:24
Done
| |
| 30 def __init__(self, path): | |
| 31 self._path = path | |
| 32 | |
| 33 @property | |
| 34 def path(self): | |
| 35 return os.path.realpath(self._path) | |
| 36 | |
| 37 def read_text(self): | |
| 38 with open(self._path, 'r') as fh: | |
| 39 return fh.read() | |
| 40 | |
| 41 def read(self): | |
| 42 text = self.read_text() | |
| 43 buf = package_pb2.Package() | |
| 44 protobuf.text_format.Merge(text, buf) | |
| 45 return buf | |
| 46 | |
| 47 def to_text(self, buf): | |
| 48 return protobuf.text_format.MessageToString(buf) | |
| 49 | |
| 50 def write(self, buf): | |
| 51 with open(self._path, 'w') as fh: | |
| 52 fh.write(self.to_text(buf)) | |
| 53 | |
| 54 | |
| 55 class PackageContext(object): | |
| 56 """Contains information about where the root package and its dependency | |
| 57 checkouts live. | |
| 58 | |
| 59 - recipes_dir is the location of recipes/ and recipe_modules/ which contain | |
| 60 the actual recipes of the root package. | |
| 61 - package_dir is where dependency checkouts live, e.g. | |
| 62 package_dir/recipe_engine/recipes/... | |
| 63 - repo_root is the root of the repository containing the root package. | |
| 64 """ | |
| 65 | |
| 66 def __init__(self, recipes_dir, package_dir, repo_root): | |
| 67 self.recipes_dir = recipes_dir | |
| 68 self.package_dir = package_dir | |
| 69 self.repo_root = repo_root | |
| 70 | |
| 71 @classmethod | |
| 72 def from_proto_file(cls, proto_file): | |
| 73 if isinstance(proto_file, basestring): | |
| 74 proto_file = ProtoFile(proto_file) | |
|
iannucci
2015/08/06 23:57:12
nuke
luqui
2015/08/20 22:45:24
Done
| |
| 75 proto_path = proto_file.path | |
| 76 | |
| 77 repo_root = os.path.dirname(os.path.dirname(os.path.dirname(proto_path))) | |
| 78 expected_path = os.path.join(repo_root, 'infra', 'config', 'recipes.cfg') | |
| 79 assert proto_path == expected_path, ( | |
| 80 'Recipes config must be in infra/config/recipes.cfg from root of repo\n' | |
|
iannucci
2015/08/06 23:57:12
maybe need to talk to luci-cfg to see if this is t
luqui
2015/08/20 22:45:24
Ugh, it is configurable.
Since we don't need this
| |
| 81 ' Expected location: %s\n' | |
| 82 ' Actual location: %s\n' | |
| 83 % (expected_path, proto_path)) | |
|
iannucci
2015/08/06 23:57:13
can we validate this higher in the stack? like mai
luqui
2015/08/20 22:45:24
Done.
| |
| 84 | |
| 85 buf = proto_file.read() | |
| 86 | |
| 87 recipes_path = buf.recipes_path | |
| 88 if sys.platform.startswith('win'): | |
| 89 recipes_path.replace('/', '\\') | |
|
iannucci
2015/08/06 23:57:12
os.sep instead of '\\'
recipes_path = buf.recip
luqui
2015/08/20 22:45:24
Done
| |
| 90 | |
| 91 return cls(os.path.join(repo_root, recipes_path), | |
| 92 os.path.join(repo_root, recipes_path, '.recipe_deps'), | |
| 93 repo_root) | |
| 94 | |
| 95 | |
| 96 @functools.total_ordering | |
| 97 class RepoUpdate(object): | |
| 98 def __init__(self, spec): | |
| 99 self.spec = spec | |
| 100 | |
| 101 @property | |
| 102 def id(self): | |
| 103 return self.spec.id | |
| 104 | |
| 105 def __eq__(self, other): | |
| 106 return (self.id, self.spec.revision) == (other.id, other.spec.revision) | |
| 107 | |
| 108 def __le__(self, other): | |
| 109 return (self.id, self.spec.revision) <= (other.id, other.spec.revision) | |
| 110 | |
| 111 | |
| 112 class RepoSpec(object): | |
| 113 def checkout(self, context): | |
|
iannucci
2015/08/06 23:57:12
need check_checkout (and whatever other methods th
luqui
2015/08/20 22:45:24
Done
| |
| 114 """Fetches the specified package and returns the path of the package root | |
| 115 (the directory that contains recipes and recipe_modules). | |
| 116 """ | |
| 117 raise NotImplementedError() | |
| 118 | |
| 119 | |
| 120 class GitRepoSpec(RepoSpec): | |
| 121 def __init__(self, id, repo, branch, revision, path): | |
| 122 self.id = id | |
| 123 self.repo = repo | |
| 124 self.branch = branch | |
| 125 self.revision = revision | |
| 126 self.path = path | |
| 127 | |
| 128 def checkout(self, context): | |
| 129 package_dir = context.package_dir | |
| 130 dep_dir = os.path.join(package_dir, self.id) | |
| 131 logging.info('Freshening repository %s' % dep_dir) | |
| 132 | |
| 133 if not os.path.isdir(dep_dir): | |
| 134 _run_cmd(['git', 'clone', self.repo, dep_dir]) | |
| 135 elif not os.path.isdir(os.path.join(dep_dir, '.git')): | |
| 136 raise UncleanFilesystemError('%s exists but is not a git repo' % dep_dir) | |
| 137 | |
| 138 with _in_directory(dep_dir): | |
| 139 _run_cmd(['git', 'fetch']) | |
|
iannucci
2015/08/06 23:57:13
mebbeh use cwd=
luqui
2015/08/07 19:30:36
check & fetch, auto-fetch.
luqui
2015/08/20 22:45:24
Done
| |
| 140 _run_cmd(['git', 'reset', '--hard', self.revision]) | |
| 141 | |
| 142 def check_checkout(self, context): | |
| 143 dep_dir = os.path.join(context.package_dir, self.id) | |
| 144 if not os.path.isdir(dep_dir): | |
| 145 raise UncleanFilesystemError('Dependency %s does not exist' % | |
| 146 dep_dir) | |
| 147 elif not os.path.isdir(os.path.join(dep_dir, '.git')): | |
| 148 raise UncleanFilesystemError('Dependency %s is not a git repo' % | |
| 149 dep_dir) | |
| 150 | |
| 151 with _in_directory(dep_dir): | |
| 152 git_status_command = ['git', 'status', '--porcelain'] | |
| 153 logging.info('%s', git_status_command) | |
| 154 output = subprocess.check_output(git_status_command) | |
| 155 if output: | |
| 156 raise UncleanFilesystemError('Dependency %s is unclean:\n%s' % | |
| 157 (dep_dir, output)) | |
| 158 | |
| 159 | |
| 160 def repo_root(self, context): | |
| 161 return os.path.join(context.package_dir, self.id, self.path) | |
| 162 | |
| 163 def dump(self): | |
| 164 buf = package_pb2.DepSpec( | |
| 165 project_id=self.id, | |
| 166 url=self.repo, | |
| 167 branch=self.branch, | |
| 168 revision=self.revision) | |
| 169 if self.path: | |
| 170 buf.path_override = self.path | |
| 171 return buf | |
| 172 | |
| 173 def updates(self, context): | |
| 174 """Returns a list of all updates to the branch since the revision this | |
| 175 repo spec refers to, paired with their commit timestamps; i.e. | |
| 176 (timestamp, GitRepoSpec). | |
| 177 | |
| 178 Although timestamps are not completely reliable, they are the best tool we | |
| 179 have to approximate global coherence. | |
| 180 """ | |
| 181 lines = filter(bool, self._raw_updates(context).strip().split('\n')) | |
| 182 return [ RepoUpdate( | |
| 183 GitRepoSpec(self.id, self.repo, self.branch, rev, self.path)) | |
| 184 for rev in lines ] | |
| 185 | |
| 186 def _raw_updates(self, context): | |
| 187 self.checkout(context) | |
| 188 # XXX(luqui): Should this just focus on the recipes subtree rather than | |
| 189 # the whole repo? | |
| 190 git = subprocess.Popen(['git', 'log', | |
| 191 '%s..origin/%s' % (self.revision, self.branch), | |
|
iannucci
2015/08/06 23:57:12
'origin' is an assumption? or is this a repo manag
luqui
2015/08/20 22:45:24
This repo is managed by us.
| |
| 192 '--pretty=%H', | |
| 193 '--reverse'], | |
| 194 stdout=subprocess.PIPE, | |
| 195 cwd=os.path.join(context.package_dir, self.id)) | |
| 196 (stdout, _) = git.communicate() | |
| 197 return stdout | |
| 198 | |
| 199 def _components(self): | |
| 200 return (self.id, self.repo, self.revision, self.path) | |
| 201 | |
| 202 def __eq__(self, other): | |
| 203 return self._components() == other._components() | |
| 204 | |
| 205 def __ne__(self, other): | |
| 206 return not self.__eq__(other) | |
| 207 | |
| 208 | |
| 209 class RootRepoSpec(RepoSpec): | |
| 210 def __init__(self): | |
| 211 pass | |
| 212 | |
| 213 def checkout(self, context): | |
| 214 # We assume this is already checked out. | |
| 215 pass | |
| 216 | |
| 217 def check_checkout(self, context): | |
| 218 pass | |
| 219 | |
| 220 def repo_root(self, context): | |
| 221 return context.repo_root | |
| 222 | |
| 223 | |
| 224 class Package(object): | |
| 225 def __init__(self, repo, deps, recipes_dir): | |
| 226 self.repo = repo | |
|
iannucci
2015/08/06 23:57:12
s/repo/repo_spec
doc type of deps
doc that repo_
luqui
2015/08/20 22:45:24
Done
| |
| 227 self.deps = deps | |
| 228 self.recipes_dir = recipes_dir | |
| 229 | |
| 230 @property | |
| 231 def recipe_dirs(self): | |
| 232 return [os.path.join(self.recipes_dir, 'recipes')] | |
| 233 | |
| 234 @property | |
| 235 def module_dirs(self): | |
| 236 return [os.path.join(self.recipes_dir, 'recipe_modules')] | |
| 237 | |
| 238 def find_dep(self, dep_name): | |
| 239 return self.deps[dep_name] | |
| 240 | |
| 241 def module_path(self, module_name): | |
| 242 return os.path.join(self.recipes_dir, 'recipe_modules', module_name) | |
| 243 | |
| 244 | |
| 245 class PackageSpec(object): | |
| 246 API_VERSION = 1 | |
| 247 | |
| 248 def __init__(self, project_id, recipes_path, deps): | |
| 249 self._project_id = project_id | |
| 250 self._recipes_path = recipes_path | |
| 251 self._deps = deps | |
| 252 | |
| 253 @classmethod | |
| 254 def load_proto(cls, proto_file): | |
| 255 buf = proto_file.read() | |
| 256 assert buf.api_version == cls.API_VERSION | |
| 257 | |
| 258 deps = { dep.project_id: GitRepoSpec(dep.project_id, | |
| 259 dep.url, | |
| 260 dep.branch, | |
| 261 dep.revision, | |
| 262 dep.path_override) | |
| 263 for dep in buf.deps } | |
| 264 return cls(buf.project_id, buf.recipes_path, deps) | |
| 265 | |
| 266 @property | |
| 267 def project_id(self): | |
| 268 return self._project_id | |
| 269 | |
| 270 @property | |
| 271 def recipes_path(self): | |
| 272 return self._recipes_path | |
| 273 | |
| 274 @property | |
| 275 def deps(self): | |
| 276 return self._deps | |
| 277 | |
| 278 def dump(self): | |
| 279 return package_pb2.Package( | |
| 280 api_version=self.API_VERSION, | |
| 281 project_id=self._project_id, | |
| 282 recipes_path=self._recipes_path, | |
| 283 deps=[ self._deps[dep].dump() for dep in sorted(self._deps.keys()) ]) | |
| 284 | |
| 285 def updates(self, context): | |
|
iannucci
2015/08/06 23:57:12
need docstrings
luqui
2015/08/20 22:45:24
Done
| |
| 286 dep_updates = _merge([ | |
| 287 self._deps[dep].updates(context) for dep in sorted(self._deps.keys()) ]) | |
| 288 | |
| 289 deps_so_far = self._deps | |
| 290 ret_updates = [] | |
| 291 for update in dep_updates: | |
| 292 deps_so_far = _updated(deps_so_far, { update.id: update.spec }) | |
| 293 ret_updates.append(RepoUpdate(PackageSpec( | |
|
iannucci
2015/08/06 23:57:12
maybe don't need RepoUpdate because it's only used
luqui
2015/08/07 19:30:36
Document how rolling work.
Document global coheren
luqui
2015/08/20 22:45:24
Done
| |
| 294 self._project_id, self._recipes_path, deps_so_far))) | |
| 295 return ret_updates | |
| 296 | |
| 297 def iterate_consistent_updates(self, context): | |
| 298 root_spec = RootRepoSpec() | |
| 299 for update in self.updates(context): | |
| 300 consistent_spec = True | |
| 301 try: | |
| 302 package_deps = PackageDeps(context) | |
| 303 package_deps._create_from_spec(root_spec, update.spec, fetch=True) | |
| 304 except InconsistentDependencyGraphError: | |
| 305 # Skip inconsistent graphs, which are blocked on dependency rolls | |
| 306 consistent_spec = False | |
| 307 if consistent_spec: | |
| 308 yield update | |
|
iannucci
2015/08/06 23:57:12
remove consistent_spec
try:
...
yield update
luqui
2015/08/20 22:45:24
Done
| |
| 309 | |
| 310 def __eq__(self, other): | |
| 311 return ( | |
| 312 self._project_id == other._project_id and | |
| 313 self._recipes_path == other._recipes_path and | |
| 314 self._deps == other._deps) | |
| 315 | |
| 316 def __ne__(self, other): | |
| 317 return not self.__eq__(other) | |
| 318 | |
| 319 | |
| 320 class PackageDeps(object): | |
| 321 """An object containing all the transitive dependencies of the root package. | |
| 322 """ | |
| 323 def __init__(self, context): | |
| 324 self._context = context | |
| 325 self._repos = {} | |
| 326 | |
| 327 @classmethod | |
| 328 def create(cls, proto_file, fetch=False): | |
|
iannucci
2015/08/06 23:57:13
doc: proto_file is the root-repo's `infra/config/r
luqui
2015/08/20 22:45:24
Done
| |
| 329 if isinstance(proto_file, basestring): | |
| 330 proto_file = ProtoFile(proto_file) | |
|
iannucci
2015/08/06 23:57:12
nuke2faic (always require ProtoFile)
luqui
2015/08/20 22:45:24
Done
| |
| 331 context = PackageContext.from_proto_file(proto_file) | |
| 332 package_deps = cls(context) | |
| 333 | |
| 334 root_package = package_deps._create_package(RootRepoSpec(), fetch) | |
|
iannucci
2015/08/06 23:57:12
maybe name 'allowFetch'?
just call _create_from_s
luqui
2015/08/20 22:45:24
allow_fetch, Done.
| |
| 335 return package_deps | |
| 336 | |
| 337 def _create_package(self, repo_spec, fetch): | |
| 338 if fetch: | |
| 339 repo_spec.checkout(self._context) | |
| 340 else: | |
| 341 try: | |
| 342 repo_spec.check_checkout(self._context) | |
| 343 except UncleanFilesystemError as e: | |
| 344 logging.warn( | |
| 345 'Unclean environment. You probably need to run "recipes.py fetch"\n' | |
| 346 '%s' % e.message) | |
| 347 | |
| 348 proto_path = os.path.join(repo_spec.repo_root(self._context), | |
| 349 'infra', 'config', 'recipes.cfg') | |
|
iannucci
2015/08/06 23:57:12
can this 'infra' 'config' join concept be abstract
luqui
2015/08/20 22:45:24
Done mostly
| |
| 350 package_spec = PackageSpec.load_proto(ProtoFile(proto_path)) | |
| 351 | |
| 352 return self._create_from_spec(repo_spec, package_spec, fetch) | |
| 353 | |
| 354 def _create_from_spec(self, repo_spec, package_spec, fetch): | |
|
iannucci
2015/08/06 23:57:12
I have sneaking suspicion that repo_spec is only r
| |
| 355 deps = {} | |
| 356 for dep, dep_repo in sorted(package_spec.deps.items()): | |
| 357 deps[dep] = self._create_package(dep_repo, fetch) | |
| 358 | |
| 359 if (package_spec.project_id in self._repos and | |
| 360 not repo_spec == self._repos[package_spec.project_id].repo): | |
|
iannucci
2015/08/06 23:57:12
!=
possibly?
luqui
2015/08/20 22:45:24
Done
.
| |
| 361 raise InconsistentDependencyGraphError( | |
| 362 'Package specs do not match: %s vs %s' % | |
| 363 (repo_spec, self._repos[package_spec.project_id].repo)) | |
| 364 | |
| 365 package = Package( | |
| 366 repo_spec, deps, | |
| 367 os.path.join(repo_spec.repo_root(self._context), | |
| 368 package_spec.recipes_path)) | |
| 369 | |
| 370 self._repos[package_spec.project_id] = package | |
|
iannucci
2015/08/06 23:57:12
need a cycle breaker "currently loading" object pe
luqui
2015/08/20 22:45:24
Done
| |
| 371 return package | |
| 372 | |
| 373 # TODO(luqui): Remove this, so all accesses to packages are done | |
| 374 # via other packages with properly scoped deps. | |
| 375 def get_package(self, package_id): | |
| 376 return self._repos[package_id] | |
| 377 | |
| 378 @property | |
| 379 def all_recipe_dirs(self): | |
| 380 for repo in self._repos.values(): | |
| 381 for subdir in repo.recipe_dirs: | |
| 382 yield str(subdir) | |
| 383 | |
| 384 @property | |
| 385 def all_module_dirs(self): | |
| 386 for repo in self._repos.values(): | |
| 387 for subdir in repo.module_dirs: | |
| 388 yield str(subdir) | |
| 389 | |
| 390 | |
| 391 def _run_cmd(cmd): | |
| 392 logging.info('%s', cmd) | |
| 393 subprocess.check_call(cmd) | |
| 394 | |
| 395 | |
| 396 def _parse_date(datestr): | |
|
iannucci
2015/08/06 23:57:13
don't need this :P
luqui
2015/08/20 22:45:24
Done
| |
| 397 """Parses an ISO-8601 date string into a datetime object. | |
| 398 | |
| 399 >>> ( _parse_date('2015-06-30T10:15:20-00:00') | |
| 400 ... <= _parse_date('2015-06-30T11:20:31-00:00')) | |
| 401 True | |
| 402 >>> ( _parse_date('2015-06-30T11:33:52-07:00') | |
| 403 ... <= _parse_date('2015-06-30T11:33:52-08:00')) | |
| 404 True | |
| 405 >>> ( _parse_date('2015-06-30T11:33:52-07:00') | |
| 406 ... <= _parse_date('2015-06-30T11:33:52-06:00')) | |
| 407 False | |
| 408 """ | |
| 409 return dateutil.parser.parse(datestr) | |
| 410 | |
| 411 | |
| 412 def _merge2(xs, ys, compare=lambda x, y: x <= y): | |
|
iannucci
2015/08/06 23:57:13
maybe just
return sorted(xs + ys)
luqui
2015/08/20 22:45:24
Will not work, since we need the order in xs and y
| |
| 413 """Merges two sorted iterables, preserving sort order. | |
| 414 | |
| 415 >>> list(_merge2([1, 3, 6], [2, 4, 5])) | |
| 416 [1, 2, 3, 4, 5, 6] | |
| 417 >>> list(_merge2([1, 2, 3], [])) | |
| 418 [1, 2, 3] | |
| 419 >>> list(_merge2([], [4, 5, 6])) | |
| 420 [4, 5, 6] | |
| 421 >>> list(_merge2([], [])) | |
| 422 [] | |
| 423 >>> list(_merge2([4, 2], [3, 1], compare=lambda x, y: x >= y)) | |
| 424 [4, 3, 2, 1] | |
| 425 | |
| 426 The merge is left-biased and preserves order within each argument. | |
| 427 | |
| 428 >>> list(_merge2([1, 4], [3, 2], compare=lambda x, y: True)) | |
| 429 [1, 4, 3, 2] | |
| 430 """ | |
| 431 | |
|
iannucci
2015/08/06 23:57:12
not_a_thing = object()
luqui
2015/08/20 22:45:24
Done
| |
| 432 xs = iter(xs) | |
| 433 ys = iter(ys) | |
| 434 x = None | |
| 435 y = None | |
| 436 try: | |
| 437 x = (xs.next(),) | |
| 438 y = (ys.next(),) | |
| 439 | |
| 440 while True: | |
| 441 if compare(x[0], y[0]): | |
| 442 yield x[0] | |
| 443 x = None | |
| 444 x = (xs.next(),) | |
| 445 else: | |
| 446 yield y[0] | |
| 447 y = None | |
| 448 y = (ys.next(),) | |
| 449 except StopIteration: | |
| 450 if x: yield x[0] | |
| 451 for x in xs: yield x | |
| 452 if y: yield y[0] | |
| 453 for y in ys: yield y | |
| 454 | |
| 455 | |
| 456 def _merge(xss, compare=lambda x, y: x <= y): | |
| 457 """Merges a sequence of sorted iterables in sorted order. | |
| 458 | |
| 459 >>> list(_merge([ [1,5], [2,5,6], [], [0,7] ])) | |
| 460 [0, 1, 2, 5, 5, 6, 7] | |
| 461 >>> list(_merge([ [1,2,3] ])) | |
| 462 [1, 2, 3] | |
| 463 >>> list(_merge([])) | |
| 464 [] | |
| 465 """ | |
| 466 return reduce(lambda xs, ys: _merge2(xs, ys, compare=compare), xss, []) | |
| 467 | |
| 468 | |
| 469 def _updated(d, updates): | |
| 470 """Updates a dictionary without mutation. | |
| 471 | |
| 472 >>> d = { 'x': 1, 'y': 2 } | |
| 473 >>> sorted(_updated(d, { 'y': 3, 'z': 4 }).items()) | |
| 474 [('x', 1), ('y', 3), ('z', 4)] | |
| 475 >>> sorted(d.items()) | |
| 476 [('x', 1), ('y', 2)] | |
| 477 """ | |
| 478 | |
| 479 d = copy.copy(d) | |
| 480 d.update(updates) | |
| 481 return d | |
| 482 | |
| 483 | |
| 484 @contextlib.contextmanager | |
| 485 def _in_directory(target_dir): | |
| 486 cwd = os.getcwd() | |
| 487 os.chdir(target_dir) | |
| 488 try: | |
| 489 yield | |
| 490 finally: | |
| 491 os.chdir(cwd) | |
| OLD | NEW |