OLD | NEW |
---|---|
1 # Copyright 2015 The LUCI Authors. All rights reserved. | 1 # Copyright 2015 The LUCI Authors. All rights reserved. |
2 # Use of this source code is governed under the Apache License, Version 2.0 | 2 # Use of this source code is governed under the Apache License, Version 2.0 |
3 # that can be found in the LICENSE file. | 3 # that can be found in the LICENSE file. |
4 | 4 |
5 import copy | 5 import copy |
6 import difflib | 6 import difflib |
7 import json | 7 import json |
8 import logging | 8 import logging |
9 import operator | 9 import operator |
10 import os | 10 import os |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
53 return os.path.dirname( # <repo root> | 53 return os.path.dirname( # <repo root> |
54 os.path.dirname( # infra | 54 os.path.dirname( # infra |
55 os.path.dirname( # config | 55 os.path.dirname( # config |
56 os.path.abspath(recipes_cfg)))) # recipes.cfg | 56 os.path.abspath(recipes_cfg)))) # recipes.cfg |
57 | 57 |
58 | 58 |
59 class ProtoFile(object): | 59 class ProtoFile(object): |
60 """A collection of functions operating on a proto path. | 60 """A collection of functions operating on a proto path. |
61 | 61 |
62 This is an object so that it can be mocked in the tests. | 62 This is an object so that it can be mocked in the tests. |
63 | |
64 Proto files read will always be upconverted to the current proto in | |
65 package.proto, and will be written back in their original format. | |
63 """ | 66 """ |
67 API_VERSIONS = (1, 2) | |
68 | |
64 def __init__(self, path): | 69 def __init__(self, path): |
65 self._path = path | 70 self._path = path |
66 | 71 |
67 @property | 72 @property |
68 def path(self): | 73 def path(self): |
69 return os.path.realpath(self._path) | 74 return os.path.realpath(self._path) |
70 | 75 |
71 def read_raw(self): | 76 def read_raw(self): |
72 with open(self._path, 'r') as fh: | 77 with open(self._path, 'r') as fh: |
73 return fh.read() | 78 return fh.read() |
74 | 79 |
75 def read(self): | 80 def read(self): |
76 text = self.read_raw() | 81 obj = json.loads(self.read_raw()) |
82 | |
83 vers = obj.get('api_version') | |
84 assert vers in self.API_VERSIONS, ( | |
85 'expected %r to be in %r' % (vers, self.API_VERSIONS) | |
nodir
2017/03/28 03:36:13
to be one of
iannucci
2017/03/28 04:06:18
Done.
| |
86 ) | |
87 | |
88 # upconvert old deps-as-a-list to deps-as-a-dict | |
89 if 'deps' in obj and obj.get('api_version', 1) == 1: | |
nodir
2017/03/28 03:36:13
use vers
iannucci
2017/03/28 04:06:18
Done.
| |
90 obj['deps'] = {d.pop('project_id'): d for d in obj['deps']} | |
91 | |
77 buf = package_pb2.Package() | 92 buf = package_pb2.Package() |
78 json_format.Parse(text, buf, ignore_unknown_fields=True) | 93 json_format.ParseDict(obj, buf, ignore_unknown_fields=True) |
79 return buf | 94 return buf |
80 | 95 |
81 def to_raw(self, buf): | 96 def to_raw(self, buf): |
82 obj = json_format.MessageToDict(buf, preserving_proto_field_name=True) | 97 obj = json_format.MessageToDict(buf, preserving_proto_field_name=True) |
98 | |
99 # downconvert if api_version is 1 | |
100 if 'deps' in obj and obj.get('api_version', 1) < 2: | |
nodir
2017/03/28 03:36:13
use vers ?
iannucci
2017/03/28 04:06:18
Done.
| |
101 deps = [] | |
102 for pid, d in sorted(obj['deps'].iteritems()): | |
103 d['project_id'] = pid | |
104 deps.append(d) | |
105 obj['deps'] = deps | |
106 | |
83 return json.dumps(obj, indent=2, sort_keys=True).replace(' \n', '\n') | 107 return json.dumps(obj, indent=2, sort_keys=True).replace(' \n', '\n') |
84 | 108 |
85 def write(self, buf): | 109 def write(self, buf): |
86 with open(self._path, 'w') as fh: | 110 with open(self._path, 'w') as fh: |
87 fh.write(self.to_raw(buf)) | 111 fh.write(self.to_raw(buf)) |
88 | 112 |
89 | 113 |
90 class PackageContext(object): | 114 class PackageContext(object): |
91 """Contains information about where the root package and its dependency | 115 """Contains information about where the root package and its dependency |
92 checkouts live. | 116 checkouts live. |
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
198 checkout_dir = self._dep_dir(context) | 222 checkout_dir = self._dep_dir(context) |
199 self.backend.checkout( | 223 self.backend.checkout( |
200 self.repo, self.revision, checkout_dir, context.allow_fetch) | 224 self.repo, self.revision, checkout_dir, context.allow_fetch) |
201 cleanup_pyc(checkout_dir) | 225 cleanup_pyc(checkout_dir) |
202 | 226 |
203 def repo_root(self, context): | 227 def repo_root(self, context): |
204 return os.path.join(self._dep_dir(context), self.path) | 228 return os.path.join(self._dep_dir(context), self.path) |
205 | 229 |
206 def dump(self): | 230 def dump(self): |
207 buf = package_pb2.DepSpec( | 231 buf = package_pb2.DepSpec( |
208 project_id=self.project_id, | |
209 url=self.repo, | 232 url=self.repo, |
210 branch=self.branch, | 233 branch=self.branch, |
211 revision=self.revision) | 234 revision=self.revision) |
212 if self.path: | 235 if self.path: |
213 buf.path_override = self.path | 236 buf.path_override = self.path |
214 | 237 |
215 # Only dump repo_type if it's different from default. This preserves | 238 # Only dump repo_type if it's different from default. This preserves |
216 # compatibility e.g. with recipes.py bootstrap scripts in client repos | 239 # compatibility e.g. with recipes.py bootstrap scripts in client repos |
217 # which may not handle repo_type correctly. | 240 # which may not handle repo_type correctly. |
218 # TODO(phajdan.jr): programmatically extract the default value. | 241 # TODO(phajdan.jr): programmatically extract the default value. |
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
323 Requires a good checkout.""" | 346 Requires a good checkout.""" |
324 return ProtoFile(InfraRepoConfig().to_recipes_cfg(self.path)) | 347 return ProtoFile(InfraRepoConfig().to_recipes_cfg(self.path)) |
325 | 348 |
326 def updates(self, _context, _other_revision=None): | 349 def updates(self, _context, _other_revision=None): |
327 """Returns (empty) list of potential updates for this spec.""" | 350 """Returns (empty) list of potential updates for this spec.""" |
328 return [] | 351 return [] |
329 | 352 |
330 def dump(self): | 353 def dump(self): |
331 """Returns the package.proto DepSpec form of this RepoSpec.""" | 354 """Returns the package.proto DepSpec form of this RepoSpec.""" |
332 return package_pb2.DepSpec( | 355 return package_pb2.DepSpec( |
333 project_id=self.project_id, | |
334 url="file://"+self.path) | 356 url="file://"+self.path) |
335 | 357 |
336 def __eq__(self, other): | 358 def __eq__(self, other): |
337 if not isinstance(other, type(self)): | 359 if not isinstance(other, type(self)): |
338 return False | 360 return False |
339 return self.path == other.path | 361 return self.path == other.path |
340 | 362 |
341 | 363 |
342 class RootRepoSpec(RepoSpec): | 364 class RootRepoSpec(RepoSpec): |
343 def __init__(self, proto_file): | 365 def __init__(self, proto_file): |
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
455 # Prevent rolling backwards. | 477 # Prevent rolling backwards. |
456 more_recent_revision = other_spec.get_more_recent_revision( | 478 more_recent_revision = other_spec.get_more_recent_revision( |
457 self._context, current_revision, other_spec.revision) | 479 self._context, current_revision, other_spec.revision) |
458 if more_recent_revision != other_spec.revision: | 480 if more_recent_revision != other_spec.revision: |
459 return False | 481 return False |
460 | 482 |
461 self._updates[other_spec.project_id] = other_spec | 483 self._updates[other_spec.project_id] = other_spec |
462 | 484 |
463 def get_rolled_spec(self): | 485 def get_rolled_spec(self): |
464 """Returns a PackageSpec with all the deps updates from this roll.""" | 486 """Returns a PackageSpec with all the deps updates from this roll.""" |
465 # TODO(phajdan.jr): does this preserve comments? should it? | |
466 new_deps = _updated( | 487 new_deps = _updated( |
467 self._package_spec.deps, | 488 self._package_spec.deps, |
468 { project_id: spec for project_id, spec in | 489 { project_id: spec for project_id, spec in |
469 self._updates.iteritems() }) | 490 self._updates.iteritems() }) |
470 return PackageSpec( | 491 return PackageSpec( |
492 self._package_spec.api_version, | |
471 self._package_spec.project_id, | 493 self._package_spec.project_id, |
472 self._package_spec.recipes_path, | 494 self._package_spec.recipes_path, |
473 new_deps) | 495 new_deps) |
474 | 496 |
475 def get_commit_infos(self): | 497 def get_commit_infos(self): |
476 """Returns a mapping project_id -> list of commits from that repo | 498 """Returns a mapping project_id -> list of commits from that repo |
477 that are getting pulled by this roll. | 499 that are getting pulled by this roll. |
478 """ | 500 """ |
479 commit_infos = {} | 501 commit_infos = {} |
480 | 502 |
(...skipping 11 matching lines...) Expand all Loading... | |
492 | 514 |
493 def get_diff(self): | 515 def get_diff(self): |
494 """Returns a unified diff between original package spec and one after roll. | 516 """Returns a unified diff between original package spec and one after roll. |
495 """ | 517 """ |
496 orig = str(self._package_spec.dump()).splitlines() | 518 orig = str(self._package_spec.dump()).splitlines() |
497 new = str(self.get_rolled_spec().dump()).splitlines() | 519 new = str(self.get_rolled_spec().dump()).splitlines() |
498 return '\n'.join(difflib.unified_diff(orig, new, lineterm='')) | 520 return '\n'.join(difflib.unified_diff(orig, new, lineterm='')) |
499 | 521 |
500 | 522 |
501 class PackageSpec(object): | 523 class PackageSpec(object): |
502 API_VERSION = 1 | 524 def __init__(self, api_version, project_id, recipes_path, deps): |
503 | 525 self._api_version = api_version |
504 def __init__(self, project_id, recipes_path, deps): | |
505 self._project_id = project_id | 526 self._project_id = project_id |
506 self._recipes_path = recipes_path | 527 self._recipes_path = recipes_path |
507 self._deps = deps | 528 self._deps = deps |
508 | 529 |
509 def __repr__(self): | 530 def __repr__(self): |
510 return 'PackageSpec(%s, %s, %r)' % (self._project_id, self._recipes_path, | 531 return 'PackageSpec(%s, %s, %r)' % (self._project_id, self._recipes_path, |
511 self._deps) | 532 self._deps) |
512 | 533 |
513 @classmethod | 534 @classmethod |
514 def load_proto(cls, proto_file): | 535 def load_proto(cls, proto_file): |
515 buf = proto_file.read() | 536 buf = proto_file.read() |
516 assert buf.api_version == cls.API_VERSION | |
517 | 537 |
518 deps = { str(dep.project_id): cls.spec_for_dep(dep) | 538 deps = { pid: cls.spec_for_dep(pid, dep) |
519 for dep in buf.deps } | 539 for pid, dep in buf.deps.iteritems() } |
520 return cls(str(buf.project_id), str(buf.recipes_path), deps) | 540 return cls(buf.api_version, str(buf.project_id), str(buf.recipes_path), |
541 deps) | |
521 | 542 |
522 @classmethod | 543 @classmethod |
523 def spec_for_dep(cls, dep): | 544 def spec_for_dep(cls, project_id, dep): |
524 """Returns a RepoSpec for the given dependency protobuf.""" | 545 """Returns a RepoSpec for the given dependency protobuf.""" |
525 url = str(dep.url) | 546 url = str(dep.url) |
526 if url.startswith("file://"): | 547 if url.startswith("file://"): |
527 return PathRepoSpec(str(dep.project_id), url[len("file://"):]) | 548 return PathRepoSpec(str(project_id), url[len("file://"):]) |
528 | 549 |
529 if dep.repo_type in (package_pb2.DepSpec.GIT, package_pb2.DepSpec.GITILES): | 550 if dep.repo_type in (package_pb2.DepSpec.GIT, package_pb2.DepSpec.GITILES): |
530 if dep.repo_type == package_pb2.DepSpec.GIT: | 551 if dep.repo_type == package_pb2.DepSpec.GIT: |
531 backend = fetch.GitBackend() | 552 backend = fetch.GitBackend() |
532 elif dep.repo_type == package_pb2.DepSpec.GITILES: | 553 elif dep.repo_type == package_pb2.DepSpec.GITILES: |
533 backend = fetch.GitilesBackend() | 554 backend = fetch.GitilesBackend() |
534 return GitRepoSpec(str(dep.project_id), | 555 return GitRepoSpec(str(project_id), |
535 url, | 556 url, |
536 str(dep.branch), | 557 str(dep.branch), |
537 str(dep.revision), | 558 str(dep.revision), |
538 str(dep.path_override), | 559 str(dep.path_override), |
539 backend) | 560 backend) |
540 | 561 |
541 assert False, 'Unexpected repo type: %s' % dep | 562 assert False, 'Unexpected repo type: %s' % dep |
542 | 563 |
543 @property | 564 @property |
544 def project_id(self): | 565 def project_id(self): |
545 return self._project_id | 566 return self._project_id |
546 | 567 |
547 @property | 568 @property |
548 def recipes_path(self): | 569 def recipes_path(self): |
549 return self._recipes_path | 570 return self._recipes_path |
550 | 571 |
551 @property | 572 @property |
552 def deps(self): | 573 def deps(self): |
553 return self._deps | 574 return self._deps |
554 | 575 |
576 @property | |
577 def api_version(self): | |
578 return self._api_version | |
579 | |
555 def dump(self): | 580 def dump(self): |
556 return package_pb2.Package( | 581 return package_pb2.Package( |
557 api_version=self.API_VERSION, | 582 api_version=self._api_version, |
558 project_id=self._project_id, | 583 project_id=self._project_id, |
559 recipes_path=self._recipes_path, | 584 recipes_path=self._recipes_path, |
560 deps=[ self._deps[dep].dump() for dep in sorted(self._deps.keys()) ]) | 585 deps={k: v.dump() for k, v in self._deps.iteritems()}) |
561 | 586 |
562 def roll_candidates(self, root_spec, context): | 587 def roll_candidates(self, root_spec, context): |
563 """Returns list of consistent roll candidates, and rejected roll candidates. | 588 """Returns list of consistent roll candidates, and rejected roll candidates. |
564 | 589 |
565 The first one is sorted by score, descending. The more commits are pulled by | 590 The first one is sorted by score, descending. The more commits are pulled by |
566 the roll, the higher score. | 591 the roll, the higher score. |
567 | 592 |
568 Second list is included to distinguish between a situation where there are | 593 Second list is included to distinguish between a situation where there are |
569 no roll candidates from one where there are updates but they're not | 594 no roll candidates from one where there are updates but they're not |
570 consistent. | 595 consistent. |
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
692 >>> d = { 'x': 1, 'y': 2 } | 717 >>> d = { 'x': 1, 'y': 2 } |
693 >>> sorted(_updated(d, { 'y': 3, 'z': 4 }).items()) | 718 >>> sorted(_updated(d, { 'y': 3, 'z': 4 }).items()) |
694 [('x', 1), ('y', 3), ('z', 4)] | 719 [('x', 1), ('y', 3), ('z', 4)] |
695 >>> sorted(d.items()) | 720 >>> sorted(d.items()) |
696 [('x', 1), ('y', 2)] | 721 [('x', 1), ('y', 2)] |
697 """ | 722 """ |
698 | 723 |
699 d = copy.copy(d) | 724 d = copy.copy(d) |
700 d.update(updates) | 725 d.update(updates) |
701 return d | 726 return d |
OLD | NEW |