OLD | NEW |
1 # Copyright 2016 The LUCI Authors. All rights reserved. | 1 # Copyright 2016 The LUCI Authors. All rights reserved. |
2 # Use of this source code is governed under the Apache License, Version 2.0 | 2 # Use of this source code is governed under the Apache License, Version 2.0 |
3 # that can be found in the LICENSE file. | 3 # that can be found in the LICENSE file. |
4 | 4 |
5 import calendar | 5 import calendar |
6 import httplib | 6 import httplib |
7 import json | 7 import json |
8 import logging | 8 import logging |
9 import os | 9 import os |
10 import re | 10 import re |
(...skipping 13 matching lines...) Expand all Loading... |
24 from . import env | 24 from . import env |
25 from . import requests_ssl | 25 from . import requests_ssl |
26 from .requests_ssl import requests | 26 from .requests_ssl import requests |
27 | 27 |
28 import subprocess42 | 28 import subprocess42 |
29 from google.protobuf import json_format | 29 from google.protobuf import json_format |
30 | 30 |
31 LOGGER = logging.getLogger(__name__) | 31 LOGGER = logging.getLogger(__name__) |
32 | 32 |
33 | 33 |
34 def has_interesting_changes(spec, changed_files): | |
35 # TODO(iannucci): analyze bundle_extra_paths.txt too. | |
36 return ( | |
37 'infra/config/recipes.cfg' in changed_files or | |
38 any(f.startswith(spec.recipes_path) for f in changed_files) | |
39 ) | |
40 | |
41 | |
42 class FetchError(Exception): | 34 class FetchError(Exception): |
43 pass | 35 pass |
44 | 36 |
45 | 37 |
46 class FetchNotAllowedError(FetchError): | 38 class FetchNotAllowedError(FetchError): |
47 pass | 39 pass |
48 | 40 |
49 | 41 |
50 class UnresolvedRefspec(Exception): | 42 class UnresolvedRefspec(Exception): |
51 pass | 43 pass |
52 | 44 |
53 | 45 |
54 # revision (str): the revision of this commit (i.e. hash) | 46 # revision (str): the revision of this commit (i.e. hash) |
55 # author_email (str|None): the email of the author of this commit | 47 # author_email (str|None): the email of the author of this commit |
56 # commit_timestamp (int): the unix commit timestamp for this commit | 48 # commit_timestamp (int): the unix commit timestamp for this commit |
57 # message_lines (tuple(str)): the message of this commit | 49 # message_lines (tuple(str)): the message of this commit |
58 # spec (package_pb2.Package): the parsed infra/config/recipes.cfg file or None. | 50 # spec (package_pb2.Package): the parsed infra/config/recipes.cfg file or None. |
59 # roll_candidate (bool): if this commit contains changes which are known to | |
60 # affect the behavior of the recipes (i.e. modifications within recipe_path | |
61 # and/or modifications to recipes.cfg) | |
62 CommitMetadata = namedtuple( | 51 CommitMetadata = namedtuple( |
63 '_CommitMetadata', | 52 '_CommitMetadata', |
64 'revision author_email commit_timestamp message_lines spec roll_candidate') | 53 'revision author_email commit_timestamp message_lines spec') |
65 | 54 |
66 | 55 |
67 class Backend(object): | 56 class Backend(object): |
68 @staticmethod | 57 @staticmethod |
69 def class_for_type(repo_type): | 58 def class_for_type(repo_type): |
70 """ | 59 """ |
71 Args: | 60 Args: |
72 repo_type (package_pb2.DepSpec.RepoType) | 61 repo_type (package_pb2.DepSpec.RepoType) |
73 | 62 |
74 Returns Backend (class): Returns the Backend appropriate for the | 63 Returns Backend (class): Returns the Backend appropriate for the |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
118 _GIT_METADATA_CACHE = {} | 107 _GIT_METADATA_CACHE = {} |
119 | 108 |
120 # This matches git commit hashes. | 109 # This matches git commit hashes. |
121 _COMMIT_RE = re.compile(r'^[a-fA-F0-9]{40}$') | 110 _COMMIT_RE = re.compile(r'^[a-fA-F0-9]{40}$') |
122 | 111 |
123 def commit_metadata(self, refspec): | 112 def commit_metadata(self, refspec): |
124 """Cached version of _commit_metadata_impl. | 113 """Cached version of _commit_metadata_impl. |
125 | 114 |
126 The refspec will be resolved if it's not absolute. | 115 The refspec will be resolved if it's not absolute. |
127 | 116 |
128 Returns (CommitMetadata). | 117 Returns { |
| 118 'author': '<author name>', |
| 119 'message': '<commit message>', |
| 120 'spec': package_pb2.Package or None, # the parsed recipes.cfg file. |
| 121 } |
129 """ | 122 """ |
130 revision = self.resolve_refspec(refspec) | 123 revision = self.resolve_refspec(refspec) |
131 cache = self._GIT_METADATA_CACHE.setdefault(self.repo_url, {}) | 124 cache = self._GIT_METADATA_CACHE.setdefault(self.repo_url, {}) |
132 if revision not in cache: | 125 if revision not in cache: |
133 cache[revision] = self._commit_metadata_impl(revision) | 126 cache[revision] = self._commit_metadata_impl(revision) |
134 return cache[revision] | 127 return cache[revision] |
135 | 128 |
136 @classmethod | 129 @classmethod |
137 def is_resolved_revision(cls, revision): | 130 def is_resolved_revision(cls, revision): |
138 return cls._COMMIT_RE.match(revision) | 131 return cls._COMMIT_RE.match(revision) |
139 | 132 |
140 @classmethod | 133 @classmethod |
141 def assert_resolved(cls, revision): | 134 def assert_resolved(cls, revision): |
142 if not cls.is_resolved_revision(revision): | 135 if not cls.is_resolved_revision(revision): |
143 raise UnresolvedRefspec('unresolved refspec %r' % revision) | 136 raise UnresolvedRefspec('unresolved refspec %r' % revision) |
144 | 137 |
145 def resolve_refspec(self, refspec): | 138 def resolve_refspec(self, refspec): |
146 if self.is_resolved_revision(refspec): | 139 if self.is_resolved_revision(refspec): |
147 return refspec | 140 return refspec |
148 return self._resolve_refspec_impl(refspec) | 141 return self._resolve_refspec_impl(refspec) |
149 | 142 |
150 def updates(self, revision, other_revision): | 143 def updates(self, revision, other_revision, paths): |
151 """Returns a list of revisions |revision| through |other_revision| | 144 """Returns a list of revisions |revision| through |other_revision| |
152 (inclusive). | 145 (inclusive). |
153 | 146 |
| 147 If |paths| is a non-empty list, the history is scoped just to these paths. |
| 148 |
154 Returns list(CommitMetadata) - The commit metadata in the range | 149 Returns list(CommitMetadata) - The commit metadata in the range |
155 (revision,other_revision]. | 150 (revision,other_revision]. |
156 """ | 151 """ |
157 self.assert_resolved(revision) | 152 self.assert_resolved(revision) |
158 self.assert_resolved(other_revision) | 153 self.assert_resolved(other_revision) |
159 return self._updates_impl(revision, other_revision) | 154 return self._updates_impl(revision, other_revision, paths) |
160 | 155 |
161 ### direct overrides. These are public methods which must be overridden. | 156 ### direct overrides. These are public methods which must be overridden. |
162 | 157 |
163 @property | 158 @property |
164 def repo_type(self): | 159 def repo_type(self): |
165 """Returns package_pb2.DepSpec.RepoType.""" | 160 """Returns package_pb2.DepSpec.RepoType.""" |
166 raise NotImplementedError() | 161 raise NotImplementedError() |
167 | 162 |
168 def fetch(self, refspec): | 163 def fetch(self, refspec): |
169 """Does a fetch for the provided refspec (e.g. get all data from remote), if | 164 """Does a fetch for the provided refspec (e.g. get all data from remote), if |
(...skipping 13 matching lines...) Expand all Loading... |
183 remote git repo, e.g. 'refs/heads/master', 'deadbeef...face', etc. | 178 remote git repo, e.g. 'refs/heads/master', 'deadbeef...face', etc. |
184 """ | 179 """ |
185 # TODO(iannucci): Alter the contract for this method so that it only checks | 180 # TODO(iannucci): Alter the contract for this method so that it only checks |
186 # out the files referred to according to the rules that the bundle | 181 # out the files referred to according to the rules that the bundle |
187 # subcommand uses. | 182 # subcommand uses. |
188 raise NotImplementedError() | 183 raise NotImplementedError() |
189 | 184 |
190 ### private overrides. Override these in the implementations, but don't call | 185 ### private overrides. Override these in the implementations, but don't call |
191 ### externally. | 186 ### externally. |
192 | 187 |
193 def _updates_impl(self, revision, other_revision): | 188 def _updates_impl(self, revision, other_revision, paths): |
194 """Returns a list of revisions |revision| through |other_revision|. This | 189 """Returns a list of revisions |revision| through |other_revision|. This |
195 includes |revision| and |other_revision|. | 190 includes |revision| and |other_revision|. |
196 | 191 |
| 192 If |paths| is a non-empty list, the history is scoped just to these paths. |
| 193 |
197 Args: | 194 Args: |
198 revision (str) - the first git commit | 195 revision (str) - the first git commit |
199 other_revision (str) - the second git commit | 196 other_revision (str) - the second git commit |
200 | 197 |
201 Returns list(CommitMetadata) - The commit metadata in the range | 198 Returns list(CommitMetadata) - The commit metadata in the range |
202 [revision,other_revision]. | 199 [revision,other_revision]. |
203 """ | 200 """ |
204 raise NotImplementedError() | 201 raise NotImplementedError() |
205 | 202 |
206 def _resolve_refspec_impl(self, refspec): | 203 def _resolve_refspec_impl(self, refspec): |
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
323 self.fetch(refspec) | 320 self.fetch(refspec) |
324 | 321 |
325 # reset touches index.lock which is problematic when multiple processes are | 322 # reset touches index.lock which is problematic when multiple processes are |
326 # accessing the recipes at the same time. To allieviate this, we do a quick | 323 # accessing the recipes at the same time. To allieviate this, we do a quick |
327 # diff, which will exit if `revision` is not already checked out. | 324 # diff, which will exit if `revision` is not already checked out. |
328 try: | 325 try: |
329 self._git('diff', '--quiet', revision) | 326 self._git('diff', '--quiet', revision) |
330 except GitError: | 327 except GitError: |
331 self._git('reset', '-q', '--hard', revision) | 328 self._git('reset', '-q', '--hard', revision) |
332 | 329 |
333 def _updates_impl(self, revision, other_revision): | 330 def _updates_impl(self, revision, other_revision, paths): |
334 args = [ | 331 args = [ |
335 'rev-list', | 332 'rev-list', |
336 '--reverse', | 333 '--reverse', |
337 '--topo-order', | 334 '--topo-order', |
338 '%s..%s' % (revision, other_revision), | 335 '%s..%s' % (revision, other_revision), |
339 ] | 336 ] |
| 337 if paths: |
| 338 args.extend(['--'] + paths) |
340 return [ | 339 return [ |
341 self.commit_metadata(rev) | 340 self.commit_metadata(rev) |
342 for rev in self._git(*args).strip().split('\n') | 341 for rev in self._git(*args).strip().split('\n') |
343 if bool(rev) | 342 if bool(rev) |
344 ] | 343 ] |
345 | 344 |
346 def _resolve_refspec_impl(self, revision): | 345 def _resolve_refspec_impl(self, revision): |
347 self._ensure_local_repo_exists() | 346 self._ensure_local_repo_exists() |
348 self.assert_remote('resolve refspec %r' % revision) | 347 self.assert_remote('resolve refspec %r' % revision) |
349 rslt = self._git('ls-remote', self.repo_url, revision).split()[0] | 348 rslt = self._git('ls-remote', self.repo_url, revision).split()[0] |
(...skipping 11 matching lines...) Expand all Loading... |
361 # %`Body` | 360 # %`Body` |
362 meta = self._git( | 361 meta = self._git( |
363 'show', '-s', '--format=%aE%n%ct%n%B', revision).rstrip('\n').splitlines() | 362 'show', '-s', '--format=%aE%n%ct%n%B', revision).rstrip('\n').splitlines() |
364 | 363 |
365 try: | 364 try: |
366 spec = package_io.parse(self._git( | 365 spec = package_io.parse(self._git( |
367 'cat-file', 'blob', '%s:infra/config/recipes.cfg' % revision)) | 366 'cat-file', 'blob', '%s:infra/config/recipes.cfg' % revision)) |
368 except GitError: | 367 except GitError: |
369 spec = None | 368 spec = None |
370 | 369 |
371 # check diff to see if it touches anything interesting. | |
372 changed_files = set(self._git( | |
373 'diff-tree', '-r', '--no-commit-id', '--name-only', '%s^!' % revision) | |
374 .splitlines()) | |
375 | |
376 return CommitMetadata(revision, meta[0], | 370 return CommitMetadata(revision, meta[0], |
377 int(meta[1]), tuple(meta[2:]), | 371 int(meta[1]), tuple(meta[2:]), |
378 spec, has_interesting_changes(spec, changed_files)) | 372 spec) |
379 | 373 |
380 class GitilesFetchError(FetchError): | 374 class GitilesFetchError(FetchError): |
381 """An HTTP error that occurred during Gitiles fetching.""" | 375 """An HTTP error that occurred during Gitiles fetching.""" |
382 | 376 |
383 def __init__(self, status, message): | 377 def __init__(self, status, message): |
384 super(GitilesFetchError, self).__init__( | 378 super(GitilesFetchError, self).__init__( |
385 'Gitiles error code (%d): %s' % (status, message)) | 379 'Gitiles error code (%d): %s' % (status, message)) |
386 self.status = status | 380 self.status = status |
387 self.message = message | 381 self.message = message |
388 | 382 |
389 @staticmethod | 383 @staticmethod |
390 def transient(e): | 384 def transient(e): |
391 """ | 385 """ |
392 Returns (bool): True if "e" is a GitilesFetchError with transient HTTP code. | 386 Returns (bool): True if "e" is a GitilesFetchError with transient HTTP code. |
393 """ | 387 """ |
394 return (isinstance(e, GitilesFetchError) and | 388 return (isinstance(e, GitilesFetchError) and |
395 e.status >= httplib.INTERNAL_SERVER_ERROR) | 389 e.status >= httplib.INTERNAL_SERVER_ERROR) |
396 | 390 |
397 | 391 |
398 # Internal cache object for GitilesBackend. | 392 # Internal cache object for GitilesBackend. |
399 # commit (str) - the git commit hash | 393 # commit (str) - the git commit hash |
400 # author_email (str) - the author email for this commit | 394 # author_email (str) - the author email for this commit |
401 # message_lines (tuple) - the lines of the commit message | 395 # message_lines (tuple) - the lines of the commit message |
402 # changed_files (frozenset) - all paths touched by this commit | |
403 class _GitilesCommitJson(namedtuple( | 396 class _GitilesCommitJson(namedtuple( |
404 '_GitilesCommitJson', | 397 '_GitilesCommitJson', |
405 'commit author_email commit_timestamp message_lines changed_files')): | 398 'commit author_email commit_timestamp message_lines')): |
406 @classmethod | 399 @classmethod |
407 def from_raw_json(cls, raw): | 400 def from_raw_json(cls, raw): |
408 mod_files = set() | |
409 for entry in raw['tree_diff']: | |
410 mod_files.add(entry['old_path']) | |
411 mod_files.add(entry['new_path']) | |
412 return cls( | 401 return cls( |
413 raw['commit'], | 402 raw['commit'], |
414 raw['author']['email'], | 403 raw['author']['email'], |
415 calendar.timegm(time.strptime(raw['committer']['time'])), | 404 calendar.timegm(time.strptime(raw['committer']['time'])), |
416 tuple(raw['message'].splitlines()), | 405 tuple(raw['message'].splitlines()), |
417 frozenset(mod_files), | |
418 ) | 406 ) |
419 | 407 |
420 | 408 |
421 class GitilesBackend(Backend): | 409 class GitilesBackend(Backend): |
422 """GitilesBackend uses a repo served by Gitiles.""" | 410 """GitilesBackend uses a repo served by Gitiles.""" |
423 | 411 |
424 # Prefix at the beginning of Gerrit/Gitiles JSON API responses. | 412 # Prefix at the beginning of Gerrit/Gitiles JSON API responses. |
425 _GERRIT_XSRF_HEADER = ')]}\'\n' | 413 _GERRIT_XSRF_HEADER = ')]}\'\n' |
426 | 414 |
427 @util.exponential_retry(condition=GitilesFetchError.transient) | 415 @util.exponential_retry(condition=GitilesFetchError.transient) |
428 def _fetch_gitiles(self, url_fmt, *args): | 416 def _fetch_gitiles(self, url_fmt, *args): |
429 """Fetches a remote URL path and returns the response object on success. | 417 """Fetches a remote URL path and returns the response object on success. |
430 | 418 |
431 Args: | 419 Args: |
432 url_fmt (str) - the url path fragment as a python %format string, like | 420 url_fmt (str) - the url path fragment as a python %format string, like |
433 '%s/foo/bar?something=value' | 421 '%s/foo/bar?something=value' |
434 *args (str) - the arguments to format url_fmt with. They will be URL | 422 *args (str) - the arguments to format url_fmt with. They will be URL |
435 escaped. | 423 escaped. |
436 | 424 |
437 Returns requests.Response. | 425 Returns requests.Response. |
438 """ | 426 """ |
439 url = '%s/%s' % (self.repo_url, | 427 url = '%s/%s' % (self.repo_url, |
440 url_fmt % tuple(map(requests.utils.quote, args))) | 428 url_fmt % tuple(map(requests.utils.quote, args))) |
441 LOGGER.info('fetching %s' % url) | 429 LOGGER.info('fetching %s' % url) |
442 resp = requests.get(url) | 430 resp = requests.get(url) |
443 if resp.status_code != httplib.OK: | 431 if resp.status_code != httplib.OK: |
444 raise GitilesFetchError(resp.status_code, resp.text) | 432 raise GitilesFetchError(resp.status_code, resp.text) |
445 return resp | 433 return resp |
446 | 434 |
447 def _fetch_gitiles_committish_json(self, url_fmt, *args): | 435 def _fetch_gitiles_json(self, url_fmt, *args): |
448 """Fetches a remote URL path and expects a JSON object on success. | 436 """Fetches a remote URL path and expects a JSON object on success. |
449 | 437 |
450 This appends two GET params to url_fmt: | |
451 format=JSON - Does what you expect | |
452 name-status=1 - Ensures that commit objects returned have a 'tree_diff' | |
453 member which shows the diff for that commit. | |
454 | |
455 Args: | 438 Args: |
456 url_fmt (str) - the url path fragment as a python %format string, like | 439 url_fmt (str) - the url path fragment as a python %format string, like |
457 '%s/foo/bar?something=value' | 440 '%s/foo/bar?something=value' |
458 *args (str) - the arguments to format url_fmt with. They will be URL | 441 *args (str) - the arguments to format url_fmt with. They will be URL |
459 escaped. | 442 escaped. |
460 | 443 |
461 Returns the decoded JSON object | 444 Returns the decoded JSON object |
462 """ | 445 """ |
463 resp = self._fetch_gitiles(url_fmt+'?name-status=1&format=JSON', *args) | 446 resp = self._fetch_gitiles(url_fmt, *args) |
464 if not resp.text.startswith(self._GERRIT_XSRF_HEADER): | 447 if not resp.text.startswith(self._GERRIT_XSRF_HEADER): |
465 raise GitilesFetchError(resp.status_code, 'Missing XSRF prefix') | 448 raise GitilesFetchError(resp.status_code, 'Missing XSRF prefix') |
466 return json.loads(resp.text[len(self._GERRIT_XSRF_HEADER):]) | 449 return json.loads(resp.text[len(self._GERRIT_XSRF_HEADER):]) |
467 | 450 |
468 # This caches entries from _fetch_commit_json. It's populated by | 451 # This caches entries from _fetch_commit_json. It's populated by |
469 # _fetch_commit_json as well as _updates_impl. | 452 # _fetch_commit_json as well as _updates_impl. |
470 # | 453 # |
471 # Mapping of: | 454 # Mapping of: |
472 # repo_url -> git_revision -> _GitilesCommitJson | 455 # repo_url -> git_revision -> _GitilesCommitJson |
473 # | 456 # |
474 # Only populated if _fetch_commit_json is passed a resolved commit. | 457 # Only populated if _fetch_commit_json is passed a resolved commit. |
475 _COMMIT_JSON_CACHE = {} | 458 _COMMIT_JSON_CACHE = {} |
476 | 459 |
477 def _fetch_commit_json(self, refspec): | 460 def _fetch_commit_json(self, refspec): |
478 """Returns _GitilesCommitJson for the refspec. | 461 """Returns _GitilesCommitJson for the refspec. |
479 | 462 |
480 If refspec is resolved then this value is cached. | 463 If refspec is resolved then this value is cached. |
481 """ | 464 """ |
482 c = self._COMMIT_JSON_CACHE.setdefault(self.repo_url, {}) | 465 c = self._COMMIT_JSON_CACHE.setdefault(self.repo_url, {}) |
483 if refspec in c: | 466 if refspec in c: |
484 return c[refspec] | 467 return c[refspec] |
485 | 468 |
486 raw = self._fetch_gitiles_committish_json('+/%s', refspec) | 469 raw = self._fetch_gitiles_json('+/%s?format=JSON', refspec) |
487 ret = _GitilesCommitJson.from_raw_json(raw) | 470 ret = _GitilesCommitJson.from_raw_json(raw) |
488 if self.is_resolved_revision(refspec): | 471 if self.is_resolved_revision(refspec): |
489 c[refspec] = ret | 472 c[refspec] = ret |
490 | 473 |
491 return ret | 474 return ret |
492 | 475 |
493 | 476 |
494 ### Backend implementations | 477 ### Backend implementations |
495 | 478 |
496 | 479 |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
535 | 518 |
536 # TODO(iannucci): This implementation may be slow if we need to retieve | 519 # TODO(iannucci): This implementation may be slow if we need to retieve |
537 # multiple files/archives from the remote server. Should possibly consider | 520 # multiple files/archives from the remote server. Should possibly consider |
538 # using a thread pool here. | 521 # using a thread pool here. |
539 | 522 |
540 archive_response = self._fetch_gitiles( | 523 archive_response = self._fetch_gitiles( |
541 '+archive/%s/%s.tar.gz', revision, recipes_path_rel) | 524 '+archive/%s/%s.tar.gz', revision, recipes_path_rel) |
542 with tarfile.open(fileobj=StringIO(archive_response.content)) as tf: | 525 with tarfile.open(fileobj=StringIO(archive_response.content)) as tf: |
543 tf.extractall(recipes_path) | 526 tf.extractall(recipes_path) |
544 | 527 |
545 def _updates_impl(self, revision, other_revision): | 528 def _updates_impl(self, revision, other_revision, paths): |
546 self.assert_remote('_updates_impl') | 529 self.assert_remote('_updates_impl') |
547 | 530 |
548 # TODO(iannucci): implement paging | 531 # TODO(iannucci): implement paging |
549 | 532 |
550 log_json = self._fetch_gitiles_committish_json( | 533 # To include info about touched paths (tree_diff), pass name-status=1 below. |
551 '+log/%s..%s', revision, other_revision) | 534 log_json = self._fetch_gitiles_json( |
| 535 '+log/%s..%s?name-status=1&format=JSON', revision, other_revision) |
552 | 536 |
553 c = self._COMMIT_JSON_CACHE.setdefault(self.repo_url, {}) | 537 c = self._COMMIT_JSON_CACHE.setdefault(self.repo_url, {}) |
554 | 538 |
555 results = [] | 539 results = [] |
556 for entry in log_json['log']: | 540 for entry in log_json['log']: |
557 commit = entry['commit'] | 541 commit = entry['commit'] |
558 c[commit] = _GitilesCommitJson.from_raw_json(entry) | 542 c[commit] = _GitilesCommitJson.from_raw_json(entry) |
559 results.append(commit) | 543 |
| 544 matched = False |
| 545 for path in paths: |
| 546 for diff_entry in entry['tree_diff']: |
| 547 if (diff_entry['old_path'].startswith(path) or |
| 548 diff_entry['new_path'].startswith(path)): |
| 549 matched = True |
| 550 break |
| 551 if matched: |
| 552 break |
| 553 if matched or not paths: |
| 554 results.append(commit) |
560 | 555 |
561 results.reverse() | 556 results.reverse() |
562 return map(self.commit_metadata, results) | 557 return map(self.commit_metadata, results) |
563 | 558 |
564 def _resolve_refspec_impl(self, refspec): | 559 def _resolve_refspec_impl(self, refspec): |
565 if self.is_resolved_revision(refspec): | 560 if self.is_resolved_revision(refspec): |
566 return self.commit_metadata(refspec).commit | 561 return self.commit_metadata(refspec).commit |
567 return self._fetch_commit_json(refspec).commit | 562 return self._fetch_commit_json(refspec).commit |
568 | 563 |
569 def _commit_metadata_impl(self, revision): | 564 def _commit_metadata_impl(self, revision): |
570 self.assert_remote('_commit_metadata_impl') | 565 self.assert_remote('_commit_metadata_impl') |
571 rev_json = self._fetch_commit_json(revision) | 566 rev_json = self._fetch_commit_json(revision) |
572 | 567 |
573 recipes_cfg_text = self._fetch_gitiles( | 568 recipes_cfg_text = self._fetch_gitiles( |
574 '+/%s/infra/config/recipes.cfg?format=TEXT', revision | 569 '+/%s/infra/config/recipes.cfg?format=TEXT', revision |
575 ).text.decode('base64') | 570 ).text.decode('base64') |
576 spec = json_format.Parse( | 571 spec = json_format.Parse( |
577 recipes_cfg_text, package_pb2.Package(), ignore_unknown_fields=True) | 572 recipes_cfg_text, package_pb2.Package(), ignore_unknown_fields=True) |
578 | 573 |
579 return CommitMetadata( | 574 return CommitMetadata( |
580 revision, | 575 revision, |
581 rev_json.author_email, | 576 rev_json.author_email, |
582 rev_json.commit_timestamp, | 577 rev_json.commit_timestamp, |
583 rev_json.message_lines, | 578 rev_json.message_lines, |
584 spec, | 579 spec) |
585 has_interesting_changes(spec, rev_json.changed_files)) | |
OLD | NEW |