OLD | NEW |
---|---|
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright 2014 The Chromium Authors. All rights reserved. | 2 # Copyright 2014 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 # TODO(hinoka): Use logging. | 6 # TODO(hinoka): Use logging. |
7 | 7 |
8 import cStringIO | 8 import cStringIO |
9 import codecs | 9 import codecs |
10 import collections | 10 import collections |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
79 path.join(ROOT_DIR, # .recipe_deps | 79 path.join(ROOT_DIR, # .recipe_deps |
80 path.pardir, # slave | 80 path.pardir, # slave |
81 path.pardir, # scripts | 81 path.pardir, # scripts |
82 path.pardir), # build_internal | 82 path.pardir), # build_internal |
83 ]) | 83 ]) |
84 | 84 |
85 | 85 |
86 CHROMIUM_GIT_HOST = 'https://chromium.googlesource.com' | 86 CHROMIUM_GIT_HOST = 'https://chromium.googlesource.com' |
87 CHROMIUM_SRC_URL = CHROMIUM_GIT_HOST + '/chromium/src.git' | 87 CHROMIUM_SRC_URL = CHROMIUM_GIT_HOST + '/chromium/src.git' |
88 | 88 |
89 # Official builds use buildspecs, so this is a special case. | |
90 BUILDSPEC_TYPE = collections.namedtuple('buildspec', | |
91 ('container', 'version')) | |
92 BUILDSPEC_RE = (r'^/chrome-internal/trunk/tools/buildspec/' | |
93 '(build|branches|releases)/(.+)$') | |
94 GIT_BUILDSPEC_PATH = ('https://chrome-internal.googlesource.com/chrome/tools/' | |
95 'buildspec') | |
96 BRANCH_HEADS_REFSPEC = '+refs/branch-heads/*' | 89 BRANCH_HEADS_REFSPEC = '+refs/branch-heads/*' |
97 | 90 |
98 BUILDSPEC_COMMIT_RE = ( | 91 BUILDSPEC_COMMIT_RE = ( |
99 re.compile(r'Buildspec for.*version (\d+\.\d+\.\d+\.\d+)'), | 92 re.compile(r'Buildspec for.*version (\d+\.\d+\.\d+\.\d+)'), |
100 re.compile(r'Create (\d+\.\d+\.\d+\.\d+) buildspec'), | 93 re.compile(r'Create (\d+\.\d+\.\d+\.\d+) buildspec'), |
101 re.compile(r'Auto-converted (\d+\.\d+\.\d+\.\d+) buildspec to git'), | 94 re.compile(r'Auto-converted (\d+\.\d+\.\d+\.\d+) buildspec to git'), |
102 ) | 95 ) |
103 | 96 |
104 # Regular expression that matches a single commit footer line. | 97 # Regular expression that matches a single commit footer line. |
105 COMMIT_FOOTER_ENTRY_RE = re.compile(r'([^:]+):\s+(.+)') | 98 COMMIT_FOOTER_ENTRY_RE = re.compile(r'([^:]+):\s+(.+)') |
106 | 99 |
107 # Footer metadata keys for regular and gsubtreed mirrored commit positions. | 100 # Footer metadata keys for regular and gsubtreed mirrored commit positions. |
108 COMMIT_POSITION_FOOTER_KEY = 'Cr-Commit-Position' | 101 COMMIT_POSITION_FOOTER_KEY = 'Cr-Commit-Position' |
109 COMMIT_ORIGINAL_POSITION_FOOTER_KEY = 'Cr-Original-Commit-Position' | 102 COMMIT_ORIGINAL_POSITION_FOOTER_KEY = 'Cr-Original-Commit-Position' |
110 # Regular expression to parse a commit position | 103 # Regular expression to parse a commit position |
111 COMMIT_POSITION_RE = re.compile(r'(.+)@\{#(\d+)\}') | 104 COMMIT_POSITION_RE = re.compile(r'(.+)@\{#(\d+)\}') |
112 | 105 |
113 # Regular expression to parse gclient's revinfo entries. | 106 # Regular expression to parse gclient's revinfo entries. |
114 REVINFO_RE = re.compile(r'^([^:]+):\s+([^@]+)@(.+)$') | 107 REVINFO_RE = re.compile(r'^([^:]+):\s+([^@]+)@(.+)$') |
115 | 108 |
116 # Used by 'ResolveSvnRevisionFromGitiles' | |
117 GIT_SVN_PROJECT_MAP = { | |
118 'webkit': { | |
119 'svn_url': 'svn://svn.chromium.org/blink', | |
120 'branch_map': [ | |
121 (r'trunk', r'refs/heads/master'), | |
122 (r'branches/([^/]+)', r'refs/branch-heads/\1'), | |
123 ], | |
124 }, | |
125 'v8': { | |
126 'svn_url': 'https://v8.googlecode.com/svn', | |
127 'branch_map': [ | |
128 (r'trunk', r'refs/heads/candidates'), | |
129 (r'branches/bleeding_edge', r'refs/heads/master'), | |
130 (r'branches/([^/]+)', r'refs/branch-heads/\1'), | |
131 ], | |
132 }, | |
133 'nacl': { | |
134 'svn_url': 'svn://svn.chromium.org/native_client', | |
135 'branch_map': [ | |
136 (r'trunk/src/native_client', r'refs/heads/master'), | |
137 ], | |
138 }, | |
139 } | |
140 | |
141 # Key for the 'git-svn' ID metadata commit footer entry. | |
142 GIT_SVN_ID_FOOTER_KEY = 'git-svn-id' | |
143 # e.g., git-svn-id: https://v8.googlecode.com/svn/trunk@23117 | |
144 # ce2b1a6d-e550-0410-aec6-3dcde31c8c00 | |
145 GIT_SVN_ID_RE = re.compile(r'((?:\w+)://[^@]+)@(\d+)\s+(?:[a-zA-Z0-9\-]+)') | |
146 | |
147 | |
148 # This is the git mirror of the buildspecs repository. We could rely on the svn | |
149 # checkout, now that the git buildspecs are checked in alongside the svn | |
150 # buildspecs, but we're going to want to pull all the buildspecs from here | |
151 # eventually anyhow, and there's already some logic to pull from git (for the | |
152 # old git_buildspecs.git repo), so just stick with that. | |
153 GIT_BUILDSPEC_REPO = ( | |
154 'https://chrome-internal.googlesource.com/chrome/tools/buildspec') | |
155 | 109 |
156 # Copied from scripts/recipes/chromium.py. | 110 # Copied from scripts/recipes/chromium.py. |
157 GOT_REVISION_MAPPINGS = { | 111 GOT_REVISION_MAPPINGS = { |
158 '/chrome/trunk/src': { | 112 CHROMIUM_SRC_URL: { |
159 'src/': 'got_revision', | 113 'src/': 'got_revision', |
160 'src/native_client/': 'got_nacl_revision', | 114 'src/native_client/': 'got_nacl_revision', |
161 'src/tools/swarm_client/': 'got_swarm_client_revision', | 115 'src/tools/swarm_client/': 'got_swarm_client_revision', |
162 'src/tools/swarming_client/': 'got_swarming_client_revision', | 116 'src/tools/swarming_client/': 'got_swarming_client_revision', |
163 'src/third_party/WebKit/': 'got_webkit_revision', | 117 'src/third_party/WebKit/': 'got_webkit_revision', |
164 'src/third_party/webrtc/': 'got_webrtc_revision', | 118 'src/third_party/webrtc/': 'got_webrtc_revision', |
165 'src/v8/': 'got_v8_revision', | 119 'src/v8/': 'got_v8_revision', |
166 } | 120 } |
167 } | 121 } |
168 | 122 |
169 | 123 |
170 BOT_UPDATE_MESSAGE = """ | 124 BOT_UPDATE_MESSAGE = """ |
171 What is the "Bot Update" step? | 125 What is the "Bot Update" step? |
172 ============================== | 126 ============================== |
173 | 127 |
174 This step ensures that the source checkout on the bot (e.g. Chromium's src/ and | 128 This step ensures that the source checkout on the bot (e.g. Chromium's src/ and |
175 its dependencies) is checked out in a consistent state. This means that all of | 129 its dependencies) is checked out in a consistent state. This means that all of |
176 the necessary repositories are checked out, no extra repositories are checked | 130 the necessary repositories are checked out, no extra repositories are checked |
177 out, and no locally modified files are present. | 131 out, and no locally modified files are present. |
178 | 132 |
179 These actions used to be taken care of by the "gclient revert" and "update" | 133 These actions used to be taken care of by the "gclient revert" and "update" |
180 steps. However, those steps are known to be buggy and occasionally flaky. This | 134 steps. However, those steps are known to be buggy and occasionally flaky. This |
181 step has two main advantages over them: | 135 step has two main advantages over them: |
182 * it only operates in Git, so the logic can be clearer and cleaner; and | 136 * it only operates in Git, so the logic can be clearer and cleaner; and |
183 * it is a slave-side script, so its behavior can be modified without | 137 * it is a slave-side script, so its behavior can be modified without |
184 restarting the master. | 138 restarting the master. |
185 | 139 |
186 Why Git, you ask? Because that is the direction that the Chromium project is | |
187 heading. This step is an integral part of the transition from using the SVN repo | |
188 at chrome/trunk/src to using the Git repo src.git. Please pardon the dust while | |
189 we fully convert everything to Git. This message will get out of your way | |
190 eventually, and the waterfall will be a happier place because of it. | |
191 | |
192 This step can be activated or deactivated independently on every builder on | |
193 every master. When it is active, the "gclient revert" and "update" steps become | |
194 no-ops. When it is inactive, it prints this message, cleans up after itself, and | |
195 lets everything else continue as though nothing has changed. Eventually, when | |
196 everything is stable enough, this step will replace them entirely. | |
197 | |
198 Debugging information: | 140 Debugging information: |
199 (master/builder/slave may be unspecified on recipes) | 141 (master/builder/slave may be unspecified on recipes) |
200 master: %(master)s | 142 master: %(master)s |
201 builder: %(builder)s | 143 builder: %(builder)s |
202 slave: %(slave)s | 144 slave: %(slave)s |
203 forced by recipes: %(recipe)s | 145 forced by recipes: %(recipe)s |
204 CURRENT_DIR: %(CURRENT_DIR)s | 146 CURRENT_DIR: %(CURRENT_DIR)s |
205 BUILDER_DIR: %(BUILDER_DIR)s | 147 BUILDER_DIR: %(BUILDER_DIR)s |
206 SLAVE_DIR: %(SLAVE_DIR)s | 148 SLAVE_DIR: %(SLAVE_DIR)s |
207 THIS_DIR: %(THIS_DIR)s | 149 THIS_DIR: %(THIS_DIR)s |
(...skipping 29 matching lines...) Expand all Loading... | |
237 try: | 179 try: |
238 execfile(os.path.join( | 180 execfile(os.path.join( |
239 BUILD_INTERNAL_DIR, 'scripts', 'slave', 'bot_update_cfg.py'), | 181 BUILD_INTERNAL_DIR, 'scripts', 'slave', 'bot_update_cfg.py'), |
240 local_vars) | 182 local_vars) |
241 except Exception: | 183 except Exception: |
242 # Same as if BUILD_INTERNAL_DIR didn't exist in the first place. | 184 # Same as if BUILD_INTERNAL_DIR didn't exist in the first place. |
243 print 'Warning: unable to read internal configuration file.' | 185 print 'Warning: unable to read internal configuration file.' |
244 print 'If this is an internal bot, this step may be erroneously inactive.' | 186 print 'If this is an internal bot, this step may be erroneously inactive.' |
245 internal_data = local_vars | 187 internal_data = local_vars |
246 | 188 |
247 RECOGNIZED_PATHS = { | |
248 # If SVN path matches key, the entire URL is rewritten to the Git url. | |
249 '/chrome/trunk/src': | |
250 CHROMIUM_SRC_URL, | |
251 '/chrome/trunk/src/tools/cros.DEPS': | |
252 CHROMIUM_GIT_HOST + '/chromium/src/tools/cros.DEPS.git', | |
253 '/chrome-internal/trunk/src-internal': | |
254 'https://chrome-internal.googlesource.com/chrome/src-internal.git', | |
255 } | |
256 RECOGNIZED_PATHS.update(internal_data.get('RECOGNIZED_PATHS', {})) | |
257 | 189 |
258 ENABLED_MASTERS = [ | 190 ENABLED_MASTERS = [ |
259 'bot_update.always_on', | 191 'bot_update.always_on', |
260 'chromium.android', | 192 'chromium.android', |
261 'chromium.angle', | 193 'chromium.angle', |
262 'chromium.chrome', | 194 'chromium.chrome', |
263 'chromium.chromedriver', | 195 'chromium.chromedriver', |
264 'chromium.chromiumos', | 196 'chromium.chromiumos', |
265 'chromium', | 197 'chromium', |
266 'chromium.fyi', | 198 'chromium.fyi', |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
330 | 262 |
331 # Disabled filters get run AFTER enabled filters, so for example if a builder | 263 # Disabled filters get run AFTER enabled filters, so for example if a builder |
332 # config is enabled, but a bot on that builder is disabled, that bot will | 264 # config is enabled, but a bot on that builder is disabled, that bot will |
333 # be disabled. | 265 # be disabled. |
334 DISABLED_BUILDERS = {} | 266 DISABLED_BUILDERS = {} |
335 DISABLED_BUILDERS.update(internal_data.get('DISABLED_BUILDERS', {})) | 267 DISABLED_BUILDERS.update(internal_data.get('DISABLED_BUILDERS', {})) |
336 | 268 |
337 DISABLED_SLAVES = {} | 269 DISABLED_SLAVES = {} |
338 DISABLED_SLAVES.update(internal_data.get('DISABLED_SLAVES', {})) | 270 DISABLED_SLAVES.update(internal_data.get('DISABLED_SLAVES', {})) |
339 | 271 |
340 # These masters work only in Git, meaning for got_revision, always output | |
341 # a git hash rather than a SVN rev. | |
342 GIT_MASTERS = [ | |
343 'client.v8', | |
344 'client.v8.branches', | |
345 'client.v8.ports', | |
346 'tryserver.v8', | |
347 ] | |
348 GIT_MASTERS += internal_data.get('GIT_MASTERS', []) | |
349 | |
350 | |
351 # How many times to try before giving up. | 272 # How many times to try before giving up. |
352 ATTEMPTS = 5 | 273 ATTEMPTS = 5 |
353 | 274 |
354 GIT_CACHE_PATH = path.join(DEPOT_TOOLS_DIR, 'git_cache.py') | 275 GIT_CACHE_PATH = path.join(DEPOT_TOOLS_DIR, 'git_cache.py') |
355 | 276 |
356 # Find the patch tool. | 277 # Find the patch tool. |
357 if sys.platform.startswith('win'): | 278 if sys.platform.startswith('win'): |
358 if not BUILD_INTERNAL_DIR: | 279 if not BUILD_INTERNAL_DIR: |
359 print 'Warning: could not find patch tool because there is no ' | 280 print 'Warning: could not find patch tool because there is no ' |
360 print 'build_internal present.' | 281 print 'build_internal present.' |
(...skipping 16 matching lines...) Expand all Loading... | |
377 | 298 |
378 | 299 |
379 class PatchFailed(SubprocessFailed): | 300 class PatchFailed(SubprocessFailed): |
380 pass | 301 pass |
381 | 302 |
382 | 303 |
383 class GclientSyncFailed(SubprocessFailed): | 304 class GclientSyncFailed(SubprocessFailed): |
384 pass | 305 pass |
385 | 306 |
386 | 307 |
387 class SVNRevisionNotFound(Exception): | |
388 pass | |
389 | |
390 | |
391 class InvalidDiff(Exception): | 308 class InvalidDiff(Exception): |
392 pass | 309 pass |
393 | 310 |
394 | 311 |
395 class Inactive(Exception): | |
396 """Not really an exception, just used to exit early cleanly.""" | |
397 pass | |
398 | |
399 | |
400 RETRY = object() | 312 RETRY = object() |
401 OK = object() | 313 OK = object() |
402 FAIL = object() | 314 FAIL = object() |
403 | 315 |
404 | 316 |
405 class PsPrinter(object): | 317 class PsPrinter(object): |
406 def __init__(self, interval=300): | 318 def __init__(self, interval=300): |
407 self.interval = interval | 319 self.interval = interval |
408 self.active = sys.platform.startswith('linux2') | 320 self.active = sys.platform.startswith('linux2') |
409 self.thread = None | 321 self.thread = None |
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
546 if slave_list and slave in slave_list: | 458 if slave_list and slave in slave_list: |
547 return True | 459 return True |
548 return False | 460 return False |
549 | 461 |
550 | 462 |
551 def check_valid_host(master, builder, slave): | 463 def check_valid_host(master, builder, slave): |
552 return (check_enabled(master, builder, slave) | 464 return (check_enabled(master, builder, slave) |
553 and not check_disabled(master, builder, slave)) | 465 and not check_disabled(master, builder, slave)) |
554 | 466 |
555 | 467 |
556 def maybe_ignore_revision(revision, buildspec): | |
557 """Handle builders that don't care what buildbot tells them to build. | |
558 | |
559 This is especially the case with branch builders that build from buildspecs | |
560 and/or trigger off multiple repositories, where the --revision passed in has | |
561 nothing to do with the solution being built. Clearing the revision in this | |
562 case causes bot_update to use HEAD rather that trying to checkout an | |
563 inappropriate version of the solution. | |
564 """ | |
565 if buildspec and buildspec.container == 'branches': | |
566 return [] | |
567 return revision | |
568 | |
569 | |
570 def solutions_printer(solutions): | 468 def solutions_printer(solutions): |
571 """Prints gclient solution to stdout.""" | 469 """Prints gclient solution to stdout.""" |
572 print 'Gclient Solutions' | 470 print 'Gclient Solutions' |
573 print '=================' | 471 print '=================' |
574 for solution in solutions: | 472 for solution in solutions: |
575 name = solution.get('name') | 473 name = solution.get('name') |
576 url = solution.get('url') | 474 url = solution.get('url') |
577 print '%s (%s)' % (name, url) | 475 print '%s (%s)' % (name, url) |
578 if solution.get('deps_file'): | 476 if solution.get('deps_file'): |
579 print ' Dependencies file is %s' % solution['deps_file'] | 477 print ' Dependencies file is %s' % solution['deps_file'] |
(...skipping 14 matching lines...) Expand all Loading... | |
594 print ' %s: Ignore' % deps_name | 492 print ' %s: Ignore' % deps_name |
595 for k, v in solution.iteritems(): | 493 for k, v in solution.iteritems(): |
596 # Print out all the keys we don't know about. | 494 # Print out all the keys we don't know about. |
597 if k in ['name', 'url', 'deps_file', 'custom_vars', 'custom_deps', | 495 if k in ['name', 'url', 'deps_file', 'custom_vars', 'custom_deps', |
598 'managed']: | 496 'managed']: |
599 continue | 497 continue |
600 print ' %s is %s' % (k, v) | 498 print ' %s is %s' % (k, v) |
601 print | 499 print |
602 | 500 |
603 | 501 |
604 def solutions_to_git(input_solutions): | 502 def modify_solutions(input_solutions): |
605 """Modifies urls in solutions to point at Git repos. | 503 """Modifies urls in solutions to point at Git repos. |
606 | 504 |
607 returns: (git solution, svn root of first solution) tuple. | 505 returns: new solution dictionary |
608 """ | 506 """ |
609 assert input_solutions | 507 assert input_solutions |
610 solutions = copy.deepcopy(input_solutions) | 508 solutions = copy.deepcopy(input_solutions) |
611 first_solution = True | 509 first_solution = True |
iannucci
2016/09/07 19:45:55
this variable is no longer used
agable
2016/09/08 00:29:17
Removed.
| |
612 buildspec = None | |
613 for solution in solutions: | 510 for solution in solutions: |
614 original_url = solution['url'] | 511 original_url = solution['url'] |
615 parsed_url = urlparse.urlparse(original_url) | 512 parsed_url = urlparse.urlparse(original_url) |
616 parsed_path = parsed_url.path | 513 parsed_path = parsed_url.path |
617 | 514 |
618 # Rewrite SVN urls into Git urls. | 515 if parsed_url.scheme == 'https' and 'googlesource' in parsed_url.netloc: |
619 buildspec_m = re.match(BUILDSPEC_RE, parsed_path) | |
620 if first_solution and buildspec_m: | |
621 solution['url'] = GIT_BUILDSPEC_PATH | |
622 buildspec = BUILDSPEC_TYPE( | |
623 container=buildspec_m.group(1), | |
624 version=buildspec_m.group(2), | |
625 ) | |
626 solution['deps_file'] = path.join(buildspec.container, buildspec.version, | |
627 'DEPS') | |
628 elif parsed_path in RECOGNIZED_PATHS: | |
629 solution['url'] = RECOGNIZED_PATHS[parsed_path] | |
630 solution['deps_file'] = '.DEPS.git' | |
631 elif parsed_url.scheme == 'https' and 'googlesource' in parsed_url.netloc: | |
632 pass | 516 pass |
633 else: | 517 else: |
634 print 'Warning: %s' % ('path %r not recognized' % parsed_path,) | 518 print 'Warning: %s' % ('path %r not recognized' % parsed_path,) |
iannucci
2016/09/07 19:45:55
remove this entire condition... if we need it at a
agable
2016/09/08 00:29:17
Removed.
| |
635 | 519 |
636 # Strip out deps containing $$V8_REV$$, etc. | 520 # Strip out deps containing $$V8_REV$$, etc. |
637 if 'custom_deps' in solution: | 521 if 'custom_deps' in solution: |
638 new_custom_deps = {} | 522 new_custom_deps = {} |
639 for deps_name, deps_value in solution['custom_deps'].iteritems(): | 523 for deps_name, deps_value in solution['custom_deps'].iteritems(): |
640 if deps_value and '$$' in deps_value: | 524 if deps_value and '$$' in deps_value: |
iannucci
2016/09/07 19:45:55
this is never used in recipes: https://cs.chromium
agable
2016/09/08 00:29:17
Removed whole conditional stanza.
| |
641 print 'Dropping %s:%s from custom deps' % (deps_name, deps_value) | 525 print 'Dropping %s:%s from custom deps' % (deps_name, deps_value) |
642 else: | 526 else: |
643 new_custom_deps[deps_name] = deps_value | 527 new_custom_deps[deps_name] = deps_value |
644 solution['custom_deps'] = new_custom_deps | 528 solution['custom_deps'] = new_custom_deps |
645 | 529 |
646 if first_solution: | |
647 root = parsed_path | |
648 first_solution = False | |
649 | |
650 solution['managed'] = False | 530 solution['managed'] = False |
651 # We don't want gclient to be using a safesync URL. Instead it should | 531 # We don't want gclient to be using a safesync URL. Instead it should |
652 # using the lkgr/lkcr branch/tags. | 532 # using the lkgr/lkcr branch/tags. |
653 if 'safesync_url' in solution: | 533 if 'safesync_url' in solution: |
654 print 'Removing safesync url %s from %s' % (solution['safesync_url'], | 534 print 'Removing safesync url %s from %s' % (solution['safesync_url'], |
655 parsed_path) | 535 parsed_path) |
656 del solution['safesync_url'] | 536 del solution['safesync_url'] |
657 return solutions, root, buildspec | 537 first_solution = False |
538 | |
539 return solutions | |
658 | 540 |
659 | 541 |
660 def remove(target): | 542 def remove(target): |
661 """Remove a target by moving it into build.dead.""" | 543 """Remove a target by moving it into build.dead.""" |
662 dead_folder = path.join(BUILDER_DIR, 'build.dead') | 544 dead_folder = path.join(BUILDER_DIR, 'build.dead') |
663 if not path.exists(dead_folder): | 545 if not path.exists(dead_folder): |
664 os.makedirs(dead_folder) | 546 os.makedirs(dead_folder) |
665 os.rename(target, path.join(dead_folder, uuid.uuid4().hex)) | 547 os.rename(target, path.join(dead_folder, uuid.uuid4().hex)) |
666 | 548 |
667 | 549 |
668 def ensure_no_checkout(dir_names, scm_dirname): | 550 def ensure_no_checkout(dir_names): |
669 """Ensure that there is no undesired checkout under build/. | 551 """Ensure that there is no undesired checkout under build/.""" |
670 | 552 has_checkout = any(path.exists(path.join(os.getcwd(), dir_name, '.git')) |
671 If there is an incorrect checkout under build/, then | |
672 move build/ to build.dead/ | |
673 This function will check each directory in dir_names. | |
674 | |
675 scm_dirname is expected to be either ['.svn', '.git'] | |
676 """ | |
677 assert scm_dirname in ['.svn', '.git', '*'] | |
678 has_checkout = any(path.exists(path.join(os.getcwd(), dir_name, scm_dirname)) | |
679 for dir_name in dir_names) | 553 for dir_name in dir_names) |
680 | 554 |
681 if has_checkout or scm_dirname == '*': | 555 if has_checkout: |
682 build_dir = os.getcwd() | 556 build_dir = os.getcwd() |
iannucci
2016/09/07 19:45:55
can we move this os.getcwd() up and re-use it in t
agable
2016/09/08 00:29:17
Done.
| |
683 prefix = '' | |
684 if scm_dirname != '*': | |
685 prefix = '%s detected in checkout, ' % scm_dirname | |
686 | |
687 for filename in os.listdir(build_dir): | 557 for filename in os.listdir(build_dir): |
688 deletion_target = path.join(build_dir, filename) | 558 deletion_target = path.join(build_dir, filename) |
689 print '%sdeleting %s...' % (prefix, deletion_target), | 559 print '.git detected in checkout, deleting %s...' % deletion_target, |
690 remove(deletion_target) | 560 remove(deletion_target) |
691 print 'done' | 561 print 'done' |
692 | 562 |
693 | 563 |
694 def gclient_configure(solutions, target_os, target_os_only, git_cache_dir): | 564 def gclient_configure(solutions, target_os, target_os_only, git_cache_dir): |
695 """Should do the same thing as gclient --spec='...'.""" | 565 """Should do the same thing as gclient --spec='...'.""" |
696 with codecs.open('.gclient', mode='w', encoding='utf-8') as f: | 566 with codecs.open('.gclient', mode='w', encoding='utf-8') as f: |
697 f.write(get_gclient_spec( | 567 f.write(get_gclient_spec( |
698 solutions, target_os, target_os_only, git_cache_dir)) | 568 solutions, target_os, target_os_only, git_cache_dir)) |
699 | 569 |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
773 footers[m.group(1)] = m.group(2).strip() | 643 footers[m.group(1)] = m.group(2).strip() |
774 return footers | 644 return footers |
775 | 645 |
776 | 646 |
777 def get_commit_message_footer(message, key): | 647 def get_commit_message_footer(message, key): |
778 """Returns: (str/None) The footer value for 'key', or None if none was found. | 648 """Returns: (str/None) The footer value for 'key', or None if none was found. |
779 """ | 649 """ |
780 return get_commit_message_footer_map(message).get(key) | 650 return get_commit_message_footer_map(message).get(key) |
781 | 651 |
782 | 652 |
783 def get_svn_rev(git_hash, dir_name): | |
784 log = git('log', '-1', git_hash, cwd=dir_name) | |
785 git_svn_id = get_commit_message_footer(log, GIT_SVN_ID_FOOTER_KEY) | |
786 if not git_svn_id: | |
787 return None | |
788 m = GIT_SVN_ID_RE.match(git_svn_id) | |
789 if not m: | |
790 return None | |
791 return int(m.group(2)) | |
792 | |
793 | |
794 def get_git_hash(revision, branch, sln_dir): | |
795 """We want to search for the SVN revision on the git-svn branch. | |
796 | |
797 Note that git will search backwards from origin/master. | |
798 """ | |
799 match = "^%s: [^ ]*@%s " % (GIT_SVN_ID_FOOTER_KEY, revision) | |
800 ref = branch if branch.startswith('refs/') else 'origin/%s' % branch | |
801 cmd = ['log', '-E', '--grep', match, '--format=%H', '--max-count=1', ref] | |
802 result = git(*cmd, cwd=sln_dir).strip() | |
803 if result: | |
804 return result | |
805 raise SVNRevisionNotFound('We can\'t resolve svn r%s into a git hash in %s' % | |
806 (revision, sln_dir)) | |
807 | |
808 | |
809 def emit_log_lines(name, lines): | 653 def emit_log_lines(name, lines): |
810 for line in lines.splitlines(): | 654 for line in lines.splitlines(): |
811 print '@@@STEP_LOG_LINE@%s@%s@@@' % (name, line) | 655 print '@@@STEP_LOG_LINE@%s@%s@@@' % (name, line) |
812 print '@@@STEP_LOG_END@%s@@@' % name | 656 print '@@@STEP_LOG_END@%s@@@' % name |
813 | 657 |
814 | 658 |
815 def emit_properties(properties): | 659 def emit_properties(properties): |
816 for property_name, property_value in sorted(properties.items()): | 660 for property_name, property_value in sorted(properties.items()): |
817 print '@@@SET_BUILD_PROPERTY@%s@"%s"@@@' % (property_name, property_value) | 661 print '@@@SET_BUILD_PROPERTY@%s@"%s"@@@' % (property_name, property_value) |
818 | 662 |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
853 | 697 |
854 | 698 |
855 def force_revision(folder_name, revision): | 699 def force_revision(folder_name, revision): |
856 split_revision = revision.split(':', 1) | 700 split_revision = revision.split(':', 1) |
857 branch = 'master' | 701 branch = 'master' |
858 if len(split_revision) == 2: | 702 if len(split_revision) == 2: |
859 # Support for "branch:revision" syntax. | 703 # Support for "branch:revision" syntax. |
860 branch, revision = split_revision | 704 branch, revision = split_revision |
861 | 705 |
862 if revision and revision.upper() != 'HEAD': | 706 if revision and revision.upper() != 'HEAD': |
863 if revision and revision.isdigit() and len(revision) < 40: | 707 git('checkout', '--force', revision, cwd=folder_name) |
864 # rev_num is really a svn revision number, convert it into a git hash. | |
865 git_ref = get_git_hash(int(revision), branch, folder_name) | |
866 else: | |
867 # rev_num is actually a git hash or ref, we can just use it. | |
868 git_ref = revision | |
869 git('checkout', '--force', git_ref, cwd=folder_name) | |
870 else: | 708 else: |
871 ref = branch if branch.startswith('refs/') else 'origin/%s' % branch | 709 ref = branch if branch.startswith('refs/') else 'origin/%s' % branch |
872 git('checkout', '--force', ref, cwd=folder_name) | 710 git('checkout', '--force', ref, cwd=folder_name) |
873 | 711 |
712 | |
874 def git_checkout(solutions, revisions, shallow, refs, git_cache_dir): | 713 def git_checkout(solutions, revisions, shallow, refs, git_cache_dir): |
875 build_dir = os.getcwd() | 714 build_dir = os.getcwd() |
876 # Before we do anything, break all git_cache locks. | 715 # Before we do anything, break all git_cache locks. |
877 if path.isdir(git_cache_dir): | 716 if path.isdir(git_cache_dir): |
878 git('cache', 'unlock', '-vv', '--force', '--all', | 717 git('cache', 'unlock', '-vv', '--force', '--all', |
879 '--cache-dir', git_cache_dir) | 718 '--cache-dir', git_cache_dir) |
880 for item in os.listdir(git_cache_dir): | 719 for item in os.listdir(git_cache_dir): |
881 filename = os.path.join(git_cache_dir, item) | 720 filename = os.path.join(git_cache_dir, item) |
882 if item.endswith('.lock'): | 721 if item.endswith('.lock'): |
883 raise Exception('%s exists after cache unlock' % filename) | 722 raise Exception('%s exists after cache unlock' % filename) |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
924 # Exited abnormally, theres probably something wrong. | 763 # Exited abnormally, theres probably something wrong. |
925 # Lets wipe the checkout and try again. | 764 # Lets wipe the checkout and try again. |
926 tries_left -= 1 | 765 tries_left -= 1 |
927 if tries_left > 0: | 766 if tries_left > 0: |
928 print 'Something failed: %s.' % str(e) | 767 print 'Something failed: %s.' % str(e) |
929 print 'waiting 5 seconds and trying again...' | 768 print 'waiting 5 seconds and trying again...' |
930 time.sleep(5) | 769 time.sleep(5) |
931 else: | 770 else: |
932 raise | 771 raise |
933 remove(sln_dir) | 772 remove(sln_dir) |
934 except SVNRevisionNotFound: | |
935 tries_left -= 1 | |
936 if tries_left > 0: | |
937 # If we don't have the correct revision, wait and try again. | |
938 print 'We can\'t find revision %s.' % revision | |
939 print 'The svn to git replicator is probably falling behind.' | |
940 print 'waiting 5 seconds and trying again...' | |
941 time.sleep(5) | |
942 else: | |
943 raise | |
944 | 773 |
945 git('clean', '-dff', cwd=sln_dir) | 774 git('clean', '-dff', cwd=sln_dir) |
946 | 775 |
947 if first_solution: | 776 if first_solution: |
948 git_ref = git('log', '--format=%H', '--max-count=1', | 777 git_ref = git('log', '--format=%H', '--max-count=1', |
949 cwd=sln_dir).strip() | 778 cwd=sln_dir).strip() |
950 first_solution = False | 779 first_solution = False |
951 return git_ref | 780 return git_ref |
952 | 781 |
953 | 782 |
954 def _download(url): | 783 def _download(url): |
955 """Fetch url and return content, with retries for flake.""" | 784 """Fetch url and return content, with retries for flake.""" |
956 for attempt in xrange(ATTEMPTS): | 785 for attempt in xrange(ATTEMPTS): |
957 try: | 786 try: |
958 return urllib2.urlopen(url).read() | 787 return urllib2.urlopen(url).read() |
959 except Exception: | 788 except Exception: |
960 if attempt == ATTEMPTS - 1: | 789 if attempt == ATTEMPTS - 1: |
961 raise | 790 raise |
962 | 791 |
963 | 792 |
964 def parse_diff(diff): | |
965 """Takes a unified diff and returns a list of diffed files and their diffs. | |
966 | |
967 The return format is a list of pairs of: | |
968 (<filename>, <diff contents>) | |
969 <diff contents> is inclusive of the diff line. | |
970 """ | |
971 result = [] | |
972 current_diff = '' | |
973 current_header = None | |
974 for line in diff.splitlines(): | |
975 # "diff" is for git style patches, and "Index: " is for SVN style patches. | |
976 if line.startswith('diff') or line.startswith('Index: '): | |
977 if current_header: | |
978 # If we are in a diff portion, then save the diff. | |
979 result.append((current_header, '%s\n' % current_diff)) | |
980 git_header_match = re.match(r'diff (?:--git )?(\S+) (\S+)', line) | |
981 svn_header_match = re.match(r'Index: (.*)', line) | |
982 | |
983 if git_header_match: | |
984 # First, see if its a git style header. | |
985 from_file = git_header_match.group(1) | |
986 to_file = git_header_match.group(2) | |
987 if from_file != to_file and from_file.startswith('a/'): | |
988 # Sometimes git prepends 'a/' and 'b/' in front of file paths. | |
989 from_file = from_file[2:] | |
990 current_header = from_file | |
991 | |
992 elif svn_header_match: | |
993 # Otherwise, check if its an SVN style header. | |
994 current_header = svn_header_match.group(1) | |
995 | |
996 else: | |
997 # Otherwise... I'm not really sure what to do with this. | |
998 raise InvalidDiff('Can\'t process header: %s\nFull diff:\n%s' % | |
999 (line, diff)) | |
1000 | |
1001 current_diff = '' | |
1002 current_diff += '%s\n' % line | |
1003 if current_header: | |
1004 # We hit EOF, gotta save the last diff. | |
1005 result.append((current_header, current_diff)) | |
1006 return result | |
1007 | |
1008 | |
1009 def get_svn_patch(patch_url): | |
1010 """Fetch patch from patch_url, return list of (filename, diff)""" | |
1011 svn_exe = 'svn.bat' if sys.platform.startswith('win') else 'svn' | |
1012 patch_data = call(svn_exe, 'cat', patch_url) | |
1013 return parse_diff(patch_data) | |
1014 | |
1015 | |
1016 def apply_svn_patch(patch_root, patches, whitelist=None, blacklist=None): | |
1017 """Expects a list of (filename, diff), applies it on top of patch_root.""" | |
1018 if whitelist: | |
1019 patches = [(name, diff) for name, diff in patches if name in whitelist] | |
1020 elif blacklist: | |
1021 patches = [(name, diff) for name, diff in patches if name not in blacklist] | |
1022 diffs = [diff for _, diff in patches] | |
1023 patch = ''.join(diffs) | |
1024 | |
1025 if patch: | |
1026 print '===Patching files===' | |
1027 for filename, _ in patches: | |
1028 print 'Patching %s' % filename | |
1029 try: | |
1030 call(PATCH_TOOL, '-p0', '--remove-empty-files', '--force', '--forward', | |
1031 stdin_data=patch, cwd=patch_root, tries=1) | |
1032 for filename, _ in patches: | |
1033 full_filename = path.abspath(path.join(patch_root, filename)) | |
1034 git('add', full_filename, cwd=path.dirname(full_filename)) | |
1035 except SubprocessFailed as e: | |
1036 raise PatchFailed(e.message, e.code, e.output) | |
1037 | |
1038 def apply_rietveld_issue(issue, patchset, root, server, _rev_map, _revision, | 793 def apply_rietveld_issue(issue, patchset, root, server, _rev_map, _revision, |
1039 email_file, key_file, whitelist=None, blacklist=None): | 794 email_file, key_file, whitelist=None, blacklist=None): |
1040 apply_issue_bin = ('apply_issue.bat' if sys.platform.startswith('win') | 795 apply_issue_bin = ('apply_issue.bat' if sys.platform.startswith('win') |
1041 else 'apply_issue') | 796 else 'apply_issue') |
1042 cmd = [apply_issue_bin, | 797 cmd = [apply_issue_bin, |
1043 # The patch will be applied on top of this directory. | 798 # The patch will be applied on top of this directory. |
1044 '--root_dir', root, | 799 '--root_dir', root, |
1045 # Tell apply_issue how to fetch the patch. | 800 # Tell apply_issue how to fetch the patch. |
1046 '--issue', issue, | 801 '--issue', issue, |
1047 '--server', server, | 802 '--server', server, |
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1125 os.remove(flag_file) | 880 os.remove(flag_file) |
1126 | 881 |
1127 | 882 |
1128 def emit_flag(flag_file): | 883 def emit_flag(flag_file): |
1129 """Deposit a bot update flag on the system to tell gclient not to run.""" | 884 """Deposit a bot update flag on the system to tell gclient not to run.""" |
1130 print 'Emitting flag file at %s' % flag_file | 885 print 'Emitting flag file at %s' % flag_file |
1131 with open(flag_file, 'wb') as f: | 886 with open(flag_file, 'wb') as f: |
1132 f.write('Success!') | 887 f.write('Success!') |
1133 | 888 |
1134 | 889 |
1135 def get_commit_position_for_git_svn(url, revision): | |
1136 """Generates a commit position string for a 'git-svn' URL/revision. | |
1137 | |
1138 If the 'git-svn' URL maps to a known project, we will construct a commit | |
1139 position branch value by applying substitution on the SVN URL. | |
1140 """ | |
1141 # Identify the base URL so we can strip off trunk/branch name | |
1142 project_config = branch = None | |
1143 for _, project_config in GIT_SVN_PROJECT_MAP.iteritems(): | |
1144 if url.startswith(project_config['svn_url']): | |
1145 branch = url[len(project_config['svn_url']):] | |
1146 break | |
1147 | |
1148 if branch: | |
1149 # Strip any leading slashes | |
1150 branch = branch.lstrip('/') | |
1151 | |
1152 # Try and map the branch | |
1153 for pattern, repl in project_config.get('branch_map', ()): | |
1154 nbranch, subn = re.subn(pattern, repl, branch, count=1) | |
1155 if subn: | |
1156 print 'INFO: Mapped SVN branch to Git branch [%s] => [%s]' % ( | |
1157 branch, nbranch) | |
1158 branch = nbranch | |
1159 break | |
1160 else: | |
1161 # Use generic 'svn' branch | |
1162 print 'INFO: Could not resolve project for SVN URL %r' % (url,) | |
1163 branch = 'svn' | |
1164 return '%s@{#%s}' % (branch, revision) | |
1165 | |
1166 | |
1167 def get_commit_position(git_path, revision='HEAD'): | 890 def get_commit_position(git_path, revision='HEAD'): |
iannucci
2016/09/07 19:45:55
TODO; use git-footers tool for this
agable
2016/09/08 00:29:17
Added
| |
1168 """Dumps the 'git' log for a specific revision and parses out the commit | 891 """Dumps the 'git' log for a specific revision and parses out the commit |
1169 position. | 892 position. |
1170 | 893 |
1171 If a commit position metadata key is found, its value will be returned. | 894 If a commit position metadata key is found, its value will be returned. |
1172 | |
1173 Otherwise, we will search for a 'git-svn' metadata entry. If one is found, | |
1174 we will compose a commit position from it, using its SVN revision value as | |
1175 the revision. | |
1176 | |
1177 If the 'git-svn' URL maps to a known project, we will construct a commit | |
1178 position branch value by truncating the URL, mapping 'trunk' to | |
1179 "refs/heads/master". Otherwise, we will return the generic branch, 'svn'. | |
1180 """ | 895 """ |
1181 git_log = git('log', '--format=%B', '-n1', revision, cwd=git_path) | 896 git_log = git('log', '--format=%B', '-n1', revision, cwd=git_path) |
1182 footer_map = get_commit_message_footer_map(git_log) | 897 footer_map = get_commit_message_footer_map(git_log) |
1183 | 898 |
1184 # Search for commit position metadata | 899 # Search for commit position metadata |
1185 value = (footer_map.get(COMMIT_POSITION_FOOTER_KEY) or | 900 value = (footer_map.get(COMMIT_POSITION_FOOTER_KEY) or |
1186 footer_map.get(COMMIT_ORIGINAL_POSITION_FOOTER_KEY)) | 901 footer_map.get(COMMIT_ORIGINAL_POSITION_FOOTER_KEY)) |
1187 if value: | 902 if value: |
1188 return value | 903 return value |
1189 | |
1190 # Compose a commit position from 'git-svn' metadata | |
1191 value = footer_map.get(GIT_SVN_ID_FOOTER_KEY) | |
1192 if value: | |
1193 m = GIT_SVN_ID_RE.match(value) | |
1194 if not m: | |
1195 raise ValueError("Invalid 'git-svn' value: [%s]" % (value,)) | |
1196 return get_commit_position_for_git_svn(m.group(1), m.group(2)) | |
1197 return None | 904 return None |
1198 | 905 |
1199 | 906 |
1200 def parse_got_revision(gclient_output, got_revision_mapping, use_svn_revs): | 907 def parse_got_revision(gclient_output, got_revision_mapping): |
1201 """Translate git gclient revision mapping to build properties. | 908 """Translate git gclient revision mapping to build properties.""" |
1202 | |
1203 If use_svn_revs is True, then translate git hashes in the revision mapping | |
1204 to svn revision numbers. | |
1205 """ | |
1206 properties = {} | 909 properties = {} |
1207 solutions_output = { | 910 solutions_output = { |
1208 # Make sure path always ends with a single slash. | 911 # Make sure path always ends with a single slash. |
1209 '%s/' % path.rstrip('/') : solution_output for path, solution_output | 912 '%s/' % path.rstrip('/') : solution_output for path, solution_output |
1210 in gclient_output['solutions'].iteritems() | 913 in gclient_output['solutions'].iteritems() |
1211 } | 914 } |
1212 for dir_name, property_name in got_revision_mapping.iteritems(): | 915 for dir_name, property_name in got_revision_mapping.iteritems(): |
1213 # Make sure dir_name always ends with a single slash. | 916 # Make sure dir_name always ends with a single slash. |
1214 dir_name = '%s/' % dir_name.rstrip('/') | 917 dir_name = '%s/' % dir_name.rstrip('/') |
1215 if dir_name not in solutions_output: | 918 if dir_name not in solutions_output: |
1216 continue | 919 continue |
1217 solution_output = solutions_output[dir_name] | 920 solution_output = solutions_output[dir_name] |
1218 if solution_output.get('scm') is None: | 921 if solution_output.get('scm') is None: |
1219 # This is an ignored DEPS, so the output got_revision should be 'None'. | 922 # This is an ignored DEPS, so the output got_revision should be 'None'. |
1220 git_revision = revision = commit_position = None | 923 git_revision = revision = commit_position = None |
1221 else: | 924 else: |
1222 # Since we are using .DEPS.git, everything had better be git. | 925 # Since we are using .DEPS.git, everything had better be git. |
1223 assert solution_output.get('scm') == 'git' | 926 assert solution_output.get('scm') == 'git' |
1224 git_revision = git('rev-parse', 'HEAD', cwd=dir_name).strip() | 927 revision = git('rev-parse', 'HEAD', cwd=dir_name).strip() |
1225 if use_svn_revs: | |
1226 revision = get_svn_rev(git_revision, dir_name) | |
1227 if not revision: | |
1228 revision = git_revision | |
1229 else: | |
1230 revision = git_revision | |
1231 commit_position = get_commit_position(dir_name) | 928 commit_position = get_commit_position(dir_name) |
1232 | 929 |
1233 properties[property_name] = revision | 930 properties[property_name] = revision |
1234 if revision != git_revision: | 931 if revision != git_revision: |
1235 properties['%s_git' % property_name] = git_revision | 932 properties['%s_git' % property_name] = git_revision |
1236 if commit_position: | 933 if commit_position: |
1237 properties['%s_cp' % property_name] = commit_position | 934 properties['%s_cp' % property_name] = commit_position |
1238 | 935 |
1239 return properties | 936 return properties |
1240 | 937 |
(...skipping 11 matching lines...) Expand all Loading... | |
1252 def ensure_deps_revisions(deps_url_mapping, solutions, revisions): | 949 def ensure_deps_revisions(deps_url_mapping, solutions, revisions): |
1253 """Ensure correct DEPS revisions, ignores solutions.""" | 950 """Ensure correct DEPS revisions, ignores solutions.""" |
1254 for deps_name, deps_data in sorted(deps_url_mapping.items()): | 951 for deps_name, deps_data in sorted(deps_url_mapping.items()): |
1255 if deps_name.strip('/') in solutions: | 952 if deps_name.strip('/') in solutions: |
1256 # This has already been forced to the correct solution by git_checkout(). | 953 # This has already been forced to the correct solution by git_checkout(). |
1257 continue | 954 continue |
1258 revision = get_target_revision(deps_name, deps_data.get('url', None), | 955 revision = get_target_revision(deps_name, deps_data.get('url', None), |
1259 revisions) | 956 revisions) |
1260 if not revision: | 957 if not revision: |
1261 continue | 958 continue |
1262 # TODO(hinoka): Catch SVNRevisionNotFound error maybe? | |
1263 git('fetch', 'origin', cwd=deps_name) | 959 git('fetch', 'origin', cwd=deps_name) |
1264 force_revision(deps_name, revision) | 960 force_revision(deps_name, revision) |
1265 | 961 |
1266 | 962 |
1267 def ensure_checkout(solutions, revisions, first_sln, target_os, target_os_only, | 963 def ensure_checkout(solutions, revisions, first_sln, target_os, target_os_only, |
1268 patch_root, issue, patchset, patch_url, rietveld_server, | 964 patch_root, issue, patchset, rietveld_server, |
1269 gerrit_repo, gerrit_ref, gerrit_rebase_patch_ref, | 965 gerrit_repo, gerrit_ref, gerrit_rebase_patch_ref, |
1270 revision_mapping, apply_issue_email_file, | 966 revision_mapping, apply_issue_email_file, |
1271 apply_issue_key_file, buildspec, gyp_env, shallow, runhooks, | 967 apply_issue_key_file, gyp_env, shallow, runhooks, |
1272 refs, git_cache_dir, gerrit_reset): | 968 refs, git_cache_dir, gerrit_reset): |
1273 # Get a checkout of each solution, without DEPS or hooks. | 969 # Get a checkout of each solution, without DEPS or hooks. |
1274 # Calling git directly because there is no way to run Gclient without | 970 # Calling git directly because there is no way to run Gclient without |
1275 # invoking DEPS. | 971 # invoking DEPS. |
1276 print 'Fetching Git checkout' | 972 print 'Fetching Git checkout' |
1277 | 973 |
1278 git_ref = git_checkout(solutions, revisions, shallow, refs, git_cache_dir) | 974 git_ref = git_checkout(solutions, revisions, shallow, refs, git_cache_dir) |
1279 | 975 |
1280 patches = None | 976 patches = None |
1281 if patch_url: | |
1282 patches = get_svn_patch(patch_url) | |
1283 | 977 |
1284 print '===Processing patch solutions===' | 978 print '===Processing patch solutions===' |
1285 already_patched = [] | 979 already_patched = [] |
1286 patch_root = patch_root or '' | 980 patch_root = patch_root or '' |
1287 applied_gerrit_patch = False | 981 applied_gerrit_patch = False |
1288 print 'Patch root is %r' % patch_root | 982 print 'Patch root is %r' % patch_root |
1289 for solution in solutions: | 983 for solution in solutions: |
1290 print 'Processing solution %r' % solution['name'] | 984 print 'Processing solution %r' % solution['name'] |
1291 if (patch_root == solution['name'] or | 985 if (patch_root == solution['name'] or |
1292 solution['name'].startswith(patch_root + '/')): | 986 solution['name'].startswith(patch_root + '/')): |
1293 relative_root = solution['name'][len(patch_root) + 1:] | 987 relative_root = solution['name'][len(patch_root) + 1:] |
1294 target = '/'.join([relative_root, 'DEPS']).lstrip('/') | 988 target = '/'.join([relative_root, 'DEPS']).lstrip('/') |
1295 print ' relative root is %r, target is %r' % (relative_root, target) | 989 print ' relative root is %r, target is %r' % (relative_root, target) |
1296 if patches: | 990 if issue: |
1297 apply_svn_patch(patch_root, patches, whitelist=[target]) | |
1298 already_patched.append(target) | |
1299 elif issue: | |
1300 apply_rietveld_issue(issue, patchset, patch_root, rietveld_server, | 991 apply_rietveld_issue(issue, patchset, patch_root, rietveld_server, |
1301 revision_mapping, git_ref, apply_issue_email_file, | 992 revision_mapping, git_ref, apply_issue_email_file, |
1302 apply_issue_key_file, whitelist=[target]) | 993 apply_issue_key_file, whitelist=[target]) |
1303 already_patched.append(target) | 994 already_patched.append(target) |
1304 elif gerrit_ref: | 995 elif gerrit_ref: |
1305 apply_gerrit_ref(gerrit_repo, gerrit_ref, patch_root, gerrit_reset, | 996 apply_gerrit_ref(gerrit_repo, gerrit_ref, patch_root, gerrit_reset, |
1306 gerrit_rebase_patch_ref) | 997 gerrit_rebase_patch_ref) |
1307 applied_gerrit_patch = True | 998 applied_gerrit_patch = True |
1308 | 999 |
1309 # Ensure our build/ directory is set up with the correct .gclient file. | 1000 # Ensure our build/ directory is set up with the correct .gclient file. |
1310 gclient_configure(solutions, target_os, target_os_only, git_cache_dir) | 1001 gclient_configure(solutions, target_os, target_os_only, git_cache_dir) |
1311 | 1002 |
1312 # Let gclient do the DEPS syncing. | 1003 # Let gclient do the DEPS syncing. |
1313 # The branch-head refspec is a special case because its possible Chrome | 1004 # The branch-head refspec is a special case because its possible Chrome |
1314 # src, which contains the branch-head refspecs, is DEPSed in. | 1005 # src, which contains the branch-head refspecs, is DEPSed in. |
1315 gclient_output = gclient_sync(buildspec or BRANCH_HEADS_REFSPEC in refs, | 1006 gclient_output = gclient_sync(BRANCH_HEADS_REFSPEC in refs, shallow) |
1316 shallow) | |
1317 | 1007 |
1318 # Now that gclient_sync has finished, we should revert any .DEPS.git so that | 1008 # Now that gclient_sync has finished, we should revert any .DEPS.git so that |
1319 # presubmit doesn't complain about it being modified. | 1009 # presubmit doesn't complain about it being modified. |
1320 if (not buildspec and | 1010 if git('ls-files', '.DEPS.git', cwd=first_sln).strip(): |
1321 git('ls-files', '.DEPS.git', cwd=first_sln).strip()): | |
1322 git('checkout', 'HEAD', '--', '.DEPS.git', cwd=first_sln) | 1011 git('checkout', 'HEAD', '--', '.DEPS.git', cwd=first_sln) |
1323 | 1012 |
1324 if buildspec and runhooks: | |
1325 # Run gclient runhooks if we're on an official builder. | |
1326 # TODO(hinoka): Remove this when the official builders run their own | |
1327 # runhooks step. | |
1328 gclient_runhooks(gyp_env) | |
1329 | |
1330 # Finally, ensure that all DEPS are pinned to the correct revision. | 1013 # Finally, ensure that all DEPS are pinned to the correct revision. |
1331 dir_names = [sln['name'] for sln in solutions] | 1014 dir_names = [sln['name'] for sln in solutions] |
1332 ensure_deps_revisions(gclient_output.get('solutions', {}), | 1015 ensure_deps_revisions(gclient_output.get('solutions', {}), |
1333 dir_names, revisions) | 1016 dir_names, revisions) |
1334 # Apply the rest of the patch here (sans DEPS) | 1017 # Apply the rest of the patch here (sans DEPS) |
1335 if patches: | 1018 if issue: |
1336 apply_svn_patch(patch_root, patches, blacklist=already_patched) | |
1337 elif issue: | |
1338 apply_rietveld_issue(issue, patchset, patch_root, rietveld_server, | 1019 apply_rietveld_issue(issue, patchset, patch_root, rietveld_server, |
1339 revision_mapping, git_ref, apply_issue_email_file, | 1020 revision_mapping, git_ref, apply_issue_email_file, |
1340 apply_issue_key_file, blacklist=already_patched) | 1021 apply_issue_key_file, blacklist=already_patched) |
1341 elif gerrit_ref and not applied_gerrit_patch: | 1022 elif gerrit_ref and not applied_gerrit_patch: |
1342 # If gerrit_ref was for solution's main repository, it has already been | 1023 # If gerrit_ref was for solution's main repository, it has already been |
1343 # applied above. This chunk is executed only for patches to DEPS-ed in | 1024 # applied above. This chunk is executed only for patches to DEPS-ed in |
1344 # git repositories. | 1025 # git repositories. |
1345 apply_gerrit_ref(gerrit_repo, gerrit_ref, patch_root, gerrit_reset, | 1026 apply_gerrit_ref(gerrit_repo, gerrit_ref, patch_root, gerrit_reset, |
1346 gerrit_rebase_patch_ref) | 1027 gerrit_rebase_patch_ref) |
1347 | 1028 |
(...skipping 19 matching lines...) Expand all Loading... | |
1367 expanded_revisions.extend(revision.split(',')) | 1048 expanded_revisions.extend(revision.split(',')) |
1368 for revision in expanded_revisions: | 1049 for revision in expanded_revisions: |
1369 split_revision = revision.split('@') | 1050 split_revision = revision.split('@') |
1370 if len(split_revision) == 1: | 1051 if len(split_revision) == 1: |
1371 # This is just a plain revision, set it as the revision for root. | 1052 # This is just a plain revision, set it as the revision for root. |
1372 results[root] = split_revision[0] | 1053 results[root] = split_revision[0] |
1373 elif len(split_revision) == 2: | 1054 elif len(split_revision) == 2: |
1374 # This is an alt_root@revision argument. | 1055 # This is an alt_root@revision argument. |
1375 current_root, current_rev = split_revision | 1056 current_root, current_rev = split_revision |
1376 | 1057 |
1377 # We want to normalize svn/git urls into .git urls. | |
1378 parsed_root = urlparse.urlparse(current_root) | 1058 parsed_root = urlparse.urlparse(current_root) |
1379 if parsed_root.scheme == 'svn': | 1059 if parsed_root.scheme in ['http', 'https']: |
1380 if parsed_root.path in RECOGNIZED_PATHS: | 1060 # We want to normalize git urls into .git urls. |
1381 normalized_root = RECOGNIZED_PATHS[parsed_root.path] | |
1382 else: | |
1383 print 'WARNING: SVN path %s not recognized, ignoring' % current_root | |
1384 continue | |
1385 elif parsed_root.scheme in ['http', 'https']: | |
1386 normalized_root = 'https://%s/%s' % (parsed_root.netloc, | 1061 normalized_root = 'https://%s/%s' % (parsed_root.netloc, |
1387 parsed_root.path) | 1062 parsed_root.path) |
1388 if not normalized_root.endswith('.git'): | 1063 if not normalized_root.endswith('.git'): |
1389 normalized_root = '%s.git' % normalized_root | 1064 normalized_root = '%s.git' % normalized_root |
1390 elif parsed_root.scheme: | 1065 elif parsed_root.scheme: |
1391 print 'WARNING: Unrecognized scheme %s, ignoring' % parsed_root.scheme | 1066 print 'WARNING: Unrecognized scheme %s, ignoring' % parsed_root.scheme |
1392 continue | 1067 continue |
1393 else: | 1068 else: |
1394 # This is probably a local path. | 1069 # This is probably a local path. |
1395 normalized_root = current_root.strip('/') | 1070 normalized_root = current_root.strip('/') |
1396 | 1071 |
1397 results[normalized_root] = current_rev | 1072 results[normalized_root] = current_rev |
1398 else: | 1073 else: |
1399 print ('WARNING: %r is not recognized as a valid revision specification,' | 1074 print ('WARNING: %r is not recognized as a valid revision specification,' |
1400 'skipping' % revision) | 1075 'skipping' % revision) |
1401 return results | 1076 return results |
1402 | 1077 |
1403 | 1078 |
1404 def parse_args(): | 1079 def parse_args(): |
1405 parse = optparse.OptionParser() | 1080 parse = optparse.OptionParser() |
1406 | 1081 |
1407 parse.add_option('--issue', help='Issue number to patch from.') | 1082 parse.add_option('--issue', help='Issue number to patch from.') |
1408 parse.add_option('--patchset', | 1083 parse.add_option('--patchset', |
1409 help='Patchset from issue to patch from, if applicable.') | 1084 help='Patchset from issue to patch from, if applicable.') |
1410 parse.add_option('--apply_issue_email_file', | 1085 parse.add_option('--apply_issue_email_file', |
1411 help='--email-file option passthrough for apply_patch.py.') | 1086 help='--email-file option passthrough for apply_patch.py.') |
1412 parse.add_option('--apply_issue_key_file', | 1087 parse.add_option('--apply_issue_key_file', |
1413 help='--private-key-file option passthrough for ' | 1088 help='--private-key-file option passthrough for ' |
1414 'apply_patch.py.') | 1089 'apply_patch.py.') |
1415 parse.add_option('--patch_url', help='Optional URL to SVN patch.') | 1090 parse.add_option('--patch_url', help='DEPRECATED') |
iannucci
2016/09/07 19:45:55
I already removed this, but note that this is a re
agable
2016/09/08 00:29:17
Yeah good call. Rebase removed it.
| |
1416 parse.add_option('--root', dest='patch_root', | 1091 parse.add_option('--root', dest='patch_root', |
1417 help='DEPRECATED: Use --patch_root.') | 1092 help='DEPRECATED: Use --patch_root.') |
1418 parse.add_option('--patch_root', help='Directory to patch on top of.') | 1093 parse.add_option('--patch_root', help='Directory to patch on top of.') |
1419 parse.add_option('--rietveld_server', | 1094 parse.add_option('--rietveld_server', |
1420 default='codereview.chromium.org', | 1095 default='codereview.chromium.org', |
1421 help='Rietveld server.') | 1096 help='Rietveld server.') |
1422 parse.add_option('--gerrit_repo', | 1097 parse.add_option('--gerrit_repo', |
1423 help='Gerrit repository to pull the ref from.') | 1098 help='Gerrit repository to pull the ref from.') |
1424 parse.add_option('--gerrit_ref', help='Gerrit ref to apply.') | 1099 parse.add_option('--gerrit_ref', help='Gerrit ref to apply.') |
1425 parse.add_option('--gerrit_no_rebase_patch_ref', action='store_true', | 1100 parse.add_option('--gerrit_no_rebase_patch_ref', action='store_true', |
1426 help='Bypass rebase of Gerrit patch ref after checkout.') | 1101 help='Bypass rebase of Gerrit patch ref after checkout.') |
1427 parse.add_option('--gerrit_no_reset', action='store_true', | 1102 parse.add_option('--gerrit_no_reset', action='store_true', |
1428 help='Bypass calling reset after applying a gerrit ref.') | 1103 help='Bypass calling reset after applying a gerrit ref.') |
1429 parse.add_option('--specs', help='Gcilent spec.') | 1104 parse.add_option('--specs', help='Gcilent spec.') |
1430 parse.add_option('--master', | 1105 parse.add_option('--master', |
1431 help='Master name. If specified and it is not in ' | 1106 help='Master name. If specified and it is not in ' |
1432 'bot_update\'s whitelist, bot_update will be noop.') | 1107 'bot_update\'s whitelist, bot_update will be noop.') |
1433 parse.add_option('-f', '--force', action='store_true', | 1108 parse.add_option('-f', '--force', action='store_true', |
1434 help='Bypass check to see if we want to be run. ' | 1109 help='Bypass check to see if we want to be run. ' |
1435 'Should ONLY be used locally or by smart recipes.') | 1110 'Should ONLY be used locally or by smart recipes.') |
1436 parse.add_option('--revision_mapping', | 1111 parse.add_option('--revision_mapping', |
1437 help='{"path/to/repo/": "property_name"}') | 1112 help='{"path/to/repo/": "property_name"}') |
1438 parse.add_option('--revision_mapping_file', | 1113 parse.add_option('--revision_mapping_file', |
1439 help=('Same as revision_mapping, except its a path to a json' | 1114 help=('Same as revision_mapping, except its a path to a json' |
1440 ' file containing that format.')) | 1115 ' file containing that format.')) |
1441 parse.add_option('--revision', action='append', default=[], | 1116 parse.add_option('--revision', action='append', default=[], |
1442 help='Revision to check out. Can be an SVN revision number, ' | 1117 help='Revision to check out. Can be any form of git ref. ' |
1443 'git hash, or any form of git ref. Can prepend ' | 1118 'Can prepend root@<rev> to specify which repository, ' |
1444 'root@<rev> to specify which repository, where root ' | 1119 'where root is either a filesystem path or git https ' |
1445 'is either a filesystem path, git https url, or ' | 1120 'url. To specify Tip of Tree, set rev to HEAD. ') |
1446 'svn url. To specify Tip of Tree, set rev to HEAD.' | |
1447 'To specify a git branch and an SVN rev, <rev> can be ' | |
1448 'set to <branch>:<revision>.') | |
1449 parse.add_option('--output_manifest', action='store_true', | 1121 parse.add_option('--output_manifest', action='store_true', |
1450 help=('Add manifest json to the json output.')) | 1122 help=('Add manifest json to the json output.')) |
1451 parse.add_option('--slave_name', default=socket.getfqdn().split('.')[0], | 1123 parse.add_option('--slave_name', default=socket.getfqdn().split('.')[0], |
1452 help='Hostname of the current machine, ' | 1124 help='Hostname of the current machine, ' |
1453 'used for determining whether or not to activate.') | 1125 'used for determining whether or not to activate.') |
1454 parse.add_option('--builder_name', help='Name of the builder, ' | 1126 parse.add_option('--builder_name', help='Name of the builder, ' |
1455 'used for determining whether or not to activate.') | 1127 'used for determining whether or not to activate.') |
1456 parse.add_option('--build_dir', default=os.getcwd()) | 1128 parse.add_option('--build_dir', default=os.getcwd()) |
1457 parse.add_option('--flag_file', default=path.join(os.getcwd(), | 1129 parse.add_option('--flag_file', default=path.join(os.getcwd(), |
1458 'update.flag')) | 1130 'update.flag')) |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1512 options.git_cache_dir = options.git_cache_dir.replace('\\', '\\\\') | 1184 options.git_cache_dir = options.git_cache_dir.replace('\\', '\\\\') |
1513 | 1185 |
1514 return options, args | 1186 return options, args |
1515 | 1187 |
1516 | 1188 |
1517 def prepare(options, git_slns, active): | 1189 def prepare(options, git_slns, active): |
1518 """Prepares the target folder before we checkout.""" | 1190 """Prepares the target folder before we checkout.""" |
1519 dir_names = [sln.get('name') for sln in git_slns if 'name' in sln] | 1191 dir_names = [sln.get('name') for sln in git_slns if 'name' in sln] |
1520 # If we're active now, but the flag file doesn't exist (we weren't active | 1192 # If we're active now, but the flag file doesn't exist (we weren't active |
1521 # last run) or vice versa, blow away all checkouts. | 1193 # last run) or vice versa, blow away all checkouts. |
1522 if bool(active) != bool(check_flag(options.flag_file)): | 1194 if options.clobber or (bool(active) != bool(check_flag(options.flag_file))): |
1523 ensure_no_checkout(dir_names, '*') | 1195 ensure_no_checkout(dir_names) |
1524 if options.output_json: | 1196 if options.output_json: |
1525 # Make sure we tell recipes that we didn't run if the script exits here. | 1197 # Make sure we tell recipes that we didn't run if the script exits here. |
1526 emit_json(options.output_json, did_run=active) | 1198 emit_json(options.output_json, did_run=active) |
1527 if active: | 1199 emit_flag(options.flag_file) |
1528 if options.clobber: | |
1529 ensure_no_checkout(dir_names, '*') | |
1530 else: | |
1531 ensure_no_checkout(dir_names, '.svn') | |
1532 emit_flag(options.flag_file) | |
1533 else: | |
1534 delete_flag(options.flag_file) | |
1535 raise Inactive # This is caught in main() and we exit cleanly. | |
1536 | 1200 |
1537 # Do a shallow checkout if the disk is less than 100GB. | 1201 # Do a shallow checkout if the disk is less than 100GB. |
1538 total_disk_space, free_disk_space = get_total_disk_space() | 1202 total_disk_space, free_disk_space = get_total_disk_space() |
1539 total_disk_space_gb = int(total_disk_space / (1024 * 1024 * 1024)) | 1203 total_disk_space_gb = int(total_disk_space / (1024 * 1024 * 1024)) |
1540 used_disk_space_gb = int((total_disk_space - free_disk_space) | 1204 used_disk_space_gb = int((total_disk_space - free_disk_space) |
1541 / (1024 * 1024 * 1024)) | 1205 / (1024 * 1024 * 1024)) |
1542 percent_used = int(used_disk_space_gb * 100 / total_disk_space_gb) | 1206 percent_used = int(used_disk_space_gb * 100 / total_disk_space_gb) |
1543 step_text = '[%dGB/%dGB used (%d%%)]' % (used_disk_space_gb, | 1207 step_text = '[%dGB/%dGB used (%d%%)]' % (used_disk_space_gb, |
1544 total_disk_space_gb, | 1208 total_disk_space_gb, |
1545 percent_used) | 1209 percent_used) |
1546 if not options.output_json: | 1210 if not options.output_json: |
1547 print '@@@STEP_TEXT@%s@@@' % step_text | 1211 print '@@@STEP_TEXT@%s@@@' % step_text |
1548 if not options.shallow: | 1212 if not options.shallow: |
1549 options.shallow = (total_disk_space < SHALLOW_CLONE_THRESHOLD | 1213 options.shallow = (total_disk_space < SHALLOW_CLONE_THRESHOLD |
1550 and not options.no_shallow) | 1214 and not options.no_shallow) |
1551 | 1215 |
1552 # The first solution is where the primary DEPS file resides. | 1216 # The first solution is where the primary DEPS file resides. |
1553 first_sln = dir_names[0] | 1217 first_sln = dir_names[0] |
1554 | 1218 |
1555 # Split all the revision specifications into a nice dict. | 1219 # Split all the revision specifications into a nice dict. |
1556 print 'Revisions: %s' % options.revision | 1220 print 'Revisions: %s' % options.revision |
1557 revisions = parse_revisions(options.revision, first_sln) | 1221 revisions = parse_revisions(options.revision, first_sln) |
1558 print 'Fetching Git checkout at %s@%s' % (first_sln, revisions[first_sln]) | 1222 print 'Fetching Git checkout at %s@%s' % (first_sln, revisions[first_sln]) |
1559 return revisions, step_text | 1223 return revisions, step_text |
1560 | 1224 |
1561 | 1225 |
1562 def checkout(options, git_slns, specs, buildspec, master, | 1226 def checkout(options, git_slns, specs, master, revisions, step_text): |
1563 svn_root, revisions, step_text): | |
1564 first_sln = git_slns[0]['name'] | 1227 first_sln = git_slns[0]['name'] |
1565 dir_names = [sln.get('name') for sln in git_slns if 'name' in sln] | 1228 dir_names = [sln.get('name') for sln in git_slns if 'name' in sln] |
1566 try: | 1229 try: |
1567 # Outer try is for catching patch failures and exiting gracefully. | 1230 # Outer try is for catching patch failures and exiting gracefully. |
1568 # Inner try is for catching gclient failures and retrying gracefully. | 1231 # Inner try is for catching gclient failures and retrying gracefully. |
1569 try: | 1232 try: |
1570 checkout_parameters = dict( | 1233 checkout_parameters = dict( |
1571 # First, pass in the base of what we want to check out. | 1234 # First, pass in the base of what we want to check out. |
1572 solutions=git_slns, | 1235 solutions=git_slns, |
1573 revisions=revisions, | 1236 revisions=revisions, |
1574 first_sln=first_sln, | 1237 first_sln=first_sln, |
1575 | 1238 |
1576 # Also, target os variables for gclient. | 1239 # Also, target os variables for gclient. |
1577 target_os=specs.get('target_os', []), | 1240 target_os=specs.get('target_os', []), |
1578 target_os_only=specs.get('target_os_only', False), | 1241 target_os_only=specs.get('target_os_only', False), |
1579 | 1242 |
1580 # Then, pass in information about how to patch. | 1243 # Then, pass in information about how to patch. |
1581 patch_root=options.patch_root, | 1244 patch_root=options.patch_root, |
1582 issue=options.issue, | 1245 issue=options.issue, |
1583 patchset=options.patchset, | 1246 patchset=options.patchset, |
1584 patch_url=options.patch_url, | |
1585 rietveld_server=options.rietveld_server, | 1247 rietveld_server=options.rietveld_server, |
1586 gerrit_repo=options.gerrit_repo, | 1248 gerrit_repo=options.gerrit_repo, |
1587 gerrit_ref=options.gerrit_ref, | 1249 gerrit_ref=options.gerrit_ref, |
1588 gerrit_rebase_patch_ref=not options.gerrit_no_rebase_patch_ref, | 1250 gerrit_rebase_patch_ref=not options.gerrit_no_rebase_patch_ref, |
1589 revision_mapping=options.revision_mapping, | 1251 revision_mapping=options.revision_mapping, |
1590 apply_issue_email_file=options.apply_issue_email_file, | 1252 apply_issue_email_file=options.apply_issue_email_file, |
1591 apply_issue_key_file=options.apply_issue_key_file, | 1253 apply_issue_key_file=options.apply_issue_key_file, |
1592 | 1254 |
1593 # For official builders. | 1255 # For official builders. |
1594 buildspec=buildspec, | |
1595 gyp_env=options.gyp_env, | 1256 gyp_env=options.gyp_env, |
1596 runhooks=not options.no_runhooks, | 1257 runhooks=not options.no_runhooks, |
1597 | 1258 |
1598 # Finally, extra configurations such as shallowness of the clone. | 1259 # Finally, extra configurations such as shallowness of the clone. |
1599 shallow=options.shallow, | 1260 shallow=options.shallow, |
1600 refs=options.refs, | 1261 refs=options.refs, |
1601 git_cache_dir=options.git_cache_dir, | 1262 git_cache_dir=options.git_cache_dir, |
1602 gerrit_reset=not options.gerrit_no_reset) | 1263 gerrit_reset=not options.gerrit_no_reset) |
1603 gclient_output = ensure_checkout(**checkout_parameters) | 1264 gclient_output = ensure_checkout(**checkout_parameters) |
1604 except GclientSyncFailed: | 1265 except GclientSyncFailed: |
1605 print 'We failed gclient sync, lets delete the checkout and retry.' | 1266 print 'We failed gclient sync, lets delete the checkout and retry.' |
1606 ensure_no_checkout(dir_names, '*') | 1267 ensure_no_checkout(dir_names) |
1607 gclient_output = ensure_checkout(**checkout_parameters) | 1268 gclient_output = ensure_checkout(**checkout_parameters) |
1608 except PatchFailed as e: | 1269 except PatchFailed as e: |
1609 if options.output_json: | 1270 if options.output_json: |
1610 # Tell recipes information such as root, got_revision, etc. | 1271 # Tell recipes information such as root, got_revision, etc. |
1611 emit_json(options.output_json, | 1272 emit_json(options.output_json, |
1612 did_run=True, | 1273 did_run=True, |
1613 root=first_sln, | 1274 root=first_sln, |
1614 log_lines=[('patch error', e.output),], | 1275 log_lines=[('patch error', e.output),], |
1615 patch_apply_return_code=e.code, | 1276 patch_apply_return_code=e.code, |
1616 patch_root=options.patch_root, | 1277 patch_root=options.patch_root, |
1617 patch_failure=True, | 1278 patch_failure=True, |
1618 step_text='%s PATCH FAILED' % step_text, | 1279 step_text='%s PATCH FAILED' % step_text, |
1619 fixed_revisions=revisions) | 1280 fixed_revisions=revisions) |
1620 else: | 1281 else: |
1621 # If we're not on recipes, tell annotator about our got_revisions. | 1282 # If we're not on recipes, tell annotator about our got_revisions. |
1622 emit_log_lines('patch error', e.output) | 1283 emit_log_lines('patch error', e.output) |
1623 print '@@@STEP_TEXT@%s PATCH FAILED@@@' % step_text | 1284 print '@@@STEP_TEXT@%s PATCH FAILED@@@' % step_text |
1624 raise | 1285 raise |
1625 | 1286 |
1626 # Revision is an svn revision, unless it's a git master. | |
1627 use_svn_rev = master not in GIT_MASTERS | |
1628 | |
1629 # Take care of got_revisions outputs. | 1287 # Take care of got_revisions outputs. |
1630 revision_mapping = dict(GOT_REVISION_MAPPINGS.get(svn_root, {})) | 1288 revision_mapping = GOT_REVISION_MAPPINGS.get(git_slns[0]['url'], {}) |
1631 if options.revision_mapping: | 1289 if options.revision_mapping: |
1632 revision_mapping.update(options.revision_mapping) | 1290 revision_mapping.update(options.revision_mapping) |
1633 | 1291 |
1634 # If the repo is not in the default GOT_REVISION_MAPPINGS and no | 1292 # If the repo is not in the default GOT_REVISION_MAPPINGS and no |
1635 # revision_mapping were specified on the command line then | 1293 # revision_mapping were specified on the command line then |
1636 # default to setting 'got_revision' based on the first solution. | 1294 # default to setting 'got_revision' based on the first solution. |
1637 if not revision_mapping: | 1295 if not revision_mapping: |
1638 revision_mapping[first_sln] = 'got_revision' | 1296 revision_mapping[first_sln] = 'got_revision' |
1639 | 1297 |
1640 got_revisions = parse_got_revision(gclient_output, revision_mapping, | 1298 got_revisions = parse_got_revision(gclient_output, revision_mapping) |
1641 use_svn_rev) | |
1642 | 1299 |
1643 if not got_revisions: | 1300 if not got_revisions: |
1644 # TODO(hinoka): We should probably bail out here, but in the interest | 1301 # TODO(hinoka): We should probably bail out here, but in the interest |
1645 # of giving mis-configured bots some time to get fixed use a dummy | 1302 # of giving mis-configured bots some time to get fixed use a dummy |
1646 # revision here. | 1303 # revision here. |
1647 got_revisions = { 'got_revision': 'BOT_UPDATE_NO_REV_FOUND' } | 1304 got_revisions = { 'got_revision': 'BOT_UPDATE_NO_REV_FOUND' } |
1648 #raise Exception('No got_revision(s) found in gclient output') | 1305 #raise Exception('No got_revision(s) found in gclient output') |
1649 | 1306 |
1650 if options.output_json: | 1307 if options.output_json: |
1651 manifest = create_manifest() if options.output_manifest else None | 1308 manifest = create_manifest() if options.output_manifest else None |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1705 # Check if this script should activate or not. | 1362 # Check if this script should activate or not. |
1706 active = options.force or check_valid_host(master, builder, slave) | 1363 active = options.force or check_valid_host(master, builder, slave) |
1707 | 1364 |
1708 # Print a helpful message to tell developers whats going on with this step. | 1365 # Print a helpful message to tell developers whats going on with this step. |
1709 print_help_text( | 1366 print_help_text( |
1710 options.force, options.output_json, active, master, builder, slave) | 1367 options.force, options.output_json, active, master, builder, slave) |
1711 | 1368 |
1712 # Parse, munipulate, and print the gclient solutions. | 1369 # Parse, munipulate, and print the gclient solutions. |
1713 specs = {} | 1370 specs = {} |
1714 exec(options.specs, specs) | 1371 exec(options.specs, specs) |
1715 svn_solutions = specs.get('solutions', []) | 1372 orig_solutions = specs.get('solutions', []) |
1716 git_slns, svn_root, buildspec = solutions_to_git(svn_solutions) | 1373 git_slns = modify_solutions(orig_solutions) |
1717 options.revision = maybe_ignore_revision(options.revision, buildspec) | |
1718 | 1374 |
1719 solutions_printer(git_slns) | 1375 solutions_printer(git_slns) |
1720 | 1376 |
1721 try: | 1377 try: |
1722 # Dun dun dun, the main part of bot_update. | 1378 # Dun dun dun, the main part of bot_update. |
1723 revisions, step_text = prepare(options, git_slns, active) | 1379 revisions, step_text = prepare(options, git_slns, active) |
1724 checkout(options, git_slns, specs, buildspec, master, svn_root, revisions, | 1380 checkout(options, git_slns, specs, master, revisions, step_text) |
1725 step_text) | |
1726 | 1381 |
1727 except Inactive: | |
1728 # Not active, should count as passing. | |
1729 pass | |
1730 except PatchFailed as e: | 1382 except PatchFailed as e: |
1731 emit_flag(options.flag_file) | 1383 emit_flag(options.flag_file) |
1732 # Return a specific non-zero exit code for patch failure (because it is | 1384 # Return a specific non-zero exit code for patch failure (because it is |
1733 # a failure), but make it different than other failures to distinguish | 1385 # a failure), but make it different than other failures to distinguish |
1734 # between infra failures (independent from patch author), and patch | 1386 # between infra failures (independent from patch author), and patch |
1735 # failures (that patch author can fix). However, PatchFailure due to | 1387 # failures (that patch author can fix). However, PatchFailure due to |
1736 # download patch failure is still an infra problem. | 1388 # download patch failure is still an infra problem. |
1737 if e.code == 3: | 1389 if e.code == 3: |
1738 # Patch download problem. | 1390 # Patch download problem. |
1739 return 87 | 1391 return 87 |
1740 # Genuine patch problem. | 1392 # Genuine patch problem. |
1741 return 88 | 1393 return 88 |
1742 except Exception: | 1394 except Exception: |
1743 # Unexpected failure. | 1395 # Unexpected failure. |
1744 emit_flag(options.flag_file) | 1396 emit_flag(options.flag_file) |
1745 raise | 1397 raise |
1746 else: | 1398 else: |
1747 emit_flag(options.flag_file) | 1399 emit_flag(options.flag_file) |
1748 | 1400 |
1749 | 1401 |
1750 if __name__ == '__main__': | 1402 if __name__ == '__main__': |
1751 sys.exit(main()) | 1403 sys.exit(main()) |
OLD | NEW |