OLD | NEW |
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright 2014 The Chromium Authors. All rights reserved. | 2 # Copyright 2014 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 # TODO(hinoka): Use logging. | 6 # TODO(hinoka): Use logging. |
7 | 7 |
8 import cStringIO | 8 import cStringIO |
9 import codecs | 9 import codecs |
10 import collections | 10 import collections |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
79 path.join(ROOT_DIR, # .recipe_deps | 79 path.join(ROOT_DIR, # .recipe_deps |
80 path.pardir, # slave | 80 path.pardir, # slave |
81 path.pardir, # scripts | 81 path.pardir, # scripts |
82 path.pardir), # build_internal | 82 path.pardir), # build_internal |
83 ]) | 83 ]) |
84 | 84 |
85 | 85 |
86 CHROMIUM_GIT_HOST = 'https://chromium.googlesource.com' | 86 CHROMIUM_GIT_HOST = 'https://chromium.googlesource.com' |
87 CHROMIUM_SRC_URL = CHROMIUM_GIT_HOST + '/chromium/src.git' | 87 CHROMIUM_SRC_URL = CHROMIUM_GIT_HOST + '/chromium/src.git' |
88 | 88 |
89 # Official builds use buildspecs, so this is a special case. | |
90 BUILDSPEC_TYPE = collections.namedtuple('buildspec', | |
91 ('container', 'version')) | |
92 BUILDSPEC_RE = (r'^/chrome-internal/trunk/tools/buildspec/' | |
93 '(build|branches|releases)/(.+)$') | |
94 GIT_BUILDSPEC_PATH = ('https://chrome-internal.googlesource.com/chrome/tools/' | |
95 'buildspec') | |
96 BRANCH_HEADS_REFSPEC = '+refs/branch-heads/*' | 89 BRANCH_HEADS_REFSPEC = '+refs/branch-heads/*' |
97 | 90 |
98 BUILDSPEC_COMMIT_RE = ( | 91 BUILDSPEC_COMMIT_RE = ( |
99 re.compile(r'Buildspec for.*version (\d+\.\d+\.\d+\.\d+)'), | 92 re.compile(r'Buildspec for.*version (\d+\.\d+\.\d+\.\d+)'), |
100 re.compile(r'Create (\d+\.\d+\.\d+\.\d+) buildspec'), | 93 re.compile(r'Create (\d+\.\d+\.\d+\.\d+) buildspec'), |
101 re.compile(r'Auto-converted (\d+\.\d+\.\d+\.\d+) buildspec to git'), | 94 re.compile(r'Auto-converted (\d+\.\d+\.\d+\.\d+) buildspec to git'), |
102 ) | 95 ) |
103 | 96 |
104 # Regular expression that matches a single commit footer line. | 97 # Regular expression that matches a single commit footer line. |
105 COMMIT_FOOTER_ENTRY_RE = re.compile(r'([^:]+):\s+(.+)') | 98 COMMIT_FOOTER_ENTRY_RE = re.compile(r'([^:]+):\s+(.+)') |
106 | 99 |
107 # Footer metadata keys for regular and gsubtreed mirrored commit positions. | 100 # Footer metadata keys for regular and gsubtreed mirrored commit positions. |
108 COMMIT_POSITION_FOOTER_KEY = 'Cr-Commit-Position' | 101 COMMIT_POSITION_FOOTER_KEY = 'Cr-Commit-Position' |
109 COMMIT_ORIGINAL_POSITION_FOOTER_KEY = 'Cr-Original-Commit-Position' | 102 COMMIT_ORIGINAL_POSITION_FOOTER_KEY = 'Cr-Original-Commit-Position' |
110 # Regular expression to parse a commit position | 103 # Regular expression to parse a commit position |
111 COMMIT_POSITION_RE = re.compile(r'(.+)@\{#(\d+)\}') | 104 COMMIT_POSITION_RE = re.compile(r'(.+)@\{#(\d+)\}') |
112 | 105 |
113 # Regular expression to parse gclient's revinfo entries. | 106 # Regular expression to parse gclient's revinfo entries. |
114 REVINFO_RE = re.compile(r'^([^:]+):\s+([^@]+)@(.+)$') | 107 REVINFO_RE = re.compile(r'^([^:]+):\s+([^@]+)@(.+)$') |
115 | 108 |
116 # Used by 'ResolveSvnRevisionFromGitiles' | |
117 GIT_SVN_PROJECT_MAP = { | |
118 'webkit': { | |
119 'svn_url': 'svn://svn.chromium.org/blink', | |
120 'branch_map': [ | |
121 (r'trunk', r'refs/heads/master'), | |
122 (r'branches/([^/]+)', r'refs/branch-heads/\1'), | |
123 ], | |
124 }, | |
125 'v8': { | |
126 'svn_url': 'https://v8.googlecode.com/svn', | |
127 'branch_map': [ | |
128 (r'trunk', r'refs/heads/candidates'), | |
129 (r'branches/bleeding_edge', r'refs/heads/master'), | |
130 (r'branches/([^/]+)', r'refs/branch-heads/\1'), | |
131 ], | |
132 }, | |
133 'nacl': { | |
134 'svn_url': 'svn://svn.chromium.org/native_client', | |
135 'branch_map': [ | |
136 (r'trunk/src/native_client', r'refs/heads/master'), | |
137 ], | |
138 }, | |
139 } | |
140 | |
141 # Key for the 'git-svn' ID metadata commit footer entry. | |
142 GIT_SVN_ID_FOOTER_KEY = 'git-svn-id' | |
143 # e.g., git-svn-id: https://v8.googlecode.com/svn/trunk@23117 | |
144 # ce2b1a6d-e550-0410-aec6-3dcde31c8c00 | |
145 GIT_SVN_ID_RE = re.compile(r'((?:\w+)://[^@]+)@(\d+)\s+(?:[a-zA-Z0-9\-]+)') | |
146 | |
147 | |
148 # This is the git mirror of the buildspecs repository. We could rely on the svn | |
149 # checkout, now that the git buildspecs are checked in alongside the svn | |
150 # buildspecs, but we're going to want to pull all the buildspecs from here | |
151 # eventually anyhow, and there's already some logic to pull from git (for the | |
152 # old git_buildspecs.git repo), so just stick with that. | |
153 GIT_BUILDSPEC_REPO = ( | |
154 'https://chrome-internal.googlesource.com/chrome/tools/buildspec') | |
155 | 109 |
156 # Copied from scripts/recipes/chromium.py. | 110 # Copied from scripts/recipes/chromium.py. |
157 GOT_REVISION_MAPPINGS = { | 111 GOT_REVISION_MAPPINGS = { |
158 '/chrome/trunk/src': { | 112 CHROMIUM_SRC_URL: { |
159 'src/': 'got_revision', | 113 'src/': 'got_revision', |
160 'src/native_client/': 'got_nacl_revision', | 114 'src/native_client/': 'got_nacl_revision', |
161 'src/tools/swarm_client/': 'got_swarm_client_revision', | 115 'src/tools/swarm_client/': 'got_swarm_client_revision', |
162 'src/tools/swarming_client/': 'got_swarming_client_revision', | 116 'src/tools/swarming_client/': 'got_swarming_client_revision', |
163 'src/third_party/WebKit/': 'got_webkit_revision', | 117 'src/third_party/WebKit/': 'got_webkit_revision', |
164 'src/third_party/webrtc/': 'got_webrtc_revision', | 118 'src/third_party/webrtc/': 'got_webrtc_revision', |
165 'src/v8/': 'got_v8_revision', | 119 'src/v8/': 'got_v8_revision', |
166 } | 120 } |
167 } | 121 } |
168 | 122 |
169 | 123 |
170 BOT_UPDATE_MESSAGE = """ | 124 BOT_UPDATE_MESSAGE = """ |
171 What is the "Bot Update" step? | 125 What is the "Bot Update" step? |
172 ============================== | 126 ============================== |
173 | 127 |
174 This step ensures that the source checkout on the bot (e.g. Chromium's src/ and | 128 This step ensures that the source checkout on the bot (e.g. Chromium's src/ and |
175 its dependencies) is checked out in a consistent state. This means that all of | 129 its dependencies) is checked out in a consistent state. This means that all of |
176 the necessary repositories are checked out, no extra repositories are checked | 130 the necessary repositories are checked out, no extra repositories are checked |
177 out, and no locally modified files are present. | 131 out, and no locally modified files are present. |
178 | 132 |
179 These actions used to be taken care of by the "gclient revert" and "update" | 133 These actions used to be taken care of by the "gclient revert" and "update" |
180 steps. However, those steps are known to be buggy and occasionally flaky. This | 134 steps. However, those steps are known to be buggy and occasionally flaky. This |
181 step has two main advantages over them: | 135 step has two main advantages over them: |
182 * it only operates in Git, so the logic can be clearer and cleaner; and | 136 * it only operates in Git, so the logic can be clearer and cleaner; and |
183 * it is a slave-side script, so its behavior can be modified without | 137 * it is a slave-side script, so its behavior can be modified without |
184 restarting the master. | 138 restarting the master. |
185 | 139 |
186 Why Git, you ask? Because that is the direction that the Chromium project is | |
187 heading. This step is an integral part of the transition from using the SVN repo | |
188 at chrome/trunk/src to using the Git repo src.git. Please pardon the dust while | |
189 we fully convert everything to Git. This message will get out of your way | |
190 eventually, and the waterfall will be a happier place because of it. | |
191 | |
192 This step can be activated or deactivated independently on every builder on | |
193 every master. When it is active, the "gclient revert" and "update" steps become | |
194 no-ops. When it is inactive, it prints this message, cleans up after itself, and | |
195 lets everything else continue as though nothing has changed. Eventually, when | |
196 everything is stable enough, this step will replace them entirely. | |
197 | |
198 Debugging information: | 140 Debugging information: |
199 (master/builder/slave may be unspecified on recipes) | 141 (master/builder/slave may be unspecified on recipes) |
200 master: %(master)s | 142 master: %(master)s |
201 builder: %(builder)s | 143 builder: %(builder)s |
202 slave: %(slave)s | 144 slave: %(slave)s |
203 forced by recipes: %(recipe)s | 145 forced by recipes: %(recipe)s |
204 CURRENT_DIR: %(CURRENT_DIR)s | 146 CURRENT_DIR: %(CURRENT_DIR)s |
205 BUILDER_DIR: %(BUILDER_DIR)s | 147 BUILDER_DIR: %(BUILDER_DIR)s |
206 SLAVE_DIR: %(SLAVE_DIR)s | 148 SLAVE_DIR: %(SLAVE_DIR)s |
207 THIS_DIR: %(THIS_DIR)s | 149 THIS_DIR: %(THIS_DIR)s |
(...skipping 29 matching lines...) Expand all Loading... |
237 try: | 179 try: |
238 execfile(os.path.join( | 180 execfile(os.path.join( |
239 BUILD_INTERNAL_DIR, 'scripts', 'slave', 'bot_update_cfg.py'), | 181 BUILD_INTERNAL_DIR, 'scripts', 'slave', 'bot_update_cfg.py'), |
240 local_vars) | 182 local_vars) |
241 except Exception: | 183 except Exception: |
242 # Same as if BUILD_INTERNAL_DIR didn't exist in the first place. | 184 # Same as if BUILD_INTERNAL_DIR didn't exist in the first place. |
243 print 'Warning: unable to read internal configuration file.' | 185 print 'Warning: unable to read internal configuration file.' |
244 print 'If this is an internal bot, this step may be erroneously inactive.' | 186 print 'If this is an internal bot, this step may be erroneously inactive.' |
245 internal_data = local_vars | 187 internal_data = local_vars |
246 | 188 |
247 RECOGNIZED_PATHS = { | |
248 # If SVN path matches key, the entire URL is rewritten to the Git url. | |
249 '/chrome/trunk/src': | |
250 CHROMIUM_SRC_URL, | |
251 '/chrome/trunk/src/tools/cros.DEPS': | |
252 CHROMIUM_GIT_HOST + '/chromium/src/tools/cros.DEPS.git', | |
253 '/chrome-internal/trunk/src-internal': | |
254 'https://chrome-internal.googlesource.com/chrome/src-internal.git', | |
255 } | |
256 RECOGNIZED_PATHS.update(internal_data.get('RECOGNIZED_PATHS', {})) | |
257 | 189 |
258 ENABLED_MASTERS = [ | 190 ENABLED_MASTERS = [ |
259 'bot_update.always_on', | 191 'bot_update.always_on', |
260 'chromium.android', | 192 'chromium.android', |
261 'chromium.angle', | 193 'chromium.angle', |
262 'chromium.chrome', | 194 'chromium.chrome', |
263 'chromium.chromedriver', | 195 'chromium.chromedriver', |
264 'chromium.chromiumos', | 196 'chromium.chromiumos', |
265 'chromium', | 197 'chromium', |
266 'chromium.fyi', | 198 'chromium.fyi', |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
330 | 262 |
331 # Disabled filters get run AFTER enabled filters, so for example if a builder | 263 # Disabled filters get run AFTER enabled filters, so for example if a builder |
332 # config is enabled, but a bot on that builder is disabled, that bot will | 264 # config is enabled, but a bot on that builder is disabled, that bot will |
333 # be disabled. | 265 # be disabled. |
334 DISABLED_BUILDERS = {} | 266 DISABLED_BUILDERS = {} |
335 DISABLED_BUILDERS.update(internal_data.get('DISABLED_BUILDERS', {})) | 267 DISABLED_BUILDERS.update(internal_data.get('DISABLED_BUILDERS', {})) |
336 | 268 |
337 DISABLED_SLAVES = {} | 269 DISABLED_SLAVES = {} |
338 DISABLED_SLAVES.update(internal_data.get('DISABLED_SLAVES', {})) | 270 DISABLED_SLAVES.update(internal_data.get('DISABLED_SLAVES', {})) |
339 | 271 |
340 # These masters work only in Git, meaning for got_revision, always output | |
341 # a git hash rather than a SVN rev. | |
342 GIT_MASTERS = [ | |
343 'client.v8', | |
344 'client.v8.branches', | |
345 'client.v8.ports', | |
346 'tryserver.v8', | |
347 ] | |
348 GIT_MASTERS += internal_data.get('GIT_MASTERS', []) | |
349 | |
350 | |
351 # How many times to try before giving up. | 272 # How many times to try before giving up. |
352 ATTEMPTS = 5 | 273 ATTEMPTS = 5 |
353 | 274 |
354 GIT_CACHE_PATH = path.join(DEPOT_TOOLS_DIR, 'git_cache.py') | 275 GIT_CACHE_PATH = path.join(DEPOT_TOOLS_DIR, 'git_cache.py') |
355 | 276 |
356 # Find the patch tool. | 277 # Find the patch tool. |
357 if sys.platform.startswith('win'): | 278 if sys.platform.startswith('win'): |
358 if not BUILD_INTERNAL_DIR: | 279 if not BUILD_INTERNAL_DIR: |
359 print 'Warning: could not find patch tool because there is no ' | 280 print 'Warning: could not find patch tool because there is no ' |
360 print 'build_internal present.' | 281 print 'build_internal present.' |
(...skipping 16 matching lines...) Expand all Loading... |
377 | 298 |
378 | 299 |
379 class PatchFailed(SubprocessFailed): | 300 class PatchFailed(SubprocessFailed): |
380 pass | 301 pass |
381 | 302 |
382 | 303 |
383 class GclientSyncFailed(SubprocessFailed): | 304 class GclientSyncFailed(SubprocessFailed): |
384 pass | 305 pass |
385 | 306 |
386 | 307 |
387 class SVNRevisionNotFound(Exception): | |
388 pass | |
389 | |
390 | |
391 class InvalidDiff(Exception): | 308 class InvalidDiff(Exception): |
392 pass | 309 pass |
393 | 310 |
394 | 311 |
395 class Inactive(Exception): | |
396 """Not really an exception, just used to exit early cleanly.""" | |
397 pass | |
398 | |
399 | |
400 RETRY = object() | 312 RETRY = object() |
401 OK = object() | 313 OK = object() |
402 FAIL = object() | 314 FAIL = object() |
403 | 315 |
404 | 316 |
405 class PsPrinter(object): | 317 class PsPrinter(object): |
406 def __init__(self, interval=300): | 318 def __init__(self, interval=300): |
407 self.interval = interval | 319 self.interval = interval |
408 self.active = sys.platform.startswith('linux2') | 320 self.active = sys.platform.startswith('linux2') |
409 self.thread = None | 321 self.thread = None |
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
546 if slave_list and slave in slave_list: | 458 if slave_list and slave in slave_list: |
547 return True | 459 return True |
548 return False | 460 return False |
549 | 461 |
550 | 462 |
551 def check_valid_host(master, builder, slave): | 463 def check_valid_host(master, builder, slave): |
552 return (check_enabled(master, builder, slave) | 464 return (check_enabled(master, builder, slave) |
553 and not check_disabled(master, builder, slave)) | 465 and not check_disabled(master, builder, slave)) |
554 | 466 |
555 | 467 |
556 def maybe_ignore_revision(revision, buildspec): | |
557 """Handle builders that don't care what buildbot tells them to build. | |
558 | |
559 This is especially the case with branch builders that build from buildspecs | |
560 and/or trigger off multiple repositories, where the --revision passed in has | |
561 nothing to do with the solution being built. Clearing the revision in this | |
562 case causes bot_update to use HEAD rather that trying to checkout an | |
563 inappropriate version of the solution. | |
564 """ | |
565 if buildspec and buildspec.container == 'branches': | |
566 return [] | |
567 return revision | |
568 | |
569 | |
570 def solutions_printer(solutions): | 468 def solutions_printer(solutions): |
571 """Prints gclient solution to stdout.""" | 469 """Prints gclient solution to stdout.""" |
572 print 'Gclient Solutions' | 470 print 'Gclient Solutions' |
573 print '=================' | 471 print '=================' |
574 for solution in solutions: | 472 for solution in solutions: |
575 name = solution.get('name') | 473 name = solution.get('name') |
576 url = solution.get('url') | 474 url = solution.get('url') |
577 print '%s (%s)' % (name, url) | 475 print '%s (%s)' % (name, url) |
578 if solution.get('deps_file'): | 476 if solution.get('deps_file'): |
579 print ' Dependencies file is %s' % solution['deps_file'] | 477 print ' Dependencies file is %s' % solution['deps_file'] |
(...skipping 14 matching lines...) Expand all Loading... |
594 print ' %s: Ignore' % deps_name | 492 print ' %s: Ignore' % deps_name |
595 for k, v in solution.iteritems(): | 493 for k, v in solution.iteritems(): |
596 # Print out all the keys we don't know about. | 494 # Print out all the keys we don't know about. |
597 if k in ['name', 'url', 'deps_file', 'custom_vars', 'custom_deps', | 495 if k in ['name', 'url', 'deps_file', 'custom_vars', 'custom_deps', |
598 'managed']: | 496 'managed']: |
599 continue | 497 continue |
600 print ' %s is %s' % (k, v) | 498 print ' %s is %s' % (k, v) |
601 print | 499 print |
602 | 500 |
603 | 501 |
604 def solutions_to_git(input_solutions): | 502 def modify_solutions(input_solutions): |
605 """Modifies urls in solutions to point at Git repos. | 503 """Modifies urls in solutions to point at Git repos. |
606 | 504 |
607 returns: (git solution, svn root of first solution) tuple. | 505 returns: new solution dictionary |
608 """ | 506 """ |
609 assert input_solutions | 507 assert input_solutions |
610 solutions = copy.deepcopy(input_solutions) | 508 solutions = copy.deepcopy(input_solutions) |
611 first_solution = True | |
612 buildspec = None | |
613 for solution in solutions: | 509 for solution in solutions: |
614 original_url = solution['url'] | 510 original_url = solution['url'] |
615 parsed_url = urlparse.urlparse(original_url) | 511 parsed_url = urlparse.urlparse(original_url) |
616 parsed_path = parsed_url.path | 512 parsed_path = parsed_url.path |
617 | 513 |
618 # Rewrite SVN urls into Git urls. | |
619 buildspec_m = re.match(BUILDSPEC_RE, parsed_path) | |
620 if first_solution and buildspec_m: | |
621 solution['url'] = GIT_BUILDSPEC_PATH | |
622 buildspec = BUILDSPEC_TYPE( | |
623 container=buildspec_m.group(1), | |
624 version=buildspec_m.group(2), | |
625 ) | |
626 solution['deps_file'] = path.join(buildspec.container, buildspec.version, | |
627 'DEPS') | |
628 elif parsed_path in RECOGNIZED_PATHS: | |
629 solution['url'] = RECOGNIZED_PATHS[parsed_path] | |
630 solution['deps_file'] = '.DEPS.git' | |
631 elif parsed_url.scheme == 'https' and 'googlesource' in parsed_url.netloc: | |
632 pass | |
633 else: | |
634 print 'Warning: %s' % ('path %r not recognized' % parsed_path,) | |
635 | |
636 # Strip out deps containing $$V8_REV$$, etc. | |
637 if 'custom_deps' in solution: | |
638 new_custom_deps = {} | |
639 for deps_name, deps_value in solution['custom_deps'].iteritems(): | |
640 if deps_value and '$$' in deps_value: | |
641 print 'Dropping %s:%s from custom deps' % (deps_name, deps_value) | |
642 else: | |
643 new_custom_deps[deps_name] = deps_value | |
644 solution['custom_deps'] = new_custom_deps | |
645 | |
646 if first_solution: | |
647 root = parsed_path | |
648 first_solution = False | |
649 | |
650 solution['managed'] = False | 514 solution['managed'] = False |
651 # We don't want gclient to be using a safesync URL. Instead it should | 515 # We don't want gclient to be using a safesync URL. Instead it should |
652 # using the lkgr/lkcr branch/tags. | 516 # using the lkgr/lkcr branch/tags. |
653 if 'safesync_url' in solution: | 517 if 'safesync_url' in solution: |
654 print 'Removing safesync url %s from %s' % (solution['safesync_url'], | 518 print 'Removing safesync url %s from %s' % (solution['safesync_url'], |
655 parsed_path) | 519 parsed_path) |
656 del solution['safesync_url'] | 520 del solution['safesync_url'] |
657 return solutions, root, buildspec | 521 |
| 522 return solutions |
658 | 523 |
659 | 524 |
660 def remove(target): | 525 def remove(target): |
661 """Remove a target by moving it into build.dead.""" | 526 """Remove a target by moving it into build.dead.""" |
662 dead_folder = path.join(BUILDER_DIR, 'build.dead') | 527 dead_folder = path.join(BUILDER_DIR, 'build.dead') |
663 if not path.exists(dead_folder): | 528 if not path.exists(dead_folder): |
664 os.makedirs(dead_folder) | 529 os.makedirs(dead_folder) |
665 os.rename(target, path.join(dead_folder, uuid.uuid4().hex)) | 530 os.rename(target, path.join(dead_folder, uuid.uuid4().hex)) |
666 | 531 |
667 | 532 |
668 def ensure_no_checkout(dir_names, scm_dirname): | 533 def ensure_no_checkout(dir_names): |
669 """Ensure that there is no undesired checkout under build/. | 534 """Ensure that there is no undesired checkout under build/.""" |
670 | 535 build_dir = os.getcwd() |
671 If there is an incorrect checkout under build/, then | 536 has_checkout = any(path.exists(path.join(build_dir, dir_name, '.git')) |
672 move build/ to build.dead/ | |
673 This function will check each directory in dir_names. | |
674 | |
675 scm_dirname is expected to be either ['.svn', '.git'] | |
676 """ | |
677 assert scm_dirname in ['.svn', '.git', '*'] | |
678 has_checkout = any(path.exists(path.join(os.getcwd(), dir_name, scm_dirname)) | |
679 for dir_name in dir_names) | 537 for dir_name in dir_names) |
680 | 538 if has_checkout: |
681 if has_checkout or scm_dirname == '*': | |
682 build_dir = os.getcwd() | |
683 prefix = '' | |
684 if scm_dirname != '*': | |
685 prefix = '%s detected in checkout, ' % scm_dirname | |
686 | |
687 for filename in os.listdir(build_dir): | 539 for filename in os.listdir(build_dir): |
688 deletion_target = path.join(build_dir, filename) | 540 deletion_target = path.join(build_dir, filename) |
689 print '%sdeleting %s...' % (prefix, deletion_target), | 541 print '.git detected in checkout, deleting %s...' % deletion_target, |
690 remove(deletion_target) | 542 remove(deletion_target) |
691 print 'done' | 543 print 'done' |
692 | 544 |
693 | 545 |
694 def gclient_configure(solutions, target_os, target_os_only, git_cache_dir): | 546 def gclient_configure(solutions, target_os, target_os_only, git_cache_dir): |
695 """Should do the same thing as gclient --spec='...'.""" | 547 """Should do the same thing as gclient --spec='...'.""" |
696 with codecs.open('.gclient', mode='w', encoding='utf-8') as f: | 548 with codecs.open('.gclient', mode='w', encoding='utf-8') as f: |
697 f.write(get_gclient_spec( | 549 f.write(get_gclient_spec( |
698 solutions, target_os, target_os_only, git_cache_dir)) | 550 solutions, target_os, target_os_only, git_cache_dir)) |
699 | 551 |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
773 footers[m.group(1)] = m.group(2).strip() | 625 footers[m.group(1)] = m.group(2).strip() |
774 return footers | 626 return footers |
775 | 627 |
776 | 628 |
777 def get_commit_message_footer(message, key): | 629 def get_commit_message_footer(message, key): |
778 """Returns: (str/None) The footer value for 'key', or None if none was found. | 630 """Returns: (str/None) The footer value for 'key', or None if none was found. |
779 """ | 631 """ |
780 return get_commit_message_footer_map(message).get(key) | 632 return get_commit_message_footer_map(message).get(key) |
781 | 633 |
782 | 634 |
783 def get_svn_rev(git_hash, dir_name): | |
784 log = git('log', '-1', git_hash, cwd=dir_name) | |
785 git_svn_id = get_commit_message_footer(log, GIT_SVN_ID_FOOTER_KEY) | |
786 if not git_svn_id: | |
787 return None | |
788 m = GIT_SVN_ID_RE.match(git_svn_id) | |
789 if not m: | |
790 return None | |
791 return int(m.group(2)) | |
792 | |
793 | |
794 def get_git_hash(revision, branch, sln_dir): | |
795 """We want to search for the SVN revision on the git-svn branch. | |
796 | |
797 Note that git will search backwards from origin/master. | |
798 """ | |
799 match = "^%s: [^ ]*@%s " % (GIT_SVN_ID_FOOTER_KEY, revision) | |
800 ref = branch if branch.startswith('refs/') else 'origin/%s' % branch | |
801 cmd = ['log', '-E', '--grep', match, '--format=%H', '--max-count=1', ref] | |
802 result = git(*cmd, cwd=sln_dir).strip() | |
803 if result: | |
804 return result | |
805 raise SVNRevisionNotFound('We can\'t resolve svn r%s into a git hash in %s' % | |
806 (revision, sln_dir)) | |
807 | |
808 | |
809 def emit_log_lines(name, lines): | 635 def emit_log_lines(name, lines): |
810 for line in lines.splitlines(): | 636 for line in lines.splitlines(): |
811 print '@@@STEP_LOG_LINE@%s@%s@@@' % (name, line) | 637 print '@@@STEP_LOG_LINE@%s@%s@@@' % (name, line) |
812 print '@@@STEP_LOG_END@%s@@@' % name | 638 print '@@@STEP_LOG_END@%s@@@' % name |
813 | 639 |
814 | 640 |
815 def emit_properties(properties): | 641 def emit_properties(properties): |
816 for property_name, property_value in sorted(properties.items()): | 642 for property_name, property_value in sorted(properties.items()): |
817 print '@@@SET_BUILD_PROPERTY@%s@"%s"@@@' % (property_name, property_value) | 643 print '@@@SET_BUILD_PROPERTY@%s@"%s"@@@' % (property_name, property_value) |
818 | 644 |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
853 | 679 |
854 | 680 |
855 def force_revision(folder_name, revision): | 681 def force_revision(folder_name, revision): |
856 split_revision = revision.split(':', 1) | 682 split_revision = revision.split(':', 1) |
857 branch = 'master' | 683 branch = 'master' |
858 if len(split_revision) == 2: | 684 if len(split_revision) == 2: |
859 # Support for "branch:revision" syntax. | 685 # Support for "branch:revision" syntax. |
860 branch, revision = split_revision | 686 branch, revision = split_revision |
861 | 687 |
862 if revision and revision.upper() != 'HEAD': | 688 if revision and revision.upper() != 'HEAD': |
863 if revision and revision.isdigit() and len(revision) < 40: | 689 git('checkout', '--force', revision, cwd=folder_name) |
864 # rev_num is really a svn revision number, convert it into a git hash. | |
865 git_ref = get_git_hash(int(revision), branch, folder_name) | |
866 else: | |
867 # rev_num is actually a git hash or ref, we can just use it. | |
868 git_ref = revision | |
869 git('checkout', '--force', git_ref, cwd=folder_name) | |
870 else: | 690 else: |
871 ref = branch if branch.startswith('refs/') else 'origin/%s' % branch | 691 ref = branch if branch.startswith('refs/') else 'origin/%s' % branch |
872 git('checkout', '--force', ref, cwd=folder_name) | 692 git('checkout', '--force', ref, cwd=folder_name) |
873 | 693 |
| 694 |
874 def git_checkout(solutions, revisions, shallow, refs, git_cache_dir): | 695 def git_checkout(solutions, revisions, shallow, refs, git_cache_dir): |
875 build_dir = os.getcwd() | 696 build_dir = os.getcwd() |
876 # Before we do anything, break all git_cache locks. | 697 # Before we do anything, break all git_cache locks. |
877 if path.isdir(git_cache_dir): | 698 if path.isdir(git_cache_dir): |
878 git('cache', 'unlock', '-vv', '--force', '--all', | 699 git('cache', 'unlock', '-vv', '--force', '--all', |
879 '--cache-dir', git_cache_dir) | 700 '--cache-dir', git_cache_dir) |
880 for item in os.listdir(git_cache_dir): | 701 for item in os.listdir(git_cache_dir): |
881 filename = os.path.join(git_cache_dir, item) | 702 filename = os.path.join(git_cache_dir, item) |
882 if item.endswith('.lock'): | 703 if item.endswith('.lock'): |
883 raise Exception('%s exists after cache unlock' % filename) | 704 raise Exception('%s exists after cache unlock' % filename) |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
924 # Exited abnormally, theres probably something wrong. | 745 # Exited abnormally, theres probably something wrong. |
925 # Lets wipe the checkout and try again. | 746 # Lets wipe the checkout and try again. |
926 tries_left -= 1 | 747 tries_left -= 1 |
927 if tries_left > 0: | 748 if tries_left > 0: |
928 print 'Something failed: %s.' % str(e) | 749 print 'Something failed: %s.' % str(e) |
929 print 'waiting 5 seconds and trying again...' | 750 print 'waiting 5 seconds and trying again...' |
930 time.sleep(5) | 751 time.sleep(5) |
931 else: | 752 else: |
932 raise | 753 raise |
933 remove(sln_dir) | 754 remove(sln_dir) |
934 except SVNRevisionNotFound: | |
935 tries_left -= 1 | |
936 if tries_left > 0: | |
937 # If we don't have the correct revision, wait and try again. | |
938 print 'We can\'t find revision %s.' % revision | |
939 print 'The svn to git replicator is probably falling behind.' | |
940 print 'waiting 5 seconds and trying again...' | |
941 time.sleep(5) | |
942 else: | |
943 raise | |
944 | 755 |
945 git('clean', '-dff', cwd=sln_dir) | 756 git('clean', '-dff', cwd=sln_dir) |
946 | 757 |
947 if first_solution: | 758 if first_solution: |
948 git_ref = git('log', '--format=%H', '--max-count=1', | 759 git_ref = git('log', '--format=%H', '--max-count=1', |
949 cwd=sln_dir).strip() | 760 cwd=sln_dir).strip() |
950 first_solution = False | 761 first_solution = False |
951 return git_ref | 762 return git_ref |
952 | 763 |
953 | 764 |
954 def _download(url): | 765 def _download(url): |
955 """Fetch url and return content, with retries for flake.""" | 766 """Fetch url and return content, with retries for flake.""" |
956 for attempt in xrange(ATTEMPTS): | 767 for attempt in xrange(ATTEMPTS): |
957 try: | 768 try: |
958 return urllib2.urlopen(url).read() | 769 return urllib2.urlopen(url).read() |
959 except Exception: | 770 except Exception: |
960 if attempt == ATTEMPTS - 1: | 771 if attempt == ATTEMPTS - 1: |
961 raise | 772 raise |
962 | 773 |
963 | 774 |
964 def parse_diff(diff): | |
965 """Takes a unified diff and returns a list of diffed files and their diffs. | |
966 | |
967 The return format is a list of pairs of: | |
968 (<filename>, <diff contents>) | |
969 <diff contents> is inclusive of the diff line. | |
970 """ | |
971 result = [] | |
972 current_diff = '' | |
973 current_header = None | |
974 for line in diff.splitlines(): | |
975 # "diff" is for git style patches, and "Index: " is for SVN style patches. | |
976 if line.startswith('diff') or line.startswith('Index: '): | |
977 if current_header: | |
978 # If we are in a diff portion, then save the diff. | |
979 result.append((current_header, '%s\n' % current_diff)) | |
980 git_header_match = re.match(r'diff (?:--git )?(\S+) (\S+)', line) | |
981 svn_header_match = re.match(r'Index: (.*)', line) | |
982 | |
983 if git_header_match: | |
984 # First, see if its a git style header. | |
985 from_file = git_header_match.group(1) | |
986 to_file = git_header_match.group(2) | |
987 if from_file != to_file and from_file.startswith('a/'): | |
988 # Sometimes git prepends 'a/' and 'b/' in front of file paths. | |
989 from_file = from_file[2:] | |
990 current_header = from_file | |
991 | |
992 elif svn_header_match: | |
993 # Otherwise, check if its an SVN style header. | |
994 current_header = svn_header_match.group(1) | |
995 | |
996 else: | |
997 # Otherwise... I'm not really sure what to do with this. | |
998 raise InvalidDiff('Can\'t process header: %s\nFull diff:\n%s' % | |
999 (line, diff)) | |
1000 | |
1001 current_diff = '' | |
1002 current_diff += '%s\n' % line | |
1003 if current_header: | |
1004 # We hit EOF, gotta save the last diff. | |
1005 result.append((current_header, current_diff)) | |
1006 return result | |
1007 | |
1008 | |
1009 def apply_rietveld_issue(issue, patchset, root, server, _rev_map, _revision, | 775 def apply_rietveld_issue(issue, patchset, root, server, _rev_map, _revision, |
1010 email_file, key_file, whitelist=None, blacklist=None): | 776 email_file, key_file, whitelist=None, blacklist=None): |
1011 apply_issue_bin = ('apply_issue.bat' if sys.platform.startswith('win') | 777 apply_issue_bin = ('apply_issue.bat' if sys.platform.startswith('win') |
1012 else 'apply_issue') | 778 else 'apply_issue') |
1013 cmd = [apply_issue_bin, | 779 cmd = [apply_issue_bin, |
1014 # The patch will be applied on top of this directory. | 780 # The patch will be applied on top of this directory. |
1015 '--root_dir', root, | 781 '--root_dir', root, |
1016 # Tell apply_issue how to fetch the patch. | 782 # Tell apply_issue how to fetch the patch. |
1017 '--issue', issue, | 783 '--issue', issue, |
1018 '--server', server, | 784 '--server', server, |
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1096 os.remove(flag_file) | 862 os.remove(flag_file) |
1097 | 863 |
1098 | 864 |
1099 def emit_flag(flag_file): | 865 def emit_flag(flag_file): |
1100 """Deposit a bot update flag on the system to tell gclient not to run.""" | 866 """Deposit a bot update flag on the system to tell gclient not to run.""" |
1101 print 'Emitting flag file at %s' % flag_file | 867 print 'Emitting flag file at %s' % flag_file |
1102 with open(flag_file, 'wb') as f: | 868 with open(flag_file, 'wb') as f: |
1103 f.write('Success!') | 869 f.write('Success!') |
1104 | 870 |
1105 | 871 |
1106 def get_commit_position_for_git_svn(url, revision): | |
1107 """Generates a commit position string for a 'git-svn' URL/revision. | |
1108 | |
1109 If the 'git-svn' URL maps to a known project, we will construct a commit | |
1110 position branch value by applying substitution on the SVN URL. | |
1111 """ | |
1112 # Identify the base URL so we can strip off trunk/branch name | |
1113 project_config = branch = None | |
1114 for _, project_config in GIT_SVN_PROJECT_MAP.iteritems(): | |
1115 if url.startswith(project_config['svn_url']): | |
1116 branch = url[len(project_config['svn_url']):] | |
1117 break | |
1118 | |
1119 if branch: | |
1120 # Strip any leading slashes | |
1121 branch = branch.lstrip('/') | |
1122 | |
1123 # Try and map the branch | |
1124 for pattern, repl in project_config.get('branch_map', ()): | |
1125 nbranch, subn = re.subn(pattern, repl, branch, count=1) | |
1126 if subn: | |
1127 print 'INFO: Mapped SVN branch to Git branch [%s] => [%s]' % ( | |
1128 branch, nbranch) | |
1129 branch = nbranch | |
1130 break | |
1131 else: | |
1132 # Use generic 'svn' branch | |
1133 print 'INFO: Could not resolve project for SVN URL %r' % (url,) | |
1134 branch = 'svn' | |
1135 return '%s@{#%s}' % (branch, revision) | |
1136 | |
1137 | |
1138 def get_commit_position(git_path, revision='HEAD'): | 872 def get_commit_position(git_path, revision='HEAD'): |
1139 """Dumps the 'git' log for a specific revision and parses out the commit | 873 """Dumps the 'git' log for a specific revision and parses out the commit |
1140 position. | 874 position. |
1141 | 875 |
1142 If a commit position metadata key is found, its value will be returned. | 876 If a commit position metadata key is found, its value will be returned. |
1143 | |
1144 Otherwise, we will search for a 'git-svn' metadata entry. If one is found, | |
1145 we will compose a commit position from it, using its SVN revision value as | |
1146 the revision. | |
1147 | |
1148 If the 'git-svn' URL maps to a known project, we will construct a commit | |
1149 position branch value by truncating the URL, mapping 'trunk' to | |
1150 "refs/heads/master". Otherwise, we will return the generic branch, 'svn'. | |
1151 """ | 877 """ |
| 878 # TODO(iannucci): Use git-footers for this. |
1152 git_log = git('log', '--format=%B', '-n1', revision, cwd=git_path) | 879 git_log = git('log', '--format=%B', '-n1', revision, cwd=git_path) |
1153 footer_map = get_commit_message_footer_map(git_log) | 880 footer_map = get_commit_message_footer_map(git_log) |
1154 | 881 |
1155 # Search for commit position metadata | 882 # Search for commit position metadata |
1156 value = (footer_map.get(COMMIT_POSITION_FOOTER_KEY) or | 883 value = (footer_map.get(COMMIT_POSITION_FOOTER_KEY) or |
1157 footer_map.get(COMMIT_ORIGINAL_POSITION_FOOTER_KEY)) | 884 footer_map.get(COMMIT_ORIGINAL_POSITION_FOOTER_KEY)) |
1158 if value: | 885 if value: |
1159 return value | 886 return value |
1160 | |
1161 # Compose a commit position from 'git-svn' metadata | |
1162 value = footer_map.get(GIT_SVN_ID_FOOTER_KEY) | |
1163 if value: | |
1164 m = GIT_SVN_ID_RE.match(value) | |
1165 if not m: | |
1166 raise ValueError("Invalid 'git-svn' value: [%s]" % (value,)) | |
1167 return get_commit_position_for_git_svn(m.group(1), m.group(2)) | |
1168 return None | 887 return None |
1169 | 888 |
1170 | 889 |
1171 def parse_got_revision(gclient_output, got_revision_mapping, use_svn_revs): | 890 def parse_got_revision(gclient_output, got_revision_mapping): |
1172 """Translate git gclient revision mapping to build properties. | 891 """Translate git gclient revision mapping to build properties.""" |
1173 | |
1174 If use_svn_revs is True, then translate git hashes in the revision mapping | |
1175 to svn revision numbers. | |
1176 """ | |
1177 properties = {} | 892 properties = {} |
1178 solutions_output = { | 893 solutions_output = { |
1179 # Make sure path always ends with a single slash. | 894 # Make sure path always ends with a single slash. |
1180 '%s/' % path.rstrip('/') : solution_output for path, solution_output | 895 '%s/' % path.rstrip('/') : solution_output for path, solution_output |
1181 in gclient_output['solutions'].iteritems() | 896 in gclient_output['solutions'].iteritems() |
1182 } | 897 } |
1183 for dir_name, property_name in got_revision_mapping.iteritems(): | 898 for dir_name, property_name in got_revision_mapping.iteritems(): |
1184 # Make sure dir_name always ends with a single slash. | 899 # Make sure dir_name always ends with a single slash. |
1185 dir_name = '%s/' % dir_name.rstrip('/') | 900 dir_name = '%s/' % dir_name.rstrip('/') |
1186 if dir_name not in solutions_output: | 901 if dir_name not in solutions_output: |
1187 continue | 902 continue |
1188 solution_output = solutions_output[dir_name] | 903 solution_output = solutions_output[dir_name] |
1189 if solution_output.get('scm') is None: | 904 if solution_output.get('scm') is None: |
1190 # This is an ignored DEPS, so the output got_revision should be 'None'. | 905 # This is an ignored DEPS, so the output got_revision should be 'None'. |
1191 git_revision = revision = commit_position = None | 906 git_revision = revision = commit_position = None |
1192 else: | 907 else: |
1193 # Since we are using .DEPS.git, everything had better be git. | 908 # Since we are using .DEPS.git, everything had better be git. |
1194 assert solution_output.get('scm') == 'git' | 909 assert solution_output.get('scm') == 'git' |
1195 git_revision = git('rev-parse', 'HEAD', cwd=dir_name).strip() | 910 revision = git('rev-parse', 'HEAD', cwd=dir_name).strip() |
1196 if use_svn_revs: | |
1197 revision = get_svn_rev(git_revision, dir_name) | |
1198 if not revision: | |
1199 revision = git_revision | |
1200 else: | |
1201 revision = git_revision | |
1202 commit_position = get_commit_position(dir_name) | 911 commit_position = get_commit_position(dir_name) |
1203 | 912 |
1204 properties[property_name] = revision | 913 properties[property_name] = revision |
1205 if revision != git_revision: | 914 if revision != git_revision: |
1206 properties['%s_git' % property_name] = git_revision | 915 properties['%s_git' % property_name] = git_revision |
1207 if commit_position: | 916 if commit_position: |
1208 properties['%s_cp' % property_name] = commit_position | 917 properties['%s_cp' % property_name] = commit_position |
1209 | 918 |
1210 return properties | 919 return properties |
1211 | 920 |
(...skipping 11 matching lines...) Expand all Loading... |
1223 def ensure_deps_revisions(deps_url_mapping, solutions, revisions): | 932 def ensure_deps_revisions(deps_url_mapping, solutions, revisions): |
1224 """Ensure correct DEPS revisions, ignores solutions.""" | 933 """Ensure correct DEPS revisions, ignores solutions.""" |
1225 for deps_name, deps_data in sorted(deps_url_mapping.items()): | 934 for deps_name, deps_data in sorted(deps_url_mapping.items()): |
1226 if deps_name.strip('/') in solutions: | 935 if deps_name.strip('/') in solutions: |
1227 # This has already been forced to the correct solution by git_checkout(). | 936 # This has already been forced to the correct solution by git_checkout(). |
1228 continue | 937 continue |
1229 revision = get_target_revision(deps_name, deps_data.get('url', None), | 938 revision = get_target_revision(deps_name, deps_data.get('url', None), |
1230 revisions) | 939 revisions) |
1231 if not revision: | 940 if not revision: |
1232 continue | 941 continue |
1233 # TODO(hinoka): Catch SVNRevisionNotFound error maybe? | |
1234 git('fetch', 'origin', cwd=deps_name) | 942 git('fetch', 'origin', cwd=deps_name) |
1235 force_revision(deps_name, revision) | 943 force_revision(deps_name, revision) |
1236 | 944 |
1237 | 945 |
1238 def ensure_checkout(solutions, revisions, first_sln, target_os, target_os_only, | 946 def ensure_checkout(solutions, revisions, first_sln, target_os, target_os_only, |
1239 patch_root, issue, patchset, rietveld_server, | 947 patch_root, issue, patchset, rietveld_server, |
1240 gerrit_repo, gerrit_ref, gerrit_rebase_patch_ref, | 948 gerrit_repo, gerrit_ref, gerrit_rebase_patch_ref, |
1241 revision_mapping, apply_issue_email_file, | 949 revision_mapping, apply_issue_email_file, |
1242 apply_issue_key_file, buildspec, gyp_env, shallow, runhooks, | 950 apply_issue_key_file, gyp_env, shallow, runhooks, |
1243 refs, git_cache_dir, gerrit_reset): | 951 refs, git_cache_dir, gerrit_reset): |
1244 # Get a checkout of each solution, without DEPS or hooks. | 952 # Get a checkout of each solution, without DEPS or hooks. |
1245 # Calling git directly because there is no way to run Gclient without | 953 # Calling git directly because there is no way to run Gclient without |
1246 # invoking DEPS. | 954 # invoking DEPS. |
1247 print 'Fetching Git checkout' | 955 print 'Fetching Git checkout' |
1248 | 956 |
1249 git_ref = git_checkout(solutions, revisions, shallow, refs, git_cache_dir) | 957 git_ref = git_checkout(solutions, revisions, shallow, refs, git_cache_dir) |
1250 | 958 |
1251 print '===Processing patch solutions===' | 959 print '===Processing patch solutions===' |
1252 already_patched = [] | 960 already_patched = [] |
(...skipping 16 matching lines...) Expand all Loading... |
1269 apply_gerrit_ref(gerrit_repo, gerrit_ref, patch_root, gerrit_reset, | 977 apply_gerrit_ref(gerrit_repo, gerrit_ref, patch_root, gerrit_reset, |
1270 gerrit_rebase_patch_ref) | 978 gerrit_rebase_patch_ref) |
1271 applied_gerrit_patch = True | 979 applied_gerrit_patch = True |
1272 | 980 |
1273 # Ensure our build/ directory is set up with the correct .gclient file. | 981 # Ensure our build/ directory is set up with the correct .gclient file. |
1274 gclient_configure(solutions, target_os, target_os_only, git_cache_dir) | 982 gclient_configure(solutions, target_os, target_os_only, git_cache_dir) |
1275 | 983 |
1276 # Let gclient do the DEPS syncing. | 984 # Let gclient do the DEPS syncing. |
1277 # The branch-head refspec is a special case because its possible Chrome | 985 # The branch-head refspec is a special case because its possible Chrome |
1278 # src, which contains the branch-head refspecs, is DEPSed in. | 986 # src, which contains the branch-head refspecs, is DEPSed in. |
1279 gclient_output = gclient_sync(buildspec or BRANCH_HEADS_REFSPEC in refs, | 987 gclient_output = gclient_sync(BRANCH_HEADS_REFSPEC in refs, shallow) |
1280 shallow) | |
1281 | 988 |
1282 # Now that gclient_sync has finished, we should revert any .DEPS.git so that | 989 # Now that gclient_sync has finished, we should revert any .DEPS.git so that |
1283 # presubmit doesn't complain about it being modified. | 990 # presubmit doesn't complain about it being modified. |
1284 if (not buildspec and | 991 if git('ls-files', '.DEPS.git', cwd=first_sln).strip(): |
1285 git('ls-files', '.DEPS.git', cwd=first_sln).strip()): | |
1286 git('checkout', 'HEAD', '--', '.DEPS.git', cwd=first_sln) | 992 git('checkout', 'HEAD', '--', '.DEPS.git', cwd=first_sln) |
1287 | 993 |
1288 if buildspec and runhooks: | |
1289 # Run gclient runhooks if we're on an official builder. | |
1290 # TODO(hinoka): Remove this when the official builders run their own | |
1291 # runhooks step. | |
1292 gclient_runhooks(gyp_env) | |
1293 | |
1294 # Finally, ensure that all DEPS are pinned to the correct revision. | 994 # Finally, ensure that all DEPS are pinned to the correct revision. |
1295 dir_names = [sln['name'] for sln in solutions] | 995 dir_names = [sln['name'] for sln in solutions] |
1296 ensure_deps_revisions(gclient_output.get('solutions', {}), | 996 ensure_deps_revisions(gclient_output.get('solutions', {}), |
1297 dir_names, revisions) | 997 dir_names, revisions) |
1298 # Apply the rest of the patch here (sans DEPS) | 998 # Apply the rest of the patch here (sans DEPS) |
1299 if issue: | 999 if issue: |
1300 apply_rietveld_issue(issue, patchset, patch_root, rietveld_server, | 1000 apply_rietveld_issue(issue, patchset, patch_root, rietveld_server, |
1301 revision_mapping, git_ref, apply_issue_email_file, | 1001 revision_mapping, git_ref, apply_issue_email_file, |
1302 apply_issue_key_file, blacklist=already_patched) | 1002 apply_issue_key_file, blacklist=already_patched) |
1303 elif gerrit_ref and not applied_gerrit_patch: | 1003 elif gerrit_ref and not applied_gerrit_patch: |
(...skipping 25 matching lines...) Expand all Loading... |
1329 expanded_revisions.extend(revision.split(',')) | 1029 expanded_revisions.extend(revision.split(',')) |
1330 for revision in expanded_revisions: | 1030 for revision in expanded_revisions: |
1331 split_revision = revision.split('@') | 1031 split_revision = revision.split('@') |
1332 if len(split_revision) == 1: | 1032 if len(split_revision) == 1: |
1333 # This is just a plain revision, set it as the revision for root. | 1033 # This is just a plain revision, set it as the revision for root. |
1334 results[root] = split_revision[0] | 1034 results[root] = split_revision[0] |
1335 elif len(split_revision) == 2: | 1035 elif len(split_revision) == 2: |
1336 # This is an alt_root@revision argument. | 1036 # This is an alt_root@revision argument. |
1337 current_root, current_rev = split_revision | 1037 current_root, current_rev = split_revision |
1338 | 1038 |
1339 # We want to normalize svn/git urls into .git urls. | |
1340 parsed_root = urlparse.urlparse(current_root) | 1039 parsed_root = urlparse.urlparse(current_root) |
1341 if parsed_root.scheme == 'svn': | 1040 if parsed_root.scheme in ['http', 'https']: |
1342 if parsed_root.path in RECOGNIZED_PATHS: | 1041 # We want to normalize git urls into .git urls. |
1343 normalized_root = RECOGNIZED_PATHS[parsed_root.path] | |
1344 else: | |
1345 print 'WARNING: SVN path %s not recognized, ignoring' % current_root | |
1346 continue | |
1347 elif parsed_root.scheme in ['http', 'https']: | |
1348 normalized_root = 'https://%s/%s' % (parsed_root.netloc, | 1042 normalized_root = 'https://%s/%s' % (parsed_root.netloc, |
1349 parsed_root.path) | 1043 parsed_root.path) |
1350 if not normalized_root.endswith('.git'): | 1044 if not normalized_root.endswith('.git'): |
1351 normalized_root = '%s.git' % normalized_root | 1045 normalized_root = '%s.git' % normalized_root |
1352 elif parsed_root.scheme: | 1046 elif parsed_root.scheme: |
1353 print 'WARNING: Unrecognized scheme %s, ignoring' % parsed_root.scheme | 1047 print 'WARNING: Unrecognized scheme %s, ignoring' % parsed_root.scheme |
1354 continue | 1048 continue |
1355 else: | 1049 else: |
1356 # This is probably a local path. | 1050 # This is probably a local path. |
1357 normalized_root = current_root.strip('/') | 1051 normalized_root = current_root.strip('/') |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1393 'bot_update\'s whitelist, bot_update will be noop.') | 1087 'bot_update\'s whitelist, bot_update will be noop.') |
1394 parse.add_option('-f', '--force', action='store_true', | 1088 parse.add_option('-f', '--force', action='store_true', |
1395 help='Bypass check to see if we want to be run. ' | 1089 help='Bypass check to see if we want to be run. ' |
1396 'Should ONLY be used locally or by smart recipes.') | 1090 'Should ONLY be used locally or by smart recipes.') |
1397 parse.add_option('--revision_mapping', | 1091 parse.add_option('--revision_mapping', |
1398 help='{"path/to/repo/": "property_name"}') | 1092 help='{"path/to/repo/": "property_name"}') |
1399 parse.add_option('--revision_mapping_file', | 1093 parse.add_option('--revision_mapping_file', |
1400 help=('Same as revision_mapping, except its a path to a json' | 1094 help=('Same as revision_mapping, except its a path to a json' |
1401 ' file containing that format.')) | 1095 ' file containing that format.')) |
1402 parse.add_option('--revision', action='append', default=[], | 1096 parse.add_option('--revision', action='append', default=[], |
1403 help='Revision to check out. Can be an SVN revision number, ' | 1097 help='Revision to check out. Can be any form of git ref. ' |
1404 'git hash, or any form of git ref. Can prepend ' | 1098 'Can prepend root@<rev> to specify which repository, ' |
1405 'root@<rev> to specify which repository, where root ' | 1099 'where root is either a filesystem path or git https ' |
1406 'is either a filesystem path, git https url, or ' | 1100 'url. To specify Tip of Tree, set rev to HEAD. ') |
1407 'svn url. To specify Tip of Tree, set rev to HEAD.' | |
1408 'To specify a git branch and an SVN rev, <rev> can be ' | |
1409 'set to <branch>:<revision>.') | |
1410 parse.add_option('--output_manifest', action='store_true', | 1101 parse.add_option('--output_manifest', action='store_true', |
1411 help=('Add manifest json to the json output.')) | 1102 help=('Add manifest json to the json output.')) |
1412 parse.add_option('--slave_name', default=socket.getfqdn().split('.')[0], | 1103 parse.add_option('--slave_name', default=socket.getfqdn().split('.')[0], |
1413 help='Hostname of the current machine, ' | 1104 help='Hostname of the current machine, ' |
1414 'used for determining whether or not to activate.') | 1105 'used for determining whether or not to activate.') |
1415 parse.add_option('--builder_name', help='Name of the builder, ' | 1106 parse.add_option('--builder_name', help='Name of the builder, ' |
1416 'used for determining whether or not to activate.') | 1107 'used for determining whether or not to activate.') |
1417 parse.add_option('--build_dir', default=os.getcwd()) | 1108 parse.add_option('--build_dir', default=os.getcwd()) |
1418 parse.add_option('--flag_file', default=path.join(os.getcwd(), | 1109 parse.add_option('--flag_file', default=path.join(os.getcwd(), |
1419 'update.flag')) | 1110 'update.flag')) |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1473 options.git_cache_dir = options.git_cache_dir.replace('\\', '\\\\') | 1164 options.git_cache_dir = options.git_cache_dir.replace('\\', '\\\\') |
1474 | 1165 |
1475 return options, args | 1166 return options, args |
1476 | 1167 |
1477 | 1168 |
1478 def prepare(options, git_slns, active): | 1169 def prepare(options, git_slns, active): |
1479 """Prepares the target folder before we checkout.""" | 1170 """Prepares the target folder before we checkout.""" |
1480 dir_names = [sln.get('name') for sln in git_slns if 'name' in sln] | 1171 dir_names = [sln.get('name') for sln in git_slns if 'name' in sln] |
1481 # If we're active now, but the flag file doesn't exist (we weren't active | 1172 # If we're active now, but the flag file doesn't exist (we weren't active |
1482 # last run) or vice versa, blow away all checkouts. | 1173 # last run) or vice versa, blow away all checkouts. |
1483 if bool(active) != bool(check_flag(options.flag_file)): | 1174 if options.clobber or (bool(active) != bool(check_flag(options.flag_file))): |
1484 ensure_no_checkout(dir_names, '*') | 1175 ensure_no_checkout(dir_names) |
1485 if options.output_json: | 1176 if options.output_json: |
1486 # Make sure we tell recipes that we didn't run if the script exits here. | 1177 # Make sure we tell recipes that we didn't run if the script exits here. |
1487 emit_json(options.output_json, did_run=active) | 1178 emit_json(options.output_json, did_run=active) |
1488 if active: | 1179 emit_flag(options.flag_file) |
1489 if options.clobber: | |
1490 ensure_no_checkout(dir_names, '*') | |
1491 else: | |
1492 ensure_no_checkout(dir_names, '.svn') | |
1493 emit_flag(options.flag_file) | |
1494 else: | |
1495 delete_flag(options.flag_file) | |
1496 raise Inactive # This is caught in main() and we exit cleanly. | |
1497 | 1180 |
1498 # Do a shallow checkout if the disk is less than 100GB. | 1181 # Do a shallow checkout if the disk is less than 100GB. |
1499 total_disk_space, free_disk_space = get_total_disk_space() | 1182 total_disk_space, free_disk_space = get_total_disk_space() |
1500 total_disk_space_gb = int(total_disk_space / (1024 * 1024 * 1024)) | 1183 total_disk_space_gb = int(total_disk_space / (1024 * 1024 * 1024)) |
1501 used_disk_space_gb = int((total_disk_space - free_disk_space) | 1184 used_disk_space_gb = int((total_disk_space - free_disk_space) |
1502 / (1024 * 1024 * 1024)) | 1185 / (1024 * 1024 * 1024)) |
1503 percent_used = int(used_disk_space_gb * 100 / total_disk_space_gb) | 1186 percent_used = int(used_disk_space_gb * 100 / total_disk_space_gb) |
1504 step_text = '[%dGB/%dGB used (%d%%)]' % (used_disk_space_gb, | 1187 step_text = '[%dGB/%dGB used (%d%%)]' % (used_disk_space_gb, |
1505 total_disk_space_gb, | 1188 total_disk_space_gb, |
1506 percent_used) | 1189 percent_used) |
1507 if not options.output_json: | 1190 if not options.output_json: |
1508 print '@@@STEP_TEXT@%s@@@' % step_text | 1191 print '@@@STEP_TEXT@%s@@@' % step_text |
1509 if not options.shallow: | 1192 if not options.shallow: |
1510 options.shallow = (total_disk_space < SHALLOW_CLONE_THRESHOLD | 1193 options.shallow = (total_disk_space < SHALLOW_CLONE_THRESHOLD |
1511 and not options.no_shallow) | 1194 and not options.no_shallow) |
1512 | 1195 |
1513 # The first solution is where the primary DEPS file resides. | 1196 # The first solution is where the primary DEPS file resides. |
1514 first_sln = dir_names[0] | 1197 first_sln = dir_names[0] |
1515 | 1198 |
1516 # Split all the revision specifications into a nice dict. | 1199 # Split all the revision specifications into a nice dict. |
1517 print 'Revisions: %s' % options.revision | 1200 print 'Revisions: %s' % options.revision |
1518 revisions = parse_revisions(options.revision, first_sln) | 1201 revisions = parse_revisions(options.revision, first_sln) |
1519 print 'Fetching Git checkout at %s@%s' % (first_sln, revisions[first_sln]) | 1202 print 'Fetching Git checkout at %s@%s' % (first_sln, revisions[first_sln]) |
1520 return revisions, step_text | 1203 return revisions, step_text |
1521 | 1204 |
1522 | 1205 |
1523 def checkout(options, git_slns, specs, buildspec, master, | 1206 def checkout(options, git_slns, specs, master, revisions, step_text): |
1524 svn_root, revisions, step_text): | |
1525 first_sln = git_slns[0]['name'] | 1207 first_sln = git_slns[0]['name'] |
1526 dir_names = [sln.get('name') for sln in git_slns if 'name' in sln] | 1208 dir_names = [sln.get('name') for sln in git_slns if 'name' in sln] |
1527 try: | 1209 try: |
1528 # Outer try is for catching patch failures and exiting gracefully. | 1210 # Outer try is for catching patch failures and exiting gracefully. |
1529 # Inner try is for catching gclient failures and retrying gracefully. | 1211 # Inner try is for catching gclient failures and retrying gracefully. |
1530 try: | 1212 try: |
1531 checkout_parameters = dict( | 1213 checkout_parameters = dict( |
1532 # First, pass in the base of what we want to check out. | 1214 # First, pass in the base of what we want to check out. |
1533 solutions=git_slns, | 1215 solutions=git_slns, |
1534 revisions=revisions, | 1216 revisions=revisions, |
1535 first_sln=first_sln, | 1217 first_sln=first_sln, |
1536 | 1218 |
1537 # Also, target os variables for gclient. | 1219 # Also, target os variables for gclient. |
1538 target_os=specs.get('target_os', []), | 1220 target_os=specs.get('target_os', []), |
1539 target_os_only=specs.get('target_os_only', False), | 1221 target_os_only=specs.get('target_os_only', False), |
1540 | 1222 |
1541 # Then, pass in information about how to patch. | 1223 # Then, pass in information about how to patch. |
1542 patch_root=options.patch_root, | 1224 patch_root=options.patch_root, |
1543 issue=options.issue, | 1225 issue=options.issue, |
1544 patchset=options.patchset, | 1226 patchset=options.patchset, |
1545 rietveld_server=options.rietveld_server, | 1227 rietveld_server=options.rietveld_server, |
1546 gerrit_repo=options.gerrit_repo, | 1228 gerrit_repo=options.gerrit_repo, |
1547 gerrit_ref=options.gerrit_ref, | 1229 gerrit_ref=options.gerrit_ref, |
1548 gerrit_rebase_patch_ref=not options.gerrit_no_rebase_patch_ref, | 1230 gerrit_rebase_patch_ref=not options.gerrit_no_rebase_patch_ref, |
1549 revision_mapping=options.revision_mapping, | 1231 revision_mapping=options.revision_mapping, |
1550 apply_issue_email_file=options.apply_issue_email_file, | 1232 apply_issue_email_file=options.apply_issue_email_file, |
1551 apply_issue_key_file=options.apply_issue_key_file, | 1233 apply_issue_key_file=options.apply_issue_key_file, |
1552 | 1234 |
1553 # For official builders. | 1235 # For official builders. |
1554 buildspec=buildspec, | |
1555 gyp_env=options.gyp_env, | 1236 gyp_env=options.gyp_env, |
1556 runhooks=not options.no_runhooks, | 1237 runhooks=not options.no_runhooks, |
1557 | 1238 |
1558 # Finally, extra configurations such as shallowness of the clone. | 1239 # Finally, extra configurations such as shallowness of the clone. |
1559 shallow=options.shallow, | 1240 shallow=options.shallow, |
1560 refs=options.refs, | 1241 refs=options.refs, |
1561 git_cache_dir=options.git_cache_dir, | 1242 git_cache_dir=options.git_cache_dir, |
1562 gerrit_reset=not options.gerrit_no_reset) | 1243 gerrit_reset=not options.gerrit_no_reset) |
1563 gclient_output = ensure_checkout(**checkout_parameters) | 1244 gclient_output = ensure_checkout(**checkout_parameters) |
1564 except GclientSyncFailed: | 1245 except GclientSyncFailed: |
1565 print 'We failed gclient sync, lets delete the checkout and retry.' | 1246 print 'We failed gclient sync, lets delete the checkout and retry.' |
1566 ensure_no_checkout(dir_names, '*') | 1247 ensure_no_checkout(dir_names) |
1567 gclient_output = ensure_checkout(**checkout_parameters) | 1248 gclient_output = ensure_checkout(**checkout_parameters) |
1568 except PatchFailed as e: | 1249 except PatchFailed as e: |
1569 if options.output_json: | 1250 if options.output_json: |
1570 # Tell recipes information such as root, got_revision, etc. | 1251 # Tell recipes information such as root, got_revision, etc. |
1571 emit_json(options.output_json, | 1252 emit_json(options.output_json, |
1572 did_run=True, | 1253 did_run=True, |
1573 root=first_sln, | 1254 root=first_sln, |
1574 log_lines=[('patch error', e.output),], | 1255 log_lines=[('patch error', e.output),], |
1575 patch_apply_return_code=e.code, | 1256 patch_apply_return_code=e.code, |
1576 patch_root=options.patch_root, | 1257 patch_root=options.patch_root, |
1577 patch_failure=True, | 1258 patch_failure=True, |
1578 step_text='%s PATCH FAILED' % step_text, | 1259 step_text='%s PATCH FAILED' % step_text, |
1579 fixed_revisions=revisions) | 1260 fixed_revisions=revisions) |
1580 else: | 1261 else: |
1581 # If we're not on recipes, tell annotator about our got_revisions. | 1262 # If we're not on recipes, tell annotator about our got_revisions. |
1582 emit_log_lines('patch error', e.output) | 1263 emit_log_lines('patch error', e.output) |
1583 print '@@@STEP_TEXT@%s PATCH FAILED@@@' % step_text | 1264 print '@@@STEP_TEXT@%s PATCH FAILED@@@' % step_text |
1584 raise | 1265 raise |
1585 | 1266 |
1586 # Revision is an svn revision, unless it's a git master. | |
1587 use_svn_rev = master not in GIT_MASTERS | |
1588 | |
1589 # Take care of got_revisions outputs. | 1267 # Take care of got_revisions outputs. |
1590 revision_mapping = dict(GOT_REVISION_MAPPINGS.get(svn_root, {})) | 1268 revision_mapping = GOT_REVISION_MAPPINGS.get(git_slns[0]['url'], {}) |
1591 if options.revision_mapping: | 1269 if options.revision_mapping: |
1592 revision_mapping.update(options.revision_mapping) | 1270 revision_mapping.update(options.revision_mapping) |
1593 | 1271 |
1594 # If the repo is not in the default GOT_REVISION_MAPPINGS and no | 1272 # If the repo is not in the default GOT_REVISION_MAPPINGS and no |
1595 # revision_mapping were specified on the command line then | 1273 # revision_mapping were specified on the command line then |
1596 # default to setting 'got_revision' based on the first solution. | 1274 # default to setting 'got_revision' based on the first solution. |
1597 if not revision_mapping: | 1275 if not revision_mapping: |
1598 revision_mapping[first_sln] = 'got_revision' | 1276 revision_mapping[first_sln] = 'got_revision' |
1599 | 1277 |
1600 got_revisions = parse_got_revision(gclient_output, revision_mapping, | 1278 got_revisions = parse_got_revision(gclient_output, revision_mapping) |
1601 use_svn_rev) | |
1602 | 1279 |
1603 if not got_revisions: | 1280 if not got_revisions: |
1604 # TODO(hinoka): We should probably bail out here, but in the interest | 1281 # TODO(hinoka): We should probably bail out here, but in the interest |
1605 # of giving mis-configured bots some time to get fixed use a dummy | 1282 # of giving mis-configured bots some time to get fixed use a dummy |
1606 # revision here. | 1283 # revision here. |
1607 got_revisions = { 'got_revision': 'BOT_UPDATE_NO_REV_FOUND' } | 1284 got_revisions = { 'got_revision': 'BOT_UPDATE_NO_REV_FOUND' } |
1608 #raise Exception('No got_revision(s) found in gclient output') | 1285 #raise Exception('No got_revision(s) found in gclient output') |
1609 | 1286 |
1610 if options.output_json: | 1287 if options.output_json: |
1611 manifest = create_manifest() if options.output_manifest else None | 1288 manifest = create_manifest() if options.output_manifest else None |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1665 # Check if this script should activate or not. | 1342 # Check if this script should activate or not. |
1666 active = options.force or check_valid_host(master, builder, slave) | 1343 active = options.force or check_valid_host(master, builder, slave) |
1667 | 1344 |
1668 # Print a helpful message to tell developers whats going on with this step. | 1345 # Print a helpful message to tell developers whats going on with this step. |
1669 print_help_text( | 1346 print_help_text( |
1670 options.force, options.output_json, active, master, builder, slave) | 1347 options.force, options.output_json, active, master, builder, slave) |
1671 | 1348 |
1672 # Parse, munipulate, and print the gclient solutions. | 1349 # Parse, munipulate, and print the gclient solutions. |
1673 specs = {} | 1350 specs = {} |
1674 exec(options.specs, specs) | 1351 exec(options.specs, specs) |
1675 svn_solutions = specs.get('solutions', []) | 1352 orig_solutions = specs.get('solutions', []) |
1676 git_slns, svn_root, buildspec = solutions_to_git(svn_solutions) | 1353 git_slns = modify_solutions(orig_solutions) |
1677 options.revision = maybe_ignore_revision(options.revision, buildspec) | |
1678 | 1354 |
1679 solutions_printer(git_slns) | 1355 solutions_printer(git_slns) |
1680 | 1356 |
1681 try: | 1357 try: |
1682 # Dun dun dun, the main part of bot_update. | 1358 # Dun dun dun, the main part of bot_update. |
1683 revisions, step_text = prepare(options, git_slns, active) | 1359 revisions, step_text = prepare(options, git_slns, active) |
1684 checkout(options, git_slns, specs, buildspec, master, svn_root, revisions, | 1360 checkout(options, git_slns, specs, master, revisions, step_text) |
1685 step_text) | |
1686 | 1361 |
1687 except Inactive: | |
1688 # Not active, should count as passing. | |
1689 pass | |
1690 except PatchFailed as e: | 1362 except PatchFailed as e: |
1691 emit_flag(options.flag_file) | 1363 emit_flag(options.flag_file) |
1692 # Return a specific non-zero exit code for patch failure (because it is | 1364 # Return a specific non-zero exit code for patch failure (because it is |
1693 # a failure), but make it different than other failures to distinguish | 1365 # a failure), but make it different than other failures to distinguish |
1694 # between infra failures (independent from patch author), and patch | 1366 # between infra failures (independent from patch author), and patch |
1695 # failures (that patch author can fix). However, PatchFailure due to | 1367 # failures (that patch author can fix). However, PatchFailure due to |
1696 # download patch failure is still an infra problem. | 1368 # download patch failure is still an infra problem. |
1697 if e.code == 3: | 1369 if e.code == 3: |
1698 # Patch download problem. | 1370 # Patch download problem. |
1699 return 87 | 1371 return 87 |
1700 # Genuine patch problem. | 1372 # Genuine patch problem. |
1701 return 88 | 1373 return 88 |
1702 except Exception: | 1374 except Exception: |
1703 # Unexpected failure. | 1375 # Unexpected failure. |
1704 emit_flag(options.flag_file) | 1376 emit_flag(options.flag_file) |
1705 raise | 1377 raise |
1706 else: | 1378 else: |
1707 emit_flag(options.flag_file) | 1379 emit_flag(options.flag_file) |
1708 | 1380 |
1709 | 1381 |
1710 if __name__ == '__main__': | 1382 if __name__ == '__main__': |
1711 sys.exit(main()) | 1383 sys.exit(main()) |
OLD | NEW |