| OLD | NEW |
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2014 The Chromium Authors. All rights reserved. | 2 # Copyright 2014 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 # TODO(hinoka): Use logging. | 6 # TODO(hinoka): Use logging. |
| 7 | 7 |
| 8 import cStringIO | 8 import cStringIO |
| 9 import codecs | 9 import codecs |
| 10 import collections | 10 import collections |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 79 path.join(ROOT_DIR, # .recipe_deps | 79 path.join(ROOT_DIR, # .recipe_deps |
| 80 path.pardir, # slave | 80 path.pardir, # slave |
| 81 path.pardir, # scripts | 81 path.pardir, # scripts |
| 82 path.pardir), # build_internal | 82 path.pardir), # build_internal |
| 83 ]) | 83 ]) |
| 84 | 84 |
| 85 | 85 |
| 86 CHROMIUM_GIT_HOST = 'https://chromium.googlesource.com' | 86 CHROMIUM_GIT_HOST = 'https://chromium.googlesource.com' |
| 87 CHROMIUM_SRC_URL = CHROMIUM_GIT_HOST + '/chromium/src.git' | 87 CHROMIUM_SRC_URL = CHROMIUM_GIT_HOST + '/chromium/src.git' |
| 88 | 88 |
| 89 # Official builds use buildspecs, so this is a special case. |
| 90 BUILDSPEC_TYPE = collections.namedtuple('buildspec', |
| 91 ('container', 'version')) |
| 92 BUILDSPEC_RE = (r'^/chrome-internal/trunk/tools/buildspec/' |
| 93 '(build|branches|releases)/(.+)$') |
| 94 GIT_BUILDSPEC_PATH = ('https://chrome-internal.googlesource.com/chrome/tools/' |
| 95 'buildspec') |
| 89 BRANCH_HEADS_REFSPEC = '+refs/branch-heads/*' | 96 BRANCH_HEADS_REFSPEC = '+refs/branch-heads/*' |
| 90 | 97 |
| 91 BUILDSPEC_COMMIT_RE = ( | 98 BUILDSPEC_COMMIT_RE = ( |
| 92 re.compile(r'Buildspec for.*version (\d+\.\d+\.\d+\.\d+)'), | 99 re.compile(r'Buildspec for.*version (\d+\.\d+\.\d+\.\d+)'), |
| 93 re.compile(r'Create (\d+\.\d+\.\d+\.\d+) buildspec'), | 100 re.compile(r'Create (\d+\.\d+\.\d+\.\d+) buildspec'), |
| 94 re.compile(r'Auto-converted (\d+\.\d+\.\d+\.\d+) buildspec to git'), | 101 re.compile(r'Auto-converted (\d+\.\d+\.\d+\.\d+) buildspec to git'), |
| 95 ) | 102 ) |
| 96 | 103 |
| 97 # Regular expression that matches a single commit footer line. | 104 # Regular expression that matches a single commit footer line. |
| 98 COMMIT_FOOTER_ENTRY_RE = re.compile(r'([^:]+):\s+(.+)') | 105 COMMIT_FOOTER_ENTRY_RE = re.compile(r'([^:]+):\s+(.+)') |
| 99 | 106 |
| 100 # Footer metadata keys for regular and gsubtreed mirrored commit positions. | 107 # Footer metadata keys for regular and gsubtreed mirrored commit positions. |
| 101 COMMIT_POSITION_FOOTER_KEY = 'Cr-Commit-Position' | 108 COMMIT_POSITION_FOOTER_KEY = 'Cr-Commit-Position' |
| 102 COMMIT_ORIGINAL_POSITION_FOOTER_KEY = 'Cr-Original-Commit-Position' | 109 COMMIT_ORIGINAL_POSITION_FOOTER_KEY = 'Cr-Original-Commit-Position' |
| 103 # Regular expression to parse a commit position | 110 # Regular expression to parse a commit position |
| 104 COMMIT_POSITION_RE = re.compile(r'(.+)@\{#(\d+)\}') | 111 COMMIT_POSITION_RE = re.compile(r'(.+)@\{#(\d+)\}') |
| 105 | 112 |
| 106 # Regular expression to parse gclient's revinfo entries. | 113 # Regular expression to parse gclient's revinfo entries. |
| 107 REVINFO_RE = re.compile(r'^([^:]+):\s+([^@]+)@(.+)$') | 114 REVINFO_RE = re.compile(r'^([^:]+):\s+([^@]+)@(.+)$') |
| 108 | 115 |
| 116 # Used by 'ResolveSvnRevisionFromGitiles' |
| 117 GIT_SVN_PROJECT_MAP = { |
| 118 'webkit': { |
| 119 'svn_url': 'svn://svn.chromium.org/blink', |
| 120 'branch_map': [ |
| 121 (r'trunk', r'refs/heads/master'), |
| 122 (r'branches/([^/]+)', r'refs/branch-heads/\1'), |
| 123 ], |
| 124 }, |
| 125 'v8': { |
| 126 'svn_url': 'https://v8.googlecode.com/svn', |
| 127 'branch_map': [ |
| 128 (r'trunk', r'refs/heads/candidates'), |
| 129 (r'branches/bleeding_edge', r'refs/heads/master'), |
| 130 (r'branches/([^/]+)', r'refs/branch-heads/\1'), |
| 131 ], |
| 132 }, |
| 133 'nacl': { |
| 134 'svn_url': 'svn://svn.chromium.org/native_client', |
| 135 'branch_map': [ |
| 136 (r'trunk/src/native_client', r'refs/heads/master'), |
| 137 ], |
| 138 }, |
| 139 } |
| 140 |
| 141 # Key for the 'git-svn' ID metadata commit footer entry. |
| 142 GIT_SVN_ID_FOOTER_KEY = 'git-svn-id' |
| 143 # e.g., git-svn-id: https://v8.googlecode.com/svn/trunk@23117 |
| 144 # ce2b1a6d-e550-0410-aec6-3dcde31c8c00 |
| 145 GIT_SVN_ID_RE = re.compile(r'((?:\w+)://[^@]+)@(\d+)\s+(?:[a-zA-Z0-9\-]+)') |
| 146 |
| 147 |
| 148 # This is the git mirror of the buildspecs repository. We could rely on the svn |
| 149 # checkout, now that the git buildspecs are checked in alongside the svn |
| 150 # buildspecs, but we're going to want to pull all the buildspecs from here |
| 151 # eventually anyhow, and there's already some logic to pull from git (for the |
| 152 # old git_buildspecs.git repo), so just stick with that. |
| 153 GIT_BUILDSPEC_REPO = ( |
| 154 'https://chrome-internal.googlesource.com/chrome/tools/buildspec') |
| 109 | 155 |
| 110 # Copied from scripts/recipes/chromium.py. | 156 # Copied from scripts/recipes/chromium.py. |
| 111 GOT_REVISION_MAPPINGS = { | 157 GOT_REVISION_MAPPINGS = { |
| 112 CHROMIUM_SRC_URL: { | 158 '/chrome/trunk/src': { |
| 113 'src/': 'got_revision', | 159 'src/': 'got_revision', |
| 114 'src/native_client/': 'got_nacl_revision', | 160 'src/native_client/': 'got_nacl_revision', |
| 115 'src/tools/swarm_client/': 'got_swarm_client_revision', | 161 'src/tools/swarm_client/': 'got_swarm_client_revision', |
| 116 'src/tools/swarming_client/': 'got_swarming_client_revision', | 162 'src/tools/swarming_client/': 'got_swarming_client_revision', |
| 117 'src/third_party/WebKit/': 'got_webkit_revision', | 163 'src/third_party/WebKit/': 'got_webkit_revision', |
| 118 'src/third_party/webrtc/': 'got_webrtc_revision', | 164 'src/third_party/webrtc/': 'got_webrtc_revision', |
| 119 'src/v8/': 'got_v8_revision', | 165 'src/v8/': 'got_v8_revision', |
| 120 } | 166 } |
| 121 } | 167 } |
| 122 | 168 |
| 123 | 169 |
| 124 BOT_UPDATE_MESSAGE = """ | 170 BOT_UPDATE_MESSAGE = """ |
| 125 What is the "Bot Update" step? | 171 What is the "Bot Update" step? |
| 126 ============================== | 172 ============================== |
| 127 | 173 |
| 128 This step ensures that the source checkout on the bot (e.g. Chromium's src/ and | 174 This step ensures that the source checkout on the bot (e.g. Chromium's src/ and |
| 129 its dependencies) is checked out in a consistent state. This means that all of | 175 its dependencies) is checked out in a consistent state. This means that all of |
| 130 the necessary repositories are checked out, no extra repositories are checked | 176 the necessary repositories are checked out, no extra repositories are checked |
| 131 out, and no locally modified files are present. | 177 out, and no locally modified files are present. |
| 132 | 178 |
| 133 These actions used to be taken care of by the "gclient revert" and "update" | 179 These actions used to be taken care of by the "gclient revert" and "update" |
| 134 steps. However, those steps are known to be buggy and occasionally flaky. This | 180 steps. However, those steps are known to be buggy and occasionally flaky. This |
| 135 step has two main advantages over them: | 181 step has two main advantages over them: |
| 136 * it only operates in Git, so the logic can be clearer and cleaner; and | 182 * it only operates in Git, so the logic can be clearer and cleaner; and |
| 137 * it is a slave-side script, so its behavior can be modified without | 183 * it is a slave-side script, so its behavior can be modified without |
| 138 restarting the master. | 184 restarting the master. |
| 139 | 185 |
| 186 Why Git, you ask? Because that is the direction that the Chromium project is |
| 187 heading. This step is an integral part of the transition from using the SVN repo |
| 188 at chrome/trunk/src to using the Git repo src.git. Please pardon the dust while |
| 189 we fully convert everything to Git. This message will get out of your way |
| 190 eventually, and the waterfall will be a happier place because of it. |
| 191 |
| 192 This step can be activated or deactivated independently on every builder on |
| 193 every master. When it is active, the "gclient revert" and "update" steps become |
| 194 no-ops. When it is inactive, it prints this message, cleans up after itself, and |
| 195 lets everything else continue as though nothing has changed. Eventually, when |
| 196 everything is stable enough, this step will replace them entirely. |
| 197 |
| 140 Debugging information: | 198 Debugging information: |
| 141 (master/builder/slave may be unspecified on recipes) | 199 (master/builder/slave may be unspecified on recipes) |
| 142 master: %(master)s | 200 master: %(master)s |
| 143 builder: %(builder)s | 201 builder: %(builder)s |
| 144 slave: %(slave)s | 202 slave: %(slave)s |
| 145 forced by recipes: %(recipe)s | 203 forced by recipes: %(recipe)s |
| 146 CURRENT_DIR: %(CURRENT_DIR)s | 204 CURRENT_DIR: %(CURRENT_DIR)s |
| 147 BUILDER_DIR: %(BUILDER_DIR)s | 205 BUILDER_DIR: %(BUILDER_DIR)s |
| 148 SLAVE_DIR: %(SLAVE_DIR)s | 206 SLAVE_DIR: %(SLAVE_DIR)s |
| 149 THIS_DIR: %(THIS_DIR)s | 207 THIS_DIR: %(THIS_DIR)s |
| (...skipping 29 matching lines...) Expand all Loading... |
| 179 try: | 237 try: |
| 180 execfile(os.path.join( | 238 execfile(os.path.join( |
| 181 BUILD_INTERNAL_DIR, 'scripts', 'slave', 'bot_update_cfg.py'), | 239 BUILD_INTERNAL_DIR, 'scripts', 'slave', 'bot_update_cfg.py'), |
| 182 local_vars) | 240 local_vars) |
| 183 except Exception: | 241 except Exception: |
| 184 # Same as if BUILD_INTERNAL_DIR didn't exist in the first place. | 242 # Same as if BUILD_INTERNAL_DIR didn't exist in the first place. |
| 185 print 'Warning: unable to read internal configuration file.' | 243 print 'Warning: unable to read internal configuration file.' |
| 186 print 'If this is an internal bot, this step may be erroneously inactive.' | 244 print 'If this is an internal bot, this step may be erroneously inactive.' |
| 187 internal_data = local_vars | 245 internal_data = local_vars |
| 188 | 246 |
| 247 RECOGNIZED_PATHS = { |
| 248 # If SVN path matches key, the entire URL is rewritten to the Git url. |
| 249 '/chrome/trunk/src': |
| 250 CHROMIUM_SRC_URL, |
| 251 '/chrome/trunk/src/tools/cros.DEPS': |
| 252 CHROMIUM_GIT_HOST + '/chromium/src/tools/cros.DEPS.git', |
| 253 '/chrome-internal/trunk/src-internal': |
| 254 'https://chrome-internal.googlesource.com/chrome/src-internal.git', |
| 255 } |
| 256 RECOGNIZED_PATHS.update(internal_data.get('RECOGNIZED_PATHS', {})) |
| 189 | 257 |
| 190 ENABLED_MASTERS = [ | 258 ENABLED_MASTERS = [ |
| 191 'bot_update.always_on', | 259 'bot_update.always_on', |
| 192 'chromium.android', | 260 'chromium.android', |
| 193 'chromium.angle', | 261 'chromium.angle', |
| 194 'chromium.chrome', | 262 'chromium.chrome', |
| 195 'chromium.chromedriver', | 263 'chromium.chromedriver', |
| 196 'chromium.chromiumos', | 264 'chromium.chromiumos', |
| 197 'chromium', | 265 'chromium', |
| 198 'chromium.fyi', | 266 'chromium.fyi', |
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 262 | 330 |
| 263 # Disabled filters get run AFTER enabled filters, so for example if a builder | 331 # Disabled filters get run AFTER enabled filters, so for example if a builder |
| 264 # config is enabled, but a bot on that builder is disabled, that bot will | 332 # config is enabled, but a bot on that builder is disabled, that bot will |
| 265 # be disabled. | 333 # be disabled. |
| 266 DISABLED_BUILDERS = {} | 334 DISABLED_BUILDERS = {} |
| 267 DISABLED_BUILDERS.update(internal_data.get('DISABLED_BUILDERS', {})) | 335 DISABLED_BUILDERS.update(internal_data.get('DISABLED_BUILDERS', {})) |
| 268 | 336 |
| 269 DISABLED_SLAVES = {} | 337 DISABLED_SLAVES = {} |
| 270 DISABLED_SLAVES.update(internal_data.get('DISABLED_SLAVES', {})) | 338 DISABLED_SLAVES.update(internal_data.get('DISABLED_SLAVES', {})) |
| 271 | 339 |
| 340 # These masters work only in Git, meaning for got_revision, always output |
| 341 # a git hash rather than a SVN rev. |
| 342 GIT_MASTERS = [ |
| 343 'client.v8', |
| 344 'client.v8.branches', |
| 345 'client.v8.ports', |
| 346 'tryserver.v8', |
| 347 ] |
| 348 GIT_MASTERS += internal_data.get('GIT_MASTERS', []) |
| 349 |
| 350 |
| 272 # How many times to try before giving up. | 351 # How many times to try before giving up. |
| 273 ATTEMPTS = 5 | 352 ATTEMPTS = 5 |
| 274 | 353 |
| 275 GIT_CACHE_PATH = path.join(DEPOT_TOOLS_DIR, 'git_cache.py') | 354 GIT_CACHE_PATH = path.join(DEPOT_TOOLS_DIR, 'git_cache.py') |
| 276 | 355 |
| 277 # Find the patch tool. | 356 # Find the patch tool. |
| 278 if sys.platform.startswith('win'): | 357 if sys.platform.startswith('win'): |
| 279 if not BUILD_INTERNAL_DIR: | 358 if not BUILD_INTERNAL_DIR: |
| 280 print 'Warning: could not find patch tool because there is no ' | 359 print 'Warning: could not find patch tool because there is no ' |
| 281 print 'build_internal present.' | 360 print 'build_internal present.' |
| (...skipping 16 matching lines...) Expand all Loading... |
| 298 | 377 |
| 299 | 378 |
| 300 class PatchFailed(SubprocessFailed): | 379 class PatchFailed(SubprocessFailed): |
| 301 pass | 380 pass |
| 302 | 381 |
| 303 | 382 |
| 304 class GclientSyncFailed(SubprocessFailed): | 383 class GclientSyncFailed(SubprocessFailed): |
| 305 pass | 384 pass |
| 306 | 385 |
| 307 | 386 |
| 387 class SVNRevisionNotFound(Exception): |
| 388 pass |
| 389 |
| 390 |
| 308 class InvalidDiff(Exception): | 391 class InvalidDiff(Exception): |
| 309 pass | 392 pass |
| 310 | 393 |
| 311 | 394 |
| 395 class Inactive(Exception): |
| 396 """Not really an exception, just used to exit early cleanly.""" |
| 397 pass |
| 398 |
| 399 |
| 312 RETRY = object() | 400 RETRY = object() |
| 313 OK = object() | 401 OK = object() |
| 314 FAIL = object() | 402 FAIL = object() |
| 315 | 403 |
| 316 | 404 |
| 317 class PsPrinter(object): | 405 class PsPrinter(object): |
| 318 def __init__(self, interval=300): | 406 def __init__(self, interval=300): |
| 319 self.interval = interval | 407 self.interval = interval |
| 320 self.active = sys.platform.startswith('linux2') | 408 self.active = sys.platform.startswith('linux2') |
| 321 self.thread = None | 409 self.thread = None |
| (...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 458 if slave_list and slave in slave_list: | 546 if slave_list and slave in slave_list: |
| 459 return True | 547 return True |
| 460 return False | 548 return False |
| 461 | 549 |
| 462 | 550 |
| 463 def check_valid_host(master, builder, slave): | 551 def check_valid_host(master, builder, slave): |
| 464 return (check_enabled(master, builder, slave) | 552 return (check_enabled(master, builder, slave) |
| 465 and not check_disabled(master, builder, slave)) | 553 and not check_disabled(master, builder, slave)) |
| 466 | 554 |
| 467 | 555 |
| 556 def maybe_ignore_revision(revision, buildspec): |
| 557 """Handle builders that don't care what buildbot tells them to build. |
| 558 |
| 559 This is especially the case with branch builders that build from buildspecs |
| 560 and/or trigger off multiple repositories, where the --revision passed in has |
| 561 nothing to do with the solution being built. Clearing the revision in this |
| 562 case causes bot_update to use HEAD rather that trying to checkout an |
| 563 inappropriate version of the solution. |
| 564 """ |
| 565 if buildspec and buildspec.container == 'branches': |
| 566 return [] |
| 567 return revision |
| 568 |
| 569 |
| 468 def solutions_printer(solutions): | 570 def solutions_printer(solutions): |
| 469 """Prints gclient solution to stdout.""" | 571 """Prints gclient solution to stdout.""" |
| 470 print 'Gclient Solutions' | 572 print 'Gclient Solutions' |
| 471 print '=================' | 573 print '=================' |
| 472 for solution in solutions: | 574 for solution in solutions: |
| 473 name = solution.get('name') | 575 name = solution.get('name') |
| 474 url = solution.get('url') | 576 url = solution.get('url') |
| 475 print '%s (%s)' % (name, url) | 577 print '%s (%s)' % (name, url) |
| 476 if solution.get('deps_file'): | 578 if solution.get('deps_file'): |
| 477 print ' Dependencies file is %s' % solution['deps_file'] | 579 print ' Dependencies file is %s' % solution['deps_file'] |
| (...skipping 14 matching lines...) Expand all Loading... |
| 492 print ' %s: Ignore' % deps_name | 594 print ' %s: Ignore' % deps_name |
| 493 for k, v in solution.iteritems(): | 595 for k, v in solution.iteritems(): |
| 494 # Print out all the keys we don't know about. | 596 # Print out all the keys we don't know about. |
| 495 if k in ['name', 'url', 'deps_file', 'custom_vars', 'custom_deps', | 597 if k in ['name', 'url', 'deps_file', 'custom_vars', 'custom_deps', |
| 496 'managed']: | 598 'managed']: |
| 497 continue | 599 continue |
| 498 print ' %s is %s' % (k, v) | 600 print ' %s is %s' % (k, v) |
| 499 print | 601 print |
| 500 | 602 |
| 501 | 603 |
| 502 def modify_solutions(input_solutions): | 604 def solutions_to_git(input_solutions): |
| 503 """Modifies urls in solutions to point at Git repos. | 605 """Modifies urls in solutions to point at Git repos. |
| 504 | 606 |
| 505 returns: new solution dictionary | 607 returns: (git solution, svn root of first solution) tuple. |
| 506 """ | 608 """ |
| 507 assert input_solutions | 609 assert input_solutions |
| 508 solutions = copy.deepcopy(input_solutions) | 610 solutions = copy.deepcopy(input_solutions) |
| 611 first_solution = True |
| 612 buildspec = None |
| 509 for solution in solutions: | 613 for solution in solutions: |
| 510 original_url = solution['url'] | 614 original_url = solution['url'] |
| 511 parsed_url = urlparse.urlparse(original_url) | 615 parsed_url = urlparse.urlparse(original_url) |
| 512 parsed_path = parsed_url.path | 616 parsed_path = parsed_url.path |
| 513 | 617 |
| 618 # Rewrite SVN urls into Git urls. |
| 619 buildspec_m = re.match(BUILDSPEC_RE, parsed_path) |
| 620 if first_solution and buildspec_m: |
| 621 solution['url'] = GIT_BUILDSPEC_PATH |
| 622 buildspec = BUILDSPEC_TYPE( |
| 623 container=buildspec_m.group(1), |
| 624 version=buildspec_m.group(2), |
| 625 ) |
| 626 solution['deps_file'] = path.join(buildspec.container, buildspec.version, |
| 627 'DEPS') |
| 628 elif parsed_path in RECOGNIZED_PATHS: |
| 629 solution['url'] = RECOGNIZED_PATHS[parsed_path] |
| 630 solution['deps_file'] = '.DEPS.git' |
| 631 elif parsed_url.scheme == 'https' and 'googlesource' in parsed_url.netloc: |
| 632 pass |
| 633 else: |
| 634 print 'Warning: %s' % ('path %r not recognized' % parsed_path,) |
| 635 |
| 636 # Strip out deps containing $$V8_REV$$, etc. |
| 637 if 'custom_deps' in solution: |
| 638 new_custom_deps = {} |
| 639 for deps_name, deps_value in solution['custom_deps'].iteritems(): |
| 640 if deps_value and '$$' in deps_value: |
| 641 print 'Dropping %s:%s from custom deps' % (deps_name, deps_value) |
| 642 else: |
| 643 new_custom_deps[deps_name] = deps_value |
| 644 solution['custom_deps'] = new_custom_deps |
| 645 |
| 646 if first_solution: |
| 647 root = parsed_path |
| 648 first_solution = False |
| 649 |
| 514 solution['managed'] = False | 650 solution['managed'] = False |
| 515 # We don't want gclient to be using a safesync URL. Instead it should | 651 # We don't want gclient to be using a safesync URL. Instead it should |
| 516 # using the lkgr/lkcr branch/tags. | 652 # using the lkgr/lkcr branch/tags. |
| 517 if 'safesync_url' in solution: | 653 if 'safesync_url' in solution: |
| 518 print 'Removing safesync url %s from %s' % (solution['safesync_url'], | 654 print 'Removing safesync url %s from %s' % (solution['safesync_url'], |
| 519 parsed_path) | 655 parsed_path) |
| 520 del solution['safesync_url'] | 656 del solution['safesync_url'] |
| 521 | 657 return solutions, root, buildspec |
| 522 return solutions | |
| 523 | 658 |
| 524 | 659 |
| 525 def remove(target): | 660 def remove(target): |
| 526 """Remove a target by moving it into build.dead.""" | 661 """Remove a target by moving it into build.dead.""" |
| 527 dead_folder = path.join(BUILDER_DIR, 'build.dead') | 662 dead_folder = path.join(BUILDER_DIR, 'build.dead') |
| 528 if not path.exists(dead_folder): | 663 if not path.exists(dead_folder): |
| 529 os.makedirs(dead_folder) | 664 os.makedirs(dead_folder) |
| 530 os.rename(target, path.join(dead_folder, uuid.uuid4().hex)) | 665 os.rename(target, path.join(dead_folder, uuid.uuid4().hex)) |
| 531 | 666 |
| 532 | 667 |
| 533 def ensure_no_checkout(dir_names): | 668 def ensure_no_checkout(dir_names, scm_dirname): |
| 534 """Ensure that there is no undesired checkout under build/.""" | 669 """Ensure that there is no undesired checkout under build/. |
| 535 build_dir = os.getcwd() | 670 |
| 536 has_checkout = any(path.exists(path.join(build_dir, dir_name, '.git')) | 671 If there is an incorrect checkout under build/, then |
| 672 move build/ to build.dead/ |
| 673 This function will check each directory in dir_names. |
| 674 |
| 675 scm_dirname is expected to be either ['.svn', '.git'] |
| 676 """ |
| 677 assert scm_dirname in ['.svn', '.git', '*'] |
| 678 has_checkout = any(path.exists(path.join(os.getcwd(), dir_name, scm_dirname)) |
| 537 for dir_name in dir_names) | 679 for dir_name in dir_names) |
| 538 if has_checkout: | 680 |
| 681 if has_checkout or scm_dirname == '*': |
| 682 build_dir = os.getcwd() |
| 683 prefix = '' |
| 684 if scm_dirname != '*': |
| 685 prefix = '%s detected in checkout, ' % scm_dirname |
| 686 |
| 539 for filename in os.listdir(build_dir): | 687 for filename in os.listdir(build_dir): |
| 540 deletion_target = path.join(build_dir, filename) | 688 deletion_target = path.join(build_dir, filename) |
| 541 print '.git detected in checkout, deleting %s...' % deletion_target, | 689 print '%sdeleting %s...' % (prefix, deletion_target), |
| 542 remove(deletion_target) | 690 remove(deletion_target) |
| 543 print 'done' | 691 print 'done' |
| 544 | 692 |
| 545 | 693 |
| 546 def gclient_configure(solutions, target_os, target_os_only, git_cache_dir): | 694 def gclient_configure(solutions, target_os, target_os_only, git_cache_dir): |
| 547 """Should do the same thing as gclient --spec='...'.""" | 695 """Should do the same thing as gclient --spec='...'.""" |
| 548 with codecs.open('.gclient', mode='w', encoding='utf-8') as f: | 696 with codecs.open('.gclient', mode='w', encoding='utf-8') as f: |
| 549 f.write(get_gclient_spec( | 697 f.write(get_gclient_spec( |
| 550 solutions, target_os, target_os_only, git_cache_dir)) | 698 solutions, target_os, target_os_only, git_cache_dir)) |
| 551 | 699 |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 625 footers[m.group(1)] = m.group(2).strip() | 773 footers[m.group(1)] = m.group(2).strip() |
| 626 return footers | 774 return footers |
| 627 | 775 |
| 628 | 776 |
| 629 def get_commit_message_footer(message, key): | 777 def get_commit_message_footer(message, key): |
| 630 """Returns: (str/None) The footer value for 'key', or None if none was found. | 778 """Returns: (str/None) The footer value for 'key', or None if none was found. |
| 631 """ | 779 """ |
| 632 return get_commit_message_footer_map(message).get(key) | 780 return get_commit_message_footer_map(message).get(key) |
| 633 | 781 |
| 634 | 782 |
| 783 def get_svn_rev(git_hash, dir_name): |
| 784 log = git('log', '-1', git_hash, cwd=dir_name) |
| 785 git_svn_id = get_commit_message_footer(log, GIT_SVN_ID_FOOTER_KEY) |
| 786 if not git_svn_id: |
| 787 return None |
| 788 m = GIT_SVN_ID_RE.match(git_svn_id) |
| 789 if not m: |
| 790 return None |
| 791 return int(m.group(2)) |
| 792 |
| 793 |
| 794 def get_git_hash(revision, branch, sln_dir): |
| 795 """We want to search for the SVN revision on the git-svn branch. |
| 796 |
| 797 Note that git will search backwards from origin/master. |
| 798 """ |
| 799 match = "^%s: [^ ]*@%s " % (GIT_SVN_ID_FOOTER_KEY, revision) |
| 800 ref = branch if branch.startswith('refs/') else 'origin/%s' % branch |
| 801 cmd = ['log', '-E', '--grep', match, '--format=%H', '--max-count=1', ref] |
| 802 result = git(*cmd, cwd=sln_dir).strip() |
| 803 if result: |
| 804 return result |
| 805 raise SVNRevisionNotFound('We can\'t resolve svn r%s into a git hash in %s' % |
| 806 (revision, sln_dir)) |
| 807 |
| 808 |
| 635 def emit_log_lines(name, lines): | 809 def emit_log_lines(name, lines): |
| 636 for line in lines.splitlines(): | 810 for line in lines.splitlines(): |
| 637 print '@@@STEP_LOG_LINE@%s@%s@@@' % (name, line) | 811 print '@@@STEP_LOG_LINE@%s@%s@@@' % (name, line) |
| 638 print '@@@STEP_LOG_END@%s@@@' % name | 812 print '@@@STEP_LOG_END@%s@@@' % name |
| 639 | 813 |
| 640 | 814 |
| 641 def emit_properties(properties): | 815 def emit_properties(properties): |
| 642 for property_name, property_value in sorted(properties.items()): | 816 for property_name, property_value in sorted(properties.items()): |
| 643 print '@@@SET_BUILD_PROPERTY@%s@"%s"@@@' % (property_name, property_value) | 817 print '@@@SET_BUILD_PROPERTY@%s@"%s"@@@' % (property_name, property_value) |
| 644 | 818 |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 679 | 853 |
| 680 | 854 |
| 681 def force_revision(folder_name, revision): | 855 def force_revision(folder_name, revision): |
| 682 split_revision = revision.split(':', 1) | 856 split_revision = revision.split(':', 1) |
| 683 branch = 'master' | 857 branch = 'master' |
| 684 if len(split_revision) == 2: | 858 if len(split_revision) == 2: |
| 685 # Support for "branch:revision" syntax. | 859 # Support for "branch:revision" syntax. |
| 686 branch, revision = split_revision | 860 branch, revision = split_revision |
| 687 | 861 |
| 688 if revision and revision.upper() != 'HEAD': | 862 if revision and revision.upper() != 'HEAD': |
| 689 git('checkout', '--force', revision, cwd=folder_name) | 863 if revision and revision.isdigit() and len(revision) < 40: |
| 864 # rev_num is really a svn revision number, convert it into a git hash. |
| 865 git_ref = get_git_hash(int(revision), branch, folder_name) |
| 866 else: |
| 867 # rev_num is actually a git hash or ref, we can just use it. |
| 868 git_ref = revision |
| 869 git('checkout', '--force', git_ref, cwd=folder_name) |
| 690 else: | 870 else: |
| 691 ref = branch if branch.startswith('refs/') else 'origin/%s' % branch | 871 ref = branch if branch.startswith('refs/') else 'origin/%s' % branch |
| 692 git('checkout', '--force', ref, cwd=folder_name) | 872 git('checkout', '--force', ref, cwd=folder_name) |
| 693 | 873 |
| 694 | |
| 695 def git_checkout(solutions, revisions, shallow, refs, git_cache_dir): | 874 def git_checkout(solutions, revisions, shallow, refs, git_cache_dir): |
| 696 build_dir = os.getcwd() | 875 build_dir = os.getcwd() |
| 697 # Before we do anything, break all git_cache locks. | 876 # Before we do anything, break all git_cache locks. |
| 698 if path.isdir(git_cache_dir): | 877 if path.isdir(git_cache_dir): |
| 699 git('cache', 'unlock', '-vv', '--force', '--all', | 878 git('cache', 'unlock', '-vv', '--force', '--all', |
| 700 '--cache-dir', git_cache_dir) | 879 '--cache-dir', git_cache_dir) |
| 701 for item in os.listdir(git_cache_dir): | 880 for item in os.listdir(git_cache_dir): |
| 702 filename = os.path.join(git_cache_dir, item) | 881 filename = os.path.join(git_cache_dir, item) |
| 703 if item.endswith('.lock'): | 882 if item.endswith('.lock'): |
| 704 raise Exception('%s exists after cache unlock' % filename) | 883 raise Exception('%s exists after cache unlock' % filename) |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 745 # Exited abnormally, theres probably something wrong. | 924 # Exited abnormally, theres probably something wrong. |
| 746 # Lets wipe the checkout and try again. | 925 # Lets wipe the checkout and try again. |
| 747 tries_left -= 1 | 926 tries_left -= 1 |
| 748 if tries_left > 0: | 927 if tries_left > 0: |
| 749 print 'Something failed: %s.' % str(e) | 928 print 'Something failed: %s.' % str(e) |
| 750 print 'waiting 5 seconds and trying again...' | 929 print 'waiting 5 seconds and trying again...' |
| 751 time.sleep(5) | 930 time.sleep(5) |
| 752 else: | 931 else: |
| 753 raise | 932 raise |
| 754 remove(sln_dir) | 933 remove(sln_dir) |
| 934 except SVNRevisionNotFound: |
| 935 tries_left -= 1 |
| 936 if tries_left > 0: |
| 937 # If we don't have the correct revision, wait and try again. |
| 938 print 'We can\'t find revision %s.' % revision |
| 939 print 'The svn to git replicator is probably falling behind.' |
| 940 print 'waiting 5 seconds and trying again...' |
| 941 time.sleep(5) |
| 942 else: |
| 943 raise |
| 755 | 944 |
| 756 git('clean', '-dff', cwd=sln_dir) | 945 git('clean', '-dff', cwd=sln_dir) |
| 757 | 946 |
| 758 if first_solution: | 947 if first_solution: |
| 759 git_ref = git('log', '--format=%H', '--max-count=1', | 948 git_ref = git('log', '--format=%H', '--max-count=1', |
| 760 cwd=sln_dir).strip() | 949 cwd=sln_dir).strip() |
| 761 first_solution = False | 950 first_solution = False |
| 762 return git_ref | 951 return git_ref |
| 763 | 952 |
| 764 | 953 |
| 765 def _download(url): | 954 def _download(url): |
| 766 """Fetch url and return content, with retries for flake.""" | 955 """Fetch url and return content, with retries for flake.""" |
| 767 for attempt in xrange(ATTEMPTS): | 956 for attempt in xrange(ATTEMPTS): |
| 768 try: | 957 try: |
| 769 return urllib2.urlopen(url).read() | 958 return urllib2.urlopen(url).read() |
| 770 except Exception: | 959 except Exception: |
| 771 if attempt == ATTEMPTS - 1: | 960 if attempt == ATTEMPTS - 1: |
| 772 raise | 961 raise |
| 773 | 962 |
| 774 | 963 |
| 964 def parse_diff(diff): |
| 965 """Takes a unified diff and returns a list of diffed files and their diffs. |
| 966 |
| 967 The return format is a list of pairs of: |
| 968 (<filename>, <diff contents>) |
| 969 <diff contents> is inclusive of the diff line. |
| 970 """ |
| 971 result = [] |
| 972 current_diff = '' |
| 973 current_header = None |
| 974 for line in diff.splitlines(): |
| 975 # "diff" is for git style patches, and "Index: " is for SVN style patches. |
| 976 if line.startswith('diff') or line.startswith('Index: '): |
| 977 if current_header: |
| 978 # If we are in a diff portion, then save the diff. |
| 979 result.append((current_header, '%s\n' % current_diff)) |
| 980 git_header_match = re.match(r'diff (?:--git )?(\S+) (\S+)', line) |
| 981 svn_header_match = re.match(r'Index: (.*)', line) |
| 982 |
| 983 if git_header_match: |
| 984 # First, see if its a git style header. |
| 985 from_file = git_header_match.group(1) |
| 986 to_file = git_header_match.group(2) |
| 987 if from_file != to_file and from_file.startswith('a/'): |
| 988 # Sometimes git prepends 'a/' and 'b/' in front of file paths. |
| 989 from_file = from_file[2:] |
| 990 current_header = from_file |
| 991 |
| 992 elif svn_header_match: |
| 993 # Otherwise, check if its an SVN style header. |
| 994 current_header = svn_header_match.group(1) |
| 995 |
| 996 else: |
| 997 # Otherwise... I'm not really sure what to do with this. |
| 998 raise InvalidDiff('Can\'t process header: %s\nFull diff:\n%s' % |
| 999 (line, diff)) |
| 1000 |
| 1001 current_diff = '' |
| 1002 current_diff += '%s\n' % line |
| 1003 if current_header: |
| 1004 # We hit EOF, gotta save the last diff. |
| 1005 result.append((current_header, current_diff)) |
| 1006 return result |
| 1007 |
| 1008 |
| 775 def apply_rietveld_issue(issue, patchset, root, server, _rev_map, _revision, | 1009 def apply_rietveld_issue(issue, patchset, root, server, _rev_map, _revision, |
| 776 email_file, key_file, whitelist=None, blacklist=None): | 1010 email_file, key_file, whitelist=None, blacklist=None): |
| 777 apply_issue_bin = ('apply_issue.bat' if sys.platform.startswith('win') | 1011 apply_issue_bin = ('apply_issue.bat' if sys.platform.startswith('win') |
| 778 else 'apply_issue') | 1012 else 'apply_issue') |
| 779 cmd = [apply_issue_bin, | 1013 cmd = [apply_issue_bin, |
| 780 # The patch will be applied on top of this directory. | 1014 # The patch will be applied on top of this directory. |
| 781 '--root_dir', root, | 1015 '--root_dir', root, |
| 782 # Tell apply_issue how to fetch the patch. | 1016 # Tell apply_issue how to fetch the patch. |
| 783 '--issue', issue, | 1017 '--issue', issue, |
| 784 '--server', server, | 1018 '--server', server, |
| (...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 862 os.remove(flag_file) | 1096 os.remove(flag_file) |
| 863 | 1097 |
| 864 | 1098 |
| 865 def emit_flag(flag_file): | 1099 def emit_flag(flag_file): |
| 866 """Deposit a bot update flag on the system to tell gclient not to run.""" | 1100 """Deposit a bot update flag on the system to tell gclient not to run.""" |
| 867 print 'Emitting flag file at %s' % flag_file | 1101 print 'Emitting flag file at %s' % flag_file |
| 868 with open(flag_file, 'wb') as f: | 1102 with open(flag_file, 'wb') as f: |
| 869 f.write('Success!') | 1103 f.write('Success!') |
| 870 | 1104 |
| 871 | 1105 |
| 1106 def get_commit_position_for_git_svn(url, revision): |
| 1107 """Generates a commit position string for a 'git-svn' URL/revision. |
| 1108 |
| 1109 If the 'git-svn' URL maps to a known project, we will construct a commit |
| 1110 position branch value by applying substitution on the SVN URL. |
| 1111 """ |
| 1112 # Identify the base URL so we can strip off trunk/branch name |
| 1113 project_config = branch = None |
| 1114 for _, project_config in GIT_SVN_PROJECT_MAP.iteritems(): |
| 1115 if url.startswith(project_config['svn_url']): |
| 1116 branch = url[len(project_config['svn_url']):] |
| 1117 break |
| 1118 |
| 1119 if branch: |
| 1120 # Strip any leading slashes |
| 1121 branch = branch.lstrip('/') |
| 1122 |
| 1123 # Try and map the branch |
| 1124 for pattern, repl in project_config.get('branch_map', ()): |
| 1125 nbranch, subn = re.subn(pattern, repl, branch, count=1) |
| 1126 if subn: |
| 1127 print 'INFO: Mapped SVN branch to Git branch [%s] => [%s]' % ( |
| 1128 branch, nbranch) |
| 1129 branch = nbranch |
| 1130 break |
| 1131 else: |
| 1132 # Use generic 'svn' branch |
| 1133 print 'INFO: Could not resolve project for SVN URL %r' % (url,) |
| 1134 branch = 'svn' |
| 1135 return '%s@{#%s}' % (branch, revision) |
| 1136 |
| 1137 |
| 872 def get_commit_position(git_path, revision='HEAD'): | 1138 def get_commit_position(git_path, revision='HEAD'): |
| 873 """Dumps the 'git' log for a specific revision and parses out the commit | 1139 """Dumps the 'git' log for a specific revision and parses out the commit |
| 874 position. | 1140 position. |
| 875 | 1141 |
| 876 If a commit position metadata key is found, its value will be returned. | 1142 If a commit position metadata key is found, its value will be returned. |
| 1143 |
| 1144 Otherwise, we will search for a 'git-svn' metadata entry. If one is found, |
| 1145 we will compose a commit position from it, using its SVN revision value as |
| 1146 the revision. |
| 1147 |
| 1148 If the 'git-svn' URL maps to a known project, we will construct a commit |
| 1149 position branch value by truncating the URL, mapping 'trunk' to |
| 1150 "refs/heads/master". Otherwise, we will return the generic branch, 'svn'. |
| 877 """ | 1151 """ |
| 878 # TODO(iannucci): Use git-footers for this. | |
| 879 git_log = git('log', '--format=%B', '-n1', revision, cwd=git_path) | 1152 git_log = git('log', '--format=%B', '-n1', revision, cwd=git_path) |
| 880 footer_map = get_commit_message_footer_map(git_log) | 1153 footer_map = get_commit_message_footer_map(git_log) |
| 881 | 1154 |
| 882 # Search for commit position metadata | 1155 # Search for commit position metadata |
| 883 value = (footer_map.get(COMMIT_POSITION_FOOTER_KEY) or | 1156 value = (footer_map.get(COMMIT_POSITION_FOOTER_KEY) or |
| 884 footer_map.get(COMMIT_ORIGINAL_POSITION_FOOTER_KEY)) | 1157 footer_map.get(COMMIT_ORIGINAL_POSITION_FOOTER_KEY)) |
| 885 if value: | 1158 if value: |
| 886 return value | 1159 return value |
| 1160 |
| 1161 # Compose a commit position from 'git-svn' metadata |
| 1162 value = footer_map.get(GIT_SVN_ID_FOOTER_KEY) |
| 1163 if value: |
| 1164 m = GIT_SVN_ID_RE.match(value) |
| 1165 if not m: |
| 1166 raise ValueError("Invalid 'git-svn' value: [%s]" % (value,)) |
| 1167 return get_commit_position_for_git_svn(m.group(1), m.group(2)) |
| 887 return None | 1168 return None |
| 888 | 1169 |
| 889 | 1170 |
| 890 def parse_got_revision(gclient_output, got_revision_mapping): | 1171 def parse_got_revision(gclient_output, got_revision_mapping, use_svn_revs): |
| 891 """Translate git gclient revision mapping to build properties.""" | 1172 """Translate git gclient revision mapping to build properties. |
| 1173 |
| 1174 If use_svn_revs is True, then translate git hashes in the revision mapping |
| 1175 to svn revision numbers. |
| 1176 """ |
| 892 properties = {} | 1177 properties = {} |
| 893 solutions_output = { | 1178 solutions_output = { |
| 894 # Make sure path always ends with a single slash. | 1179 # Make sure path always ends with a single slash. |
| 895 '%s/' % path.rstrip('/') : solution_output for path, solution_output | 1180 '%s/' % path.rstrip('/') : solution_output for path, solution_output |
| 896 in gclient_output['solutions'].iteritems() | 1181 in gclient_output['solutions'].iteritems() |
| 897 } | 1182 } |
| 898 for dir_name, property_name in got_revision_mapping.iteritems(): | 1183 for dir_name, property_name in got_revision_mapping.iteritems(): |
| 899 # Make sure dir_name always ends with a single slash. | 1184 # Make sure dir_name always ends with a single slash. |
| 900 dir_name = '%s/' % dir_name.rstrip('/') | 1185 dir_name = '%s/' % dir_name.rstrip('/') |
| 901 if dir_name not in solutions_output: | 1186 if dir_name not in solutions_output: |
| 902 continue | 1187 continue |
| 903 solution_output = solutions_output[dir_name] | 1188 solution_output = solutions_output[dir_name] |
| 904 if solution_output.get('scm') is None: | 1189 if solution_output.get('scm') is None: |
| 905 # This is an ignored DEPS, so the output got_revision should be 'None'. | 1190 # This is an ignored DEPS, so the output got_revision should be 'None'. |
| 906 git_revision = revision = commit_position = None | 1191 git_revision = revision = commit_position = None |
| 907 else: | 1192 else: |
| 908 # Since we are using .DEPS.git, everything had better be git. | 1193 # Since we are using .DEPS.git, everything had better be git. |
| 909 assert solution_output.get('scm') == 'git' | 1194 assert solution_output.get('scm') == 'git' |
| 910 revision = git('rev-parse', 'HEAD', cwd=dir_name).strip() | 1195 git_revision = git('rev-parse', 'HEAD', cwd=dir_name).strip() |
| 1196 if use_svn_revs: |
| 1197 revision = get_svn_rev(git_revision, dir_name) |
| 1198 if not revision: |
| 1199 revision = git_revision |
| 1200 else: |
| 1201 revision = git_revision |
| 911 commit_position = get_commit_position(dir_name) | 1202 commit_position = get_commit_position(dir_name) |
| 912 | 1203 |
| 913 properties[property_name] = revision | 1204 properties[property_name] = revision |
| 914 if revision != git_revision: | 1205 if revision != git_revision: |
| 915 properties['%s_git' % property_name] = git_revision | 1206 properties['%s_git' % property_name] = git_revision |
| 916 if commit_position: | 1207 if commit_position: |
| 917 properties['%s_cp' % property_name] = commit_position | 1208 properties['%s_cp' % property_name] = commit_position |
| 918 | 1209 |
| 919 return properties | 1210 return properties |
| 920 | 1211 |
| (...skipping 11 matching lines...) Expand all Loading... |
| 932 def ensure_deps_revisions(deps_url_mapping, solutions, revisions): | 1223 def ensure_deps_revisions(deps_url_mapping, solutions, revisions): |
| 933 """Ensure correct DEPS revisions, ignores solutions.""" | 1224 """Ensure correct DEPS revisions, ignores solutions.""" |
| 934 for deps_name, deps_data in sorted(deps_url_mapping.items()): | 1225 for deps_name, deps_data in sorted(deps_url_mapping.items()): |
| 935 if deps_name.strip('/') in solutions: | 1226 if deps_name.strip('/') in solutions: |
| 936 # This has already been forced to the correct solution by git_checkout(). | 1227 # This has already been forced to the correct solution by git_checkout(). |
| 937 continue | 1228 continue |
| 938 revision = get_target_revision(deps_name, deps_data.get('url', None), | 1229 revision = get_target_revision(deps_name, deps_data.get('url', None), |
| 939 revisions) | 1230 revisions) |
| 940 if not revision: | 1231 if not revision: |
| 941 continue | 1232 continue |
| 1233 # TODO(hinoka): Catch SVNRevisionNotFound error maybe? |
| 942 git('fetch', 'origin', cwd=deps_name) | 1234 git('fetch', 'origin', cwd=deps_name) |
| 943 force_revision(deps_name, revision) | 1235 force_revision(deps_name, revision) |
| 944 | 1236 |
| 945 | 1237 |
| 946 def ensure_checkout(solutions, revisions, first_sln, target_os, target_os_only, | 1238 def ensure_checkout(solutions, revisions, first_sln, target_os, target_os_only, |
| 947 patch_root, issue, patchset, rietveld_server, | 1239 patch_root, issue, patchset, rietveld_server, |
| 948 gerrit_repo, gerrit_ref, gerrit_rebase_patch_ref, | 1240 gerrit_repo, gerrit_ref, gerrit_rebase_patch_ref, |
| 949 revision_mapping, apply_issue_email_file, | 1241 revision_mapping, apply_issue_email_file, |
| 950 apply_issue_key_file, gyp_env, shallow, runhooks, | 1242 apply_issue_key_file, buildspec, gyp_env, shallow, runhooks, |
| 951 refs, git_cache_dir, gerrit_reset): | 1243 refs, git_cache_dir, gerrit_reset): |
| 952 # Get a checkout of each solution, without DEPS or hooks. | 1244 # Get a checkout of each solution, without DEPS or hooks. |
| 953 # Calling git directly because there is no way to run Gclient without | 1245 # Calling git directly because there is no way to run Gclient without |
| 954 # invoking DEPS. | 1246 # invoking DEPS. |
| 955 print 'Fetching Git checkout' | 1247 print 'Fetching Git checkout' |
| 956 | 1248 |
| 957 git_ref = git_checkout(solutions, revisions, shallow, refs, git_cache_dir) | 1249 git_ref = git_checkout(solutions, revisions, shallow, refs, git_cache_dir) |
| 958 | 1250 |
| 959 print '===Processing patch solutions===' | 1251 print '===Processing patch solutions===' |
| 960 already_patched = [] | 1252 already_patched = [] |
| (...skipping 16 matching lines...) Expand all Loading... |
| 977 apply_gerrit_ref(gerrit_repo, gerrit_ref, patch_root, gerrit_reset, | 1269 apply_gerrit_ref(gerrit_repo, gerrit_ref, patch_root, gerrit_reset, |
| 978 gerrit_rebase_patch_ref) | 1270 gerrit_rebase_patch_ref) |
| 979 applied_gerrit_patch = True | 1271 applied_gerrit_patch = True |
| 980 | 1272 |
| 981 # Ensure our build/ directory is set up with the correct .gclient file. | 1273 # Ensure our build/ directory is set up with the correct .gclient file. |
| 982 gclient_configure(solutions, target_os, target_os_only, git_cache_dir) | 1274 gclient_configure(solutions, target_os, target_os_only, git_cache_dir) |
| 983 | 1275 |
| 984 # Let gclient do the DEPS syncing. | 1276 # Let gclient do the DEPS syncing. |
| 985 # The branch-head refspec is a special case because its possible Chrome | 1277 # The branch-head refspec is a special case because its possible Chrome |
| 986 # src, which contains the branch-head refspecs, is DEPSed in. | 1278 # src, which contains the branch-head refspecs, is DEPSed in. |
| 987 gclient_output = gclient_sync(BRANCH_HEADS_REFSPEC in refs, shallow) | 1279 gclient_output = gclient_sync(buildspec or BRANCH_HEADS_REFSPEC in refs, |
| 1280 shallow) |
| 988 | 1281 |
| 989 # Now that gclient_sync has finished, we should revert any .DEPS.git so that | 1282 # Now that gclient_sync has finished, we should revert any .DEPS.git so that |
| 990 # presubmit doesn't complain about it being modified. | 1283 # presubmit doesn't complain about it being modified. |
| 991 if git('ls-files', '.DEPS.git', cwd=first_sln).strip(): | 1284 if (not buildspec and |
| 1285 git('ls-files', '.DEPS.git', cwd=first_sln).strip()): |
| 992 git('checkout', 'HEAD', '--', '.DEPS.git', cwd=first_sln) | 1286 git('checkout', 'HEAD', '--', '.DEPS.git', cwd=first_sln) |
| 993 | 1287 |
| 1288 if buildspec and runhooks: |
| 1289 # Run gclient runhooks if we're on an official builder. |
| 1290 # TODO(hinoka): Remove this when the official builders run their own |
| 1291 # runhooks step. |
| 1292 gclient_runhooks(gyp_env) |
| 1293 |
| 994 # Finally, ensure that all DEPS are pinned to the correct revision. | 1294 # Finally, ensure that all DEPS are pinned to the correct revision. |
| 995 dir_names = [sln['name'] for sln in solutions] | 1295 dir_names = [sln['name'] for sln in solutions] |
| 996 ensure_deps_revisions(gclient_output.get('solutions', {}), | 1296 ensure_deps_revisions(gclient_output.get('solutions', {}), |
| 997 dir_names, revisions) | 1297 dir_names, revisions) |
| 998 # Apply the rest of the patch here (sans DEPS) | 1298 # Apply the rest of the patch here (sans DEPS) |
| 999 if issue: | 1299 if issue: |
| 1000 apply_rietveld_issue(issue, patchset, patch_root, rietveld_server, | 1300 apply_rietveld_issue(issue, patchset, patch_root, rietveld_server, |
| 1001 revision_mapping, git_ref, apply_issue_email_file, | 1301 revision_mapping, git_ref, apply_issue_email_file, |
| 1002 apply_issue_key_file, blacklist=already_patched) | 1302 apply_issue_key_file, blacklist=already_patched) |
| 1003 elif gerrit_ref and not applied_gerrit_patch: | 1303 elif gerrit_ref and not applied_gerrit_patch: |
| (...skipping 25 matching lines...) Expand all Loading... |
| 1029 expanded_revisions.extend(revision.split(',')) | 1329 expanded_revisions.extend(revision.split(',')) |
| 1030 for revision in expanded_revisions: | 1330 for revision in expanded_revisions: |
| 1031 split_revision = revision.split('@') | 1331 split_revision = revision.split('@') |
| 1032 if len(split_revision) == 1: | 1332 if len(split_revision) == 1: |
| 1033 # This is just a plain revision, set it as the revision for root. | 1333 # This is just a plain revision, set it as the revision for root. |
| 1034 results[root] = split_revision[0] | 1334 results[root] = split_revision[0] |
| 1035 elif len(split_revision) == 2: | 1335 elif len(split_revision) == 2: |
| 1036 # This is an alt_root@revision argument. | 1336 # This is an alt_root@revision argument. |
| 1037 current_root, current_rev = split_revision | 1337 current_root, current_rev = split_revision |
| 1038 | 1338 |
| 1339 # We want to normalize svn/git urls into .git urls. |
| 1039 parsed_root = urlparse.urlparse(current_root) | 1340 parsed_root = urlparse.urlparse(current_root) |
| 1040 if parsed_root.scheme in ['http', 'https']: | 1341 if parsed_root.scheme == 'svn': |
| 1041 # We want to normalize git urls into .git urls. | 1342 if parsed_root.path in RECOGNIZED_PATHS: |
| 1343 normalized_root = RECOGNIZED_PATHS[parsed_root.path] |
| 1344 else: |
| 1345 print 'WARNING: SVN path %s not recognized, ignoring' % current_root |
| 1346 continue |
| 1347 elif parsed_root.scheme in ['http', 'https']: |
| 1042 normalized_root = 'https://%s/%s' % (parsed_root.netloc, | 1348 normalized_root = 'https://%s/%s' % (parsed_root.netloc, |
| 1043 parsed_root.path) | 1349 parsed_root.path) |
| 1044 if not normalized_root.endswith('.git'): | 1350 if not normalized_root.endswith('.git'): |
| 1045 normalized_root = '%s.git' % normalized_root | 1351 normalized_root = '%s.git' % normalized_root |
| 1046 elif parsed_root.scheme: | 1352 elif parsed_root.scheme: |
| 1047 print 'WARNING: Unrecognized scheme %s, ignoring' % parsed_root.scheme | 1353 print 'WARNING: Unrecognized scheme %s, ignoring' % parsed_root.scheme |
| 1048 continue | 1354 continue |
| 1049 else: | 1355 else: |
| 1050 # This is probably a local path. | 1356 # This is probably a local path. |
| 1051 normalized_root = current_root.strip('/') | 1357 normalized_root = current_root.strip('/') |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1087 'bot_update\'s whitelist, bot_update will be noop.') | 1393 'bot_update\'s whitelist, bot_update will be noop.') |
| 1088 parse.add_option('-f', '--force', action='store_true', | 1394 parse.add_option('-f', '--force', action='store_true', |
| 1089 help='Bypass check to see if we want to be run. ' | 1395 help='Bypass check to see if we want to be run. ' |
| 1090 'Should ONLY be used locally or by smart recipes.') | 1396 'Should ONLY be used locally or by smart recipes.') |
| 1091 parse.add_option('--revision_mapping', | 1397 parse.add_option('--revision_mapping', |
| 1092 help='{"path/to/repo/": "property_name"}') | 1398 help='{"path/to/repo/": "property_name"}') |
| 1093 parse.add_option('--revision_mapping_file', | 1399 parse.add_option('--revision_mapping_file', |
| 1094 help=('Same as revision_mapping, except its a path to a json' | 1400 help=('Same as revision_mapping, except its a path to a json' |
| 1095 ' file containing that format.')) | 1401 ' file containing that format.')) |
| 1096 parse.add_option('--revision', action='append', default=[], | 1402 parse.add_option('--revision', action='append', default=[], |
| 1097 help='Revision to check out. Can be any form of git ref. ' | 1403 help='Revision to check out. Can be an SVN revision number, ' |
| 1098 'Can prepend root@<rev> to specify which repository, ' | 1404 'git hash, or any form of git ref. Can prepend ' |
| 1099 'where root is either a filesystem path or git https ' | 1405 'root@<rev> to specify which repository, where root ' |
| 1100 'url. To specify Tip of Tree, set rev to HEAD. ') | 1406 'is either a filesystem path, git https url, or ' |
| 1407 'svn url. To specify Tip of Tree, set rev to HEAD.' |
| 1408 'To specify a git branch and an SVN rev, <rev> can be ' |
| 1409 'set to <branch>:<revision>.') |
| 1101 parse.add_option('--output_manifest', action='store_true', | 1410 parse.add_option('--output_manifest', action='store_true', |
| 1102 help=('Add manifest json to the json output.')) | 1411 help=('Add manifest json to the json output.')) |
| 1103 parse.add_option('--slave_name', default=socket.getfqdn().split('.')[0], | 1412 parse.add_option('--slave_name', default=socket.getfqdn().split('.')[0], |
| 1104 help='Hostname of the current machine, ' | 1413 help='Hostname of the current machine, ' |
| 1105 'used for determining whether or not to activate.') | 1414 'used for determining whether or not to activate.') |
| 1106 parse.add_option('--builder_name', help='Name of the builder, ' | 1415 parse.add_option('--builder_name', help='Name of the builder, ' |
| 1107 'used for determining whether or not to activate.') | 1416 'used for determining whether or not to activate.') |
| 1108 parse.add_option('--build_dir', default=os.getcwd()) | 1417 parse.add_option('--build_dir', default=os.getcwd()) |
| 1109 parse.add_option('--flag_file', default=path.join(os.getcwd(), | 1418 parse.add_option('--flag_file', default=path.join(os.getcwd(), |
| 1110 'update.flag')) | 1419 'update.flag')) |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1164 options.git_cache_dir = options.git_cache_dir.replace('\\', '\\\\') | 1473 options.git_cache_dir = options.git_cache_dir.replace('\\', '\\\\') |
| 1165 | 1474 |
| 1166 return options, args | 1475 return options, args |
| 1167 | 1476 |
| 1168 | 1477 |
| 1169 def prepare(options, git_slns, active): | 1478 def prepare(options, git_slns, active): |
| 1170 """Prepares the target folder before we checkout.""" | 1479 """Prepares the target folder before we checkout.""" |
| 1171 dir_names = [sln.get('name') for sln in git_slns if 'name' in sln] | 1480 dir_names = [sln.get('name') for sln in git_slns if 'name' in sln] |
| 1172 # If we're active now, but the flag file doesn't exist (we weren't active | 1481 # If we're active now, but the flag file doesn't exist (we weren't active |
| 1173 # last run) or vice versa, blow away all checkouts. | 1482 # last run) or vice versa, blow away all checkouts. |
| 1174 if options.clobber or (bool(active) != bool(check_flag(options.flag_file))): | 1483 if bool(active) != bool(check_flag(options.flag_file)): |
| 1175 ensure_no_checkout(dir_names) | 1484 ensure_no_checkout(dir_names, '*') |
| 1176 if options.output_json: | 1485 if options.output_json: |
| 1177 # Make sure we tell recipes that we didn't run if the script exits here. | 1486 # Make sure we tell recipes that we didn't run if the script exits here. |
| 1178 emit_json(options.output_json, did_run=active) | 1487 emit_json(options.output_json, did_run=active) |
| 1179 emit_flag(options.flag_file) | 1488 if active: |
| 1489 if options.clobber: |
| 1490 ensure_no_checkout(dir_names, '*') |
| 1491 else: |
| 1492 ensure_no_checkout(dir_names, '.svn') |
| 1493 emit_flag(options.flag_file) |
| 1494 else: |
| 1495 delete_flag(options.flag_file) |
| 1496 raise Inactive # This is caught in main() and we exit cleanly. |
| 1180 | 1497 |
| 1181 # Do a shallow checkout if the disk is less than 100GB. | 1498 # Do a shallow checkout if the disk is less than 100GB. |
| 1182 total_disk_space, free_disk_space = get_total_disk_space() | 1499 total_disk_space, free_disk_space = get_total_disk_space() |
| 1183 total_disk_space_gb = int(total_disk_space / (1024 * 1024 * 1024)) | 1500 total_disk_space_gb = int(total_disk_space / (1024 * 1024 * 1024)) |
| 1184 used_disk_space_gb = int((total_disk_space - free_disk_space) | 1501 used_disk_space_gb = int((total_disk_space - free_disk_space) |
| 1185 / (1024 * 1024 * 1024)) | 1502 / (1024 * 1024 * 1024)) |
| 1186 percent_used = int(used_disk_space_gb * 100 / total_disk_space_gb) | 1503 percent_used = int(used_disk_space_gb * 100 / total_disk_space_gb) |
| 1187 step_text = '[%dGB/%dGB used (%d%%)]' % (used_disk_space_gb, | 1504 step_text = '[%dGB/%dGB used (%d%%)]' % (used_disk_space_gb, |
| 1188 total_disk_space_gb, | 1505 total_disk_space_gb, |
| 1189 percent_used) | 1506 percent_used) |
| 1190 if not options.output_json: | 1507 if not options.output_json: |
| 1191 print '@@@STEP_TEXT@%s@@@' % step_text | 1508 print '@@@STEP_TEXT@%s@@@' % step_text |
| 1192 if not options.shallow: | 1509 if not options.shallow: |
| 1193 options.shallow = (total_disk_space < SHALLOW_CLONE_THRESHOLD | 1510 options.shallow = (total_disk_space < SHALLOW_CLONE_THRESHOLD |
| 1194 and not options.no_shallow) | 1511 and not options.no_shallow) |
| 1195 | 1512 |
| 1196 # The first solution is where the primary DEPS file resides. | 1513 # The first solution is where the primary DEPS file resides. |
| 1197 first_sln = dir_names[0] | 1514 first_sln = dir_names[0] |
| 1198 | 1515 |
| 1199 # Split all the revision specifications into a nice dict. | 1516 # Split all the revision specifications into a nice dict. |
| 1200 print 'Revisions: %s' % options.revision | 1517 print 'Revisions: %s' % options.revision |
| 1201 revisions = parse_revisions(options.revision, first_sln) | 1518 revisions = parse_revisions(options.revision, first_sln) |
| 1202 print 'Fetching Git checkout at %s@%s' % (first_sln, revisions[first_sln]) | 1519 print 'Fetching Git checkout at %s@%s' % (first_sln, revisions[first_sln]) |
| 1203 return revisions, step_text | 1520 return revisions, step_text |
| 1204 | 1521 |
| 1205 | 1522 |
| 1206 def checkout(options, git_slns, specs, master, revisions, step_text): | 1523 def checkout(options, git_slns, specs, buildspec, master, |
| 1524 svn_root, revisions, step_text): |
| 1207 first_sln = git_slns[0]['name'] | 1525 first_sln = git_slns[0]['name'] |
| 1208 dir_names = [sln.get('name') for sln in git_slns if 'name' in sln] | 1526 dir_names = [sln.get('name') for sln in git_slns if 'name' in sln] |
| 1209 try: | 1527 try: |
| 1210 # Outer try is for catching patch failures and exiting gracefully. | 1528 # Outer try is for catching patch failures and exiting gracefully. |
| 1211 # Inner try is for catching gclient failures and retrying gracefully. | 1529 # Inner try is for catching gclient failures and retrying gracefully. |
| 1212 try: | 1530 try: |
| 1213 checkout_parameters = dict( | 1531 checkout_parameters = dict( |
| 1214 # First, pass in the base of what we want to check out. | 1532 # First, pass in the base of what we want to check out. |
| 1215 solutions=git_slns, | 1533 solutions=git_slns, |
| 1216 revisions=revisions, | 1534 revisions=revisions, |
| 1217 first_sln=first_sln, | 1535 first_sln=first_sln, |
| 1218 | 1536 |
| 1219 # Also, target os variables for gclient. | 1537 # Also, target os variables for gclient. |
| 1220 target_os=specs.get('target_os', []), | 1538 target_os=specs.get('target_os', []), |
| 1221 target_os_only=specs.get('target_os_only', False), | 1539 target_os_only=specs.get('target_os_only', False), |
| 1222 | 1540 |
| 1223 # Then, pass in information about how to patch. | 1541 # Then, pass in information about how to patch. |
| 1224 patch_root=options.patch_root, | 1542 patch_root=options.patch_root, |
| 1225 issue=options.issue, | 1543 issue=options.issue, |
| 1226 patchset=options.patchset, | 1544 patchset=options.patchset, |
| 1227 rietveld_server=options.rietveld_server, | 1545 rietveld_server=options.rietveld_server, |
| 1228 gerrit_repo=options.gerrit_repo, | 1546 gerrit_repo=options.gerrit_repo, |
| 1229 gerrit_ref=options.gerrit_ref, | 1547 gerrit_ref=options.gerrit_ref, |
| 1230 gerrit_rebase_patch_ref=not options.gerrit_no_rebase_patch_ref, | 1548 gerrit_rebase_patch_ref=not options.gerrit_no_rebase_patch_ref, |
| 1231 revision_mapping=options.revision_mapping, | 1549 revision_mapping=options.revision_mapping, |
| 1232 apply_issue_email_file=options.apply_issue_email_file, | 1550 apply_issue_email_file=options.apply_issue_email_file, |
| 1233 apply_issue_key_file=options.apply_issue_key_file, | 1551 apply_issue_key_file=options.apply_issue_key_file, |
| 1234 | 1552 |
| 1235 # For official builders. | 1553 # For official builders. |
| 1554 buildspec=buildspec, |
| 1236 gyp_env=options.gyp_env, | 1555 gyp_env=options.gyp_env, |
| 1237 runhooks=not options.no_runhooks, | 1556 runhooks=not options.no_runhooks, |
| 1238 | 1557 |
| 1239 # Finally, extra configurations such as shallowness of the clone. | 1558 # Finally, extra configurations such as shallowness of the clone. |
| 1240 shallow=options.shallow, | 1559 shallow=options.shallow, |
| 1241 refs=options.refs, | 1560 refs=options.refs, |
| 1242 git_cache_dir=options.git_cache_dir, | 1561 git_cache_dir=options.git_cache_dir, |
| 1243 gerrit_reset=not options.gerrit_no_reset) | 1562 gerrit_reset=not options.gerrit_no_reset) |
| 1244 gclient_output = ensure_checkout(**checkout_parameters) | 1563 gclient_output = ensure_checkout(**checkout_parameters) |
| 1245 except GclientSyncFailed: | 1564 except GclientSyncFailed: |
| 1246 print 'We failed gclient sync, lets delete the checkout and retry.' | 1565 print 'We failed gclient sync, lets delete the checkout and retry.' |
| 1247 ensure_no_checkout(dir_names) | 1566 ensure_no_checkout(dir_names, '*') |
| 1248 gclient_output = ensure_checkout(**checkout_parameters) | 1567 gclient_output = ensure_checkout(**checkout_parameters) |
| 1249 except PatchFailed as e: | 1568 except PatchFailed as e: |
| 1250 if options.output_json: | 1569 if options.output_json: |
| 1251 # Tell recipes information such as root, got_revision, etc. | 1570 # Tell recipes information such as root, got_revision, etc. |
| 1252 emit_json(options.output_json, | 1571 emit_json(options.output_json, |
| 1253 did_run=True, | 1572 did_run=True, |
| 1254 root=first_sln, | 1573 root=first_sln, |
| 1255 log_lines=[('patch error', e.output),], | 1574 log_lines=[('patch error', e.output),], |
| 1256 patch_apply_return_code=e.code, | 1575 patch_apply_return_code=e.code, |
| 1257 patch_root=options.patch_root, | 1576 patch_root=options.patch_root, |
| 1258 patch_failure=True, | 1577 patch_failure=True, |
| 1259 step_text='%s PATCH FAILED' % step_text, | 1578 step_text='%s PATCH FAILED' % step_text, |
| 1260 fixed_revisions=revisions) | 1579 fixed_revisions=revisions) |
| 1261 else: | 1580 else: |
| 1262 # If we're not on recipes, tell annotator about our got_revisions. | 1581 # If we're not on recipes, tell annotator about our got_revisions. |
| 1263 emit_log_lines('patch error', e.output) | 1582 emit_log_lines('patch error', e.output) |
| 1264 print '@@@STEP_TEXT@%s PATCH FAILED@@@' % step_text | 1583 print '@@@STEP_TEXT@%s PATCH FAILED@@@' % step_text |
| 1265 raise | 1584 raise |
| 1266 | 1585 |
| 1586 # Revision is an svn revision, unless it's a git master. |
| 1587 use_svn_rev = master not in GIT_MASTERS |
| 1588 |
| 1267 # Take care of got_revisions outputs. | 1589 # Take care of got_revisions outputs. |
| 1268 revision_mapping = GOT_REVISION_MAPPINGS.get(git_slns[0]['url'], {}) | 1590 revision_mapping = dict(GOT_REVISION_MAPPINGS.get(svn_root, {})) |
| 1269 if options.revision_mapping: | 1591 if options.revision_mapping: |
| 1270 revision_mapping.update(options.revision_mapping) | 1592 revision_mapping.update(options.revision_mapping) |
| 1271 | 1593 |
| 1272 # If the repo is not in the default GOT_REVISION_MAPPINGS and no | 1594 # If the repo is not in the default GOT_REVISION_MAPPINGS and no |
| 1273 # revision_mapping were specified on the command line then | 1595 # revision_mapping were specified on the command line then |
| 1274 # default to setting 'got_revision' based on the first solution. | 1596 # default to setting 'got_revision' based on the first solution. |
| 1275 if not revision_mapping: | 1597 if not revision_mapping: |
| 1276 revision_mapping[first_sln] = 'got_revision' | 1598 revision_mapping[first_sln] = 'got_revision' |
| 1277 | 1599 |
| 1278 got_revisions = parse_got_revision(gclient_output, revision_mapping) | 1600 got_revisions = parse_got_revision(gclient_output, revision_mapping, |
| 1601 use_svn_rev) |
| 1279 | 1602 |
| 1280 if not got_revisions: | 1603 if not got_revisions: |
| 1281 # TODO(hinoka): We should probably bail out here, but in the interest | 1604 # TODO(hinoka): We should probably bail out here, but in the interest |
| 1282 # of giving mis-configured bots some time to get fixed use a dummy | 1605 # of giving mis-configured bots some time to get fixed use a dummy |
| 1283 # revision here. | 1606 # revision here. |
| 1284 got_revisions = { 'got_revision': 'BOT_UPDATE_NO_REV_FOUND' } | 1607 got_revisions = { 'got_revision': 'BOT_UPDATE_NO_REV_FOUND' } |
| 1285 #raise Exception('No got_revision(s) found in gclient output') | 1608 #raise Exception('No got_revision(s) found in gclient output') |
| 1286 | 1609 |
| 1287 if options.output_json: | 1610 if options.output_json: |
| 1288 manifest = create_manifest() if options.output_manifest else None | 1611 manifest = create_manifest() if options.output_manifest else None |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1342 # Check if this script should activate or not. | 1665 # Check if this script should activate or not. |
| 1343 active = options.force or check_valid_host(master, builder, slave) | 1666 active = options.force or check_valid_host(master, builder, slave) |
| 1344 | 1667 |
| 1345 # Print a helpful message to tell developers whats going on with this step. | 1668 # Print a helpful message to tell developers whats going on with this step. |
| 1346 print_help_text( | 1669 print_help_text( |
| 1347 options.force, options.output_json, active, master, builder, slave) | 1670 options.force, options.output_json, active, master, builder, slave) |
| 1348 | 1671 |
| 1349 # Parse, munipulate, and print the gclient solutions. | 1672 # Parse, munipulate, and print the gclient solutions. |
| 1350 specs = {} | 1673 specs = {} |
| 1351 exec(options.specs, specs) | 1674 exec(options.specs, specs) |
| 1352 orig_solutions = specs.get('solutions', []) | 1675 svn_solutions = specs.get('solutions', []) |
| 1353 git_slns = modify_solutions(orig_solutions) | 1676 git_slns, svn_root, buildspec = solutions_to_git(svn_solutions) |
| 1677 options.revision = maybe_ignore_revision(options.revision, buildspec) |
| 1354 | 1678 |
| 1355 solutions_printer(git_slns) | 1679 solutions_printer(git_slns) |
| 1356 | 1680 |
| 1357 try: | 1681 try: |
| 1358 # Dun dun dun, the main part of bot_update. | 1682 # Dun dun dun, the main part of bot_update. |
| 1359 revisions, step_text = prepare(options, git_slns, active) | 1683 revisions, step_text = prepare(options, git_slns, active) |
| 1360 checkout(options, git_slns, specs, master, revisions, step_text) | 1684 checkout(options, git_slns, specs, buildspec, master, svn_root, revisions, |
| 1685 step_text) |
| 1361 | 1686 |
| 1687 except Inactive: |
| 1688 # Not active, should count as passing. |
| 1689 pass |
| 1362 except PatchFailed as e: | 1690 except PatchFailed as e: |
| 1363 emit_flag(options.flag_file) | 1691 emit_flag(options.flag_file) |
| 1364 # Return a specific non-zero exit code for patch failure (because it is | 1692 # Return a specific non-zero exit code for patch failure (because it is |
| 1365 # a failure), but make it different than other failures to distinguish | 1693 # a failure), but make it different than other failures to distinguish |
| 1366 # between infra failures (independent from patch author), and patch | 1694 # between infra failures (independent from patch author), and patch |
| 1367 # failures (that patch author can fix). However, PatchFailure due to | 1695 # failures (that patch author can fix). However, PatchFailure due to |
| 1368 # download patch failure is still an infra problem. | 1696 # download patch failure is still an infra problem. |
| 1369 if e.code == 3: | 1697 if e.code == 3: |
| 1370 # Patch download problem. | 1698 # Patch download problem. |
| 1371 return 87 | 1699 return 87 |
| 1372 # Genuine patch problem. | 1700 # Genuine patch problem. |
| 1373 return 88 | 1701 return 88 |
| 1374 except Exception: | 1702 except Exception: |
| 1375 # Unexpected failure. | 1703 # Unexpected failure. |
| 1376 emit_flag(options.flag_file) | 1704 emit_flag(options.flag_file) |
| 1377 raise | 1705 raise |
| 1378 else: | 1706 else: |
| 1379 emit_flag(options.flag_file) | 1707 emit_flag(options.flag_file) |
| 1380 | 1708 |
| 1381 | 1709 |
| 1382 if __name__ == '__main__': | 1710 if __name__ == '__main__': |
| 1383 sys.exit(main()) | 1711 sys.exit(main()) |
| OLD | NEW |