OLD | NEW |
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright 2014 The Chromium Authors. All rights reserved. | 2 # Copyright 2016 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 # TODO(hinoka): Use logging. | |
7 | |
8 import cStringIO | 6 import cStringIO |
9 import codecs | 7 import codecs |
10 import collections | 8 import collections |
11 import copy | 9 import copy |
12 import ctypes | 10 import ctypes |
13 import json | 11 import json |
14 import optparse | 12 import optparse |
15 import os | 13 import os |
16 import pprint | 14 import pprint |
17 import random | 15 import random |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
66 '..', # ROOT_DIR | 64 '..', # ROOT_DIR |
67 'build', | 65 'build', |
68 'scripts'), | 66 'scripts'), |
69 path.join(SLAVE_DIR, '..', 'build', 'scripts'), | 67 path.join(SLAVE_DIR, '..', 'build', 'scripts'), |
70 ], default=path.dirname(THIS_DIR)) | 68 ], default=path.dirname(THIS_DIR)) |
71 BUILD_DIR = path.dirname(SCRIPTS_DIR) | 69 BUILD_DIR = path.dirname(SCRIPTS_DIR) |
72 ROOT_DIR = path.dirname(BUILD_DIR) | 70 ROOT_DIR = path.dirname(BUILD_DIR) |
73 | 71 |
74 DEPOT_TOOLS_DIR = path.abspath(path.join(THIS_DIR, '..', '..', '..')) | 72 DEPOT_TOOLS_DIR = path.abspath(path.join(THIS_DIR, '..', '..', '..')) |
75 | 73 |
76 BUILD_INTERNAL_DIR = check_dir( | |
77 'build_internal', [ | |
78 path.join(ROOT_DIR, 'build_internal'), | |
79 path.join(ROOT_DIR, # .recipe_deps | |
80 path.pardir, # slave | |
81 path.pardir, # scripts | |
82 path.pardir), # build_internal | |
83 ]) | |
84 | |
85 | |
86 CHROMIUM_GIT_HOST = 'https://chromium.googlesource.com' | 74 CHROMIUM_GIT_HOST = 'https://chromium.googlesource.com' |
87 CHROMIUM_SRC_URL = CHROMIUM_GIT_HOST + '/chromium/src.git' | 75 CHROMIUM_SRC_URL = CHROMIUM_GIT_HOST + '/chromium/src.git' |
88 | 76 |
89 # Official builds use buildspecs, so this is a special case. | 77 # Official builds use buildspecs, so this is a special case. |
90 BUILDSPEC_TYPE = collections.namedtuple('buildspec', | 78 BUILDSPEC_TYPE = collections.namedtuple('buildspec', |
91 ('container', 'version')) | 79 ('container', 'version')) |
92 BUILDSPEC_RE = (r'^/chrome-internal/trunk/tools/buildspec/' | 80 BUILDSPEC_RE = (r'^/chrome-internal/trunk/tools/buildspec/' |
93 '(build|branches|releases)/(.+)$') | 81 '(build|branches|releases)/(.+)$') |
94 GIT_BUILDSPEC_PATH = ('https://chrome-internal.googlesource.com/chrome/tools/' | 82 GIT_BUILDSPEC_PATH = ('https://chrome-internal.googlesource.com/chrome/tools/' |
95 'buildspec') | 83 'buildspec') |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
161 'src/tools/swarm_client/': 'got_swarm_client_revision', | 149 'src/tools/swarm_client/': 'got_swarm_client_revision', |
162 'src/tools/swarming_client/': 'got_swarming_client_revision', | 150 'src/tools/swarming_client/': 'got_swarming_client_revision', |
163 'src/third_party/WebKit/': 'got_webkit_revision', | 151 'src/third_party/WebKit/': 'got_webkit_revision', |
164 'src/third_party/webrtc/': 'got_webrtc_revision', | 152 'src/third_party/webrtc/': 'got_webrtc_revision', |
165 'src/v8/': 'got_v8_revision', | 153 'src/v8/': 'got_v8_revision', |
166 } | 154 } |
167 } | 155 } |
168 | 156 |
169 | 157 |
170 BOT_UPDATE_MESSAGE = """ | 158 BOT_UPDATE_MESSAGE = """ |
171 What is the "Bot Update" step? | 159 Bot Update Debugging Information |
172 ============================== | |
173 | |
174 This step ensures that the source checkout on the bot (e.g. Chromium's src/ and | |
175 its dependencies) is checked out in a consistent state. This means that all of | |
176 the necessary repositories are checked out, no extra repositories are checked | |
177 out, and no locally modified files are present. | |
178 | |
179 These actions used to be taken care of by the "gclient revert" and "update" | |
180 steps. However, those steps are known to be buggy and occasionally flaky. This | |
181 step has two main advantages over them: | |
182 * it only operates in Git, so the logic can be clearer and cleaner; and | |
183 * it is a slave-side script, so its behavior can be modified without | |
184 restarting the master. | |
185 | |
186 Why Git, you ask? Because that is the direction that the Chromium project is | |
187 heading. This step is an integral part of the transition from using the SVN repo | |
188 at chrome/trunk/src to using the Git repo src.git. Please pardon the dust while | |
189 we fully convert everything to Git. This message will get out of your way | |
190 eventually, and the waterfall will be a happier place because of it. | |
191 | |
192 This step can be activated or deactivated independently on every builder on | |
193 every master. When it is active, the "gclient revert" and "update" steps become | |
194 no-ops. When it is inactive, it prints this message, cleans up after itself, and | |
195 lets everything else continue as though nothing has changed. Eventually, when | |
196 everything is stable enough, this step will replace them entirely. | |
197 | 160 |
198 Debugging information: | 161 Debugging information: |
199 (master/builder/slave may be unspecified on recipes) | |
200 master: %(master)s | |
201 builder: %(builder)s | |
202 slave: %(slave)s | |
203 forced by recipes: %(recipe)s | |
204 CURRENT_DIR: %(CURRENT_DIR)s | 162 CURRENT_DIR: %(CURRENT_DIR)s |
205 BUILDER_DIR: %(BUILDER_DIR)s | 163 BUILDER_DIR: %(BUILDER_DIR)s |
206 SLAVE_DIR: %(SLAVE_DIR)s | 164 SLAVE_DIR: %(SLAVE_DIR)s |
207 THIS_DIR: %(THIS_DIR)s | 165 THIS_DIR: %(THIS_DIR)s |
208 SCRIPTS_DIR: %(SCRIPTS_DIR)s | 166 SCRIPTS_DIR: %(SCRIPTS_DIR)s |
209 BUILD_DIR: %(BUILD_DIR)s | 167 BUILD_DIR: %(BUILD_DIR)s |
210 ROOT_DIR: %(ROOT_DIR)s | 168 ROOT_DIR: %(ROOT_DIR)s |
211 DEPOT_TOOLS_DIR: %(DEPOT_TOOLS_DIR)s | 169 DEPOT_TOOLS_DIR: %(DEPOT_TOOLS_DIR)s""" |
212 bot_update.py is:""" | |
213 | |
214 ACTIVATED_MESSAGE = """ACTIVE. | |
215 The bot will perform a Git checkout in this step. | |
216 The "gclient revert" and "update" steps are no-ops. | |
217 | |
218 """ | |
219 | |
220 NOT_ACTIVATED_MESSAGE = """INACTIVE. | |
221 This step does nothing. You actually want to look at the "update" step. | |
222 | |
223 """ | |
224 | 170 |
225 | 171 |
226 GCLIENT_TEMPLATE = """solutions = %(solutions)s | 172 GCLIENT_TEMPLATE = """solutions = %(solutions)s |
227 | 173 |
228 cache_dir = r%(cache_dir)s | 174 cache_dir = r%(cache_dir)s |
229 %(target_os)s | 175 %(target_os)s |
230 %(target_os_only)s | 176 %(target_os_only)s |
231 """ | 177 """ |
232 | 178 |
233 | 179 |
234 internal_data = {} | |
235 if BUILD_INTERNAL_DIR: | |
236 local_vars = {} | |
237 try: | |
238 execfile(os.path.join( | |
239 BUILD_INTERNAL_DIR, 'scripts', 'slave', 'bot_update_cfg.py'), | |
240 local_vars) | |
241 except Exception: | |
242 # Same as if BUILD_INTERNAL_DIR didn't exist in the first place. | |
243 print 'Warning: unable to read internal configuration file.' | |
244 print 'If this is an internal bot, this step may be erroneously inactive.' | |
245 internal_data = local_vars | |
246 | |
247 RECOGNIZED_PATHS = { | |
248 # If SVN path matches key, the entire URL is rewritten to the Git url. | |
249 '/chrome/trunk/src': | |
250 CHROMIUM_SRC_URL, | |
251 '/chrome/trunk/src/tools/cros.DEPS': | |
252 CHROMIUM_GIT_HOST + '/chromium/src/tools/cros.DEPS.git', | |
253 } | |
254 RECOGNIZED_PATHS.update(internal_data.get('RECOGNIZED_PATHS', {})) | |
255 | |
256 ENABLED_MASTERS = [ | |
257 'bot_update.always_on', | |
258 'chromium.android', | |
259 'chromium.angle', | |
260 'chromium.chrome', | |
261 'chromium.chromedriver', | |
262 'chromium.chromiumos', | |
263 'chromium', | |
264 'chromium.fyi', | |
265 'chromium.goma', | |
266 'chromium.gpu', | |
267 'chromium.gpu.fyi', | |
268 'chromium.infra', | |
269 'chromium.infra.cron', | |
270 'chromium.linux', | |
271 'chromium.lkgr', | |
272 'chromium.mac', | |
273 'chromium.memory', | |
274 'chromium.memory.fyi', | |
275 'chromium.perf', | |
276 'chromium.perf.fyi', | |
277 'chromium.swarm', | |
278 'chromium.webkit', | |
279 'chromium.webrtc', | |
280 'chromium.webrtc.fyi', | |
281 'chromium.win', | |
282 'client.catapult', | |
283 'client.drmemory', | |
284 'client.mojo', | |
285 'client.nacl', | |
286 'client.nacl.ports', | |
287 'client.nacl.sdk', | |
288 'client.nacl.toolchain', | |
289 'client.pdfium', | |
290 'client.skia', | |
291 'client.skia.fyi', | |
292 'client.v8', | |
293 'client.v8.branches', | |
294 'client.v8.fyi', | |
295 'client.v8.ports', | |
296 'client.webrtc', | |
297 'client.webrtc.fyi', | |
298 'tryserver.blink', | |
299 'tryserver.client.catapult', | |
300 'tryserver.client.mojo', | |
301 'tryserver.chromium.android', | |
302 'tryserver.chromium.angle', | |
303 'tryserver.chromium.linux', | |
304 'tryserver.chromium.mac', | |
305 'tryserver.chromium.perf', | |
306 'tryserver.chromium.win', | |
307 'tryserver.infra', | |
308 'tryserver.nacl', | |
309 'tryserver.v8', | |
310 'tryserver.webrtc', | |
311 ] | |
312 ENABLED_MASTERS += internal_data.get('ENABLED_MASTERS', []) | |
313 | |
314 ENABLED_BUILDERS = { | |
315 'client.dart.fyi': [ | |
316 'v8-linux-release', | |
317 'v8-mac-release', | |
318 'v8-win-release', | |
319 ], | |
320 'client.dynamorio': [ | |
321 'linux-v8-dr', | |
322 ], | |
323 } | |
324 ENABLED_BUILDERS.update(internal_data.get('ENABLED_BUILDERS', {})) | |
325 | |
326 ENABLED_SLAVES = {} | |
327 ENABLED_SLAVES.update(internal_data.get('ENABLED_SLAVES', {})) | |
328 | |
329 # Disabled filters get run AFTER enabled filters, so for example if a builder | |
330 # config is enabled, but a bot on that builder is disabled, that bot will | |
331 # be disabled. | |
332 DISABLED_BUILDERS = {} | |
333 DISABLED_BUILDERS.update(internal_data.get('DISABLED_BUILDERS', {})) | |
334 | |
335 DISABLED_SLAVES = {} | |
336 DISABLED_SLAVES.update(internal_data.get('DISABLED_SLAVES', {})) | |
337 | |
338 # These masters work only in Git, meaning for got_revision, always output | |
339 # a git hash rather than a SVN rev. | |
340 GIT_MASTERS = [ | |
341 'client.v8', | |
342 'client.v8.branches', | |
343 'client.v8.ports', | |
344 'tryserver.v8', | |
345 ] | |
346 GIT_MASTERS += internal_data.get('GIT_MASTERS', []) | |
347 | |
348 | |
349 # How many times to try before giving up. | 180 # How many times to try before giving up. |
350 ATTEMPTS = 5 | 181 ATTEMPTS = 5 |
351 | 182 |
352 # Find deps2git | |
353 DEPS2GIT_DIR_PATH = path.join(SCRIPTS_DIR, 'tools', 'deps2git') | |
354 DEPS2GIT_PATH = path.join(DEPS2GIT_DIR_PATH, 'deps2git.py') | |
355 S2G_INTERNAL_PATH = path.join(SCRIPTS_DIR, 'tools', 'deps2git_internal', | |
356 'svn_to_git_internal.py') | |
357 GIT_CACHE_PATH = path.join(DEPOT_TOOLS_DIR, 'git_cache.py') | 183 GIT_CACHE_PATH = path.join(DEPOT_TOOLS_DIR, 'git_cache.py') |
358 | 184 |
359 # Find the patch tool. | 185 # Find the patch tool. |
360 if sys.platform.startswith('win'): | 186 if sys.platform.startswith('win'): |
361 if not BUILD_INTERNAL_DIR: | 187 # TODO(hinoka): Check this in, is required for perf builders. |
362 print 'Warning: could not find patch tool because there is no ' | 188 PATCH_TOOL = path.join(THIS_DIR, 'patch.EXE') |
363 print 'build_internal present.' | |
364 PATCH_TOOL = None | |
365 else: | |
366 PATCH_TOOL = path.join(BUILD_INTERNAL_DIR, 'tools', 'patch.EXE') | |
367 else: | 189 else: |
368 PATCH_TOOL = '/usr/bin/patch' | 190 PATCH_TOOL = '/usr/bin/patch' |
369 | 191 |
370 # If there is less than 100GB of disk space on the system, then we do | 192 # If there is less than 100GB of disk space on the system, then we do |
371 # a shallow checkout. | 193 # a shallow checkout. |
372 SHALLOW_CLONE_THRESHOLD = 100 * 1024 * 1024 * 1024 | 194 SHALLOW_CLONE_THRESHOLD = 100 * 1024 * 1024 * 1024 |
373 | 195 |
374 | 196 |
375 class SubprocessFailed(Exception): | 197 class SubprocessFailed(Exception): |
376 def __init__(self, message, code, output): | 198 def __init__(self, message, code, output): |
(...skipping 11 matching lines...) Expand all Loading... |
388 | 210 |
389 | 211 |
390 class SVNRevisionNotFound(Exception): | 212 class SVNRevisionNotFound(Exception): |
391 pass | 213 pass |
392 | 214 |
393 | 215 |
394 class InvalidDiff(Exception): | 216 class InvalidDiff(Exception): |
395 pass | 217 pass |
396 | 218 |
397 | 219 |
398 class Inactive(Exception): | |
399 """Not really an exception, just used to exit early cleanly.""" | |
400 pass | |
401 | |
402 | |
403 RETRY = object() | 220 RETRY = object() |
404 OK = object() | 221 OK = object() |
405 FAIL = object() | 222 FAIL = object() |
406 | 223 |
407 | 224 |
408 class PsPrinter(object): | 225 class PsPrinter(object): |
409 def __init__(self, interval=300): | 226 def __init__(self, interval=300): |
410 self.interval = interval | 227 self.interval = interval |
411 self.active = sys.platform.startswith('linux2') | 228 self.active = sys.platform.startswith('linux2') |
412 self.thread = None | 229 self.thread = None |
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
521 | 338 |
522 def get_gclient_spec(solutions, target_os, target_os_only, git_cache_dir): | 339 def get_gclient_spec(solutions, target_os, target_os_only, git_cache_dir): |
523 return GCLIENT_TEMPLATE % { | 340 return GCLIENT_TEMPLATE % { |
524 'solutions': pprint.pformat(solutions, indent=4), | 341 'solutions': pprint.pformat(solutions, indent=4), |
525 'cache_dir': '"%s"' % git_cache_dir, | 342 'cache_dir': '"%s"' % git_cache_dir, |
526 'target_os': ('\ntarget_os=%s' % target_os) if target_os else '', | 343 'target_os': ('\ntarget_os=%s' % target_os) if target_os else '', |
527 'target_os_only': '\ntarget_os_only=%s' % target_os_only | 344 'target_os_only': '\ntarget_os_only=%s' % target_os_only |
528 } | 345 } |
529 | 346 |
530 | 347 |
531 def check_enabled(master, builder, slave): | |
532 if master in ENABLED_MASTERS: | |
533 return True | |
534 builder_list = ENABLED_BUILDERS.get(master) | |
535 if builder_list and builder in builder_list: | |
536 return True | |
537 slave_list = ENABLED_SLAVES.get(master) | |
538 if slave_list and slave in slave_list: | |
539 return True | |
540 return False | |
541 | |
542 | |
543 def check_disabled(master, builder, slave): | |
544 """Returns True if disabled, False if not disabled.""" | |
545 builder_list = DISABLED_BUILDERS.get(master) | |
546 if builder_list and builder in builder_list: | |
547 return True | |
548 slave_list = DISABLED_SLAVES.get(master) | |
549 if slave_list and slave in slave_list: | |
550 return True | |
551 return False | |
552 | |
553 | |
554 def check_valid_host(master, builder, slave): | |
555 return (check_enabled(master, builder, slave) | |
556 and not check_disabled(master, builder, slave)) | |
557 | |
558 | |
559 def maybe_ignore_revision(revision, buildspec): | 348 def maybe_ignore_revision(revision, buildspec): |
560 """Handle builders that don't care what buildbot tells them to build. | 349 """Handle builders that don't care what buildbot tells them to build. |
561 | 350 |
562 This is especially the case with branch builders that build from buildspecs | 351 This is especially the case with branch builders that build from buildspecs |
563 and/or trigger off multiple repositories, where the --revision passed in has | 352 and/or trigger off multiple repositories, where the --revision passed in has |
564 nothing to do with the solution being built. Clearing the revision in this | 353 nothing to do with the solution being built. Clearing the revision in this |
565 case causes bot_update to use HEAD rather that trying to checkout an | 354 case causes bot_update to use HEAD rather that trying to checkout an |
566 inappropriate version of the solution. | 355 inappropriate version of the solution. |
567 """ | 356 """ |
568 if buildspec and buildspec.container == 'branches': | 357 if buildspec and buildspec.container == 'branches': |
(...skipping 233 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
802 match = "^%s: [^ ]*@%s " % (GIT_SVN_ID_FOOTER_KEY, revision) | 591 match = "^%s: [^ ]*@%s " % (GIT_SVN_ID_FOOTER_KEY, revision) |
803 ref = branch if branch.startswith('refs/') else 'origin/%s' % branch | 592 ref = branch if branch.startswith('refs/') else 'origin/%s' % branch |
804 cmd = ['log', '-E', '--grep', match, '--format=%H', '--max-count=1', ref] | 593 cmd = ['log', '-E', '--grep', match, '--format=%H', '--max-count=1', ref] |
805 result = git(*cmd, cwd=sln_dir).strip() | 594 result = git(*cmd, cwd=sln_dir).strip() |
806 if result: | 595 if result: |
807 return result | 596 return result |
808 raise SVNRevisionNotFound('We can\'t resolve svn r%s into a git hash in %s' % | 597 raise SVNRevisionNotFound('We can\'t resolve svn r%s into a git hash in %s' % |
809 (revision, sln_dir)) | 598 (revision, sln_dir)) |
810 | 599 |
811 | 600 |
812 def _last_commit_for_file(filename, repo_base): | |
813 cmd = ['log', '--format=%H', '--max-count=1', '--', filename] | |
814 return git(*cmd, cwd=repo_base).strip() | |
815 | |
816 | |
817 def need_to_run_deps2git(repo_base, deps_file, deps_git_file): | |
818 """Checks to see if we need to run deps2git. | |
819 | |
820 Returns True if there was a DEPS change after the last .DEPS.git update | |
821 or if DEPS has local modifications. | |
822 """ | |
823 # See if DEPS is dirty | |
824 deps_file_status = git( | |
825 'status', '--porcelain', deps_file, cwd=repo_base).strip() | |
826 if deps_file_status and deps_file_status.startswith('M '): | |
827 return True | |
828 | |
829 last_known_deps_ref = _last_commit_for_file(deps_file, repo_base) | |
830 last_known_deps_git_ref = _last_commit_for_file(deps_git_file, repo_base) | |
831 merge_base_ref = git('merge-base', last_known_deps_ref, | |
832 last_known_deps_git_ref, cwd=repo_base).strip() | |
833 | |
834 # If the merge base of the last DEPS and last .DEPS.git file is not | |
835 # equivilent to the hash of the last DEPS file, that means the DEPS file | |
836 # was committed after the last .DEPS.git file. | |
837 return last_known_deps_ref != merge_base_ref | |
838 | |
839 | |
840 def ensure_deps2git(solution, shallow, git_cache_dir): | |
841 repo_base = path.join(os.getcwd(), solution['name']) | |
842 deps_file = path.join(repo_base, 'DEPS') | |
843 deps_git_file = path.join(repo_base, '.DEPS.git') | |
844 if (not git('ls-files', 'DEPS', cwd=repo_base).strip() or | |
845 not git('ls-files', '.DEPS.git', cwd=repo_base).strip()): | |
846 return | |
847 | |
848 print 'Checking if %s is newer than %s' % (deps_file, deps_git_file) | |
849 if not need_to_run_deps2git(repo_base, deps_file, deps_git_file): | |
850 return | |
851 | |
852 print '===DEPS file modified, need to run deps2git===' | |
853 cmd = [sys.executable, DEPS2GIT_PATH, | |
854 '--workspace', os.getcwd(), | |
855 '--cache_dir', git_cache_dir, | |
856 '--deps', deps_file, | |
857 '--out', deps_git_file] | |
858 if 'chrome-internal.googlesource' in solution['url']: | |
859 cmd.extend(['--extra-rules', S2G_INTERNAL_PATH]) | |
860 if shallow: | |
861 cmd.append('--shallow') | |
862 call(*cmd) | |
863 | |
864 | |
865 def emit_log_lines(name, lines): | 601 def emit_log_lines(name, lines): |
866 for line in lines.splitlines(): | 602 for line in lines.splitlines(): |
867 print '@@@STEP_LOG_LINE@%s@%s@@@' % (name, line) | 603 print '@@@STEP_LOG_LINE@%s@%s@@@' % (name, line) |
868 print '@@@STEP_LOG_END@%s@@@' % name | 604 print '@@@STEP_LOG_END@%s@@@' % name |
869 | 605 |
870 | 606 |
871 def emit_properties(properties): | 607 def emit_properties(properties): |
872 for property_name, property_value in sorted(properties.items()): | 608 for property_name, property_value in sorted(properties.items()): |
873 print '@@@SET_BUILD_PROPERTY@%s@"%s"@@@' % (property_name, property_value) | 609 print '@@@SET_BUILD_PROPERTY@%s@"%s"@@@' % (property_name, property_value) |
874 | 610 |
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1000 | 736 |
1001 git('clean', '-dff', cwd=sln_dir) | 737 git('clean', '-dff', cwd=sln_dir) |
1002 | 738 |
1003 if first_solution: | 739 if first_solution: |
1004 git_ref = git('log', '--format=%H', '--max-count=1', | 740 git_ref = git('log', '--format=%H', '--max-count=1', |
1005 cwd=sln_dir).strip() | 741 cwd=sln_dir).strip() |
1006 first_solution = False | 742 first_solution = False |
1007 return git_ref | 743 return git_ref |
1008 | 744 |
1009 | 745 |
1010 def _download(url): | |
1011 """Fetch url and return content, with retries for flake.""" | |
1012 for attempt in xrange(ATTEMPTS): | |
1013 try: | |
1014 return urllib2.urlopen(url).read() | |
1015 except Exception: | |
1016 if attempt == ATTEMPTS - 1: | |
1017 raise | |
1018 | |
1019 | |
1020 def parse_diff(diff): | 746 def parse_diff(diff): |
1021 """Takes a unified diff and returns a list of diffed files and their diffs. | 747 """Takes a unified diff and returns a list of diffed files and their diffs. |
1022 | 748 |
1023 The return format is a list of pairs of: | 749 The return format is a list of pairs of: |
1024 (<filename>, <diff contents>) | 750 (<filename>, <diff contents>) |
1025 <diff contents> is inclusive of the diff line. | 751 <diff contents> is inclusive of the diff line. |
1026 """ | 752 """ |
1027 result = [] | 753 result = [] |
1028 current_diff = '' | 754 current_diff = '' |
1029 current_header = None | 755 current_header = None |
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1119 | 845 |
1120 if patchset: | 846 if patchset: |
1121 cmd.extend(['--patchset', patchset]) | 847 cmd.extend(['--patchset', patchset]) |
1122 if whitelist: | 848 if whitelist: |
1123 for item in whitelist: | 849 for item in whitelist: |
1124 cmd.extend(['--whitelist', item]) | 850 cmd.extend(['--whitelist', item]) |
1125 elif blacklist: | 851 elif blacklist: |
1126 for item in blacklist: | 852 for item in blacklist: |
1127 cmd.extend(['--blacklist', item]) | 853 cmd.extend(['--blacklist', item]) |
1128 | 854 |
1129 # TODO(kjellander): Remove this hack when http://crbug.com/611808 is fixed. | |
1130 if root == 'src/third_party/webrtc': | |
1131 cmd.extend(['--extra_patchlevel=1']) | |
1132 | |
1133 # Only try once, since subsequent failures hide the real failure. | 855 # Only try once, since subsequent failures hide the real failure. |
1134 try: | 856 try: |
1135 call(*cmd, tries=1) | 857 call(*cmd, tries=1) |
1136 except SubprocessFailed as e: | 858 except SubprocessFailed as e: |
1137 raise PatchFailed(e.message, e.code, e.output) | 859 raise PatchFailed(e.message, e.code, e.output) |
1138 | 860 |
1139 def apply_gerrit_ref(gerrit_repo, gerrit_ref, root, gerrit_reset): | 861 def apply_gerrit_ref(gerrit_repo, gerrit_ref, root, gerrit_reset): |
1140 gerrit_repo = gerrit_repo or 'origin' | 862 gerrit_repo = gerrit_repo or 'origin' |
1141 assert gerrit_ref | 863 assert gerrit_ref |
1142 print '===Applying gerrit ref===' | |
1143 print 'Repo is %r, ref is %r, root is %r' % (gerrit_repo, gerrit_ref, root) | |
1144 try: | 864 try: |
1145 base_rev = git('rev-parse', 'HEAD', cwd=root).strip() | 865 base_rev = git('rev-parse', 'HEAD', cwd=root).strip() |
1146 git('retry', 'fetch', gerrit_repo, gerrit_ref, cwd=root, tries=1) | 866 git('retry', 'fetch', gerrit_repo, gerrit_ref, cwd=root, tries=1) |
1147 git('checkout', 'FETCH_HEAD', cwd=root) | 867 git('checkout', 'FETCH_HEAD', cwd=root) |
1148 if gerrit_reset: | 868 if gerrit_reset: |
1149 git('reset', '--soft', base_rev, cwd=root) | 869 git('reset', '--soft', base_rev, cwd=root) |
1150 except SubprocessFailed as e: | 870 except SubprocessFailed as e: |
1151 raise PatchFailed(e.message, e.code, e.output) | 871 raise PatchFailed(e.message, e.code, e.output) |
1152 | 872 |
1153 def check_flag(flag_file): | 873 def check_flag(flag_file): |
(...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1310 # Calling git directly because there is no way to run Gclient without | 1030 # Calling git directly because there is no way to run Gclient without |
1311 # invoking DEPS. | 1031 # invoking DEPS. |
1312 print 'Fetching Git checkout' | 1032 print 'Fetching Git checkout' |
1313 | 1033 |
1314 git_ref = git_checkout(solutions, revisions, shallow, refs, git_cache_dir) | 1034 git_ref = git_checkout(solutions, revisions, shallow, refs, git_cache_dir) |
1315 | 1035 |
1316 patches = None | 1036 patches = None |
1317 if patch_url: | 1037 if patch_url: |
1318 patches = get_svn_patch(patch_url) | 1038 patches = get_svn_patch(patch_url) |
1319 | 1039 |
1320 print '===Processing patch solutions===' | |
1321 already_patched = [] | 1040 already_patched = [] |
1322 patch_root = patch_root or '' | 1041 patch_root = patch_root or '' |
1323 print 'Patch root is %r' % patch_root | |
1324 for solution in solutions: | 1042 for solution in solutions: |
1325 print 'Processing solution %r' % solution['name'] | |
1326 if (patch_root == solution['name'] or | 1043 if (patch_root == solution['name'] or |
1327 solution['name'].startswith(patch_root + '/')): | 1044 solution['name'].startswith(patch_root + '/')): |
1328 relative_root = solution['name'][len(patch_root) + 1:] | 1045 relative_root = solution['name'][len(patch_root) + 1:] |
1329 target = '/'.join([relative_root, 'DEPS']).lstrip('/') | 1046 target = '/'.join([relative_root, 'DEPS']).lstrip('/') |
1330 print ' relative root is %r, target is %r' % (relative_root, target) | |
1331 if patches: | 1047 if patches: |
1332 apply_svn_patch(patch_root, patches, whitelist=[target]) | 1048 apply_svn_patch(patch_root, patches, whitelist=[target]) |
1333 already_patched.append(target) | 1049 already_patched.append(target) |
1334 elif issue: | 1050 elif issue: |
1335 apply_rietveld_issue(issue, patchset, patch_root, rietveld_server, | 1051 apply_rietveld_issue(issue, patchset, patch_root, rietveld_server, |
1336 revision_mapping, git_ref, apply_issue_email_file, | 1052 revision_mapping, git_ref, apply_issue_email_file, |
1337 apply_issue_key_file, whitelist=[target]) | 1053 apply_issue_key_file, whitelist=[target]) |
1338 already_patched.append(target) | 1054 already_patched.append(target) |
1339 | 1055 |
1340 if not buildspec: | |
1341 # Run deps2git if there is a DEPS change after the last .DEPS.git commit. | |
1342 for solution in solutions: | |
1343 ensure_deps2git(solution, shallow, git_cache_dir) | |
1344 | |
1345 # Ensure our build/ directory is set up with the correct .gclient file. | 1056 # Ensure our build/ directory is set up with the correct .gclient file. |
1346 gclient_configure(solutions, target_os, target_os_only, git_cache_dir) | 1057 gclient_configure(solutions, target_os, target_os_only, git_cache_dir) |
1347 | 1058 |
1348 # Let gclient do the DEPS syncing. | 1059 # Let gclient do the DEPS syncing. |
1349 # The branch-head refspec is a special case because its possible Chrome | 1060 # The branch-head refspec is a special case because its possible Chrome |
1350 # src, which contains the branch-head refspecs, is DEPSed in. | 1061 # src, which contains the branch-head refspecs, is DEPSed in. |
1351 gclient_output = gclient_sync(buildspec or BRANCH_HEADS_REFSPEC in refs, | 1062 gclient_output = gclient_sync(buildspec or BRANCH_HEADS_REFSPEC in refs, |
1352 shallow) | 1063 shallow) |
1353 | 1064 |
1354 # Now that gclient_sync has finished, we should revert any .DEPS.git so that | 1065 # Now that gclient_sync has finished, we should revert any .DEPS.git so that |
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1438 | 1149 |
1439 parse.add_option('--issue', help='Issue number to patch from.') | 1150 parse.add_option('--issue', help='Issue number to patch from.') |
1440 parse.add_option('--patchset', | 1151 parse.add_option('--patchset', |
1441 help='Patchset from issue to patch from, if applicable.') | 1152 help='Patchset from issue to patch from, if applicable.') |
1442 parse.add_option('--apply_issue_email_file', | 1153 parse.add_option('--apply_issue_email_file', |
1443 help='--email-file option passthrough for apply_patch.py.') | 1154 help='--email-file option passthrough for apply_patch.py.') |
1444 parse.add_option('--apply_issue_key_file', | 1155 parse.add_option('--apply_issue_key_file', |
1445 help='--private-key-file option passthrough for ' | 1156 help='--private-key-file option passthrough for ' |
1446 'apply_patch.py.') | 1157 'apply_patch.py.') |
1447 parse.add_option('--patch_url', help='Optional URL to SVN patch.') | 1158 parse.add_option('--patch_url', help='Optional URL to SVN patch.') |
1448 parse.add_option('--root', dest='patch_root', | |
1449 help='DEPRECATED: Use --patch_root.') | |
1450 parse.add_option('--patch_root', help='Directory to patch on top of.') | 1159 parse.add_option('--patch_root', help='Directory to patch on top of.') |
1451 parse.add_option('--rietveld_server', | 1160 parse.add_option('--rietveld_server', |
1452 default='codereview.chromium.org', | 1161 default='codereview.chromium.org', |
1453 help='Rietveld server.') | 1162 help='Rietveld server.') |
1454 parse.add_option('--gerrit_repo', | 1163 parse.add_option('--gerrit_repo', |
1455 help='Gerrit repository to pull the ref from.') | 1164 help='Gerrit repository to pull the ref from.') |
1456 parse.add_option('--gerrit_ref', help='Gerrit ref to apply.') | 1165 parse.add_option('--gerrit_ref', help='Gerrit ref to apply.') |
1457 parse.add_option('--gerrit_no_reset', action='store_true', | 1166 parse.add_option('--gerrit_no_reset', action='store_true', |
1458 help='Bypass calling reset after applying a gerrit ref.') | 1167 help='Bypass calling reset after applying a gerrit ref.') |
1459 parse.add_option('--specs', help='Gcilent spec.') | 1168 parse.add_option('--specs', help='Gcilent spec.') |
1460 parse.add_option('--master', help='Master name.') | |
1461 parse.add_option('-f', '--force', action='store_true', | |
1462 help='Bypass check to see if we want to be run. ' | |
1463 'Should ONLY be used locally or by smart recipes.') | |
1464 parse.add_option('--revision_mapping', | 1169 parse.add_option('--revision_mapping', |
1465 help='{"path/to/repo/": "property_name"}') | 1170 help='{"path/to/repo/": "property_name"}') |
1466 parse.add_option('--revision_mapping_file', | 1171 parse.add_option('--revision_mapping_file', |
1467 help=('Same as revision_mapping, except its a path to a json' | 1172 help=('Same as revision_mapping, except its a path to a json' |
1468 ' file containing that format.')) | 1173 ' file containing that format.')) |
1469 parse.add_option('--revision', action='append', default=[], | 1174 parse.add_option('--revision', action='append', default=[], |
1470 help='Revision to check out. Can be an SVN revision number, ' | 1175 help='Revision to check out. Can be an SVN revision number, ' |
1471 'git hash, or any form of git ref. Can prepend ' | 1176 'git hash, or any form of git ref. Can prepend ' |
1472 'root@<rev> to specify which repository, where root ' | 1177 'root@<rev> to specify which repository, where root ' |
1473 'is either a filesystem path, git https url, or ' | 1178 'is either a filesystem path, git https url, or ' |
1474 'svn url. To specify Tip of Tree, set rev to HEAD.' | 1179 'svn url. To specify Tip of Tree, set rev to HEAD.' |
1475 'To specify a git branch and an SVN rev, <rev> can be ' | 1180 'To specify a git branch and an SVN rev, <rev> can be ' |
1476 'set to <branch>:<revision>.') | 1181 'set to <branch>:<revision>.') |
1477 parse.add_option('--output_manifest', action='store_true', | 1182 parse.add_option('--output_manifest', action='store_true', |
1478 help=('Add manifest json to the json output.')) | 1183 help=('Add manifest json to the json output.')) |
1479 parse.add_option('--slave_name', default=socket.getfqdn().split('.')[0], | |
1480 help='Hostname of the current machine, ' | |
1481 'used for determining whether or not to activate.') | |
1482 parse.add_option('--builder_name', help='Name of the builder, ' | |
1483 'used for determining whether or not to activate.') | |
1484 parse.add_option('--build_dir', default=os.getcwd()) | 1184 parse.add_option('--build_dir', default=os.getcwd()) |
1485 parse.add_option('--flag_file', default=path.join(os.getcwd(), | |
1486 'update.flag')) | |
1487 parse.add_option('--shallow', action='store_true', | 1185 parse.add_option('--shallow', action='store_true', |
1488 help='Use shallow clones for cache repositories.') | 1186 help='Use shallow clones for cache repositories.') |
1489 parse.add_option('--gyp_env', action='append', default=[], | 1187 parse.add_option('--gyp_env', action='append', default=[], |
1490 help='Environment variables to pass into gclient runhooks.') | 1188 help='Environment variables to pass into gclient runhooks.') |
1491 parse.add_option('--clobber', action='store_true', | 1189 parse.add_option('--clobber', action='store_true', |
1492 help='Delete checkout first, always') | 1190 help='Delete checkout first, always') |
1493 parse.add_option('--bot_update_clobber', action='store_true', dest='clobber', | 1191 parse.add_option('--bot_update_clobber', action='store_true', dest='clobber', |
1494 help='(synonym for --clobber)') | 1192 help='(synonym for --clobber)') |
1495 parse.add_option('-o', '--output_json', | 1193 parse.add_option('-o', '--output_json', |
1496 help='Output JSON information into a specified file') | 1194 help='Output JSON information into a specified file') |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1535 | 1233 |
1536 # Because we print CACHE_DIR out into a .gclient file, and then later run | 1234 # Because we print CACHE_DIR out into a .gclient file, and then later run |
1537 # eval() on it, backslashes need to be escaped, otherwise "E:\b\build" gets | 1235 # eval() on it, backslashes need to be escaped, otherwise "E:\b\build" gets |
1538 # parsed as "E:[\x08][\x08]uild". | 1236 # parsed as "E:[\x08][\x08]uild". |
1539 if sys.platform.startswith('win'): | 1237 if sys.platform.startswith('win'): |
1540 options.git_cache_dir = options.git_cache_dir.replace('\\', '\\\\') | 1238 options.git_cache_dir = options.git_cache_dir.replace('\\', '\\\\') |
1541 | 1239 |
1542 return options, args | 1240 return options, args |
1543 | 1241 |
1544 | 1242 |
1545 def prepare(options, git_slns, active): | 1243 def prepare(options, git_slns): |
1546 """Prepares the target folder before we checkout.""" | 1244 """Prepares the target folder before we checkout.""" |
1547 dir_names = [sln.get('name') for sln in git_slns if 'name' in sln] | 1245 dir_names = [sln.get('name') for sln in git_slns if 'name' in sln] |
1548 # If we're active now, but the flag file doesn't exist (we weren't active | 1246 emit_json(options.output_json, did_run=True) |
1549 # last run) or vice versa, blow away all checkouts. | 1247 if options.clobber: |
1550 if bool(active) != bool(check_flag(options.flag_file)): | |
1551 ensure_no_checkout(dir_names, '*') | 1248 ensure_no_checkout(dir_names, '*') |
1552 if options.output_json: | |
1553 # Make sure we tell recipes that we didn't run if the script exits here. | |
1554 emit_json(options.output_json, did_run=active) | |
1555 if active: | |
1556 if options.clobber: | |
1557 ensure_no_checkout(dir_names, '*') | |
1558 else: | |
1559 ensure_no_checkout(dir_names, '.svn') | |
1560 emit_flag(options.flag_file) | |
1561 else: | 1249 else: |
1562 delete_flag(options.flag_file) | 1250 ensure_no_checkout(dir_names, '.svn') |
1563 raise Inactive # This is caught in main() and we exit cleanly. | |
1564 | 1251 |
1565 # Do a shallow checkout if the disk is less than 100GB. | 1252 # Do a shallow checkout if the disk is less than 100GB. |
1566 total_disk_space, free_disk_space = get_total_disk_space() | 1253 total_disk_space, free_disk_space = get_total_disk_space() |
1567 total_disk_space_gb = int(total_disk_space / (1024 * 1024 * 1024)) | 1254 total_disk_space_gb = int(total_disk_space / (1024 * 1024 * 1024)) |
1568 used_disk_space_gb = int((total_disk_space - free_disk_space) | 1255 used_disk_space_gb = int((total_disk_space - free_disk_space) |
1569 / (1024 * 1024 * 1024)) | 1256 / (1024 * 1024 * 1024)) |
1570 percent_used = int(used_disk_space_gb * 100 / total_disk_space_gb) | 1257 percent_used = int(used_disk_space_gb * 100 / total_disk_space_gb) |
1571 step_text = '[%dGB/%dGB used (%d%%)]' % (used_disk_space_gb, | 1258 step_text = '[%dGB/%dGB used (%d%%)]' % (used_disk_space_gb, |
1572 total_disk_space_gb, | 1259 total_disk_space_gb, |
1573 percent_used) | 1260 percent_used) |
1574 if not options.output_json: | |
1575 print '@@@STEP_TEXT@%s@@@' % step_text | |
1576 if not options.shallow: | 1261 if not options.shallow: |
1577 options.shallow = (total_disk_space < SHALLOW_CLONE_THRESHOLD | 1262 options.shallow = (total_disk_space < SHALLOW_CLONE_THRESHOLD |
1578 and not options.no_shallow) | 1263 and not options.no_shallow) |
1579 | 1264 |
1580 # The first solution is where the primary DEPS file resides. | 1265 # The first solution is where the primary DEPS file resides. |
1581 first_sln = dir_names[0] | 1266 first_sln = dir_names[0] |
1582 | 1267 |
1583 # Split all the revision specifications into a nice dict. | 1268 # Split all the revision specifications into a nice dict. |
1584 print 'Revisions: %s' % options.revision | 1269 print 'Revisions: %s' % options.revision |
1585 revisions = parse_revisions(options.revision, first_sln) | 1270 revisions = parse_revisions(options.revision, first_sln) |
1586 print 'Fetching Git checkout at %s@%s' % (first_sln, revisions[first_sln]) | 1271 print 'Fetching Git checkout at %s@%s' % (first_sln, revisions[first_sln]) |
1587 return revisions, step_text | 1272 return revisions, step_text |
1588 | 1273 |
1589 | 1274 |
1590 def checkout(options, git_slns, specs, buildspec, master, | 1275 def checkout(options, git_slns, specs, buildspec, |
1591 svn_root, revisions, step_text): | 1276 svn_root, revisions, step_text): |
1592 first_sln = git_slns[0]['name'] | 1277 first_sln = git_slns[0]['name'] |
1593 dir_names = [sln.get('name') for sln in git_slns if 'name' in sln] | 1278 dir_names = [sln.get('name') for sln in git_slns if 'name' in sln] |
1594 try: | 1279 try: |
1595 # Outer try is for catching patch failures and exiting gracefully. | 1280 # Outer try is for catching patch failures and exiting gracefully. |
1596 # Inner try is for catching gclient failures and retrying gracefully. | 1281 # Inner try is for catching gclient failures and retrying gracefully. |
1597 try: | 1282 try: |
1598 checkout_parameters = dict( | 1283 checkout_parameters = dict( |
1599 # First, pass in the base of what we want to check out. | 1284 # First, pass in the base of what we want to check out. |
1600 solutions=git_slns, | 1285 solutions=git_slns, |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1643 patch_root=options.patch_root, | 1328 patch_root=options.patch_root, |
1644 patch_failure=True, | 1329 patch_failure=True, |
1645 step_text='%s PATCH FAILED' % step_text, | 1330 step_text='%s PATCH FAILED' % step_text, |
1646 fixed_revisions=revisions) | 1331 fixed_revisions=revisions) |
1647 else: | 1332 else: |
1648 # If we're not on recipes, tell annotator about our got_revisions. | 1333 # If we're not on recipes, tell annotator about our got_revisions. |
1649 emit_log_lines('patch error', e.output) | 1334 emit_log_lines('patch error', e.output) |
1650 print '@@@STEP_TEXT@%s PATCH FAILED@@@' % step_text | 1335 print '@@@STEP_TEXT@%s PATCH FAILED@@@' % step_text |
1651 raise | 1336 raise |
1652 | 1337 |
1653 # Revision is an svn revision, unless it's a git master. | |
1654 use_svn_rev = master not in GIT_MASTERS | |
1655 | |
1656 # Take care of got_revisions outputs. | 1338 # Take care of got_revisions outputs. |
1657 revision_mapping = dict(GOT_REVISION_MAPPINGS.get(svn_root, {})) | 1339 revision_mapping = dict(GOT_REVISION_MAPPINGS.get(svn_root, {})) |
1658 if options.revision_mapping: | 1340 if options.revision_mapping: |
1659 revision_mapping.update(options.revision_mapping) | 1341 revision_mapping.update(options.revision_mapping) |
1660 | 1342 |
1661 # If the repo is not in the default GOT_REVISION_MAPPINGS and no | 1343 # If the repo is not in the default GOT_REVISION_MAPPINGS and no |
1662 # revision_mapping were specified on the command line then | 1344 # revision_mapping were specified on the command line then |
1663 # default to setting 'got_revision' based on the first solution. | 1345 # default to setting 'got_revision' based on the first solution. |
1664 if not revision_mapping: | 1346 if not revision_mapping: |
1665 revision_mapping[first_sln] = 'got_revision' | 1347 revision_mapping[first_sln] = 'got_revision' |
1666 | 1348 |
1667 got_revisions = parse_got_revision(gclient_output, revision_mapping, | 1349 got_revisions = parse_got_revision(gclient_output, revision_mapping) |
1668 use_svn_rev) | |
1669 | 1350 |
1670 if not got_revisions: | 1351 if not got_revisions: |
1671 # TODO(hinoka): We should probably bail out here, but in the interest | 1352 # TODO(hinoka): We should probably bail out here, but in the interest |
1672 # of giving mis-configured bots some time to get fixed use a dummy | 1353 # of giving mis-configured bots some time to get fixed use a dummy |
1673 # revision here. | 1354 # revision here. |
1674 got_revisions = { 'got_revision': 'BOT_UPDATE_NO_REV_FOUND' } | 1355 got_revisions = { 'got_revision': 'BOT_UPDATE_NO_REV_FOUND' } |
1675 #raise Exception('No got_revision(s) found in gclient output') | 1356 #raise Exception('No got_revision(s) found in gclient output') |
1676 | 1357 |
1677 if options.output_json: | 1358 if options.output_json: |
1678 manifest = create_manifest() if options.output_manifest else None | 1359 manifest = create_manifest() if options.output_manifest else None |
1679 # Tell recipes information such as root, got_revision, etc. | 1360 # Tell recipes information such as root, got_revision, etc. |
1680 emit_json(options.output_json, | 1361 emit_json(options.output_json, |
1681 did_run=True, | 1362 did_run=True, |
1682 root=first_sln, | 1363 root=first_sln, |
1683 patch_root=options.patch_root, | 1364 patch_root=options.patch_root, |
1684 step_text=step_text, | 1365 step_text=step_text, |
1685 fixed_revisions=revisions, | 1366 fixed_revisions=revisions, |
1686 properties=got_revisions, | 1367 properties=got_revisions, |
1687 manifest=manifest) | 1368 manifest=manifest) |
1688 else: | 1369 else: |
1689 # If we're not on recipes, tell annotator about our got_revisions. | 1370 # If we're not on recipes, tell annotator about our got_revisions. |
1690 emit_properties(got_revisions) | 1371 emit_properties(got_revisions) |
1691 | 1372 |
1692 | 1373 |
1693 def print_help_text(force, output_json, active, master, builder, slave): | 1374 def print_help_text(): |
1694 """Print helpful messages to tell devs whats going on.""" | 1375 """Print helpful messages to tell devs whats going on.""" |
1695 if force and output_json: | |
1696 recipe_force = 'Forced on by recipes' | |
1697 elif active and output_json: | |
1698 recipe_force = 'Off by recipes, but forced on by bot update' | |
1699 elif not active and output_json: | |
1700 recipe_force = 'Forced off by recipes' | |
1701 else: | |
1702 recipe_force = 'N/A. Was not called by recipes' | |
1703 | |
1704 print BOT_UPDATE_MESSAGE % { | 1376 print BOT_UPDATE_MESSAGE % { |
1705 'master': master or 'Not specified', | |
1706 'builder': builder or 'Not specified', | |
1707 'slave': slave or 'Not specified', | |
1708 'recipe': recipe_force, | |
1709 'CURRENT_DIR': CURRENT_DIR, | 1377 'CURRENT_DIR': CURRENT_DIR, |
1710 'BUILDER_DIR': BUILDER_DIR, | 1378 'BUILDER_DIR': BUILDER_DIR, |
1711 'SLAVE_DIR': SLAVE_DIR, | 1379 'SLAVE_DIR': SLAVE_DIR, |
1712 'THIS_DIR': THIS_DIR, | 1380 'THIS_DIR': THIS_DIR, |
1713 'SCRIPTS_DIR': SCRIPTS_DIR, | 1381 'SCRIPTS_DIR': SCRIPTS_DIR, |
1714 'BUILD_DIR': BUILD_DIR, | 1382 'BUILD_DIR': BUILD_DIR, |
1715 'ROOT_DIR': ROOT_DIR, | 1383 'ROOT_DIR': ROOT_DIR, |
1716 'DEPOT_TOOLS_DIR': DEPOT_TOOLS_DIR, | 1384 'DEPOT_TOOLS_DIR': DEPOT_TOOLS_DIR, |
1717 }, | 1385 } |
1718 print ACTIVATED_MESSAGE if active else NOT_ACTIVATED_MESSAGE | |
1719 | 1386 |
1720 | 1387 |
1721 def main(): | 1388 def main(): |
1722 # Get inputs. | 1389 # Get inputs. |
1723 options, _ = parse_args() | 1390 options, _ = parse_args() |
1724 builder = options.builder_name | |
1725 slave = options.slave_name | |
1726 master = options.master | |
1727 | |
1728 # Check if this script should activate or not. | |
1729 active = check_valid_host(master, builder, slave) or options.force or False | |
1730 | 1391 |
1731 # Print a helpful message to tell developers whats going on with this step. | 1392 # Print a helpful message to tell developers whats going on with this step. |
1732 print_help_text( | 1393 print_help_text() |
1733 options.force, options.output_json, active, master, builder, slave) | |
1734 | 1394 |
1735 # Parse, munipulate, and print the gclient solutions. | 1395 # Parse, munipulate, and print the gclient solutions. |
1736 specs = {} | 1396 specs = {} |
1737 exec(options.specs, specs) | 1397 exec(options.specs, specs) |
1738 svn_solutions = specs.get('solutions', []) | 1398 svn_solutions = specs.get('solutions', []) |
1739 git_slns, svn_root, buildspec = solutions_to_git(svn_solutions) | 1399 git_slns, svn_root, buildspec = solutions_to_git(svn_solutions) |
1740 options.revision = maybe_ignore_revision(options.revision, buildspec) | 1400 options.revision = maybe_ignore_revision(options.revision, buildspec) |
1741 | 1401 |
1742 solutions_printer(git_slns) | 1402 solutions_printer(git_slns) |
1743 | 1403 |
1744 try: | 1404 try: |
1745 # Dun dun dun, the main part of bot_update. | 1405 # Dun dun dun, the main part of bot_update. |
1746 revisions, step_text = prepare(options, git_slns, active) | 1406 revisions, step_text = prepare(options, git_slns) |
1747 checkout(options, git_slns, specs, buildspec, master, svn_root, revisions, | 1407 checkout(options, git_slns, specs, buildspec, svn_root, revisions, |
1748 step_text) | 1408 step_text) |
1749 | 1409 |
1750 except Inactive: | |
1751 # Not active, should count as passing. | |
1752 pass | |
1753 except PatchFailed as e: | 1410 except PatchFailed as e: |
1754 emit_flag(options.flag_file) | 1411 emit_flag(options.flag_file) |
1755 # Return a specific non-zero exit code for patch failure (because it is | 1412 # Return a specific non-zero exit code for patch failure (because it is |
1756 # a failure), but make it different than other failures to distinguish | 1413 # a failure), but make it different than other failures to distinguish |
1757 # between infra failures (independent from patch author), and patch | 1414 # between infra failures (independent from patch author), and patch |
1758 # failures (that patch author can fix). However, PatchFailure due to | 1415 # failures (that patch author can fix). However, PatchFailure due to |
1759 # download patch failure is still an infra problem. | 1416 # download patch failure is still an infra problem. |
1760 if e.code == 3: | 1417 if e.code == 3: |
1761 # Patch download problem. | 1418 # Patch download problem. |
1762 return 87 | 1419 return 87 |
1763 # Genuine patch problem. | 1420 # Genuine patch problem. |
1764 return 88 | 1421 return 88 |
1765 except Exception: | 1422 except Exception: |
1766 # Unexpected failure. | 1423 # Unexpected failure. |
1767 emit_flag(options.flag_file) | 1424 emit_flag(options.flag_file) |
1768 raise | 1425 raise |
1769 else: | 1426 else: |
1770 emit_flag(options.flag_file) | 1427 emit_flag(options.flag_file) |
1771 | 1428 |
1772 | 1429 |
1773 if __name__ == '__main__': | 1430 if __name__ == '__main__': |
1774 sys.exit(main()) | 1431 sys.exit(main()) |
OLD | NEW |