Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(507)

Side by Side Diff: recipe_modules/bot_update/resources/bot_update.py

Issue 1651033004: depot_tools: import bot_update gclient git rietveld tryserver recipe modules (reland #1) (Closed) Base URL: https://chromium.googlesource.com/chromium/tools/depot_tools.git@master
Patch Set: 80cols Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « recipe_modules/bot_update/resources/__init__.py ('k') | recipe_modules/bot_update/test_api.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 #!/usr/bin/env python
2 # Copyright 2014 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 # TODO(hinoka): Use logging.
7
8 import cStringIO
9 import codecs
10 import collections
11 import copy
12 import ctypes
13 import json
14 import optparse
15 import os
16 import pprint
17 import random
18 import re
19 import socket
20 import subprocess
21 import sys
22 import tempfile
23 import threading
24 import time
25 import urllib2
26 import urlparse
27 import uuid
28
29 import os.path as path
30
31 # How many bytes at a time to read from pipes.
32 BUF_SIZE = 256
33
34
35 # TODO(luqui): This is a horrible hack to identify build_internal when build
36 # is a recipe dependency. bot_update should not be depending on internal,
37 # rather the arrow should go the other way (or just be destroyed).
38 def check_dir(name, dirs, default=None):
39 for d in dirs:
40 d = path.abspath(d)
41 if path.basename(d) == name and path.isdir(d):
42 return d
43 return default
44
45
46 # Define a bunch of directory paths.
47 # Relative to the current working directory.
48 CURRENT_DIR = path.abspath(os.getcwd())
49 BUILDER_DIR = path.dirname(CURRENT_DIR)
50 SLAVE_DIR = path.dirname(BUILDER_DIR)
51
52 # Relative to this script's filesystem path.
53 THIS_DIR = path.dirname(path.abspath(__file__))
54 SCRIPTS_DIR = check_dir(
55 'scripts', [
56 path.dirname(THIS_DIR),
57 path.join(SLAVE_DIR, '..', 'scripts'),
58 ], default=path.dirname(THIS_DIR))
59 BUILD_DIR = path.dirname(SCRIPTS_DIR)
60 ROOT_DIR = path.dirname(BUILD_DIR)
61
62 DEPOT_TOOLS_DIR = path.abspath(path.join(THIS_DIR, '..', '..', '..'))
63
64 BUILD_INTERNAL_DIR = check_dir(
65 'build_internal', [
66 path.join(ROOT_DIR, 'build_internal'),
67 path.join(ROOT_DIR, # .recipe_deps
68 path.pardir, # slave
69 path.pardir, # scripts
70 path.pardir), # build_internal
71 ])
72
73
74 CHROMIUM_GIT_HOST = 'https://chromium.googlesource.com'
75 CHROMIUM_SRC_URL = CHROMIUM_GIT_HOST + '/chromium/src.git'
76
77 # Official builds use buildspecs, so this is a special case.
78 BUILDSPEC_TYPE = collections.namedtuple('buildspec',
79 ('container', 'version'))
80 BUILDSPEC_RE = (r'^/chrome-internal/trunk/tools/buildspec/'
81 '(build|branches|releases)/(.+)$')
82 GIT_BUILDSPEC_PATH = ('https://chrome-internal.googlesource.com/chrome/tools/'
83 'buildspec')
84 BRANCH_HEADS_REFSPEC = '+refs/branch-heads/*'
85
86 BUILDSPEC_COMMIT_RE = (
87 re.compile(r'Buildspec for.*version (\d+\.\d+\.\d+\.\d+)'),
88 re.compile(r'Create (\d+\.\d+\.\d+\.\d+) buildspec'),
89 re.compile(r'Auto-converted (\d+\.\d+\.\d+\.\d+) buildspec to git'),
90 )
91
92 # Regular expression that matches a single commit footer line.
93 COMMIT_FOOTER_ENTRY_RE = re.compile(r'([^:]+):\s+(.+)')
94
95 # Footer metadata keys for regular and gsubtreed mirrored commit positions.
96 COMMIT_POSITION_FOOTER_KEY = 'Cr-Commit-Position'
97 COMMIT_ORIGINAL_POSITION_FOOTER_KEY = 'Cr-Original-Commit-Position'
98 # Regular expression to parse a commit position
99 COMMIT_POSITION_RE = re.compile(r'(.+)@\{#(\d+)\}')
100
101 # Regular expression to parse gclient's revinfo entries.
102 REVINFO_RE = re.compile(r'^([^:]+):\s+([^@]+)@(.+)$')
103
104 # Used by 'ResolveSvnRevisionFromGitiles'
105 GIT_SVN_PROJECT_MAP = {
106 'webkit': {
107 'svn_url': 'svn://svn.chromium.org/blink',
108 'branch_map': [
109 (r'trunk', r'refs/heads/master'),
110 (r'branches/([^/]+)', r'refs/branch-heads/\1'),
111 ],
112 },
113 'v8': {
114 'svn_url': 'https://v8.googlecode.com/svn',
115 'branch_map': [
116 (r'trunk', r'refs/heads/candidates'),
117 (r'branches/bleeding_edge', r'refs/heads/master'),
118 (r'branches/([^/]+)', r'refs/branch-heads/\1'),
119 ],
120 },
121 'nacl': {
122 'svn_url': 'svn://svn.chromium.org/native_client',
123 'branch_map': [
124 (r'trunk/src/native_client', r'refs/heads/master'),
125 ],
126 },
127 }
128
129 # Key for the 'git-svn' ID metadata commit footer entry.
130 GIT_SVN_ID_FOOTER_KEY = 'git-svn-id'
131 # e.g., git-svn-id: https://v8.googlecode.com/svn/trunk@23117
132 # ce2b1a6d-e550-0410-aec6-3dcde31c8c00
133 GIT_SVN_ID_RE = re.compile(r'((?:\w+)://[^@]+)@(\d+)\s+(?:[a-zA-Z0-9\-]+)')
134
135
136 # This is the git mirror of the buildspecs repository. We could rely on the svn
137 # checkout, now that the git buildspecs are checked in alongside the svn
138 # buildspecs, but we're going to want to pull all the buildspecs from here
139 # eventually anyhow, and there's already some logic to pull from git (for the
140 # old git_buildspecs.git repo), so just stick with that.
141 GIT_BUILDSPEC_REPO = (
142 'https://chrome-internal.googlesource.com/chrome/tools/buildspec')
143
144 # Copied from scripts/recipes/chromium.py.
145 GOT_REVISION_MAPPINGS = {
146 '/chrome/trunk/src': {
147 'src/': 'got_revision',
148 'src/native_client/': 'got_nacl_revision',
149 'src/tools/swarm_client/': 'got_swarm_client_revision',
150 'src/tools/swarming_client/': 'got_swarming_client_revision',
151 'src/third_party/WebKit/': 'got_webkit_revision',
152 'src/third_party/webrtc/': 'got_webrtc_revision',
153 'src/v8/': 'got_v8_revision',
154 }
155 }
156
157
158 BOT_UPDATE_MESSAGE = """
159 What is the "Bot Update" step?
160 ==============================
161
162 This step ensures that the source checkout on the bot (e.g. Chromium's src/ and
163 its dependencies) is checked out in a consistent state. This means that all of
164 the necessary repositories are checked out, no extra repositories are checked
165 out, and no locally modified files are present.
166
167 These actions used to be taken care of by the "gclient revert" and "update"
168 steps. However, those steps are known to be buggy and occasionally flaky. This
169 step has two main advantages over them:
170 * it only operates in Git, so the logic can be clearer and cleaner; and
171 * it is a slave-side script, so its behavior can be modified without
172 restarting the master.
173
174 Why Git, you ask? Because that is the direction that the Chromium project is
175 heading. This step is an integral part of the transition from using the SVN repo
176 at chrome/trunk/src to using the Git repo src.git. Please pardon the dust while
177 we fully convert everything to Git. This message will get out of your way
178 eventually, and the waterfall will be a happier place because of it.
179
180 This step can be activated or deactivated independently on every builder on
181 every master. When it is active, the "gclient revert" and "update" steps become
182 no-ops. When it is inactive, it prints this message, cleans up after itself, and
183 lets everything else continue as though nothing has changed. Eventually, when
184 everything is stable enough, this step will replace them entirely.
185
186 Debugging information:
187 (master/builder/slave may be unspecified on recipes)
188 master: %(master)s
189 builder: %(builder)s
190 slave: %(slave)s
191 forced by recipes: %(recipe)s
192 CURRENT_DIR: %(CURRENT_DIR)s
193 BUILDER_DIR: %(BUILDER_DIR)s
194 SLAVE_DIR: %(SLAVE_DIR)s
195 THIS_DIR: %(THIS_DIR)s
196 SCRIPTS_DIR: %(SCRIPTS_DIR)s
197 BUILD_DIR: %(BUILD_DIR)s
198 ROOT_DIR: %(ROOT_DIR)s
199 DEPOT_TOOLS_DIR: %(DEPOT_TOOLS_DIR)s
200 bot_update.py is:"""
201
202 ACTIVATED_MESSAGE = """ACTIVE.
203 The bot will perform a Git checkout in this step.
204 The "gclient revert" and "update" steps are no-ops.
205
206 """
207
208 NOT_ACTIVATED_MESSAGE = """INACTIVE.
209 This step does nothing. You actually want to look at the "update" step.
210
211 """
212
213
214 GCLIENT_TEMPLATE = """solutions = %(solutions)s
215
216 cache_dir = r%(cache_dir)s
217 %(target_os)s
218 %(target_os_only)s
219 """
220
221
222 internal_data = {}
223 if BUILD_INTERNAL_DIR:
224 local_vars = {}
225 try:
226 execfile(os.path.join(
227 BUILD_INTERNAL_DIR, 'scripts', 'slave', 'bot_update_cfg.py'),
228 local_vars)
229 except Exception:
230 # Same as if BUILD_INTERNAL_DIR didn't exist in the first place.
231 print 'Warning: unable to read internal configuration file.'
232 print 'If this is an internal bot, this step may be erroneously inactive.'
233 internal_data = local_vars
234
235 RECOGNIZED_PATHS = {
236 # If SVN path matches key, the entire URL is rewritten to the Git url.
237 '/chrome/trunk/src':
238 CHROMIUM_SRC_URL,
239 '/chrome/trunk/src/tools/cros.DEPS':
240 CHROMIUM_GIT_HOST + '/chromium/src/tools/cros.DEPS.git',
241 }
242 RECOGNIZED_PATHS.update(internal_data.get('RECOGNIZED_PATHS', {}))
243
244 ENABLED_MASTERS = [
245 'bot_update.always_on',
246 'chromium.android',
247 'chromium.angle',
248 'chromium.chrome',
249 'chromium.chromedriver',
250 'chromium.chromiumos',
251 'chromium',
252 'chromium.fyi',
253 'chromium.goma',
254 'chromium.gpu',
255 'chromium.gpu.fyi',
256 'chromium.infra',
257 'chromium.infra.cron',
258 'chromium.linux',
259 'chromium.lkgr',
260 'chromium.mac',
261 'chromium.memory',
262 'chromium.memory.fyi',
263 'chromium.perf',
264 'chromium.perf.fyi',
265 'chromium.swarm',
266 'chromium.webkit',
267 'chromium.webrtc',
268 'chromium.webrtc.fyi',
269 'chromium.win',
270 'client.catapult',
271 'client.drmemory',
272 'client.mojo',
273 'client.nacl',
274 'client.nacl.ports',
275 'client.nacl.sdk',
276 'client.nacl.toolchain',
277 'client.pdfium',
278 'client.skia',
279 'client.skia.fyi',
280 'client.v8',
281 'client.v8.branches',
282 'client.v8.fyi',
283 'client.webrtc',
284 'client.webrtc.fyi',
285 'tryserver.blink',
286 'tryserver.client.catapult',
287 'tryserver.client.mojo',
288 'tryserver.chromium.android',
289 'tryserver.chromium.angle',
290 'tryserver.chromium.linux',
291 'tryserver.chromium.mac',
292 'tryserver.chromium.perf',
293 'tryserver.chromium.win',
294 'tryserver.infra',
295 'tryserver.nacl',
296 'tryserver.v8',
297 'tryserver.webrtc',
298 ]
299 ENABLED_MASTERS += internal_data.get('ENABLED_MASTERS', [])
300
301 ENABLED_BUILDERS = {
302 'client.dart.fyi': [
303 'v8-linux-release',
304 'v8-mac-release',
305 'v8-win-release',
306 ],
307 'client.dynamorio': [
308 'linux-v8-dr',
309 ],
310 }
311 ENABLED_BUILDERS.update(internal_data.get('ENABLED_BUILDERS', {}))
312
313 ENABLED_SLAVES = {}
314 ENABLED_SLAVES.update(internal_data.get('ENABLED_SLAVES', {}))
315
316 # Disabled filters get run AFTER enabled filters, so for example if a builder
317 # config is enabled, but a bot on that builder is disabled, that bot will
318 # be disabled.
319 DISABLED_BUILDERS = {}
320 DISABLED_BUILDERS.update(internal_data.get('DISABLED_BUILDERS', {}))
321
322 DISABLED_SLAVES = {}
323 DISABLED_SLAVES.update(internal_data.get('DISABLED_SLAVES', {}))
324
325 # These masters work only in Git, meaning for got_revision, always output
326 # a git hash rather than a SVN rev.
327 GIT_MASTERS = [
328 'client.v8',
329 'client.v8.branches',
330 'tryserver.v8',
331 ]
332 GIT_MASTERS += internal_data.get('GIT_MASTERS', [])
333
334
335 # How many times to try before giving up.
336 ATTEMPTS = 5
337
338 # Find deps2git
339 DEPS2GIT_DIR_PATH = path.join(SCRIPTS_DIR, 'tools', 'deps2git')
340 DEPS2GIT_PATH = path.join(DEPS2GIT_DIR_PATH, 'deps2git.py')
341 S2G_INTERNAL_PATH = path.join(SCRIPTS_DIR, 'tools', 'deps2git_internal',
342 'svn_to_git_internal.py')
343
344 # ../../cache_dir aka /b/build/slave/cache_dir
345 GIT_CACHE_PATH = path.join(DEPOT_TOOLS_DIR, 'git_cache.py')
346 CACHE_DIR = path.join(SLAVE_DIR, 'cache_dir')
347 # Because we print CACHE_DIR out into a .gclient file, and then later run
348 # eval() on it, backslashes need to be escaped, otherwise "E:\b\build" gets
349 # parsed as "E:[\x08][\x08]uild".
350 if sys.platform.startswith('win'):
351 CACHE_DIR = CACHE_DIR.replace('\\', '\\\\')
352
353 # Find the patch tool.
354 if sys.platform.startswith('win'):
355 if not BUILD_INTERNAL_DIR:
356 print 'Warning: could not find patch tool because there is no '
357 print 'build_internal present.'
358 PATCH_TOOL = None
359 else:
360 PATCH_TOOL = path.join(BUILD_INTERNAL_DIR, 'tools', 'patch.EXE')
361 else:
362 PATCH_TOOL = '/usr/bin/patch'
363
364 # If there is less than 100GB of disk space on the system, then we do
365 # a shallow checkout.
366 SHALLOW_CLONE_THRESHOLD = 100 * 1024 * 1024 * 1024
367
368
369 class SubprocessFailed(Exception):
370 def __init__(self, message, code, output):
371 Exception.__init__(self, message)
372 self.code = code
373 self.output = output
374
375
376 class PatchFailed(SubprocessFailed):
377 pass
378
379
380 class GclientSyncFailed(SubprocessFailed):
381 pass
382
383
384 class SVNRevisionNotFound(Exception):
385 pass
386
387
388 class InvalidDiff(Exception):
389 pass
390
391
392 class Inactive(Exception):
393 """Not really an exception, just used to exit early cleanly."""
394 pass
395
396
397 RETRY = object()
398 OK = object()
399 FAIL = object()
400
401
402 class PsPrinter(object):
403 def __init__(self, interval=300):
404 self.interval = interval
405 self.active = sys.platform.startswith('linux2')
406 self.thread = None
407
408 @staticmethod
409 def print_pstree():
410 """Debugging function used to print "ps auxwwf" for stuck processes."""
411 subprocess.call(['ps', 'auxwwf'])
412
413 def poke(self):
414 if self.active:
415 self.cancel()
416 self.thread = threading.Timer(self.interval, self.print_pstree)
417 self.thread.start()
418
419 def cancel(self):
420 if self.active and self.thread is not None:
421 self.thread.cancel()
422 self.thread = None
423
424
425 def call(*args, **kwargs): # pragma: no cover
426 """Interactive subprocess call."""
427 kwargs['stdout'] = subprocess.PIPE
428 kwargs['stderr'] = subprocess.STDOUT
429 kwargs.setdefault('bufsize', BUF_SIZE)
430 cwd = kwargs.get('cwd', os.getcwd())
431 result_fn = kwargs.pop('result_fn', lambda code, out: RETRY if code else OK)
432 stdin_data = kwargs.pop('stdin_data', None)
433 tries = kwargs.pop('tries', ATTEMPTS)
434 if stdin_data:
435 kwargs['stdin'] = subprocess.PIPE
436 out = cStringIO.StringIO()
437 new_env = kwargs.get('env', {})
438 env = copy.copy(os.environ)
439 env.update(new_env)
440 kwargs['env'] = env
441 attempt = 0
442 for attempt in range(1, tries + 1):
443 attempt_msg = ' (attempt #%d)' % attempt if attempt else ''
444 if new_env:
445 print '===Injecting Environment Variables==='
446 for k, v in sorted(new_env.items()):
447 print '%s: %s' % (k, v)
448 print '===Running %s%s===' % (' '.join(args), attempt_msg)
449 print 'In directory: %s' % cwd
450 start_time = time.time()
451 proc = subprocess.Popen(args, **kwargs)
452 if stdin_data:
453 proc.stdin.write(stdin_data)
454 proc.stdin.close()
455 psprinter = PsPrinter()
456 # This is here because passing 'sys.stdout' into stdout for proc will
457 # produce out of order output.
458 hanging_cr = False
459 while True:
460 psprinter.poke()
461 buf = proc.stdout.read(BUF_SIZE)
462 if not buf:
463 break
464 if hanging_cr:
465 buf = '\r' + buf
466 hanging_cr = buf.endswith('\r')
467 if hanging_cr:
468 buf = buf[:-1]
469 buf = buf.replace('\r\n', '\n').replace('\r', '\n')
470 sys.stdout.write(buf)
471 out.write(buf)
472 if hanging_cr:
473 sys.stdout.write('\n')
474 out.write('\n')
475 psprinter.cancel()
476
477 code = proc.wait()
478 elapsed_time = ((time.time() - start_time) / 60.0)
479 outval = out.getvalue()
480 result = result_fn(code, outval)
481 if result in (FAIL, RETRY):
482 print '===Failed in %.1f mins===' % elapsed_time
483 print
484 else:
485 print '===Succeeded in %.1f mins===' % elapsed_time
486 print
487 return outval
488 if result is FAIL:
489 break
490 if result is RETRY and attempt < tries:
491 sleep_backoff = 4 ** attempt
492 sleep_time = random.randint(sleep_backoff, int(sleep_backoff * 1.2))
493 print '===backing off, sleeping for %d secs===' % sleep_time
494 time.sleep(sleep_time)
495
496 raise SubprocessFailed('%s failed with code %d in %s after %d attempts.' %
497 (' '.join(args), code, cwd, attempt),
498 code, outval)
499
500
501 def git(*args, **kwargs): # pragma: no cover
502 """Wrapper around call specifically for Git commands."""
503 if args and args[0] == 'cache':
504 # Rewrite "git cache" calls into "python git_cache.py".
505 cmd = (sys.executable, '-u', GIT_CACHE_PATH) + args[1:]
506 else:
507 git_executable = 'git'
508 # On windows, subprocess doesn't fuzzy-match 'git' to 'git.bat', so we
509 # have to do it explicitly. This is better than passing shell=True.
510 if sys.platform.startswith('win'):
511 git_executable += '.bat'
512 cmd = (git_executable,) + args
513 return call(*cmd, **kwargs)
514
515
516 def get_gclient_spec(solutions, target_os, target_os_only):
517 return GCLIENT_TEMPLATE % {
518 'solutions': pprint.pformat(solutions, indent=4),
519 'cache_dir': '"%s"' % CACHE_DIR,
520 'target_os': ('\ntarget_os=%s' % target_os) if target_os else '',
521 'target_os_only': '\ntarget_os_only=%s' % target_os_only
522 }
523
524
525 def check_enabled(master, builder, slave):
526 if master in ENABLED_MASTERS:
527 return True
528 builder_list = ENABLED_BUILDERS.get(master)
529 if builder_list and builder in builder_list:
530 return True
531 slave_list = ENABLED_SLAVES.get(master)
532 if slave_list and slave in slave_list:
533 return True
534 return False
535
536
537 def check_disabled(master, builder, slave):
538 """Returns True if disabled, False if not disabled."""
539 builder_list = DISABLED_BUILDERS.get(master)
540 if builder_list and builder in builder_list:
541 return True
542 slave_list = DISABLED_SLAVES.get(master)
543 if slave_list and slave in slave_list:
544 return True
545 return False
546
547
548 def check_valid_host(master, builder, slave):
549 return (check_enabled(master, builder, slave)
550 and not check_disabled(master, builder, slave))
551
552
553 def maybe_ignore_revision(revision, buildspec):
554 """Handle builders that don't care what buildbot tells them to build.
555
556 This is especially the case with branch builders that build from buildspecs
557 and/or trigger off multiple repositories, where the --revision passed in has
558 nothing to do with the solution being built. Clearing the revision in this
559 case causes bot_update to use HEAD rather that trying to checkout an
560 inappropriate version of the solution.
561 """
562 if buildspec and buildspec.container == 'branches':
563 return []
564 return revision
565
566
567 def solutions_printer(solutions):
568 """Prints gclient solution to stdout."""
569 print 'Gclient Solutions'
570 print '================='
571 for solution in solutions:
572 name = solution.get('name')
573 url = solution.get('url')
574 print '%s (%s)' % (name, url)
575 if solution.get('deps_file'):
576 print ' Dependencies file is %s' % solution['deps_file']
577 if 'managed' in solution:
578 print ' Managed mode is %s' % ('ON' if solution['managed'] else 'OFF')
579 custom_vars = solution.get('custom_vars')
580 if custom_vars:
581 print ' Custom Variables:'
582 for var_name, var_value in sorted(custom_vars.iteritems()):
583 print ' %s = %s' % (var_name, var_value)
584 custom_deps = solution.get('custom_deps')
585 if 'custom_deps' in solution:
586 print ' Custom Dependencies:'
587 for deps_name, deps_value in sorted(custom_deps.iteritems()):
588 if deps_value:
589 print ' %s -> %s' % (deps_name, deps_value)
590 else:
591 print ' %s: Ignore' % deps_name
592 for k, v in solution.iteritems():
593 # Print out all the keys we don't know about.
594 if k in ['name', 'url', 'deps_file', 'custom_vars', 'custom_deps',
595 'managed']:
596 continue
597 print ' %s is %s' % (k, v)
598 print
599
600
601 def solutions_to_git(input_solutions):
602 """Modifies urls in solutions to point at Git repos.
603
604 returns: (git solution, svn root of first solution) tuple.
605 """
606 assert input_solutions
607 solutions = copy.deepcopy(input_solutions)
608 first_solution = True
609 buildspec = None
610 for solution in solutions:
611 original_url = solution['url']
612 parsed_url = urlparse.urlparse(original_url)
613 parsed_path = parsed_url.path
614
615 # Rewrite SVN urls into Git urls.
616 buildspec_m = re.match(BUILDSPEC_RE, parsed_path)
617 if first_solution and buildspec_m:
618 solution['url'] = GIT_BUILDSPEC_PATH
619 buildspec = BUILDSPEC_TYPE(
620 container=buildspec_m.group(1),
621 version=buildspec_m.group(2),
622 )
623 solution['deps_file'] = path.join(buildspec.container, buildspec.version,
624 'DEPS')
625 elif parsed_path in RECOGNIZED_PATHS:
626 solution['url'] = RECOGNIZED_PATHS[parsed_path]
627 solution['deps_file'] = '.DEPS.git'
628 elif parsed_url.scheme == 'https' and 'googlesource' in parsed_url.netloc:
629 pass
630 else:
631 print 'Warning: %s' % ('path %r not recognized' % parsed_path,)
632
633 # Strip out deps containing $$V8_REV$$, etc.
634 if 'custom_deps' in solution:
635 new_custom_deps = {}
636 for deps_name, deps_value in solution['custom_deps'].iteritems():
637 if deps_value and '$$' in deps_value:
638 print 'Dropping %s:%s from custom deps' % (deps_name, deps_value)
639 else:
640 new_custom_deps[deps_name] = deps_value
641 solution['custom_deps'] = new_custom_deps
642
643 if first_solution:
644 root = parsed_path
645 first_solution = False
646
647 solution['managed'] = False
648 # We don't want gclient to be using a safesync URL. Instead it should
649 # using the lkgr/lkcr branch/tags.
650 if 'safesync_url' in solution:
651 print 'Removing safesync url %s from %s' % (solution['safesync_url'],
652 parsed_path)
653 del solution['safesync_url']
654 return solutions, root, buildspec
655
656
657 def remove(target):
658 """Remove a target by moving it into build.dead."""
659 dead_folder = path.join(BUILDER_DIR, 'build.dead')
660 if not path.exists(dead_folder):
661 os.makedirs(dead_folder)
662 os.rename(target, path.join(dead_folder, uuid.uuid4().hex))
663
664
665 def ensure_no_checkout(dir_names, scm_dirname):
666 """Ensure that there is no undesired checkout under build/.
667
668 If there is an incorrect checkout under build/, then
669 move build/ to build.dead/
670 This function will check each directory in dir_names.
671
672 scm_dirname is expected to be either ['.svn', '.git']
673 """
674 assert scm_dirname in ['.svn', '.git', '*']
675 has_checkout = any(path.exists(path.join(os.getcwd(), dir_name, scm_dirname))
676 for dir_name in dir_names)
677
678 if has_checkout or scm_dirname == '*':
679 build_dir = os.getcwd()
680 prefix = ''
681 if scm_dirname != '*':
682 prefix = '%s detected in checkout, ' % scm_dirname
683
684 for filename in os.listdir(build_dir):
685 deletion_target = path.join(build_dir, filename)
686 print '%sdeleting %s...' % (prefix, deletion_target),
687 remove(deletion_target)
688 print 'done'
689
690
691 def gclient_configure(solutions, target_os, target_os_only):
692 """Should do the same thing as gclient --spec='...'."""
693 with codecs.open('.gclient', mode='w', encoding='utf-8') as f:
694 f.write(get_gclient_spec(solutions, target_os, target_os_only))
695
696
697 def gclient_sync(with_branch_heads, shallow):
698 # We just need to allocate a filename.
699 fd, gclient_output_file = tempfile.mkstemp(suffix='.json')
700 os.close(fd)
701 gclient_bin = 'gclient.bat' if sys.platform.startswith('win') else 'gclient'
702 cmd = [gclient_bin, 'sync', '--verbose', '--reset', '--force',
703 '--ignore_locks', '--output-json', gclient_output_file,
704 '--nohooks', '--noprehooks', '--delete_unversioned_trees']
705 if with_branch_heads:
706 cmd += ['--with_branch_heads']
707 if shallow:
708 cmd += ['--shallow']
709
710 try:
711 call(*cmd, tries=1)
712 except SubprocessFailed as e:
713 # Throw a GclientSyncFailed exception so we can catch this independently.
714 raise GclientSyncFailed(e.message, e.code, e.output)
715 else:
716 with open(gclient_output_file) as f:
717 return json.load(f)
718 finally:
719 os.remove(gclient_output_file)
720
721
722 def gclient_runhooks(gyp_envs):
723 gclient_bin = 'gclient.bat' if sys.platform.startswith('win') else 'gclient'
724 env = dict([env_var.split('=', 1) for env_var in gyp_envs])
725 call(gclient_bin, 'runhooks', env=env)
726
727
728 def gclient_revinfo():
729 gclient_bin = 'gclient.bat' if sys.platform.startswith('win') else 'gclient'
730 return call(gclient_bin, 'revinfo', '-a') or ''
731
732
733 def create_manifest():
734 manifest = {}
735 output = gclient_revinfo()
736 for line in output.strip().splitlines():
737 match = REVINFO_RE.match(line.strip())
738 if match:
739 manifest[match.group(1)] = {
740 'repository': match.group(2),
741 'revision': match.group(3),
742 }
743 else:
744 print "WARNING: Couldn't match revinfo line:\n%s" % line
745 return manifest
746
747
748 def get_commit_message_footer_map(message):
749 """Returns: (dict) A dictionary of commit message footer entries.
750 """
751 footers = {}
752
753 # Extract the lines in the footer block.
754 lines = []
755 for line in message.strip().splitlines():
756 line = line.strip()
757 if len(line) == 0:
758 del lines[:]
759 continue
760 lines.append(line)
761
762 # Parse the footer
763 for line in lines:
764 m = COMMIT_FOOTER_ENTRY_RE.match(line)
765 if not m:
766 # If any single line isn't valid, the entire footer is invalid.
767 footers.clear()
768 return footers
769 footers[m.group(1)] = m.group(2).strip()
770 return footers
771
772
773 def get_commit_message_footer(message, key):
774 """Returns: (str/None) The footer value for 'key', or None if none was found.
775 """
776 return get_commit_message_footer_map(message).get(key)
777
778
779 def get_svn_rev(git_hash, dir_name):
780 log = git('log', '-1', git_hash, cwd=dir_name)
781 git_svn_id = get_commit_message_footer(log, GIT_SVN_ID_FOOTER_KEY)
782 if not git_svn_id:
783 return None
784 m = GIT_SVN_ID_RE.match(git_svn_id)
785 if not m:
786 return None
787 return int(m.group(2))
788
789
790 def get_git_hash(revision, branch, sln_dir):
791 """We want to search for the SVN revision on the git-svn branch.
792
793 Note that git will search backwards from origin/master.
794 """
795 match = "^%s: [^ ]*@%s " % (GIT_SVN_ID_FOOTER_KEY, revision)
796 ref = branch if branch.startswith('refs/') else 'origin/%s' % branch
797 cmd = ['log', '-E', '--grep', match, '--format=%H', '--max-count=1', ref]
798 result = git(*cmd, cwd=sln_dir).strip()
799 if result:
800 return result
801 raise SVNRevisionNotFound('We can\'t resolve svn r%s into a git hash in %s' %
802 (revision, sln_dir))
803
804
805 def _last_commit_for_file(filename, repo_base):
806 cmd = ['log', '--format=%H', '--max-count=1', '--', filename]
807 return git(*cmd, cwd=repo_base).strip()
808
809
810 def need_to_run_deps2git(repo_base, deps_file, deps_git_file):
811 """Checks to see if we need to run deps2git.
812
813 Returns True if there was a DEPS change after the last .DEPS.git update
814 or if DEPS has local modifications.
815 """
816 # See if DEPS is dirty
817 deps_file_status = git(
818 'status', '--porcelain', deps_file, cwd=repo_base).strip()
819 if deps_file_status and deps_file_status.startswith('M '):
820 return True
821
822 last_known_deps_ref = _last_commit_for_file(deps_file, repo_base)
823 last_known_deps_git_ref = _last_commit_for_file(deps_git_file, repo_base)
824 merge_base_ref = git('merge-base', last_known_deps_ref,
825 last_known_deps_git_ref, cwd=repo_base).strip()
826
827 # If the merge base of the last DEPS and last .DEPS.git file is not
828 # equivilent to the hash of the last DEPS file, that means the DEPS file
829 # was committed after the last .DEPS.git file.
830 return last_known_deps_ref != merge_base_ref
831
832
833 def ensure_deps2git(solution, shallow):
834 repo_base = path.join(os.getcwd(), solution['name'])
835 deps_file = path.join(repo_base, 'DEPS')
836 deps_git_file = path.join(repo_base, '.DEPS.git')
837 if (not git('ls-files', 'DEPS', cwd=repo_base).strip() or
838 not git('ls-files', '.DEPS.git', cwd=repo_base).strip()):
839 return
840
841 print 'Checking if %s is newer than %s' % (deps_file, deps_git_file)
842 if not need_to_run_deps2git(repo_base, deps_file, deps_git_file):
843 return
844
845 print '===DEPS file modified, need to run deps2git==='
846 cmd = [sys.executable, DEPS2GIT_PATH,
847 '--workspace', os.getcwd(),
848 '--cache_dir', CACHE_DIR,
849 '--deps', deps_file,
850 '--out', deps_git_file]
851 if 'chrome-internal.googlesource' in solution['url']:
852 cmd.extend(['--extra-rules', S2G_INTERNAL_PATH])
853 if shallow:
854 cmd.append('--shallow')
855 call(*cmd)
856
857
858 def emit_log_lines(name, lines):
859 for line in lines.splitlines():
860 print '@@@STEP_LOG_LINE@%s@%s@@@' % (name, line)
861 print '@@@STEP_LOG_END@%s@@@' % name
862
863
864 def emit_properties(properties):
865 for property_name, property_value in sorted(properties.items()):
866 print '@@@SET_BUILD_PROPERTY@%s@"%s"@@@' % (property_name, property_value)
867
868
869 # Derived from:
870 # http://code.activestate.com/recipes/577972-disk-usage/?in=user-4178764
871 def get_total_disk_space():
872 cwd = os.getcwd()
873 # Windows is the only platform that doesn't support os.statvfs, so
874 # we need to special case this.
875 if sys.platform.startswith('win'):
876 _, total, free = (ctypes.c_ulonglong(), ctypes.c_ulonglong(), \
877 ctypes.c_ulonglong())
878 if sys.version_info >= (3,) or isinstance(cwd, unicode):
879 fn = ctypes.windll.kernel32.GetDiskFreeSpaceExW
880 else:
881 fn = ctypes.windll.kernel32.GetDiskFreeSpaceExA
882 ret = fn(cwd, ctypes.byref(_), ctypes.byref(total), ctypes.byref(free))
883 if ret == 0:
884 # WinError() will fetch the last error code.
885 raise ctypes.WinError()
886 return (total.value, free.value)
887
888 else:
889 st = os.statvfs(cwd)
890 free = st.f_bavail * st.f_frsize
891 total = st.f_blocks * st.f_frsize
892 return (total, free)
893
894
895 def get_target_revision(folder_name, git_url, revisions):
896 normalized_name = folder_name.strip('/')
897 if normalized_name in revisions:
898 return revisions[normalized_name]
899 if git_url in revisions:
900 return revisions[git_url]
901 return None
902
903
904 def force_revision(folder_name, revision):
905 split_revision = revision.split(':', 1)
906 branch = 'master'
907 if len(split_revision) == 2:
908 # Support for "branch:revision" syntax.
909 branch, revision = split_revision
910
911 if revision and revision.upper() != 'HEAD':
912 if revision and revision.isdigit() and len(revision) < 40:
913 # rev_num is really a svn revision number, convert it into a git hash.
914 git_ref = get_git_hash(int(revision), branch, folder_name)
915 else:
916 # rev_num is actually a git hash or ref, we can just use it.
917 git_ref = revision
918 git('checkout', '--force', git_ref, cwd=folder_name)
919 else:
920 ref = branch if branch.startswith('refs/') else 'origin/%s' % branch
921 git('checkout', '--force', ref, cwd=folder_name)
922
923 def git_checkout(solutions, revisions, shallow, refs):
924 build_dir = os.getcwd()
925 # Before we do anything, break all git_cache locks.
926 if path.isdir(CACHE_DIR):
927 git('cache', 'unlock', '-vv', '--force', '--all', '--cache-dir', CACHE_DIR)
928 for item in os.listdir(CACHE_DIR):
929 filename = os.path.join(CACHE_DIR, item)
930 if item.endswith('.lock'):
931 raise Exception('%s exists after cache unlock' % filename)
932 first_solution = True
933 for sln in solutions:
934 # This is so we can loop back and try again if we need to wait for the
935 # git mirrors to update from SVN.
936 done = False
937 tries_left = 60
938 while not done:
939 name = sln['name']
940 url = sln['url']
941 if url == CHROMIUM_SRC_URL or url + '.git' == CHROMIUM_SRC_URL:
942 # Experiments show there's little to be gained from
943 # a shallow clone of src.
944 shallow = False
945 sln_dir = path.join(build_dir, name)
946 s = ['--shallow'] if shallow else []
947 populate_cmd = (['cache', 'populate', '--ignore_locks', '-v',
948 '--cache-dir', CACHE_DIR] + s + [url])
949 for ref in refs:
950 populate_cmd.extend(['--ref', ref])
951 git(*populate_cmd)
952 mirror_dir = git(
953 'cache', 'exists', '--quiet', '--cache-dir', CACHE_DIR, url).strip()
954 clone_cmd = (
955 'clone', '--no-checkout', '--local', '--shared', mirror_dir, sln_dir)
956
957 try:
958 if not path.isdir(sln_dir):
959 git(*clone_cmd)
960 else:
961 git('remote', 'set-url', 'origin', mirror_dir, cwd=sln_dir)
962 git('fetch', 'origin', cwd=sln_dir)
963 for ref in refs:
964 refspec = '%s:%s' % (ref, ref.lstrip('+'))
965 git('fetch', 'origin', refspec, cwd=sln_dir)
966
967 revision = get_target_revision(name, url, revisions) or 'HEAD'
968 force_revision(sln_dir, revision)
969 done = True
970 except SubprocessFailed as e:
971 # Exited abnormally, theres probably something wrong.
972 # Lets wipe the checkout and try again.
973 tries_left -= 1
974 if tries_left > 0:
975 print 'Something failed: %s.' % str(e)
976 print 'waiting 5 seconds and trying again...'
977 time.sleep(5)
978 else:
979 raise
980 remove(sln_dir)
981 except SVNRevisionNotFound:
982 tries_left -= 1
983 if tries_left > 0:
984 # If we don't have the correct revision, wait and try again.
985 print 'We can\'t find revision %s.' % revision
986 print 'The svn to git replicator is probably falling behind.'
987 print 'waiting 5 seconds and trying again...'
988 time.sleep(5)
989 else:
990 raise
991
992 git('clean', '-dff', cwd=sln_dir)
993
994 if first_solution:
995 git_ref = git('log', '--format=%H', '--max-count=1',
996 cwd=sln_dir).strip()
997 first_solution = False
998 return git_ref
999
1000
1001 def _download(url):
1002 """Fetch url and return content, with retries for flake."""
1003 for attempt in xrange(ATTEMPTS):
1004 try:
1005 return urllib2.urlopen(url).read()
1006 except Exception:
1007 if attempt == ATTEMPTS - 1:
1008 raise
1009
1010
1011 def parse_diff(diff):
1012 """Takes a unified diff and returns a list of diffed files and their diffs.
1013
1014 The return format is a list of pairs of:
1015 (<filename>, <diff contents>)
1016 <diff contents> is inclusive of the diff line.
1017 """
1018 result = []
1019 current_diff = ''
1020 current_header = None
1021 for line in diff.splitlines():
1022 # "diff" is for git style patches, and "Index: " is for SVN style patches.
1023 if line.startswith('diff') or line.startswith('Index: '):
1024 if current_header:
1025 # If we are in a diff portion, then save the diff.
1026 result.append((current_header, '%s\n' % current_diff))
1027 git_header_match = re.match(r'diff (?:--git )?(\S+) (\S+)', line)
1028 svn_header_match = re.match(r'Index: (.*)', line)
1029
1030 if git_header_match:
1031 # First, see if its a git style header.
1032 from_file = git_header_match.group(1)
1033 to_file = git_header_match.group(2)
1034 if from_file != to_file and from_file.startswith('a/'):
1035 # Sometimes git prepends 'a/' and 'b/' in front of file paths.
1036 from_file = from_file[2:]
1037 current_header = from_file
1038
1039 elif svn_header_match:
1040 # Otherwise, check if its an SVN style header.
1041 current_header = svn_header_match.group(1)
1042
1043 else:
1044 # Otherwise... I'm not really sure what to do with this.
1045 raise InvalidDiff('Can\'t process header: %s\nFull diff:\n%s' %
1046 (line, diff))
1047
1048 current_diff = ''
1049 current_diff += '%s\n' % line
1050 if current_header:
1051 # We hit EOF, gotta save the last diff.
1052 result.append((current_header, current_diff))
1053 return result
1054
1055
1056 def get_svn_patch(patch_url):
1057 """Fetch patch from patch_url, return list of (filename, diff)"""
1058 svn_exe = 'svn.bat' if sys.platform.startswith('win') else 'svn'
1059 patch_data = call(svn_exe, 'cat', patch_url)
1060 return parse_diff(patch_data)
1061
1062
1063 def apply_svn_patch(patch_root, patches, whitelist=None, blacklist=None):
1064 """Expects a list of (filename, diff), applies it on top of patch_root."""
1065 if whitelist:
1066 patches = [(name, diff) for name, diff in patches if name in whitelist]
1067 elif blacklist:
1068 patches = [(name, diff) for name, diff in patches if name not in blacklist]
1069 diffs = [diff for _, diff in patches]
1070 patch = ''.join(diffs)
1071
1072 if patch:
1073 print '===Patching files==='
1074 for filename, _ in patches:
1075 print 'Patching %s' % filename
1076 try:
1077 call(PATCH_TOOL, '-p0', '--remove-empty-files', '--force', '--forward',
1078 stdin_data=patch, cwd=patch_root, tries=1)
1079 for filename, _ in patches:
1080 full_filename = path.abspath(path.join(patch_root, filename))
1081 git('add', full_filename, cwd=path.dirname(full_filename))
1082 except SubprocessFailed as e:
1083 raise PatchFailed(e.message, e.code, e.output)
1084
1085 def apply_rietveld_issue(issue, patchset, root, server, _rev_map, _revision,
1086 email_file, key_file, whitelist=None, blacklist=None):
1087 apply_issue_bin = ('apply_issue.bat' if sys.platform.startswith('win')
1088 else 'apply_issue')
1089 cmd = [apply_issue_bin,
1090 # The patch will be applied on top of this directory.
1091 '--root_dir', root,
1092 # Tell apply_issue how to fetch the patch.
1093 '--issue', issue,
1094 '--server', server,
1095 # Always run apply_issue.py, otherwise it would see update.flag
1096 # and then bail out.
1097 '--force',
1098 # Don't run gclient sync when it sees a DEPS change.
1099 '--ignore_deps',
1100 # TODO(tandrii): remove after http://crbug.com/537417 is resolved.
1101 # Temporary enable verbosity to see if Rietveld requests are actually
1102 # retried.
1103 '-v', '-v', # = logging.DEBUG level.
1104 ]
1105 # Use an oauth key file if specified.
1106 if email_file and key_file:
1107 cmd.extend(['--email-file', email_file, '--private-key-file', key_file])
1108 else:
1109 cmd.append('--no-auth')
1110
1111 if patchset:
1112 cmd.extend(['--patchset', patchset])
1113 if whitelist:
1114 for item in whitelist:
1115 cmd.extend(['--whitelist', item])
1116 elif blacklist:
1117 for item in blacklist:
1118 cmd.extend(['--blacklist', item])
1119
1120 # Only try once, since subsequent failures hide the real failure.
1121 try:
1122 call(*cmd, tries=1)
1123 except SubprocessFailed as e:
1124 raise PatchFailed(e.message, e.code, e.output)
1125
1126 def apply_gerrit_ref(gerrit_repo, gerrit_ref, root):
1127 gerrit_repo = gerrit_repo or 'origin'
1128 assert gerrit_ref
1129 try:
1130 base_rev = git('rev-parse', 'HEAD', cwd=root).strip()
1131 git('retry', 'fetch', gerrit_repo, gerrit_ref, cwd=root, tries=1)
1132 git('checkout', 'FETCH_HEAD', cwd=root)
1133 git('reset', '--soft', base_rev, cwd=root)
1134 except SubprocessFailed as e:
1135 raise PatchFailed(e.message, e.code, e.output)
1136
1137 def check_flag(flag_file):
1138 """Returns True if the flag file is present."""
1139 return os.path.isfile(flag_file)
1140
1141
1142 def delete_flag(flag_file):
1143 """Remove bot update flag."""
1144 if os.path.isfile(flag_file):
1145 os.remove(flag_file)
1146
1147
1148 def emit_flag(flag_file):
1149 """Deposit a bot update flag on the system to tell gclient not to run."""
1150 print 'Emitting flag file at %s' % flag_file
1151 with open(flag_file, 'wb') as f:
1152 f.write('Success!')
1153
1154
1155 def get_commit_position_for_git_svn(url, revision):
1156 """Generates a commit position string for a 'git-svn' URL/revision.
1157
1158 If the 'git-svn' URL maps to a known project, we will construct a commit
1159 position branch value by applying substitution on the SVN URL.
1160 """
1161 # Identify the base URL so we can strip off trunk/branch name
1162 project_config = branch = None
1163 for _, project_config in GIT_SVN_PROJECT_MAP.iteritems():
1164 if url.startswith(project_config['svn_url']):
1165 branch = url[len(project_config['svn_url']):]
1166 break
1167
1168 if branch:
1169 # Strip any leading slashes
1170 branch = branch.lstrip('/')
1171
1172 # Try and map the branch
1173 for pattern, repl in project_config.get('branch_map', ()):
1174 nbranch, subn = re.subn(pattern, repl, branch, count=1)
1175 if subn:
1176 print 'INFO: Mapped SVN branch to Git branch [%s] => [%s]' % (
1177 branch, nbranch)
1178 branch = nbranch
1179 break
1180 else:
1181 # Use generic 'svn' branch
1182 print 'INFO: Could not resolve project for SVN URL %r' % (url,)
1183 branch = 'svn'
1184 return '%s@{#%s}' % (branch, revision)
1185
1186
1187 def get_commit_position(git_path, revision='HEAD'):
1188 """Dumps the 'git' log for a specific revision and parses out the commit
1189 position.
1190
1191 If a commit position metadata key is found, its value will be returned.
1192
1193 Otherwise, we will search for a 'git-svn' metadata entry. If one is found,
1194 we will compose a commit position from it, using its SVN revision value as
1195 the revision.
1196
1197 If the 'git-svn' URL maps to a known project, we will construct a commit
1198 position branch value by truncating the URL, mapping 'trunk' to
1199 "refs/heads/master". Otherwise, we will return the generic branch, 'svn'.
1200 """
1201 git_log = git('log', '--format=%B', '-n1', revision, cwd=git_path)
1202 footer_map = get_commit_message_footer_map(git_log)
1203
1204 # Search for commit position metadata
1205 value = (footer_map.get(COMMIT_POSITION_FOOTER_KEY) or
1206 footer_map.get(COMMIT_ORIGINAL_POSITION_FOOTER_KEY))
1207 if value:
1208 return value
1209
1210 # Compose a commit position from 'git-svn' metadata
1211 value = footer_map.get(GIT_SVN_ID_FOOTER_KEY)
1212 if value:
1213 m = GIT_SVN_ID_RE.match(value)
1214 if not m:
1215 raise ValueError("Invalid 'git-svn' value: [%s]" % (value,))
1216 return get_commit_position_for_git_svn(m.group(1), m.group(2))
1217 return None
1218
1219
1220 def parse_got_revision(gclient_output, got_revision_mapping, use_svn_revs):
1221 """Translate git gclient revision mapping to build properties.
1222
1223 If use_svn_revs is True, then translate git hashes in the revision mapping
1224 to svn revision numbers.
1225 """
1226 properties = {}
1227 solutions_output = {
1228 # Make sure path always ends with a single slash.
1229 '%s/' % path.rstrip('/') : solution_output for path, solution_output
1230 in gclient_output['solutions'].iteritems()
1231 }
1232 for dir_name, property_name in got_revision_mapping.iteritems():
1233 # Make sure dir_name always ends with a single slash.
1234 dir_name = '%s/' % dir_name.rstrip('/')
1235 if dir_name not in solutions_output:
1236 continue
1237 solution_output = solutions_output[dir_name]
1238 if solution_output.get('scm') is None:
1239 # This is an ignored DEPS, so the output got_revision should be 'None'.
1240 git_revision = revision = commit_position = None
1241 else:
1242 # Since we are using .DEPS.git, everything had better be git.
1243 assert solution_output.get('scm') == 'git'
1244 git_revision = git('rev-parse', 'HEAD', cwd=dir_name).strip()
1245 if use_svn_revs:
1246 revision = get_svn_rev(git_revision, dir_name)
1247 if not revision:
1248 revision = git_revision
1249 else:
1250 revision = git_revision
1251 commit_position = get_commit_position(dir_name)
1252
1253 properties[property_name] = revision
1254 if revision != git_revision:
1255 properties['%s_git' % property_name] = git_revision
1256 if commit_position:
1257 properties['%s_cp' % property_name] = commit_position
1258
1259 return properties
1260
1261
1262 def emit_json(out_file, did_run, gclient_output=None, **kwargs):
1263 """Write run information into a JSON file."""
1264 output = {}
1265 output.update(gclient_output if gclient_output else {})
1266 output.update({'did_run': did_run})
1267 output.update(kwargs)
1268 with open(out_file, 'wb') as f:
1269 f.write(json.dumps(output, sort_keys=True))
1270
1271
1272 def ensure_deps_revisions(deps_url_mapping, solutions, revisions):
1273 """Ensure correct DEPS revisions, ignores solutions."""
1274 for deps_name, deps_data in sorted(deps_url_mapping.items()):
1275 if deps_name.strip('/') in solutions:
1276 # This has already been forced to the correct solution by git_checkout().
1277 continue
1278 revision = get_target_revision(deps_name, deps_data.get('url', None),
1279 revisions)
1280 if not revision:
1281 continue
1282 # TODO(hinoka): Catch SVNRevisionNotFound error maybe?
1283 git('fetch', 'origin', cwd=deps_name)
1284 force_revision(deps_name, revision)
1285
1286
1287 def ensure_checkout(solutions, revisions, first_sln, target_os, target_os_only,
1288 patch_root, issue, patchset, patch_url, rietveld_server,
1289 gerrit_repo, gerrit_ref, revision_mapping,
1290 apply_issue_email_file, apply_issue_key_file, buildspec,
1291 gyp_env, shallow, runhooks, refs):
1292 # Get a checkout of each solution, without DEPS or hooks.
1293 # Calling git directly because there is no way to run Gclient without
1294 # invoking DEPS.
1295 print 'Fetching Git checkout'
1296
1297 git_ref = git_checkout(solutions, revisions, shallow, refs)
1298
1299 patches = None
1300 if patch_url:
1301 patches = get_svn_patch(patch_url)
1302
1303 already_patched = []
1304 patch_root = patch_root or ''
1305 for solution in solutions:
1306 if (patch_root == solution['name'] or
1307 solution['name'].startswith(patch_root + '/')):
1308 relative_root = solution['name'][len(patch_root) + 1:]
1309 target = '/'.join([relative_root, 'DEPS']).lstrip('/')
1310 if patches:
1311 apply_svn_patch(patch_root, patches, whitelist=[target])
1312 already_patched.append(target)
1313 elif issue:
1314 apply_rietveld_issue(issue, patchset, patch_root, rietveld_server,
1315 revision_mapping, git_ref, apply_issue_email_file,
1316 apply_issue_key_file, whitelist=[target])
1317 already_patched.append(target)
1318
1319 if not buildspec:
1320 # Run deps2git if there is a DEPS change after the last .DEPS.git commit.
1321 for solution in solutions:
1322 ensure_deps2git(solution, shallow)
1323
1324 # Ensure our build/ directory is set up with the correct .gclient file.
1325 gclient_configure(solutions, target_os, target_os_only)
1326
1327 # Let gclient do the DEPS syncing.
1328 # The branch-head refspec is a special case because its possible Chrome
1329 # src, which contains the branch-head refspecs, is DEPSed in.
1330 gclient_output = gclient_sync(buildspec or BRANCH_HEADS_REFSPEC in refs,
1331 shallow)
1332
1333 # Now that gclient_sync has finished, we should revert any .DEPS.git so that
1334 # presubmit doesn't complain about it being modified.
1335 if (not buildspec and
1336 git('ls-files', '.DEPS.git', cwd=first_sln).strip()):
1337 git('checkout', 'HEAD', '--', '.DEPS.git', cwd=first_sln)
1338
1339 if buildspec and runhooks:
1340 # Run gclient runhooks if we're on an official builder.
1341 # TODO(hinoka): Remove this when the official builders run their own
1342 # runhooks step.
1343 gclient_runhooks(gyp_env)
1344
1345 # Finally, ensure that all DEPS are pinned to the correct revision.
1346 dir_names = [sln['name'] for sln in solutions]
1347 ensure_deps_revisions(gclient_output.get('solutions', {}),
1348 dir_names, revisions)
1349 # Apply the rest of the patch here (sans DEPS)
1350 if patches:
1351 apply_svn_patch(patch_root, patches, blacklist=already_patched)
1352 elif issue:
1353 apply_rietveld_issue(issue, patchset, patch_root, rietveld_server,
1354 revision_mapping, git_ref, apply_issue_email_file,
1355 apply_issue_key_file, blacklist=already_patched)
1356 elif gerrit_ref:
1357 apply_gerrit_ref(gerrit_repo, gerrit_ref, patch_root)
1358
1359 # Reset the deps_file point in the solutions so that hooks get run properly.
1360 for sln in solutions:
1361 sln['deps_file'] = sln.get('deps_file', 'DEPS').replace('.DEPS.git', 'DEPS')
1362 gclient_configure(solutions, target_os, target_os_only)
1363
1364 return gclient_output
1365
1366
1367 def parse_revisions(revisions, root):
1368 """Turn a list of revision specs into a nice dictionary.
1369
1370 We will always return a dict with {root: something}. By default if root
1371 is unspecified, or if revisions is [], then revision will be assigned 'HEAD'
1372 """
1373 results = {root.strip('/'): 'HEAD'}
1374 expanded_revisions = []
1375 for revision in revisions:
1376 # Allow rev1,rev2,rev3 format.
1377 # TODO(hinoka): Delete this when webkit switches to recipes.
1378 expanded_revisions.extend(revision.split(','))
1379 for revision in expanded_revisions:
1380 split_revision = revision.split('@')
1381 if len(split_revision) == 1:
1382 # This is just a plain revision, set it as the revision for root.
1383 results[root] = split_revision[0]
1384 elif len(split_revision) == 2:
1385 # This is an alt_root@revision argument.
1386 current_root, current_rev = split_revision
1387
1388 # We want to normalize svn/git urls into .git urls.
1389 parsed_root = urlparse.urlparse(current_root)
1390 if parsed_root.scheme == 'svn':
1391 if parsed_root.path in RECOGNIZED_PATHS:
1392 normalized_root = RECOGNIZED_PATHS[parsed_root.path]
1393 else:
1394 print 'WARNING: SVN path %s not recognized, ignoring' % current_root
1395 continue
1396 elif parsed_root.scheme in ['http', 'https']:
1397 normalized_root = 'https://%s/%s' % (parsed_root.netloc,
1398 parsed_root.path)
1399 if not normalized_root.endswith('.git'):
1400 normalized_root = '%s.git' % normalized_root
1401 elif parsed_root.scheme:
1402 print 'WARNING: Unrecognized scheme %s, ignoring' % parsed_root.scheme
1403 continue
1404 else:
1405 # This is probably a local path.
1406 normalized_root = current_root.strip('/')
1407
1408 results[normalized_root] = current_rev
1409 else:
1410 print ('WARNING: %r is not recognized as a valid revision specification,'
1411 'skipping' % revision)
1412 return results
1413
1414
1415 def parse_args():
1416 parse = optparse.OptionParser()
1417
1418 parse.add_option('--issue', help='Issue number to patch from.')
1419 parse.add_option('--patchset',
1420 help='Patchset from issue to patch from, if applicable.')
1421 parse.add_option('--apply_issue_email_file',
1422 help='--email-file option passthrough for apply_patch.py.')
1423 parse.add_option('--apply_issue_key_file',
1424 help='--private-key-file option passthrough for '
1425 'apply_patch.py.')
1426 parse.add_option('--patch_url', help='Optional URL to SVN patch.')
1427 parse.add_option('--root', dest='patch_root',
1428 help='DEPRECATED: Use --patch_root.')
1429 parse.add_option('--patch_root', help='Directory to patch on top of.')
1430 parse.add_option('--rietveld_server',
1431 default='codereview.chromium.org',
1432 help='Rietveld server.')
1433 parse.add_option('--gerrit_repo',
1434 help='Gerrit repository to pull the ref from.')
1435 parse.add_option('--gerrit_ref', help='Gerrit ref to apply.')
1436 parse.add_option('--specs', help='Gcilent spec.')
1437 parse.add_option('--master', help='Master name.')
1438 parse.add_option('-f', '--force', action='store_true',
1439 help='Bypass check to see if we want to be run. '
1440 'Should ONLY be used locally or by smart recipes.')
1441 parse.add_option('--revision_mapping',
1442 help='{"path/to/repo/": "property_name"}')
1443 parse.add_option('--revision_mapping_file',
1444 help=('Same as revision_mapping, except its a path to a json'
1445 ' file containing that format.'))
1446 parse.add_option('--revision', action='append', default=[],
1447 help='Revision to check out. Can be an SVN revision number, '
1448 'git hash, or any form of git ref. Can prepend '
1449 'root@<rev> to specify which repository, where root '
1450 'is either a filesystem path, git https url, or '
1451 'svn url. To specify Tip of Tree, set rev to HEAD.'
1452 'To specify a git branch and an SVN rev, <rev> can be '
1453 'set to <branch>:<revision>.')
1454 parse.add_option('--output_manifest', action='store_true',
1455 help=('Add manifest json to the json output.'))
1456 parse.add_option('--slave_name', default=socket.getfqdn().split('.')[0],
1457 help='Hostname of the current machine, '
1458 'used for determining whether or not to activate.')
1459 parse.add_option('--builder_name', help='Name of the builder, '
1460 'used for determining whether or not to activate.')
1461 parse.add_option('--build_dir', default=os.getcwd())
1462 parse.add_option('--flag_file', default=path.join(os.getcwd(),
1463 'update.flag'))
1464 parse.add_option('--shallow', action='store_true',
1465 help='Use shallow clones for cache repositories.')
1466 parse.add_option('--gyp_env', action='append', default=[],
1467 help='Environment variables to pass into gclient runhooks.')
1468 parse.add_option('--clobber', action='store_true',
1469 help='Delete checkout first, always')
1470 parse.add_option('--bot_update_clobber', action='store_true', dest='clobber',
1471 help='(synonym for --clobber)')
1472 parse.add_option('-o', '--output_json',
1473 help='Output JSON information into a specified file')
1474 parse.add_option('--no_shallow', action='store_true',
1475 help='Bypass disk detection and never shallow clone. '
1476 'Does not override the --shallow flag')
1477 parse.add_option('--no_runhooks', action='store_true',
1478 help='Do not run hooks on official builder.')
1479 parse.add_option('--refs', action='append',
1480 help='Also fetch this refspec for the main solution(s). '
1481 'Eg. +refs/branch-heads/*')
1482 parse.add_option('--with_branch_heads', action='store_true',
1483 help='Always pass --with_branch_heads to gclient. This '
1484 'does the same thing as --refs +refs/branch-heads/*')
1485
1486
1487 options, args = parse.parse_args()
1488
1489 if not options.refs:
1490 options.refs = []
1491
1492 if options.with_branch_heads:
1493 options.refs.append(BRANCH_HEADS_REFSPEC)
1494 del options.with_branch_heads
1495
1496 try:
1497 if options.revision_mapping_file:
1498 if options.revision_mapping:
1499 print ('WARNING: Ignoring --revision_mapping: --revision_mapping_file '
1500 'was set at the same time as --revision_mapping?')
1501 with open(options.revision_mapping_file, 'r') as f:
1502 options.revision_mapping = json.load(f)
1503 elif options.revision_mapping:
1504 options.revision_mapping = json.loads(options.revision_mapping)
1505 except Exception as e:
1506 print (
1507 'WARNING: Caught execption while parsing revision_mapping*: %s'
1508 % (str(e),)
1509 )
1510
1511 return options, args
1512
1513
1514 def prepare(options, git_slns, active):
1515 """Prepares the target folder before we checkout."""
1516 dir_names = [sln.get('name') for sln in git_slns if 'name' in sln]
1517 # If we're active now, but the flag file doesn't exist (we weren't active
1518 # last run) or vice versa, blow away all checkouts.
1519 if bool(active) != bool(check_flag(options.flag_file)):
1520 ensure_no_checkout(dir_names, '*')
1521 if options.output_json:
1522 # Make sure we tell recipes that we didn't run if the script exits here.
1523 emit_json(options.output_json, did_run=active)
1524 if active:
1525 if options.clobber:
1526 ensure_no_checkout(dir_names, '*')
1527 else:
1528 ensure_no_checkout(dir_names, '.svn')
1529 emit_flag(options.flag_file)
1530 else:
1531 delete_flag(options.flag_file)
1532 raise Inactive # This is caught in main() and we exit cleanly.
1533
1534 # Do a shallow checkout if the disk is less than 100GB.
1535 total_disk_space, free_disk_space = get_total_disk_space()
1536 total_disk_space_gb = int(total_disk_space / (1024 * 1024 * 1024))
1537 used_disk_space_gb = int((total_disk_space - free_disk_space)
1538 / (1024 * 1024 * 1024))
1539 percent_used = int(used_disk_space_gb * 100 / total_disk_space_gb)
1540 step_text = '[%dGB/%dGB used (%d%%)]' % (used_disk_space_gb,
1541 total_disk_space_gb,
1542 percent_used)
1543 if not options.output_json:
1544 print '@@@STEP_TEXT@%s@@@' % step_text
1545 if not options.shallow:
1546 options.shallow = (total_disk_space < SHALLOW_CLONE_THRESHOLD
1547 and not options.no_shallow)
1548
1549 # The first solution is where the primary DEPS file resides.
1550 first_sln = dir_names[0]
1551
1552 # Split all the revision specifications into a nice dict.
1553 print 'Revisions: %s' % options.revision
1554 revisions = parse_revisions(options.revision, first_sln)
1555 print 'Fetching Git checkout at %s@%s' % (first_sln, revisions[first_sln])
1556 return revisions, step_text
1557
1558
1559 def checkout(options, git_slns, specs, buildspec, master,
1560 svn_root, revisions, step_text):
1561 first_sln = git_slns[0]['name']
1562 dir_names = [sln.get('name') for sln in git_slns if 'name' in sln]
1563 try:
1564 # Outer try is for catching patch failures and exiting gracefully.
1565 # Inner try is for catching gclient failures and retrying gracefully.
1566 try:
1567 checkout_parameters = dict(
1568 # First, pass in the base of what we want to check out.
1569 solutions=git_slns,
1570 revisions=revisions,
1571 first_sln=first_sln,
1572
1573 # Also, target os variables for gclient.
1574 target_os=specs.get('target_os', []),
1575 target_os_only=specs.get('target_os_only', False),
1576
1577 # Then, pass in information about how to patch.
1578 patch_root=options.patch_root,
1579 issue=options.issue,
1580 patchset=options.patchset,
1581 patch_url=options.patch_url,
1582 rietveld_server=options.rietveld_server,
1583 gerrit_repo=options.gerrit_repo,
1584 gerrit_ref=options.gerrit_ref,
1585 revision_mapping=options.revision_mapping,
1586 apply_issue_email_file=options.apply_issue_email_file,
1587 apply_issue_key_file=options.apply_issue_key_file,
1588
1589 # For official builders.
1590 buildspec=buildspec,
1591 gyp_env=options.gyp_env,
1592 runhooks=not options.no_runhooks,
1593
1594 # Finally, extra configurations such as shallowness of the clone.
1595 shallow=options.shallow,
1596 refs=options.refs)
1597 gclient_output = ensure_checkout(**checkout_parameters)
1598 except GclientSyncFailed:
1599 print 'We failed gclient sync, lets delete the checkout and retry.'
1600 ensure_no_checkout(dir_names, '*')
1601 gclient_output = ensure_checkout(**checkout_parameters)
1602 except PatchFailed as e:
1603 if options.output_json:
1604 # Tell recipes information such as root, got_revision, etc.
1605 emit_json(options.output_json,
1606 did_run=True,
1607 root=first_sln,
1608 log_lines=[('patch error', e.output),],
1609 patch_apply_return_code=e.code,
1610 patch_root=options.patch_root,
1611 patch_failure=True,
1612 step_text='%s PATCH FAILED' % step_text,
1613 fixed_revisions=revisions)
1614 else:
1615 # If we're not on recipes, tell annotator about our got_revisions.
1616 emit_log_lines('patch error', e.output)
1617 print '@@@STEP_TEXT@%s PATCH FAILED@@@' % step_text
1618 raise
1619
1620 # Revision is an svn revision, unless it's a git master.
1621 use_svn_rev = master not in GIT_MASTERS
1622
1623 # Take care of got_revisions outputs.
1624 revision_mapping = dict(GOT_REVISION_MAPPINGS.get(svn_root, {}))
1625 if options.revision_mapping:
1626 revision_mapping.update(options.revision_mapping)
1627
1628 # If the repo is not in the default GOT_REVISION_MAPPINGS and no
1629 # revision_mapping were specified on the command line then
1630 # default to setting 'got_revision' based on the first solution.
1631 if not revision_mapping:
1632 revision_mapping[first_sln] = 'got_revision'
1633
1634 got_revisions = parse_got_revision(gclient_output, revision_mapping,
1635 use_svn_rev)
1636
1637 if not got_revisions:
1638 # TODO(hinoka): We should probably bail out here, but in the interest
1639 # of giving mis-configured bots some time to get fixed use a dummy
1640 # revision here.
1641 got_revisions = { 'got_revision': 'BOT_UPDATE_NO_REV_FOUND' }
1642 #raise Exception('No got_revision(s) found in gclient output')
1643
1644 if options.output_json:
1645 manifest = create_manifest() if options.output_manifest else None
1646 # Tell recipes information such as root, got_revision, etc.
1647 emit_json(options.output_json,
1648 did_run=True,
1649 root=first_sln,
1650 patch_root=options.patch_root,
1651 step_text=step_text,
1652 fixed_revisions=revisions,
1653 properties=got_revisions,
1654 manifest=manifest)
1655 else:
1656 # If we're not on recipes, tell annotator about our got_revisions.
1657 emit_properties(got_revisions)
1658
1659
1660 def print_help_text(force, output_json, active, master, builder, slave):
1661 """Print helpful messages to tell devs whats going on."""
1662 if force and output_json:
1663 recipe_force = 'Forced on by recipes'
1664 elif active and output_json:
1665 recipe_force = 'Off by recipes, but forced on by bot update'
1666 elif not active and output_json:
1667 recipe_force = 'Forced off by recipes'
1668 else:
1669 recipe_force = 'N/A. Was not called by recipes'
1670
1671 print BOT_UPDATE_MESSAGE % {
1672 'master': master or 'Not specified',
1673 'builder': builder or 'Not specified',
1674 'slave': slave or 'Not specified',
1675 'recipe': recipe_force,
1676 'CURRENT_DIR': CURRENT_DIR,
1677 'BUILDER_DIR': BUILDER_DIR,
1678 'SLAVE_DIR': SLAVE_DIR,
1679 'THIS_DIR': THIS_DIR,
1680 'SCRIPTS_DIR': SCRIPTS_DIR,
1681 'BUILD_DIR': BUILD_DIR,
1682 'ROOT_DIR': ROOT_DIR,
1683 'DEPOT_TOOLS_DIR': DEPOT_TOOLS_DIR,
1684 },
1685 print ACTIVATED_MESSAGE if active else NOT_ACTIVATED_MESSAGE
1686
1687
1688 def main():
1689 # Get inputs.
1690 options, _ = parse_args()
1691 builder = options.builder_name
1692 slave = options.slave_name
1693 master = options.master
1694
1695 # Check if this script should activate or not.
1696 active = check_valid_host(master, builder, slave) or options.force or False
1697
1698 # Print a helpful message to tell developers whats going on with this step.
1699 print_help_text(
1700 options.force, options.output_json, active, master, builder, slave)
1701
1702 # Parse, munipulate, and print the gclient solutions.
1703 specs = {}
1704 exec(options.specs, specs)
1705 svn_solutions = specs.get('solutions', [])
1706 git_slns, svn_root, buildspec = solutions_to_git(svn_solutions)
1707 options.revision = maybe_ignore_revision(options.revision, buildspec)
1708
1709 solutions_printer(git_slns)
1710
1711 try:
1712 # Dun dun dun, the main part of bot_update.
1713 revisions, step_text = prepare(options, git_slns, active)
1714 checkout(options, git_slns, specs, buildspec, master, svn_root, revisions,
1715 step_text)
1716
1717 except Inactive:
1718 # Not active, should count as passing.
1719 pass
1720 except PatchFailed as e:
1721 emit_flag(options.flag_file)
1722 # Return a specific non-zero exit code for patch failure (because it is
1723 # a failure), but make it different than other failures to distinguish
1724 # between infra failures (independent from patch author), and patch
1725 # failures (that patch author can fix). However, PatchFailure due to
1726 # download patch failure is still an infra problem.
1727 if e.code == 3:
1728 # Patch download problem.
1729 return 87
1730 # Genuine patch problem.
1731 return 88
1732 except Exception:
1733 # Unexpected failure.
1734 emit_flag(options.flag_file)
1735 raise
1736 else:
1737 emit_flag(options.flag_file)
1738
1739
1740 if __name__ == '__main__':
1741 sys.exit(main())
OLDNEW
« no previous file with comments | « recipe_modules/bot_update/resources/__init__.py ('k') | recipe_modules/bot_update/test_api.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698