Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(59)

Side by Side Diff: recipe_modules/bot_update/resources/bot_update.py

Issue 1641363002: Adds bot_update to depot_tools. (Closed) Base URL: https://chromium.googlesource.com/chromium/tools/depot_tools.git@master
Patch Set: More fixes. Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 #!/usr/bin/env python
iannucci 2016/01/30 00:40:17 I'm not sure that the __init__ file is needed? I
2 # Copyright 2014 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 # TODO(hinoka): Use logging.
7
8 import cStringIO
9 import codecs
10 import collections
11 import copy
12 import ctypes
13 import json
14 import optparse
15 import os
16 import pprint
17 import random
18 import re
19 import socket
20 import subprocess
21 import sys
22 import tempfile
23 import threading
24 import time
25 import urllib2
26 import urlparse
27 import uuid
28
29 import os.path as path
30
31 # How many bytes at a time to read from pipes.
32 BUF_SIZE = 256
33
34
35 # TODO(luqui): This is a horrible hack to identify build_internal when build
36 # is a recipe dependency. bot_update should not be depending on internal,
37 # rather the arrow should go the other way (or just be destroyed).
38 def check_dir(name, dirs, default=None):
39 for d in dirs:
40 d = path.abspath(d)
41 if path.basename(d) == name and path.isdir(d):
42 return d
43 return default
44
45
46 # Define a bunch of directory paths.
47 # Relative to the current working directory.
48 CURRENT_DIR = path.abspath(os.getcwd())
49 BUILDER_DIR = path.dirname(CURRENT_DIR)
50 SLAVE_DIR = path.dirname(BUILDER_DIR)
51
52 # Relative to this script's filesystem path.
53 THIS_DIR = path.dirname(path.abspath(__file__))
54 SCRIPTS_DIR = check_dir(
55 'scripts', [
56 path.dirname(THIS_DIR),
57 path.join(SLAVE_DIR, '..', 'scripts'),
58 ], default=path.dirname(THIS_DIR))
59 BUILD_DIR = path.dirname(SCRIPTS_DIR)
60 ROOT_DIR = path.dirname(BUILD_DIR)
61
62 DEPOT_TOOLS_DIR = path.abspath(path.join(THIS_DIR, '..', '..', '..'))
63
64 CHROMIUM_GIT_HOST = 'https://chromium.googlesource.com'
65 CHROMIUM_SRC_URL = CHROMIUM_GIT_HOST + '/chromium/src.git'
66
67 # Official builds use buildspecs, so this is a special case.
68 BUILDSPEC_TYPE = collections.namedtuple('buildspec',
69 ('container', 'version'))
70 BUILDSPEC_RE = (r'^/chrome-internal/trunk/tools/buildspec/'
71 '(build|branches|releases)/(.+)$')
72 GIT_BUILDSPEC_PATH = ('https://chrome-internal.googlesource.com/chrome/tools/'
73 'buildspec')
74 BRANCH_HEADS_REFSPEC = '+refs/branch-heads/*'
75
76 BUILDSPEC_COMMIT_RE = (
77 re.compile(r'Buildspec for.*version (\d+\.\d+\.\d+\.\d+)'),
78 re.compile(r'Create (\d+\.\d+\.\d+\.\d+) buildspec'),
79 re.compile(r'Auto-converted (\d+\.\d+\.\d+\.\d+) buildspec to git'),
80 )
81
82 # Regular expression that matches a single commit footer line.
83 COMMIT_FOOTER_ENTRY_RE = re.compile(r'([^:]+):\s+(.+)')
84
85 # Footer metadata keys for regular and gsubtreed mirrored commit positions.
86 COMMIT_POSITION_FOOTER_KEY = 'Cr-Commit-Position'
87 COMMIT_ORIGINAL_POSITION_FOOTER_KEY = 'Cr-Original-Commit-Position'
88 # Regular expression to parse a commit position
89 COMMIT_POSITION_RE = re.compile(r'(.+)@\{#(\d+)\}')
90
91 # Regular expression to parse gclient's revinfo entries.
92 REVINFO_RE = re.compile(r'^([^:]+):\s+([^@]+)@(.+)$')
93
94 # Used by 'ResolveSvnRevisionFromGitiles'
95 GIT_SVN_PROJECT_MAP = {
96 'webkit': {
97 'svn_url': 'svn://svn.chromium.org/blink',
98 'branch_map': [
99 (r'trunk', r'refs/heads/master'),
100 (r'branches/([^/]+)', r'refs/branch-heads/\1'),
101 ],
102 },
103 'v8': {
104 'svn_url': 'https://v8.googlecode.com/svn',
105 'branch_map': [
106 (r'trunk', r'refs/heads/candidates'),
107 (r'branches/bleeding_edge', r'refs/heads/master'),
108 (r'branches/([^/]+)', r'refs/branch-heads/\1'),
109 ],
110 },
111 'nacl': {
112 'svn_url': 'svn://svn.chromium.org/native_client',
113 'branch_map': [
114 (r'trunk/src/native_client', r'refs/heads/master'),
115 ],
116 },
117 }
118
119 # Key for the 'git-svn' ID metadata commit footer entry.
120 GIT_SVN_ID_FOOTER_KEY = 'git-svn-id'
121 # e.g., git-svn-id: https://v8.googlecode.com/svn/trunk@23117
122 # ce2b1a6d-e550-0410-aec6-3dcde31c8c00
123 GIT_SVN_ID_RE = re.compile(r'((?:\w+)://[^@]+)@(\d+)\s+(?:[a-zA-Z0-9\-]+)')
124
125
126 # This is the git mirror of the buildspecs repository. We could rely on the svn
127 # checkout, now that the git buildspecs are checked in alongside the svn
128 # buildspecs, but we're going to want to pull all the buildspecs from here
129 # eventually anyhow, and there's already some logic to pull from git (for the
130 # old git_buildspecs.git repo), so just stick with that.
131 GIT_BUILDSPEC_REPO = (
132 'https://chrome-internal.googlesource.com/chrome/tools/buildspec')
133
134 # Copied from scripts/recipes/chromium.py.
135 GOT_REVISION_MAPPINGS = {
136 '/chrome/trunk/src': {
137 'src/': 'got_revision',
138 'src/native_client/': 'got_nacl_revision',
139 'src/tools/swarm_client/': 'got_swarm_client_revision',
140 'src/tools/swarming_client/': 'got_swarming_client_revision',
141 'src/third_party/WebKit/': 'got_webkit_revision',
142 'src/third_party/webrtc/': 'got_webrtc_revision',
143 'src/v8/': 'got_v8_revision',
144 }
145 }
146
147
148 BOT_UPDATE_MESSAGE = """
149 What is the "Bot Update" step?
150 ==============================
151
152 This step ensures that the source checkout on the bot (e.g. Chromium's src/ and
153 its dependencies) is checked out in a consistent state. This means that all of
154 the necessary repositories are checked out, no extra repositories are checked
155 out, and no locally modified files are present.
156
157 These actions used to be taken care of by the "gclient revert" and "update"
158 steps. However, those steps are known to be buggy and occasionally flaky. This
159 step has two main advantages over them:
160 * it only operates in Git, so the logic can be clearer and cleaner; and
161 * it is a slave-side script, so its behavior can be modified without
162 restarting the master.
163
164 Why Git, you ask? Because that is the direction that the Chromium project is
165 heading. This step is an integral part of the transition from using the SVN repo
166 at chrome/trunk/src to using the Git repo src.git. Please pardon the dust while
167 we fully convert everything to Git. This message will get out of your way
168 eventually, and the waterfall will be a happier place because of it.
169
170 This step can be activated or deactivated independently on every builder on
171 every master. When it is active, the "gclient revert" and "update" steps become
172 no-ops. When it is inactive, it prints this message, cleans up after itself, and
173 lets everything else continue as though nothing has changed. Eventually, when
174 everything is stable enough, this step will replace them entirely.
175
176 Debugging information:
177 (master/builder/slave may be unspecified on recipes)
178 master: %(master)s
179 builder: %(builder)s
180 slave: %(slave)s
181 forced by recipes: %(recipe)s
182 CURRENT_DIR: %(CURRENT_DIR)s
183 BUILDER_DIR: %(BUILDER_DIR)s
184 SLAVE_DIR: %(SLAVE_DIR)s
185 THIS_DIR: %(THIS_DIR)s
186 SCRIPTS_DIR: %(SCRIPTS_DIR)s
187 BUILD_DIR: %(BUILD_DIR)s
188 ROOT_DIR: %(ROOT_DIR)s
189 DEPOT_TOOLS_DIR: %(DEPOT_TOOLS_DIR)s
190 bot_update.py is:"""
191
192 ACTIVATED_MESSAGE = """ACTIVE.
193 The bot will perform a Git checkout in this step.
194 The "gclient revert" and "update" steps are no-ops.
195
196 """
197
198 NOT_ACTIVATED_MESSAGE = """INACTIVE.
199 This step does nothing. You actually want to look at the "update" step.
200
201 """
202
203
204 GCLIENT_TEMPLATE = """solutions = %(solutions)s
205
206 cache_dir = r%(cache_dir)s
207 %(target_os)s
208 %(target_os_only)s
209 """
210
211 CHROME_INTERNAL_GIT_HOST = 'https://chrome-internal.googlesource.com'
212
213 RECOGNIZED_PATHS = {
214 # If SVN path matches key, the entire URL is rewritten to the Git url.
215 '/chrome/trunk/src':
216 CHROMIUM_SRC_URL,
217 '/chrome/trunk/src/tools/cros.DEPS':
218 CHROMIUM_GIT_HOST + '/chromium/src/tools/cros.DEPS.git',
219 '/chrome-internal/trunk/src-internal':
220 CHROME_INTERNAL_GIT_HOST + '/chrome/src-internal.git',
221 }
222
223 # How many times to try before giving up.
224 ATTEMPTS = 5
225
226 # TODO(martiniss) change to be less hard coded.
227 GIT_CACHE_PATH = path.join(
228 THIS_DIR, os.pardir, os.pardir, os.pardir, 'git_cache.py')
229
230 #talk to maruel
231 CACHE_DIR = path.join(CURRENT_DIR, 'cache_dir')
232
233 # Because we print CACHE_DIR out into a .gclient file, and then later run
234 # eval() on it, backslashes need to be escaped, otherwise "E:\b\build" gets
235 # parsed as "E:[\x08][\x08]uild".
236 if sys.platform.startswith('win'):
237 CACHE_DIR = CACHE_DIR.replace('\\', '\\\\')
238
239 # Find the patch tool.
240 # if sys.platform.startswith('win'):
241 # if not BUILD_INTERNAL_DIR:
242 # print 'Warning: could not find patch tool because there is no '
243 # print 'build_internal present.'
244 # PATCH_TOOL = None
245 # else:
246 # PATCH_TOOL = path.join(BUILD_INTERNAL_DIR, 'tools', 'patch.EXE')
247 # else:
248
249 # Ignoring windows for now, will add support later.
250 PATCH_TOOL = '/usr/bin/patch'
251
252 # If there is less than 100GB of disk space on the system, then we do
253 # a shallow checkout.
254 SHALLOW_CLONE_THRESHOLD = 100 * 1024 * 1024 * 1024
255
256
257 class SubprocessFailed(Exception):
258 def __init__(self, message, code, output):
259 Exception.__init__(self, message)
260 self.code = code
261 self.output = output
262
263
264 class PatchFailed(SubprocessFailed):
265 pass
266
267
268 class GclientSyncFailed(SubprocessFailed):
269 pass
270
271
272 class SVNRevisionNotFound(Exception):
273 pass
274
275
276 class InvalidDiff(Exception):
277 pass
278
279
280 class Inactive(Exception):
281 """Not really an exception, just used to exit early cleanly."""
282 pass
283
284
285 RETRY = object()
286 OK = object()
287 FAIL = object()
288
289
290 class PsPrinter(object):
291 def __init__(self, interval=300):
292 self.interval = interval
293 self.active = sys.platform.startswith('linux2')
294 self.thread = None
295
296 @staticmethod
297 def print_pstree():
298 """Debugging function used to print "ps auxwwf" for stuck processes."""
299 subprocess.call(['ps', 'auxwwf'])
300
301 def poke(self):
302 if self.active:
303 self.cancel()
304 self.thread = threading.Timer(self.interval, self.print_pstree)
305 self.thread.start()
306
307 def cancel(self):
308 if self.active and self.thread is not None:
309 self.thread.cancel()
310 self.thread = None
311
312
313 def call(*args, **kwargs): # pragma: no cover
314 """Interactive subprocess call."""
315 kwargs['stdout'] = subprocess.PIPE
316 kwargs['stderr'] = subprocess.STDOUT
317 kwargs.setdefault('bufsize', BUF_SIZE)
318 cwd = kwargs.get('cwd', os.getcwd())
319 result_fn = kwargs.pop('result_fn', lambda code, out: RETRY if code else OK)
320 stdin_data = kwargs.pop('stdin_data', None)
321 tries = kwargs.pop('tries', ATTEMPTS)
322 if stdin_data:
323 kwargs['stdin'] = subprocess.PIPE
324 out = cStringIO.StringIO()
325 new_env = kwargs.get('env', {})
326 env = copy.copy(os.environ)
327 env.update(new_env)
328 kwargs['env'] = env
329 attempt = 0
330 for attempt in range(1, tries + 1):
331 attempt_msg = ' (attempt #%d)' % attempt if attempt else ''
332 if new_env:
333 print '===Injecting Environment Variables==='
334 for k, v in sorted(new_env.items()):
335 print '%s: %s' % (k, v)
336 print '===Running %s%s===' % (' '.join(args), attempt_msg)
337 print 'In directory: %s' % cwd
338 start_time = time.time()
339 proc = subprocess.Popen(args, **kwargs)
340 if stdin_data:
341 proc.stdin.write(stdin_data)
342 proc.stdin.close()
343 psprinter = PsPrinter()
344 # This is here because passing 'sys.stdout' into stdout for proc will
345 # produce out of order output.
346 hanging_cr = False
347 while True:
348 psprinter.poke()
349 buf = proc.stdout.read(BUF_SIZE)
350 if not buf:
351 break
352 if hanging_cr:
353 buf = '\r' + buf
354 hanging_cr = buf.endswith('\r')
355 if hanging_cr:
356 buf = buf[:-1]
357 buf = buf.replace('\r\n', '\n').replace('\r', '\n')
358 sys.stdout.write(buf)
359 out.write(buf)
360 if hanging_cr:
361 sys.stdout.write('\n')
362 out.write('\n')
363 psprinter.cancel()
364
365 code = proc.wait()
366 elapsed_time = ((time.time() - start_time) / 60.0)
367 outval = out.getvalue()
368 result = result_fn(code, outval)
369 if result in (FAIL, RETRY):
370 print '===Failed in %.1f mins===' % elapsed_time
371 print
372 else:
373 print '===Succeeded in %.1f mins===' % elapsed_time
374 print
375 return outval
376 if result is FAIL:
377 break
378 if result is RETRY and attempt < tries:
379 sleep_backoff = 4 ** attempt
380 sleep_time = random.randint(sleep_backoff, int(sleep_backoff * 1.2))
381 print '===backing off, sleeping for %d secs===' % sleep_time
382 time.sleep(sleep_time)
383
384 raise SubprocessFailed('%s failed with code %d in %s after %d attempts.' %
385 (' '.join(args), code, cwd, attempt),
386 code, outval)
387
388
389 def git(*args, **kwargs): # pragma: no cover
390 """Wrapper around call specifically for Git commands."""
391 if args and args[0] == 'cache':
392 # Rewrite "git cache" calls into "python git_cache.py".
393 cmd = (sys.executable, '-u', GIT_CACHE_PATH) + args[1:]
394 else:
395 git_executable = 'git'
396 # On windows, subprocess doesn't fuzzy-match 'git' to 'git.bat', so we
397 # have to do it explicitly. This is better than passing shell=True.
398 if sys.platform.startswith('win'):
399 git_executable += '.bat'
400 cmd = (git_executable,) + args
401 return call(*cmd, **kwargs)
402
403
404 def get_gclient_spec(solutions, target_os, target_os_only):
405 return GCLIENT_TEMPLATE % {
406 'solutions': pprint.pformat(solutions, indent=4),
407 'cache_dir': '"%s"' % CACHE_DIR,
408 'target_os': ('\ntarget_os=%s' % target_os) if target_os else '',
409 'target_os_only': '\ntarget_os_only=%s' % target_os_only
410 }
411
412
413 def maybe_ignore_revision(revision, buildspec):
414 """Handle builders that don't care what buildbot tells them to build.
415
416 This is especially the case with branch builders that build from buildspecs
417 and/or trigger off multiple repositories, where the --revision passed in has
418 nothing to do with the solution being built. Clearing the revision in this
419 case causes bot_update to use HEAD rather that trying to checkout an
420 inappropriate version of the solution.
421 """
422 if buildspec and buildspec.container == 'branches':
423 return []
424 return revision
425
426
427 def solutions_printer(solutions):
428 """Prints gclient solution to stdout."""
429 print 'Gclient Solutions'
430 print '================='
431 for solution in solutions:
432 name = solution.get('name')
433 url = solution.get('url')
434 print '%s (%s)' % (name, url)
435 if solution.get('deps_file'):
436 print ' Dependencies file is %s' % solution['deps_file']
437 if 'managed' in solution:
438 print ' Managed mode is %s' % ('ON' if solution['managed'] else 'OFF')
439 custom_vars = solution.get('custom_vars')
440 if custom_vars:
441 print ' Custom Variables:'
442 for var_name, var_value in sorted(custom_vars.iteritems()):
443 print ' %s = %s' % (var_name, var_value)
444 custom_deps = solution.get('custom_deps')
445 if 'custom_deps' in solution:
446 print ' Custom Dependencies:'
447 for deps_name, deps_value in sorted(custom_deps.iteritems()):
448 if deps_value:
449 print ' %s -> %s' % (deps_name, deps_value)
450 else:
451 print ' %s: Ignore' % deps_name
452 for k, v in solution.iteritems():
453 # Print out all the keys we don't know about.
454 if k in ['name', 'url', 'deps_file', 'custom_vars', 'custom_deps',
455 'managed']:
456 continue
457 print ' %s is %s' % (k, v)
458 print
459
460
461 def solutions_to_git(input_solutions):
462 """Modifies urls in solutions to point at Git repos.
463
464 returns: (git solution, svn root of first solution) tuple.
465 """
466 assert input_solutions
467 solutions = copy.deepcopy(input_solutions)
468 first_solution = True
469 buildspec = None
470 for solution in solutions:
471 original_url = solution['url']
472 parsed_url = urlparse.urlparse(original_url)
473 parsed_path = parsed_url.path
474
475 # Rewrite SVN urls into Git urls.
476 buildspec_m = re.match(BUILDSPEC_RE, parsed_path)
477 if first_solution and buildspec_m:
478 solution['url'] = GIT_BUILDSPEC_PATH
479 buildspec = BUILDSPEC_TYPE(
480 container=buildspec_m.group(1),
481 version=buildspec_m.group(2),
482 )
483 solution['deps_file'] = path.join(buildspec.container, buildspec.version,
484 'DEPS')
485 elif parsed_path in RECOGNIZED_PATHS:
486 solution['url'] = RECOGNIZED_PATHS[parsed_path]
487 solution['deps_file'] = '.DEPS.git'
488 elif parsed_url.scheme == 'https' and 'googlesource' in parsed_url.netloc:
489 pass
490 else:
491 print 'Warning: %s' % ('path %r not recognized' % parsed_path,)
492
493 # Strip out deps containing $$V8_REV$$, etc.
494 if 'custom_deps' in solution:
495 new_custom_deps = {}
496 for deps_name, deps_value in solution['custom_deps'].iteritems():
497 if deps_value and '$$' in deps_value:
498 print 'Dropping %s:%s from custom deps' % (deps_name, deps_value)
499 else:
500 new_custom_deps[deps_name] = deps_value
501 solution['custom_deps'] = new_custom_deps
502
503 if first_solution:
504 root = parsed_path
505 first_solution = False
506
507 solution['managed'] = False
508 # We don't want gclient to be using a safesync URL. Instead it should
509 # using the lkgr/lkcr branch/tags.
510 if 'safesync_url' in solution:
511 print 'Removing safesync url %s from %s' % (solution['safesync_url'],
512 parsed_path)
513 del solution['safesync_url']
514 return solutions, root, buildspec
515
516
517 def remove(target):
518 """Remove a target by moving it into build.dead."""
519 dead_folder = path.join(BUILDER_DIR, 'build.dead')
520 if not path.exists(dead_folder):
521 os.makedirs(dead_folder)
522 os.rename(target, path.join(dead_folder, uuid.uuid4().hex))
523
524
525 def ensure_no_checkout(dir_names, scm_dirname):
526 """Ensure that there is no undesired checkout under build/.
527
528 If there is an incorrect checkout under build/, then
529 move build/ to build.dead/
530 This function will check each directory in dir_names.
531
532 scm_dirname is expected to be either ['.svn', '.git']
533 """
534 assert scm_dirname in ['.svn', '.git', '*']
535 has_checkout = any(path.exists(path.join(os.getcwd(), dir_name, scm_dirname))
536 for dir_name in dir_names)
537
538 if has_checkout or scm_dirname == '*':
539 build_dir = os.getcwd()
540 prefix = ''
541 if scm_dirname != '*':
542 prefix = '%s detected in checkout, ' % scm_dirname
543
544 for filename in os.listdir(build_dir):
545 deletion_target = path.join(build_dir, filename)
546 print '%sdeleting %s...' % (prefix, deletion_target),
547 remove(deletion_target)
548 print 'done'
549
550
551 def gclient_configure(solutions, target_os, target_os_only):
552 """Should do the same thing as gclient --spec='...'."""
553 with codecs.open('.gclient', mode='w', encoding='utf-8') as f:
554 f.write(get_gclient_spec(solutions, target_os, target_os_only))
555
556
557 def gclient_sync(with_branch_heads, shallow):
558 # We just need to allocate a filename.
559 fd, gclient_output_file = tempfile.mkstemp(suffix='.json')
560 os.close(fd)
561 gclient_bin = 'gclient.bat' if sys.platform.startswith('win') else 'gclient'
562 cmd = [gclient_bin, 'sync', '--verbose', '--reset', '--force',
563 '--ignore_locks', '--output-json', gclient_output_file,
564 '--nohooks', '--noprehooks', '--delete_unversioned_trees']
565 if with_branch_heads:
566 cmd += ['--with_branch_heads']
567 if shallow:
568 cmd += ['--shallow']
569
570 try:
571 call(*cmd, tries=1)
572 except SubprocessFailed as e:
573 # Throw a GclientSyncFailed exception so we can catch this independently.
574 raise GclientSyncFailed(e.message, e.code, e.output)
575 else:
576 with open(gclient_output_file) as f:
577 return json.load(f)
578 finally:
579 os.remove(gclient_output_file)
580
581
582 def gclient_runhooks(gyp_envs):
583 gclient_bin = 'gclient.bat' if sys.platform.startswith('win') else 'gclient'
584 env = dict([env_var.split('=', 1) for env_var in gyp_envs])
585 call(gclient_bin, 'runhooks', env=env)
586
587
588 def gclient_revinfo():
589 gclient_bin = 'gclient.bat' if sys.platform.startswith('win') else 'gclient'
590 return call(gclient_bin, 'revinfo', '-a') or ''
591
592
593 def create_manifest():
594 manifest = {}
595 output = gclient_revinfo()
596 for line in output.strip().splitlines():
597 match = REVINFO_RE.match(line.strip())
598 if match:
599 manifest[match.group(1)] = {
600 'repository': match.group(2),
601 'revision': match.group(3),
602 }
603 else:
604 print "WARNING: Couldn't match revinfo line:\n%s" % line
605 return manifest
606
607
608 def get_commit_message_footer_map(message):
609 """Returns: (dict) A dictionary of commit message footer entries.
610 """
611 footers = {}
612
613 # Extract the lines in the footer block.
614 lines = []
615 for line in message.strip().splitlines():
616 line = line.strip()
617 if len(line) == 0:
618 del lines[:]
619 continue
620 lines.append(line)
621
622 # Parse the footer
623 for line in lines:
624 m = COMMIT_FOOTER_ENTRY_RE.match(line)
625 if not m:
626 # If any single line isn't valid, the entire footer is invalid.
627 footers.clear()
628 return footers
629 footers[m.group(1)] = m.group(2).strip()
630 return footers
631
632
633 def get_commit_message_footer(message, key):
634 """Returns: (str/None) The footer value for 'key', or None if none was found.
635 """
636 return get_commit_message_footer_map(message).get(key)
637
638
639 def get_svn_rev(git_hash, dir_name):
640 log = git('log', '-1', git_hash, cwd=dir_name)
641 git_svn_id = get_commit_message_footer(log, GIT_SVN_ID_FOOTER_KEY)
642 if not git_svn_id:
643 return None
644 m = GIT_SVN_ID_RE.match(git_svn_id)
645 if not m:
646 return None
647 return int(m.group(2))
648
649
650 def get_git_hash(revision, branch, sln_dir):
651 """We want to search for the SVN revision on the git-svn branch.
652
653 Note that git will search backwards from origin/master.
654 """
655 match = "^%s: [^ ]*@%s " % (GIT_SVN_ID_FOOTER_KEY, revision)
656 ref = branch if branch.startswith('refs/') else 'origin/%s' % branch
657 cmd = ['log', '-E', '--grep', match, '--format=%H', '--max-count=1', ref]
658 result = git(*cmd, cwd=sln_dir).strip()
659 if result:
660 return result
661 raise SVNRevisionNotFound('We can\'t resolve svn r%s into a git hash in %s' %
662 (revision, sln_dir))
663
664
665 def _last_commit_for_file(filename, repo_base):
666 cmd = ['log', '--format=%H', '--max-count=1', '--', filename]
667 return git(*cmd, cwd=repo_base).strip()
668
669
670 def need_to_run_deps2git(repo_base, deps_file, deps_git_file):
671 """Checks to see if we need to run deps2git.
672
673 Returns True if there was a DEPS change after the last .DEPS.git update
674 or if DEPS has local modifications.
675 """
676 # See if DEPS is dirty
677 deps_file_status = git(
678 'status', '--porcelain', deps_file, cwd=repo_base).strip()
679 if deps_file_status and deps_file_status.startswith('M '):
680 return True
681
682 last_known_deps_ref = _last_commit_for_file(deps_file, repo_base)
683 last_known_deps_git_ref = _last_commit_for_file(deps_git_file, repo_base)
684 merge_base_ref = git('merge-base', last_known_deps_ref,
685 last_known_deps_git_ref, cwd=repo_base).strip()
686
687 # If the merge base of the last DEPS and last .DEPS.git file is not
688 # equivilent to the hash of the last DEPS file, that means the DEPS file
689 # was committed after the last .DEPS.git file.
690 return last_known_deps_ref != merge_base_ref
691
692
693 def ensure_deps2git(solution, shallow):
694 repo_base = path.join(os.getcwd(), solution['name'])
695 deps_file = path.join(repo_base, 'DEPS')
696 deps_git_file = path.join(repo_base, '.DEPS.git')
697 if (not git('ls-files', 'DEPS', cwd=repo_base).strip() or
698 not git('ls-files', '.DEPS.git', cwd=repo_base).strip()):
699 return
700
701 print 'Checking if %s is newer than %s' % (deps_file, deps_git_file)
702 if not need_to_run_deps2git(repo_base, deps_file, deps_git_file):
703 return
704
705 print "HEY YOU"
706 print "="*80
707 print "deps2git is gone now bro :("
708 print "="*80
709 sys.exit(1)
710
711
712 def emit_log_lines(name, lines):
713 for line in lines.splitlines():
714 print '@@@STEP_LOG_LINE@%s@%s@@@' % (name, line)
715 print '@@@STEP_LOG_END@%s@@@' % name
716
717
718 def emit_properties(properties):
719 for property_name, property_value in sorted(properties.items()):
720 print '@@@SET_BUILD_PROPERTY@%s@"%s"@@@' % (property_name, property_value)
721
722
723 # Derived from:
724 # http://code.activestate.com/recipes/577972-disk-usage/?in=user-4178764
725 def get_total_disk_space():
726 cwd = os.getcwd()
727 # Windows is the only platform that doesn't support os.statvfs, so
728 # we need to special case this.
729 if sys.platform.startswith('win'):
730 _, total, free = (ctypes.c_ulonglong(), ctypes.c_ulonglong(), \
731 ctypes.c_ulonglong())
732 if sys.version_info >= (3,) or isinstance(cwd, unicode):
733 fn = ctypes.windll.kernel32.GetDiskFreeSpaceExW
734 else:
735 fn = ctypes.windll.kernel32.GetDiskFreeSpaceExA
736 ret = fn(cwd, ctypes.byref(_), ctypes.byref(total), ctypes.byref(free))
737 if ret == 0:
738 # WinError() will fetch the last error code.
739 raise ctypes.WinError()
740 return (total.value, free.value)
741
742 else:
743 st = os.statvfs(cwd)
744 free = st.f_bavail * st.f_frsize
745 total = st.f_blocks * st.f_frsize
746 return (total, free)
747
748
749 def get_target_revision(folder_name, git_url, revisions):
750 normalized_name = folder_name.strip('/')
751 if normalized_name in revisions:
752 return revisions[normalized_name]
753 if git_url in revisions:
754 return revisions[git_url]
755 return None
756
757
758 def force_revision(folder_name, revision):
759 split_revision = revision.split(':', 1)
760 branch = 'master'
761 if len(split_revision) == 2:
762 # Support for "branch:revision" syntax.
763 branch, revision = split_revision
764
765 if revision and revision.upper() != 'HEAD':
766 if revision and revision.isdigit() and len(revision) < 40:
767 # rev_num is really a svn revision number, convert it into a git hash.
768 git_ref = get_git_hash(int(revision), branch, folder_name)
769 else:
770 # rev_num is actually a git hash or ref, we can just use it.
771 git_ref = revision
772 git('checkout', '--force', git_ref, cwd=folder_name)
773 else:
774 ref = branch if branch.startswith('refs/') else 'origin/%s' % branch
775 git('checkout', '--force', ref, cwd=folder_name)
776
777 def git_checkout(solutions, revisions, shallow, refs):
778 build_dir = os.getcwd()
779 # Before we do anything, break all git_cache locks.
780 if path.isdir(CACHE_DIR):
781 git('cache', 'unlock', '-vv', '--force', '--all', '--cache-dir', CACHE_DIR)
782 for item in os.listdir(CACHE_DIR):
783 filename = os.path.join(CACHE_DIR, item)
784 if item.endswith('.lock'):
785 raise Exception('%s exists after cache unlock' % filename)
786 first_solution = True
787 for sln in solutions:
788 # This is so we can loop back and try again if we need to wait for the
789 # git mirrors to update from SVN.
790 done = False
791 tries_left = 60
792 while not done:
793 name = sln['name']
794 url = sln['url']
795 if url == CHROMIUM_SRC_URL or url + '.git' == CHROMIUM_SRC_URL:
796 # Experiments show there's little to be gained from
797 # a shallow clone of src.
798 shallow = False
799 sln_dir = path.join(build_dir, name)
800 s = ['--shallow'] if shallow else []
801 populate_cmd = (['cache', 'populate', '--ignore_locks', '-v',
802 '--cache-dir', CACHE_DIR] + s + [url])
803 for ref in refs:
804 populate_cmd.extend(['--ref', ref])
805 git(*populate_cmd)
806 mirror_dir = git(
807 'cache', 'exists', '--quiet', '--cache-dir', CACHE_DIR, url).strip()
808 clone_cmd = (
809 'clone', '--no-checkout', '--local', '--shared', mirror_dir, sln_dir)
810
811 try:
812 if not path.isdir(sln_dir):
813 git(*clone_cmd)
814 else:
815 git('remote', 'set-url', 'origin', mirror_dir, cwd=sln_dir)
816 git('fetch', 'origin', cwd=sln_dir)
817 for ref in refs:
818 refspec = '%s:%s' % (ref, ref.lstrip('+'))
819 git('fetch', 'origin', refspec, cwd=sln_dir)
820
821 revision = get_target_revision(name, url, revisions) or 'HEAD'
822 force_revision(sln_dir, revision)
823 done = True
824 except SubprocessFailed as e:
825 # Exited abnormally, theres probably something wrong.
826 # Lets wipe the checkout and try again.
827 tries_left -= 1
828 if tries_left > 0:
829 print 'Something failed: %s.' % str(e)
830 print 'waiting 5 seconds and trying again...'
831 time.sleep(5)
832 else:
833 raise
834 remove(sln_dir)
835 except SVNRevisionNotFound:
836 tries_left -= 1
837 if tries_left > 0:
838 # If we don't have the correct revision, wait and try again.
839 print 'We can\'t find revision %s.' % revision
840 print 'The svn to git replicator is probably falling behind.'
841 print 'waiting 5 seconds and trying again...'
842 time.sleep(5)
843 else:
844 raise
845
846 git('clean', '-dff', cwd=sln_dir)
847
848 if first_solution:
849 git_ref = git('log', '--format=%H', '--max-count=1',
850 cwd=sln_dir).strip()
851 first_solution = False
852 return git_ref
853
854
855 def _download(url):
856 """Fetch url and return content, with retries for flake."""
857 for attempt in xrange(ATTEMPTS):
858 try:
859 return urllib2.urlopen(url).read()
860 except Exception:
861 if attempt == ATTEMPTS - 1:
862 raise
863
864
865 def parse_diff(diff):
866 """Takes a unified diff and returns a list of diffed files and their diffs.
867
868 The return format is a list of pairs of:
869 (<filename>, <diff contents>)
870 <diff contents> is inclusive of the diff line.
871 """
872 result = []
873 current_diff = ''
874 current_header = None
875 for line in diff.splitlines():
876 # "diff" is for git style patches, and "Index: " is for SVN style patches.
877 if line.startswith('diff') or line.startswith('Index: '):
878 if current_header:
879 # If we are in a diff portion, then save the diff.
880 result.append((current_header, '%s\n' % current_diff))
881 git_header_match = re.match(r'diff (?:--git )?(\S+) (\S+)', line)
882 svn_header_match = re.match(r'Index: (.*)', line)
883
884 if git_header_match:
885 # First, see if its a git style header.
886 from_file = git_header_match.group(1)
887 to_file = git_header_match.group(2)
888 if from_file != to_file and from_file.startswith('a/'):
889 # Sometimes git prepends 'a/' and 'b/' in front of file paths.
890 from_file = from_file[2:]
891 current_header = from_file
892
893 elif svn_header_match:
894 # Otherwise, check if its an SVN style header.
895 current_header = svn_header_match.group(1)
896
897 else:
898 # Otherwise... I'm not really sure what to do with this.
899 raise InvalidDiff('Can\'t process header: %s\nFull diff:\n%s' %
900 (line, diff))
901
902 current_diff = ''
903 current_diff += '%s\n' % line
904 if current_header:
905 # We hit EOF, gotta save the last diff.
906 result.append((current_header, current_diff))
907 return result
908
909
910 def get_svn_patch(patch_url):
911 """Fetch patch from patch_url, return list of (filename, diff)"""
912 svn_exe = 'svn.bat' if sys.platform.startswith('win') else 'svn'
913 patch_data = call(svn_exe, 'cat', patch_url)
914 return parse_diff(patch_data)
915
916 # REMOVE
917 def apply_svn_patch(patch_root, patches, whitelist=None, blacklist=None):
918 """Expects a list of (filename, diff), applies it on top of patch_root."""
919 if whitelist:
920 patches = [(name, diff) for name, diff in patches if name in whitelist]
921 elif blacklist:
922 patches = [(name, diff) for name, diff in patches if name not in blacklist]
923 diffs = [diff for _, diff in patches]
924 patch = ''.join(diffs)
925
926 if patch:
927 print '===Patching files==='
928 for filename, _ in patches:
929 print 'Patching %s' % filename
930 try:
931 call(PATCH_TOOL, '-p0', '--remove-empty-files', '--force', '--forward',
932 stdin_data=patch, cwd=patch_root, tries=1)
933 for filename, _ in patches:
934 full_filename = path.abspath(path.join(patch_root, filename))
935 git('add', full_filename, cwd=path.dirname(full_filename))
936 except SubprocessFailed as e:
937 raise PatchFailed(e.message, e.code, e.output)
938
939 def apply_rietveld_issue(issue, patchset, root, server, _rev_map, _revision,
940 email_file, key_file, whitelist=None, blacklist=None):
941 apply_issue_bin = ('apply_issue.bat' if sys.platform.startswith('win')
942 else 'apply_issue')
943 cmd = [apply_issue_bin,
944 # The patch will be applied on top of this directory.
945 '--root_dir', root,
946 # Tell apply_issue how to fetch the patch.
947 '--issue', issue,
948 '--server', server,
949 # Always run apply_issue.py, otherwise it would see update.flag
950 # and then bail out.
951 '--force',
952 # Don't run gclient sync when it sees a DEPS change.
953 '--ignore_deps',
954 # TODO(tandrii): remove after http://crbug.com/537417 is resolved.
955 # Temporary enable verbosity to see if Rietveld requests are actually
956 # retried.
957 '-v', '-v', # = logging.DEBUG level.
958 ]
959 # Use an oauth key file if specified.
960 if email_file and key_file:
961 cmd.extend(['--email-file', email_file, '--private-key-file', key_file])
962 else:
963 cmd.append('--no-auth')
964
965 if patchset:
966 cmd.extend(['--patchset', patchset])
967 if whitelist:
968 for item in whitelist:
969 cmd.extend(['--whitelist', item])
970 elif blacklist:
971 for item in blacklist:
972 cmd.extend(['--blacklist', item])
973
974 # Only try once, since subsequent failures hide the real failure.
975 try:
976 call(*cmd, tries=1)
977 except SubprocessFailed as e:
978 raise PatchFailed(e.message, e.code, e.output)
979
980 def apply_gerrit_ref(gerrit_repo, gerrit_ref, root):
981 gerrit_repo = gerrit_repo or 'origin'
982 assert gerrit_ref
983 try:
984 base_rev = git('rev-parse', 'HEAD', cwd=root).strip()
985 git('retry', 'fetch', gerrit_repo, gerrit_ref, cwd=root, tries=1)
986 git('checkout', 'FETCH_HEAD', cwd=root)
987 git('reset', '--soft', base_rev, cwd=root)
988 except SubprocessFailed as e:
989 raise PatchFailed(e.message, e.code, e.output)
990
991 def check_flag(flag_file):
992 """Returns True if the flag file is present."""
993 return os.path.isfile(flag_file)
994
995
996 def delete_flag(flag_file):
997 """Remove bot update flag."""
998 if os.path.isfile(flag_file):
999 os.remove(flag_file)
1000
1001
1002 def emit_flag(flag_file):
1003 """Deposit a bot update flag on the system to tell gclient not to run."""
1004 print 'Emitting flag file at %s' % flag_file
1005 with open(flag_file, 'wb') as f:
1006 f.write('Success!')
1007
1008
1009 def get_commit_position_for_git_svn(url, revision):
1010 """Generates a commit position string for a 'git-svn' URL/revision.
1011
1012 If the 'git-svn' URL maps to a known project, we will construct a commit
1013 position branch value by applying substitution on the SVN URL.
1014 """
1015 # Identify the base URL so we can strip off trunk/branch name
1016 project_config = branch = None
1017 for _, project_config in GIT_SVN_PROJECT_MAP.iteritems():
1018 if url.startswith(project_config['svn_url']):
1019 branch = url[len(project_config['svn_url']):]
1020 break
1021
1022 if branch:
1023 # Strip any leading slashes
1024 branch = branch.lstrip('/')
1025
1026 # Try and map the branch
1027 for pattern, repl in project_config.get('branch_map', ()):
1028 nbranch, subn = re.subn(pattern, repl, branch, count=1)
1029 if subn:
1030 print 'INFO: Mapped SVN branch to Git branch [%s] => [%s]' % (
1031 branch, nbranch)
1032 branch = nbranch
1033 break
1034 else:
1035 # Use generic 'svn' branch
1036 print 'INFO: Could not resolve project for SVN URL %r' % (url,)
1037 branch = 'svn'
1038 return '%s@{#%s}' % (branch, revision)
1039
1040
1041 def get_commit_position(git_path, revision='HEAD'):
1042 """Dumps the 'git' log for a specific revision and parses out the commit
1043 position.
1044
1045 If a commit position metadata key is found, its value will be returned.
1046
1047 Otherwise, we will search for a 'git-svn' metadata entry. If one is found,
1048 we will compose a commit position from it, using its SVN revision value as
1049 the revision.
1050
1051 If the 'git-svn' URL maps to a known project, we will construct a commit
1052 position branch value by truncating the URL, mapping 'trunk' to
1053 "refs/heads/master". Otherwise, we will return the generic branch, 'svn'.
1054 """
1055 git_log = git('log', '--format=%B', '-n1', revision, cwd=git_path)
1056 footer_map = get_commit_message_footer_map(git_log)
1057
1058 # Search for commit position metadata
1059 value = (footer_map.get(COMMIT_POSITION_FOOTER_KEY) or
1060 footer_map.get(COMMIT_ORIGINAL_POSITION_FOOTER_KEY))
1061 if value:
1062 return value
1063
1064 # Compose a commit position from 'git-svn' metadata
1065 value = footer_map.get(GIT_SVN_ID_FOOTER_KEY)
1066 if value:
1067 m = GIT_SVN_ID_RE.match(value)
1068 if not m:
1069 raise ValueError("Invalid 'git-svn' value: [%s]" % (value,))
1070 return get_commit_position_for_git_svn(m.group(1), m.group(2))
1071 return None
1072
1073
1074 def parse_got_revision(gclient_output, got_revision_mapping, use_svn_revs):
1075 """Translate git gclient revision mapping to build properties.
1076
1077 If use_svn_revs is True, then translate git hashes in the revision mapping
1078 to svn revision numbers.
1079 """
1080 properties = {}
1081 solutions_output = {
1082 # Make sure path always ends with a single slash.
1083 '%s/' % path.rstrip('/') : solution_output for path, solution_output
1084 in gclient_output['solutions'].iteritems()
1085 }
1086 for dir_name, property_name in got_revision_mapping.iteritems():
1087 # Make sure dir_name always ends with a single slash.
1088 dir_name = '%s/' % dir_name.rstrip('/')
1089 if dir_name not in solutions_output:
1090 continue
1091 solution_output = solutions_output[dir_name]
1092 if solution_output.get('scm') is None:
1093 # This is an ignored DEPS, so the output got_revision should be 'None'.
1094 git_revision = revision = commit_position = None
1095 else:
1096 # Since we are using .DEPS.git, everything had better be git.
1097 assert solution_output.get('scm') == 'git'
1098 git_revision = git('rev-parse', 'HEAD', cwd=dir_name).strip()
1099 if use_svn_revs:
1100 revision = get_svn_rev(git_revision, dir_name)
1101 if not revision:
1102 revision = git_revision
1103 else:
1104 revision = git_revision
1105 commit_position = get_commit_position(dir_name)
1106
1107 properties[property_name] = revision
1108 if revision != git_revision:
1109 properties['%s_git' % property_name] = git_revision
1110 if commit_position:
1111 properties['%s_cp' % property_name] = commit_position
1112
1113 return properties
1114
1115
1116 def emit_json(out_file, did_run, gclient_output=None, **kwargs):
1117 """Write run information into a JSON file."""
1118 output = {}
1119 output.update(gclient_output if gclient_output else {})
1120 output.update({'did_run': did_run})
1121 output.update(kwargs)
1122 with open(out_file, 'wb') as f:
1123 f.write(json.dumps(output, sort_keys=True))
1124
1125
1126 def ensure_deps_revisions(deps_url_mapping, solutions, revisions):
1127 """Ensure correct DEPS revisions, ignores solutions."""
1128 for deps_name, deps_data in sorted(deps_url_mapping.items()):
1129 if deps_name.strip('/') in solutions:
1130 # This has already been forced to the correct solution by git_checkout().
1131 continue
1132 revision = get_target_revision(deps_name, deps_data.get('url', None),
1133 revisions)
1134 if not revision:
1135 continue
1136 # TODO(hinoka): Catch SVNRevisionNotFound error maybe?
1137 git('fetch', 'origin', cwd=deps_name)
1138 force_revision(deps_name, revision)
1139
1140
1141 def ensure_checkout(solutions, revisions, first_sln, target_os, target_os_only,
1142 patch_root, issue, patchset, patch_url, rietveld_server,
1143 gerrit_repo, gerrit_ref, revision_mapping,
1144 apply_issue_email_file, apply_issue_key_file, buildspec,
1145 gyp_env, shallow, runhooks, refs):
1146 # Get a checkout of each solution, without DEPS or hooks.
1147 # Calling git directly because there is no way to run Gclient without
1148 # invoking DEPS.
1149 print 'Fetching Git checkout'
1150
1151 git_ref = git_checkout(solutions, revisions, shallow, refs)
1152
1153 patches = None
1154 if patch_url:
1155 patches = get_svn_patch(patch_url)
1156
1157 already_patched = []
1158 patch_root = patch_root or ''
1159 for solution in solutions:
1160 if (patch_root == solution['name'] or
1161 solution['name'].startswith(patch_root + '/')):
1162 relative_root = solution['name'][len(patch_root) + 1:]
1163 target = '/'.join([relative_root, 'DEPS']).lstrip('/')
1164 if patches:
1165 print "HEY YOU"
1166 print "="*80
1167 print "You should not be trying to patch in a svn change."
1168 print "="*80
1169 sys.exit(1)
1170 elif issue:
1171 apply_rietveld_issue(issue, patchset, patch_root, rietveld_server,
1172 revision_mapping, git_ref, apply_issue_email_file,
1173 apply_issue_key_file, whitelist=[target])
1174 already_patched.append(target)
1175
1176 if not buildspec:
1177 # Run deps2git if there is a DEPS change after the last .DEPS.git commit.
1178 for solution in solutions:
1179 ensure_deps2git(solution, shallow)
1180
1181 # Ensure our build/ directory is set up with the correct .gclient file.
1182 gclient_configure(solutions, target_os, target_os_only)
1183
1184 # Let gclient do the DEPS syncing.
1185 # The branch-head refspec is a special case because its possible Chrome
1186 # src, which contains the branch-head refspecs, is DEPSed in.
1187 gclient_output = gclient_sync(buildspec or BRANCH_HEADS_REFSPEC in refs,
1188 shallow)
1189
1190 # Now that gclient_sync has finished, we should revert any .DEPS.git so that
1191 # presubmit doesn't complain about it being modified.
1192 if (not buildspec and
1193 git('ls-files', '.DEPS.git', cwd=first_sln).strip()):
1194 git('checkout', 'HEAD', '--', '.DEPS.git', cwd=first_sln)
1195
1196 if buildspec and runhooks:
1197 # Run gclient runhooks if we're on an official builder.
1198 # TODO(hinoka): Remove this when the official builders run their own
1199 # runhooks step.
1200 gclient_runhooks(gyp_env)
1201
1202 # Finally, ensure that all DEPS are pinned to the correct revision.
1203 dir_names = [sln['name'] for sln in solutions]
1204 ensure_deps_revisions(gclient_output.get('solutions', {}),
1205 dir_names, revisions)
1206 # Apply the rest of the patch here (sans DEPS)
1207 if patches:
1208 apply_svn_patch(patch_root, patches, blacklist=already_patched)
1209 elif issue:
1210 apply_rietveld_issue(issue, patchset, patch_root, rietveld_server,
1211 revision_mapping, git_ref, apply_issue_email_file,
1212 apply_issue_key_file, blacklist=already_patched)
1213 elif gerrit_ref:
1214 apply_gerrit_ref(gerrit_repo, gerrit_ref, patch_root)
1215
1216 # Reset the deps_file point in the solutions so that hooks get run properly.
1217 for sln in solutions:
1218 sln['deps_file'] = sln.get('deps_file', 'DEPS').replace('.DEPS.git', 'DEPS')
1219 gclient_configure(solutions, target_os, target_os_only)
1220
1221 return gclient_output
1222
1223
1224 def parse_revisions(revisions, root):
1225 """Turn a list of revision specs into a nice dictionary.
1226
1227 We will always return a dict with {root: something}. By default if root
1228 is unspecified, or if revisions is [], then revision will be assigned 'HEAD'
1229 """
1230 results = {root.strip('/'): 'HEAD'}
1231 expanded_revisions = []
1232 for revision in revisions:
1233 # Allow rev1,rev2,rev3 format.
1234 # TODO(hinoka): Delete this when webkit switches to recipes.
1235 expanded_revisions.extend(revision.split(','))
1236 for revision in expanded_revisions:
1237 split_revision = revision.split('@')
1238 if len(split_revision) == 1:
1239 # This is just a plain revision, set it as the revision for root.
1240 results[root] = split_revision[0]
1241 elif len(split_revision) == 2:
1242 # This is an alt_root@revision argument.
1243 current_root, current_rev = split_revision
1244
1245 # We want to normalize svn/git urls into .git urls.
1246 parsed_root = urlparse.urlparse(current_root)
1247 if parsed_root.scheme == 'svn':
1248 if parsed_root.path in RECOGNIZED_PATHS:
1249 normalized_root = RECOGNIZED_PATHS[parsed_root.path]
1250 else:
1251 print 'WARNING: SVN path %s not recognized, ignoring' % current_root
1252 continue
1253 elif parsed_root.scheme in ['http', 'https']:
1254 normalized_root = 'https://%s/%s' % (parsed_root.netloc,
1255 parsed_root.path)
1256 if not normalized_root.endswith('.git'):
1257 normalized_root = '%s.git' % normalized_root
1258 elif parsed_root.scheme:
1259 print 'WARNING: Unrecognized scheme %s, ignoring' % parsed_root.scheme
1260 continue
1261 else:
1262 # This is probably a local path.
1263 normalized_root = current_root.strip('/')
1264
1265 results[normalized_root] = current_rev
1266 else:
1267 print ('WARNING: %r is not recognized as a valid revision specification,'
1268 'skipping' % revision)
1269 return results
1270
1271
1272 def parse_args():
1273 parse = optparse.OptionParser()
1274
1275 parse.add_option('--issue', help='Issue number to patch from.')
1276 parse.add_option('--patchset',
1277 help='Patchset from issue to patch from, if applicable.')
1278 parse.add_option('--apply_issue_email_file',
1279 help='--email-file option passthrough for apply_patch.py.')
1280 parse.add_option('--apply_issue_key_file',
1281 help='--private-key-file option passthrough for '
1282 'apply_patch.py.')
1283 parse.add_option('--patch_url', help='Optional URL to SVN patch.')
1284 parse.add_option('--root', dest='patch_root',
1285 help='DEPRECATED: Use --patch_root.')
1286 parse.add_option('--patch_root', help='Directory to patch on top of.')
1287 parse.add_option('--rietveld_server',
1288 default='codereview.chromium.org',
1289 help='Rietveld server.')
1290 parse.add_option('--gerrit_repo',
1291 help='Gerrit repository to pull the ref from.')
1292 parse.add_option('--gerrit_ref', help='Gerrit ref to apply.')
1293 parse.add_option('--specs', help='Gcilent spec.')
1294 parse.add_option('--master', help='Master name.')
1295 parse.add_option('-f', '--force', action='store_true',
1296 help='Bypass check to see if we want to be run. '
1297 'Should ONLY be used locally or by smart recipes.')
1298 parse.add_option('--revision_mapping',
1299 help='{"path/to/repo/": "property_name"}')
1300 parse.add_option('--revision_mapping_file',
1301 help=('Same as revision_mapping, except its a path to a json'
1302 ' file containing that format.'))
1303 parse.add_option('--revision', action='append', default=[],
1304 help='Revision to check out. Can be an SVN revision number, '
1305 'git hash, or any form of git ref. Can prepend '
1306 'root@<rev> to specify which repository, where root '
1307 'is either a filesystem path, git https url, or '
1308 'svn url. To specify Tip of Tree, set rev to HEAD.'
1309 'To specify a git branch and an SVN rev, <rev> can be '
1310 'set to <branch>:<revision>.')
1311 parse.add_option('--output_manifest', action='store_true',
1312 help=('Add manifest json to the json output.'))
1313 parse.add_option('--slave_name', default=socket.getfqdn().split('.')[0],
1314 help='Hostname of the current machine, '
1315 'used for determining whether or not to activate.')
1316 parse.add_option('--builder_name', help='Name of the builder, '
1317 'used for determining whether or not to activate.')
1318 parse.add_option('--build_dir', default=os.getcwd())
1319 parse.add_option('--flag_file', default=path.join(os.getcwd(),
1320 'update.flag'))
1321 parse.add_option('--shallow', action='store_true',
1322 help='Use shallow clones for cache repositories.')
1323 parse.add_option('--gyp_env', action='append', default=[],
1324 help='Environment variables to pass into gclient runhooks.')
1325 parse.add_option('--clobber', action='store_true',
1326 help='Delete checkout first, always')
1327 parse.add_option('--bot_update_clobber', action='store_true', dest='clobber',
1328 help='(synonym for --clobber)')
1329 parse.add_option('-o', '--output_json',
1330 help='Output JSON information into a specified file')
1331 parse.add_option('--no_shallow', action='store_true',
1332 help='Bypass disk detection and never shallow clone. '
1333 'Does not override the --shallow flag')
1334 parse.add_option('--no_runhooks', action='store_true',
1335 help='Do not run hooks on official builder.')
1336 parse.add_option('--refs', action='append',
1337 help='Also fetch this refspec for the main solution(s). '
1338 'Eg. +refs/branch-heads/*')
1339 parse.add_option('--with_branch_heads', action='store_true',
1340 help='Always pass --with_branch_heads to gclient. This '
1341 'does the same thing as --refs +refs/branch-heads/*')
1342
1343
1344 options, args = parse.parse_args()
1345
1346 if not options.refs:
1347 options.refs = []
1348
1349 if options.with_branch_heads:
1350 options.refs.append(BRANCH_HEADS_REFSPEC)
1351 del options.with_branch_heads
1352
1353 try:
1354 if options.revision_mapping_file:
1355 if options.revision_mapping:
1356 print ('WARNING: Ignoring --revision_mapping: --revision_mapping_file '
1357 'was set at the same time as --revision_mapping?')
1358 with open(options.revision_mapping_file, 'r') as f:
1359 options.revision_mapping = json.load(f)
1360 elif options.revision_mapping:
1361 options.revision_mapping = json.loads(options.revision_mapping)
1362 except Exception as e:
1363 print (
1364 'WARNING: Caught execption while parsing revision_mapping*: %s'
1365 % (str(e),)
1366 )
1367
1368 return options, args
1369
1370
1371 def prepare(options, git_slns, active):
1372 """Prepares the target folder before we checkout."""
1373 dir_names = [sln.get('name') for sln in git_slns if 'name' in sln]
1374 # If we're active now, but the flag file doesn't exist (we weren't active
1375 # last run) or vice versa, blow away all checkouts.
1376 if bool(active) != bool(check_flag(options.flag_file)):
1377 ensure_no_checkout(dir_names, '*')
1378 if options.output_json:
1379 # Make sure we tell recipes that we didn't run if the script exits here.
1380 emit_json(options.output_json, did_run=active)
1381 if active:
1382 if options.clobber:
1383 ensure_no_checkout(dir_names, '*')
1384 else:
1385 ensure_no_checkout(dir_names, '.svn')
1386 emit_flag(options.flag_file)
1387 else:
1388 delete_flag(options.flag_file)
1389 raise Inactive # This is caught in main() and we exit cleanly.
1390
1391 # Do a shallow checkout if the disk is less than 100GB.
1392 total_disk_space, free_disk_space = get_total_disk_space()
1393 total_disk_space_gb = int(total_disk_space / (1024 * 1024 * 1024))
1394 used_disk_space_gb = int((total_disk_space - free_disk_space)
1395 / (1024 * 1024 * 1024))
1396 percent_used = int(used_disk_space_gb * 100 / total_disk_space_gb)
1397 step_text = '[%dGB/%dGB used (%d%%)]' % (used_disk_space_gb,
1398 total_disk_space_gb,
1399 percent_used)
1400 if not options.output_json:
1401 print '@@@STEP_TEXT@%s@@@' % step_text
1402 if not options.shallow:
1403 options.shallow = (total_disk_space < SHALLOW_CLONE_THRESHOLD
1404 and not options.no_shallow)
1405
1406 # The first solution is where the primary DEPS file resides.
1407 first_sln = dir_names[0]
1408
1409 # Split all the revision specifications into a nice dict.
1410 print 'Revisions: %s' % options.revision
1411 revisions = parse_revisions(options.revision, first_sln)
1412 print 'Fetching Git checkout at %s@%s' % (first_sln, revisions[first_sln])
1413 return revisions, step_text
1414
1415
1416 def checkout(options, git_slns, specs, buildspec, master,
1417 svn_root, revisions, step_text):
1418 first_sln = git_slns[0]['name']
1419 dir_names = [sln.get('name') for sln in git_slns if 'name' in sln]
1420 try:
1421 # Outer try is for catching patch failures and exiting gracefully.
1422 # Inner try is for catching gclient failures and retrying gracefully.
1423 try:
1424 checkout_parameters = dict(
1425 # First, pass in the base of what we want to check out.
1426 solutions=git_slns,
1427 revisions=revisions,
1428 first_sln=first_sln,
1429
1430 # Also, target os variables for gclient.
1431 target_os=specs.get('target_os', []),
1432 target_os_only=specs.get('target_os_only', False),
1433
1434 # Then, pass in information about how to patch.
1435 patch_root=options.patch_root,
1436 issue=options.issue,
1437 patchset=options.patchset,
1438 patch_url=options.patch_url,
1439 rietveld_server=options.rietveld_server,
1440 gerrit_repo=options.gerrit_repo,
1441 gerrit_ref=options.gerrit_ref,
1442 revision_mapping=options.revision_mapping,
1443 apply_issue_email_file=options.apply_issue_email_file,
1444 apply_issue_key_file=options.apply_issue_key_file,
1445
1446 # For official builders.
1447 buildspec=buildspec,
1448 gyp_env=options.gyp_env,
1449 runhooks=not options.no_runhooks,
1450
1451 # Finally, extra configurations such as shallowness of the clone.
1452 shallow=options.shallow,
1453 refs=options.refs)
1454 gclient_output = ensure_checkout(**checkout_parameters)
1455 except GclientSyncFailed:
1456 print 'We failed gclient sync, lets delete the checkout and retry.'
1457 ensure_no_checkout(dir_names, '*')
1458 gclient_output = ensure_checkout(**checkout_parameters)
1459 except PatchFailed as e:
1460 if options.output_json:
1461 # Tell recipes information such as root, got_revision, etc.
1462 emit_json(options.output_json,
1463 did_run=True,
1464 root=first_sln,
1465 log_lines=[('patch error', e.output),],
1466 patch_apply_return_code=e.code,
1467 patch_root=options.patch_root,
1468 patch_failure=True,
1469 step_text='%s PATCH FAILED' % step_text,
1470 fixed_revisions=revisions)
1471 else:
1472 # If we're not on recipes, tell annotator about our got_revisions.
1473 emit_log_lines('patch error', e.output)
1474 print '@@@STEP_TEXT@%s PATCH FAILED@@@' % step_text
1475 raise
1476
1477 # Revision is an svn revision, unless it's a git master.
1478 use_svn_rev = False
1479
1480 # Take care of got_revisions outputs.
1481 revision_mapping = dict(GOT_REVISION_MAPPINGS.get(svn_root, {}))
1482 if options.revision_mapping:
1483 revision_mapping.update(options.revision_mapping)
1484
1485 # If the repo is not in the default GOT_REVISION_MAPPINGS and no
1486 # revision_mapping were specified on the command line then
1487 # default to setting 'got_revision' based on the first solution.
1488 if not revision_mapping:
1489 revision_mapping[first_sln] = 'got_revision'
1490
1491 got_revisions = parse_got_revision(gclient_output, revision_mapping,
1492 use_svn_rev)
1493
1494 if not got_revisions:
1495 # TODO(hinoka): We should probably bail out here, but in the interest
1496 # of giving mis-configured bots some time to get fixed use a dummy
1497 # revision here.
1498 got_revisions = { 'got_revision': 'BOT_UPDATE_NO_REV_FOUND' }
1499 #raise Exception('No got_revision(s) found in gclient output')
1500
1501 if options.output_json:
1502 manifest = create_manifest() if options.output_manifest else None
1503 # Tell recipes information such as root, got_revision, etc.
1504 emit_json(options.output_json,
1505 did_run=True,
1506 root=first_sln,
1507 patch_root=options.patch_root,
1508 step_text=step_text,
1509 fixed_revisions=revisions,
1510 properties=got_revisions,
1511 manifest=manifest)
1512 else:
1513 # If we're not on recipes, tell annotator about our got_revisions.
1514 emit_properties(got_revisions)
1515
1516
1517 def print_help_text(force, output_json, active, master, builder, slave):
1518 """Print helpful messages to tell devs whats going on."""
1519 if force and output_json:
1520 recipe_force = 'Forced on by recipes'
1521 elif active and output_json:
1522 recipe_force = 'Off by recipes, but forced on by bot update'
1523 elif not active and output_json:
1524 recipe_force = 'Forced off by recipes'
1525 else:
1526 recipe_force = 'N/A. Was not called by recipes'
1527
1528 print BOT_UPDATE_MESSAGE % {
1529 'master': master or 'Not specified',
1530 'builder': builder or 'Not specified',
1531 'slave': slave or 'Not specified',
1532 'recipe': recipe_force,
1533 'CURRENT_DIR': CURRENT_DIR,
1534 'BUILDER_DIR': BUILDER_DIR,
1535 'SLAVE_DIR': SLAVE_DIR,
1536 'THIS_DIR': THIS_DIR,
1537 'SCRIPTS_DIR': SCRIPTS_DIR,
1538 'BUILD_DIR': BUILD_DIR,
1539 'ROOT_DIR': ROOT_DIR,
1540 'DEPOT_TOOLS_DIR': DEPOT_TOOLS_DIR,
1541 },
1542 print ACTIVATED_MESSAGE if active else NOT_ACTIVATED_MESSAGE
1543
1544
1545 def main():
1546 # Get inputs.
1547 options, _ = parse_args()
1548 builder = options.builder_name
1549 slave = options.slave_name
1550 master = options.master
1551
1552 # Check if this script should activate or not.
1553 active = True
1554
1555 # Print a helpful message to tell developers whats going on with this step.
1556 print_help_text(
1557 options.force, options.output_json, active, master, builder, slave)
1558
1559 # Parse, munipulate, and print the gclient solutions.
1560 specs = {}
1561 exec(options.specs, specs)
1562 svn_solutions = specs.get('solutions', [])
1563 git_slns, svn_root, buildspec = solutions_to_git(svn_solutions)
1564 options.revision = maybe_ignore_revision(options.revision, buildspec)
1565
1566 solutions_printer(git_slns)
1567
1568 try:
1569 # Dun dun dun, the main part of bot_update.
1570 revisions, step_text = prepare(options, git_slns, active)
1571 checkout(options, git_slns, specs, buildspec, master, svn_root, revisions,
1572 step_text)
1573
1574 except Inactive:
1575 # Not active, should count as passing.
1576 pass
1577 except PatchFailed as e:
1578 emit_flag(options.flag_file)
1579 # Return a specific non-zero exit code for patch failure (because it is
1580 # a failure), but make it different than other failures to distinguish
1581 # between infra failures (independent from patch author), and patch
1582 # failures (that patch author can fix). However, PatchFailure due to
1583 # download patch failure is still an infra problem.
1584 if e.code == 3:
1585 # Patch download problem.
1586 return 87
1587 # Genuine patch problem.
1588 return 88
1589 except Exception:
1590 # Unexpected failure.
1591 emit_flag(options.flag_file)
1592 raise
1593 else:
1594 emit_flag(options.flag_file)
1595
1596
1597 if __name__ == '__main__':
1598 sys.exit(main())
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698