Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(177)

Side by Side Diff: infra/recipe_modules/bot_update/resources/bot_update.py

Issue 1651323002: Revert of Adds bot_update to depot_tools. (Closed) Base URL: https://chromium.googlesource.com/chromium/tools/depot_tools.git@master
Patch Set: Update .gitignore for safety Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 #!/usr/bin/env python
2 # Copyright 2014 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 # TODO(hinoka): Use logging.
7
8 import cStringIO
9 import codecs
10 import collections
11 import copy
12 import ctypes
13 import json
14 import optparse
15 import os
16 import pprint
17 import random
18 import re
19 import socket
20 import subprocess
21 import sys
22 import tempfile
23 import threading
24 import time
25 import urllib2
26 import urlparse
27 import uuid
28
29 import os.path as path
30
31 # How many bytes at a time to read from pipes.
32 BUF_SIZE = 256
33
34 # TODO(luqui): This is a horrible hack to identify build_internal when build
35 # is a recipe dependency. bot_update should not be depending on internal,
36 # rather the arrow should go the other way (or just be destroyed).
37 def check_dir(name, dirs, default=None):
38 for d in dirs:
39 d = path.abspath(d)
40 if path.basename(d) == name and path.isdir(d):
41 return d
42 return default
43
44
45 # Define a bunch of directory paths.
46 # Relative to the current working directory.
47 CURRENT_DIR = path.abspath(os.getcwd())
48 BUILDER_DIR = path.dirname(CURRENT_DIR)
49
50 # Relative to this script's filesystem path.
51 THIS_DIR = path.dirname(path.abspath(__file__))
52
53 CHROMIUM_GIT_HOST = 'https://chromium.googlesource.com'
54 CHROMIUM_SRC_URL = CHROMIUM_GIT_HOST + '/chromium/src.git'
55
56 # Official builds use buildspecs, so this is a special case.
57 BUILDSPEC_TYPE = collections.namedtuple('buildspec',
58 ('container', 'version'))
59 BUILDSPEC_RE = (r'^/chrome-internal/trunk/tools/buildspec/'
60 '(build|branches|releases)/(.+)$')
61 GIT_BUILDSPEC_PATH = ('https://chrome-internal.googlesource.com/chrome/tools/'
62 'buildspec')
63 BRANCH_HEADS_REFSPEC = '+refs/branch-heads/*'
64
65 BUILDSPEC_COMMIT_RE = (
66 re.compile(r'Buildspec for.*version (\d+\.\d+\.\d+\.\d+)'),
67 re.compile(r'Create (\d+\.\d+\.\d+\.\d+) buildspec'),
68 re.compile(r'Auto-converted (\d+\.\d+\.\d+\.\d+) buildspec to git'),
69 )
70
71 # Regular expression that matches a single commit footer line.
72 COMMIT_FOOTER_ENTRY_RE = re.compile(r'([^:]+):\s+(.+)')
73
74 # Footer metadata keys for regular and gsubtreed mirrored commit positions.
75 COMMIT_POSITION_FOOTER_KEY = 'Cr-Commit-Position'
76 COMMIT_ORIGINAL_POSITION_FOOTER_KEY = 'Cr-Original-Commit-Position'
77 # Regular expression to parse a commit position
78 COMMIT_POSITION_RE = re.compile(r'(.+)@\{#(\d+)\}')
79
80 # Regular expression to parse gclient's revinfo entries.
81 REVINFO_RE = re.compile(r'^([^:]+):\s+([^@]+)@(.+)$')
82
83 # Used by 'ResolveSvnRevisionFromGitiles'
84 GIT_SVN_PROJECT_MAP = {
85 'webkit': {
86 'svn_url': 'svn://svn.chromium.org/blink',
87 'branch_map': [
88 (r'trunk', r'refs/heads/master'),
89 (r'branches/([^/]+)', r'refs/branch-heads/\1'),
90 ],
91 },
92 'v8': {
93 'svn_url': 'https://v8.googlecode.com/svn',
94 'branch_map': [
95 (r'trunk', r'refs/heads/candidates'),
96 (r'branches/bleeding_edge', r'refs/heads/master'),
97 (r'branches/([^/]+)', r'refs/branch-heads/\1'),
98 ],
99 },
100 'nacl': {
101 'svn_url': 'svn://svn.chromium.org/native_client',
102 'branch_map': [
103 (r'trunk/src/native_client', r'refs/heads/master'),
104 ],
105 },
106 }
107
108 # Key for the 'git-svn' ID metadata commit footer entry.
109 GIT_SVN_ID_FOOTER_KEY = 'git-svn-id'
110 # e.g., git-svn-id: https://v8.googlecode.com/svn/trunk@23117
111 # ce2b1a6d-e550-0410-aec6-3dcde31c8c00
112 GIT_SVN_ID_RE = re.compile(r'((?:\w+)://[^@]+)@(\d+)\s+(?:[a-zA-Z0-9\-]+)')
113
114
115 # This is the git mirror of the buildspecs repository. We could rely on the svn
116 # checkout, now that the git buildspecs are checked in alongside the svn
117 # buildspecs, but we're going to want to pull all the buildspecs from here
118 # eventually anyhow, and there's already some logic to pull from git (for the
119 # old git_buildspecs.git repo), so just stick with that.
120 GIT_BUILDSPEC_REPO = (
121 'https://chrome-internal.googlesource.com/chrome/tools/buildspec')
122
123 # Copied from scripts/recipes/chromium.py.
124 GOT_REVISION_MAPPINGS = {
125 '/chrome/trunk/src': {
126 'src/': 'got_revision',
127 'src/native_client/': 'got_nacl_revision',
128 'src/tools/swarm_client/': 'got_swarm_client_revision',
129 'src/tools/swarming_client/': 'got_swarming_client_revision',
130 'src/third_party/WebKit/': 'got_webkit_revision',
131 'src/third_party/webrtc/': 'got_webrtc_revision',
132 'src/v8/': 'got_v8_revision',
133 }
134 }
135
136
137 BOT_UPDATE_MESSAGE = """
138 What is the "Bot Update" step?
139 ==============================
140
141 This step ensures that the source checkout on the bot (e.g. Chromium's src/ and
142 its dependencies) is checked out in a consistent state. This means that all of
143 the necessary repositories are checked out, no extra repositories are checked
144 out, and no locally modified files are present.
145
146 These actions used to be taken care of by the "gclient revert" and "update"
147 steps. However, those steps are known to be buggy and occasionally flaky. This
148 step has two main advantages over them:
149 * it only operates in Git, so the logic can be clearer and cleaner; and
150 * it is a slave-side script, so its behavior can be modified without
151 restarting the master.
152
153 Why Git, you ask? Because that is the direction that the Chromium project is
154 heading. This step is an integral part of the transition from using the SVN repo
155 at chrome/trunk/src to using the Git repo src.git. Please pardon the dust while
156 we fully convert everything to Git. This message will get out of your way
157 eventually, and the waterfall will be a happier place because of it.
158
159 This step can be activated or deactivated independently on every builder on
160 every master. When it is active, the "gclient revert" and "update" steps become
161 no-ops. When it is inactive, it prints this message, cleans up after itself, and
162 lets everything else continue as though nothing has changed. Eventually, when
163 everything is stable enough, this step will replace them entirely.
164
165 Debugging information:
166 (master/builder/slave may be unspecified on recipes)
167 master: %(master)s
168 builder: %(builder)s
169 slave: %(slave)s
170 forced by recipes: %(recipe)s
171 CURRENT_DIR: %(CURRENT_DIR)s
172 BUILDER_DIR: %(BUILDER_DIR)s
173 THIS_DIR: %(THIS_DIR)s
174 bot_update.py is:"""
175
176 ACTIVATED_MESSAGE = """ACTIVE.
177 The bot will perform a Git checkout in this step.
178 The "gclient revert" and "update" steps are no-ops.
179
180 """
181
182 NOT_ACTIVATED_MESSAGE = """INACTIVE.
183 This step does nothing. You actually want to look at the "update" step.
184
185 """
186
187
188 GCLIENT_TEMPLATE = """solutions = %(solutions)s
189
190 cache_dir = r%(cache_dir)s
191 %(target_os)s
192 %(target_os_only)s
193 """
194
195 CHROME_INTERNAL_GIT_HOST = 'https://chrome-internal.googlesource.com'
196
197 RECOGNIZED_PATHS = {
198 # If SVN path matches key, the entire URL is rewritten to the Git url.
199 '/chrome/trunk/src':
200 CHROMIUM_SRC_URL,
201 '/chrome/trunk/src/tools/cros.DEPS':
202 CHROMIUM_GIT_HOST + '/chromium/src/tools/cros.DEPS.git',
203 '/chrome-internal/trunk/src-internal':
204 CHROME_INTERNAL_GIT_HOST + '/chrome/src-internal.git',
205 }
206
207 # How many times to try before giving up.
208 ATTEMPTS = 5
209
210 # TODO(martiniss) change to be less hard coded.
211 GIT_CACHE_PATH = path.join(
212 THIS_DIR, os.pardir, os.pardir, os.pardir, os.pardir, 'git_cache.py')
213
214 # TODO(martiniss) change to be useable on swarming
215 CACHE_DIR = path.join(CURRENT_DIR, 'cache_dir')
216
217 # Because we print CACHE_DIR out into a .gclient file, and then later run
218 # eval() on it, backslashes need to be escaped, otherwise "E:\b\build" gets
219 # parsed as "E:[\x08][\x08]uild".
220 if sys.platform.startswith('win'):
221 CACHE_DIR = CACHE_DIR.replace('\\', '\\\\')
222
223 # TODO(martiniss) Ignoring windows for now, will add support later.
224 PATCH_TOOL = '/usr/bin/patch'
225
226 # If there is less than 100GB of disk space on the system, then we do
227 # a shallow checkout.
228 SHALLOW_CLONE_THRESHOLD = 100 * 1024 * 1024 * 1024
229
230
231 class SubprocessFailed(Exception):
232 def __init__(self, message, code, output):
233 Exception.__init__(self, message)
234 self.code = code
235 self.output = output
236
237
238 class PatchFailed(SubprocessFailed):
239 pass
240
241
242 class GclientSyncFailed(SubprocessFailed):
243 pass
244
245
246 class SVNRevisionNotFound(Exception):
247 pass
248
249
250 class InvalidDiff(Exception):
251 pass
252
253
254 class Inactive(Exception):
255 """Not really an exception, just used to exit early cleanly."""
256 pass
257
258
259 RETRY = object()
260 OK = object()
261 FAIL = object()
262
263
264 class PsPrinter(object):
265 def __init__(self, interval=300):
266 self.interval = interval
267 self.active = sys.platform.startswith('linux2')
268 self.thread = None
269
270 @staticmethod
271 def print_pstree():
272 """Debugging function used to print "ps auxwwf" for stuck processes."""
273 subprocess.call(['ps', 'auxwwf'])
274
275 def poke(self):
276 if self.active:
277 self.cancel()
278 self.thread = threading.Timer(self.interval, self.print_pstree)
279 self.thread.start()
280
281 def cancel(self):
282 if self.active and self.thread is not None:
283 self.thread.cancel()
284 self.thread = None
285
286
287 def call(*args, **kwargs): # pragma: no cover
288 """Interactive subprocess call."""
289 kwargs['stdout'] = subprocess.PIPE
290 kwargs['stderr'] = subprocess.STDOUT
291 kwargs.setdefault('bufsize', BUF_SIZE)
292 cwd = kwargs.get('cwd', os.getcwd())
293 result_fn = kwargs.pop('result_fn', lambda code, out: RETRY if code else OK)
294 stdin_data = kwargs.pop('stdin_data', None)
295 tries = kwargs.pop('tries', ATTEMPTS)
296 if stdin_data:
297 kwargs['stdin'] = subprocess.PIPE
298 out = cStringIO.StringIO()
299 new_env = kwargs.get('env', {})
300 env = copy.copy(os.environ)
301 env.update(new_env)
302 kwargs['env'] = env
303 attempt = 0
304 for attempt in range(1, tries + 1):
305 attempt_msg = ' (attempt #%d)' % attempt if attempt else ''
306 if new_env:
307 print '===Injecting Environment Variables==='
308 for k, v in sorted(new_env.items()):
309 print '%s: %s' % (k, v)
310 print '===Running %s%s===' % (' '.join(args), attempt_msg)
311 print 'In directory: %s' % cwd
312 start_time = time.time()
313 proc = subprocess.Popen(args, **kwargs)
314 if stdin_data:
315 proc.stdin.write(stdin_data)
316 proc.stdin.close()
317 psprinter = PsPrinter()
318 # This is here because passing 'sys.stdout' into stdout for proc will
319 # produce out of order output.
320 hanging_cr = False
321 while True:
322 psprinter.poke()
323 buf = proc.stdout.read(BUF_SIZE)
324 if not buf:
325 break
326 if hanging_cr:
327 buf = '\r' + buf
328 hanging_cr = buf.endswith('\r')
329 if hanging_cr:
330 buf = buf[:-1]
331 buf = buf.replace('\r\n', '\n').replace('\r', '\n')
332 sys.stdout.write(buf)
333 out.write(buf)
334 if hanging_cr:
335 sys.stdout.write('\n')
336 out.write('\n')
337 psprinter.cancel()
338
339 code = proc.wait()
340 elapsed_time = ((time.time() - start_time) / 60.0)
341 outval = out.getvalue()
342 result = result_fn(code, outval)
343 if result in (FAIL, RETRY):
344 print '===Failed in %.1f mins===' % elapsed_time
345 print
346 else:
347 print '===Succeeded in %.1f mins===' % elapsed_time
348 print
349 return outval
350 if result is FAIL:
351 break
352 if result is RETRY and attempt < tries:
353 sleep_backoff = 4 ** attempt
354 sleep_time = random.randint(sleep_backoff, int(sleep_backoff * 1.2))
355 print '===backing off, sleeping for %d secs===' % sleep_time
356 time.sleep(sleep_time)
357
358 raise SubprocessFailed('%s failed with code %d in %s after %d attempts.' %
359 (' '.join(args), code, cwd, attempt),
360 code, outval)
361
362
363 def git(*args, **kwargs): # pragma: no cover
364 """Wrapper around call specifically for Git commands."""
365 if args and args[0] == 'cache':
366 # Rewrite "git cache" calls into "python git_cache.py".
367 cmd = (sys.executable, '-u', GIT_CACHE_PATH) + args[1:]
368 else:
369 git_executable = 'git'
370 # On windows, subprocess doesn't fuzzy-match 'git' to 'git.bat', so we
371 # have to do it explicitly. This is better than passing shell=True.
372 if sys.platform.startswith('win'):
373 git_executable += '.bat'
374 cmd = (git_executable,) + args
375 return call(*cmd, **kwargs)
376
377
378 def get_gclient_spec(solutions, target_os, target_os_only):
379 return GCLIENT_TEMPLATE % {
380 'solutions': pprint.pformat(solutions, indent=4),
381 'cache_dir': '"%s"' % CACHE_DIR,
382 'target_os': ('\ntarget_os=%s' % target_os) if target_os else '',
383 'target_os_only': '\ntarget_os_only=%s' % target_os_only
384 }
385
386
387 def maybe_ignore_revision(revision, buildspec):
388 """Handle builders that don't care what buildbot tells them to build.
389
390 This is especially the case with branch builders that build from buildspecs
391 and/or trigger off multiple repositories, where the --revision passed in has
392 nothing to do with the solution being built. Clearing the revision in this
393 case causes bot_update to use HEAD rather that trying to checkout an
394 inappropriate version of the solution.
395 """
396 if buildspec and buildspec.container == 'branches':
397 return []
398 return revision
399
400
401 def solutions_printer(solutions):
402 """Prints gclient solution to stdout."""
403 print 'Gclient Solutions'
404 print '================='
405 for solution in solutions:
406 name = solution.get('name')
407 url = solution.get('url')
408 print '%s (%s)' % (name, url)
409 if solution.get('deps_file'):
410 print ' Dependencies file is %s' % solution['deps_file']
411 if 'managed' in solution:
412 print ' Managed mode is %s' % ('ON' if solution['managed'] else 'OFF')
413 custom_vars = solution.get('custom_vars')
414 if custom_vars:
415 print ' Custom Variables:'
416 for var_name, var_value in sorted(custom_vars.iteritems()):
417 print ' %s = %s' % (var_name, var_value)
418 custom_deps = solution.get('custom_deps')
419 if 'custom_deps' in solution:
420 print ' Custom Dependencies:'
421 for deps_name, deps_value in sorted(custom_deps.iteritems()):
422 if deps_value:
423 print ' %s -> %s' % (deps_name, deps_value)
424 else:
425 print ' %s: Ignore' % deps_name
426 for k, v in solution.iteritems():
427 # Print out all the keys we don't know about.
428 if k in ['name', 'url', 'deps_file', 'custom_vars', 'custom_deps',
429 'managed']:
430 continue
431 print ' %s is %s' % (k, v)
432 print
433
434
435 def solutions_to_git(input_solutions):
436 """Modifies urls in solutions to point at Git repos.
437
438 returns: (git solution, svn root of first solution) tuple.
439 """
440 assert input_solutions
441 solutions = copy.deepcopy(input_solutions)
442 first_solution = True
443 buildspec = None
444 for solution in solutions:
445 original_url = solution['url']
446 parsed_url = urlparse.urlparse(original_url)
447 parsed_path = parsed_url.path
448
449 # Rewrite SVN urls into Git urls.
450 buildspec_m = re.match(BUILDSPEC_RE, parsed_path)
451 if first_solution and buildspec_m:
452 solution['url'] = GIT_BUILDSPEC_PATH
453 buildspec = BUILDSPEC_TYPE(
454 container=buildspec_m.group(1),
455 version=buildspec_m.group(2),
456 )
457 solution['deps_file'] = path.join(buildspec.container, buildspec.version,
458 'DEPS')
459 elif parsed_path in RECOGNIZED_PATHS:
460 solution['url'] = RECOGNIZED_PATHS[parsed_path]
461 solution['deps_file'] = '.DEPS.git'
462 elif parsed_url.scheme == 'https' and 'googlesource' in parsed_url.netloc:
463 pass
464 else:
465 print 'Warning: %s' % ('path %r not recognized' % parsed_path,)
466
467 # Strip out deps containing $$V8_REV$$, etc.
468 if 'custom_deps' in solution:
469 new_custom_deps = {}
470 for deps_name, deps_value in solution['custom_deps'].iteritems():
471 if deps_value and '$$' in deps_value:
472 print 'Dropping %s:%s from custom deps' % (deps_name, deps_value)
473 else:
474 new_custom_deps[deps_name] = deps_value
475 solution['custom_deps'] = new_custom_deps
476
477 if first_solution:
478 root = parsed_path
479 first_solution = False
480
481 solution['managed'] = False
482 # We don't want gclient to be using a safesync URL. Instead it should
483 # using the lkgr/lkcr branch/tags.
484 if 'safesync_url' in solution:
485 print 'Removing safesync url %s from %s' % (solution['safesync_url'],
486 parsed_path)
487 del solution['safesync_url']
488 return solutions, root, buildspec
489
490
491 def remove(target):
492 """Remove a target by moving it into build.dead."""
493 dead_folder = path.join(BUILDER_DIR, 'build.dead')
494 if not path.exists(dead_folder):
495 os.makedirs(dead_folder)
496 os.rename(target, path.join(dead_folder, uuid.uuid4().hex))
497
498
499 def ensure_no_checkout(dir_names, scm_dirname):
500 """Ensure that there is no undesired checkout under build/.
501
502 If there is an incorrect checkout under build/, then
503 move build/ to build.dead/
504 This function will check each directory in dir_names.
505
506 scm_dirname is expected to be either ['.svn', '.git']
507 """
508 assert scm_dirname in ['.svn', '.git', '*']
509 has_checkout = any(path.exists(path.join(os.getcwd(), dir_name, scm_dirname))
510 for dir_name in dir_names)
511
512 if has_checkout or scm_dirname == '*':
513 build_dir = os.getcwd()
514 prefix = ''
515 if scm_dirname != '*':
516 prefix = '%s detected in checkout, ' % scm_dirname
517
518 for filename in os.listdir(build_dir):
519 deletion_target = path.join(build_dir, filename)
520 print '%sdeleting %s...' % (prefix, deletion_target),
521 remove(deletion_target)
522 print 'done'
523
524
525 def gclient_configure(solutions, target_os, target_os_only):
526 """Should do the same thing as gclient --spec='...'."""
527 with codecs.open('.gclient', mode='w', encoding='utf-8') as f:
528 f.write(get_gclient_spec(solutions, target_os, target_os_only))
529
530
531 def gclient_sync(with_branch_heads, shallow):
532 # We just need to allocate a filename.
533 fd, gclient_output_file = tempfile.mkstemp(suffix='.json')
534 os.close(fd)
535 gclient_bin = 'gclient.bat' if sys.platform.startswith('win') else 'gclient'
536 cmd = [gclient_bin, 'sync', '--verbose', '--reset', '--force',
537 '--ignore_locks', '--output-json', gclient_output_file,
538 '--nohooks', '--noprehooks', '--delete_unversioned_trees']
539 if with_branch_heads:
540 cmd += ['--with_branch_heads']
541 if shallow:
542 cmd += ['--shallow']
543
544 try:
545 call(*cmd, tries=1)
546 except SubprocessFailed as e:
547 # Throw a GclientSyncFailed exception so we can catch this independently.
548 raise GclientSyncFailed(e.message, e.code, e.output)
549 else:
550 with open(gclient_output_file) as f:
551 return json.load(f)
552 finally:
553 os.remove(gclient_output_file)
554
555
556 def gclient_runhooks(gyp_envs):
557 gclient_bin = 'gclient.bat' if sys.platform.startswith('win') else 'gclient'
558 env = dict([env_var.split('=', 1) for env_var in gyp_envs])
559 call(gclient_bin, 'runhooks', env=env)
560
561
562 def gclient_revinfo():
563 gclient_bin = 'gclient.bat' if sys.platform.startswith('win') else 'gclient'
564 return call(gclient_bin, 'revinfo', '-a') or ''
565
566
567 def create_manifest():
568 manifest = {}
569 output = gclient_revinfo()
570 for line in output.strip().splitlines():
571 match = REVINFO_RE.match(line.strip())
572 if match:
573 manifest[match.group(1)] = {
574 'repository': match.group(2),
575 'revision': match.group(3),
576 }
577 else:
578 print "WARNING: Couldn't match revinfo line:\n%s" % line
579 return manifest
580
581
582 def get_commit_message_footer_map(message):
583 """Returns: (dict) A dictionary of commit message footer entries.
584 """
585 footers = {}
586
587 # Extract the lines in the footer block.
588 lines = []
589 for line in message.strip().splitlines():
590 line = line.strip()
591 if len(line) == 0:
592 del lines[:]
593 continue
594 lines.append(line)
595
596 # Parse the footer
597 for line in lines:
598 m = COMMIT_FOOTER_ENTRY_RE.match(line)
599 if not m:
600 # If any single line isn't valid, the entire footer is invalid.
601 footers.clear()
602 return footers
603 footers[m.group(1)] = m.group(2).strip()
604 return footers
605
606
607 def get_commit_message_footer(message, key):
608 """Returns: (str/None) The footer value for 'key', or None if none was found.
609 """
610 return get_commit_message_footer_map(message).get(key)
611
612
613 def get_svn_rev(git_hash, dir_name):
614 log = git('log', '-1', git_hash, cwd=dir_name)
615 git_svn_id = get_commit_message_footer(log, GIT_SVN_ID_FOOTER_KEY)
616 if not git_svn_id:
617 return None
618 m = GIT_SVN_ID_RE.match(git_svn_id)
619 if not m:
620 return None
621 return int(m.group(2))
622
623
624 def get_git_hash(revision, branch, sln_dir):
625 """We want to search for the SVN revision on the git-svn branch.
626
627 Note that git will search backwards from origin/master.
628 """
629 match = "^%s: [^ ]*@%s " % (GIT_SVN_ID_FOOTER_KEY, revision)
630 ref = branch if branch.startswith('refs/') else 'origin/%s' % branch
631 cmd = ['log', '-E', '--grep', match, '--format=%H', '--max-count=1', ref]
632 result = git(*cmd, cwd=sln_dir).strip()
633 if result:
634 return result
635 raise SVNRevisionNotFound('We can\'t resolve svn r%s into a git hash in %s' %
636 (revision, sln_dir))
637
638
639 def _last_commit_for_file(filename, repo_base):
640 cmd = ['log', '--format=%H', '--max-count=1', '--', filename]
641 return git(*cmd, cwd=repo_base).strip()
642
643
644 def need_to_run_deps2git(repo_base, deps_file, deps_git_file):
645 """Checks to see if we need to run deps2git.
646
647 Returns True if there was a DEPS change after the last .DEPS.git update
648 or if DEPS has local modifications.
649 """
650 # See if DEPS is dirty
651 deps_file_status = git(
652 'status', '--porcelain', deps_file, cwd=repo_base).strip()
653 if deps_file_status and deps_file_status.startswith('M '):
654 return True
655
656 last_known_deps_ref = _last_commit_for_file(deps_file, repo_base)
657 last_known_deps_git_ref = _last_commit_for_file(deps_git_file, repo_base)
658 merge_base_ref = git('merge-base', last_known_deps_ref,
659 last_known_deps_git_ref, cwd=repo_base).strip()
660
661 # If the merge base of the last DEPS and last .DEPS.git file is not
662 # equivilent to the hash of the last DEPS file, that means the DEPS file
663 # was committed after the last .DEPS.git file.
664 return last_known_deps_ref != merge_base_ref
665
666
667 def ensure_deps2git(solution, shallow):
668 repo_base = path.join(os.getcwd(), solution['name'])
669 deps_file = path.join(repo_base, 'DEPS')
670 deps_git_file = path.join(repo_base, '.DEPS.git')
671 if (not git('ls-files', 'DEPS', cwd=repo_base).strip() or
672 not git('ls-files', '.DEPS.git', cwd=repo_base).strip()):
673 return
674
675 print 'Checking if %s is newer than %s' % (deps_file, deps_git_file)
676 if not need_to_run_deps2git(repo_base, deps_file, deps_git_file):
677 return
678
679 print "HEY YOU"
680 print "="*80
681 print "deps2git is gone now bro :("
682 print "="*80
683 sys.exit(1)
684
685
686 def emit_log_lines(name, lines):
687 for line in lines.splitlines():
688 print '@@@STEP_LOG_LINE@%s@%s@@@' % (name, line)
689 print '@@@STEP_LOG_END@%s@@@' % name
690
691
692 def emit_properties(properties):
693 for property_name, property_value in sorted(properties.items()):
694 print '@@@SET_BUILD_PROPERTY@%s@"%s"@@@' % (property_name, property_value)
695
696
697 # Derived from:
698 # http://code.activestate.com/recipes/577972-disk-usage/?in=user-4178764
699 def get_total_disk_space():
700 cwd = os.getcwd()
701 # Windows is the only platform that doesn't support os.statvfs, so
702 # we need to special case this.
703 if sys.platform.startswith('win'):
704 _, total, free = (ctypes.c_ulonglong(), ctypes.c_ulonglong(), \
705 ctypes.c_ulonglong())
706 if sys.version_info >= (3,) or isinstance(cwd, unicode):
707 fn = ctypes.windll.kernel32.GetDiskFreeSpaceExW
708 else:
709 fn = ctypes.windll.kernel32.GetDiskFreeSpaceExA
710 ret = fn(cwd, ctypes.byref(_), ctypes.byref(total), ctypes.byref(free))
711 if ret == 0:
712 # WinError() will fetch the last error code.
713 raise ctypes.WinError()
714 return (total.value, free.value)
715
716 else:
717 st = os.statvfs(cwd)
718 free = st.f_bavail * st.f_frsize
719 total = st.f_blocks * st.f_frsize
720 return (total, free)
721
722
723 def get_target_revision(folder_name, git_url, revisions):
724 normalized_name = folder_name.strip('/')
725 if normalized_name in revisions:
726 return revisions[normalized_name]
727 if git_url in revisions:
728 return revisions[git_url]
729 return None
730
731
732 def force_revision(folder_name, revision):
733 split_revision = revision.split(':', 1)
734 branch = 'master'
735 if len(split_revision) == 2:
736 # Support for "branch:revision" syntax.
737 branch, revision = split_revision
738
739 if revision and revision.upper() != 'HEAD':
740 if revision and revision.isdigit() and len(revision) < 40:
741 # rev_num is really a svn revision number, convert it into a git hash.
742 git_ref = get_git_hash(int(revision), branch, folder_name)
743 else:
744 # rev_num is actually a git hash or ref, we can just use it.
745 git_ref = revision
746 git('checkout', '--force', git_ref, cwd=folder_name)
747 else:
748 ref = branch if branch.startswith('refs/') else 'origin/%s' % branch
749 git('checkout', '--force', ref, cwd=folder_name)
750
751 def git_checkout(solutions, revisions, shallow, refs):
752 build_dir = os.getcwd()
753 # Before we do anything, break all git_cache locks.
754 if path.isdir(CACHE_DIR):
755 git('cache', 'unlock', '-vv', '--force', '--all', '--cache-dir', CACHE_DIR)
756 for item in os.listdir(CACHE_DIR):
757 filename = os.path.join(CACHE_DIR, item)
758 if item.endswith('.lock'):
759 raise Exception('%s exists after cache unlock' % filename)
760 first_solution = True
761 for sln in solutions:
762 # This is so we can loop back and try again if we need to wait for the
763 # git mirrors to update from SVN.
764 done = False
765 tries_left = 60
766 while not done:
767 name = sln['name']
768 url = sln['url']
769 if url == CHROMIUM_SRC_URL or url + '.git' == CHROMIUM_SRC_URL:
770 # Experiments show there's little to be gained from
771 # a shallow clone of src.
772 shallow = False
773 sln_dir = path.join(build_dir, name)
774 s = ['--shallow'] if shallow else []
775 populate_cmd = (['cache', 'populate', '--ignore_locks', '-v',
776 '--cache-dir', CACHE_DIR] + s + [url])
777 for ref in refs:
778 populate_cmd.extend(['--ref', ref])
779 git(*populate_cmd)
780 mirror_dir = git(
781 'cache', 'exists', '--quiet', '--cache-dir', CACHE_DIR, url).strip()
782 clone_cmd = (
783 'clone', '--no-checkout', '--local', '--shared', mirror_dir, sln_dir)
784
785 try:
786 if not path.isdir(sln_dir):
787 git(*clone_cmd)
788 else:
789 git('remote', 'set-url', 'origin', mirror_dir, cwd=sln_dir)
790 git('fetch', 'origin', cwd=sln_dir)
791 for ref in refs:
792 refspec = '%s:%s' % (ref, ref.lstrip('+'))
793 git('fetch', 'origin', refspec, cwd=sln_dir)
794
795 revision = get_target_revision(name, url, revisions) or 'HEAD'
796 force_revision(sln_dir, revision)
797 done = True
798 except SubprocessFailed as e:
799 # Exited abnormally, theres probably something wrong.
800 # Lets wipe the checkout and try again.
801 tries_left -= 1
802 if tries_left > 0:
803 print 'Something failed: %s.' % str(e)
804 print 'waiting 5 seconds and trying again...'
805 time.sleep(5)
806 else:
807 raise
808 remove(sln_dir)
809 except SVNRevisionNotFound:
810 tries_left -= 1
811 if tries_left > 0:
812 # If we don't have the correct revision, wait and try again.
813 print 'We can\'t find revision %s.' % revision
814 print 'The svn to git replicator is probably falling behind.'
815 print 'waiting 5 seconds and trying again...'
816 time.sleep(5)
817 else:
818 raise
819
820 git('clean', '-dff', cwd=sln_dir)
821
822 if first_solution:
823 git_ref = git('log', '--format=%H', '--max-count=1',
824 cwd=sln_dir).strip()
825 first_solution = False
826 return git_ref
827
828
829 def _download(url):
830 """Fetch url and return content, with retries for flake."""
831 for attempt in xrange(ATTEMPTS):
832 try:
833 return urllib2.urlopen(url).read()
834 except Exception:
835 if attempt == ATTEMPTS - 1:
836 raise
837
838
839 def parse_diff(diff):
840 """Takes a unified diff and returns a list of diffed files and their diffs.
841
842 The return format is a list of pairs of:
843 (<filename>, <diff contents>)
844 <diff contents> is inclusive of the diff line.
845 """
846 result = []
847 current_diff = ''
848 current_header = None
849 for line in diff.splitlines():
850 # "diff" is for git style patches, and "Index: " is for SVN style patches.
851 if line.startswith('diff') or line.startswith('Index: '):
852 if current_header:
853 # If we are in a diff portion, then save the diff.
854 result.append((current_header, '%s\n' % current_diff))
855 git_header_match = re.match(r'diff (?:--git )?(\S+) (\S+)', line)
856 svn_header_match = re.match(r'Index: (.*)', line)
857
858 if git_header_match:
859 # First, see if its a git style header.
860 from_file = git_header_match.group(1)
861 to_file = git_header_match.group(2)
862 if from_file != to_file and from_file.startswith('a/'):
863 # Sometimes git prepends 'a/' and 'b/' in front of file paths.
864 from_file = from_file[2:]
865 current_header = from_file
866
867 elif svn_header_match:
868 # Otherwise, check if its an SVN style header.
869 current_header = svn_header_match.group(1)
870
871 else:
872 # Otherwise... I'm not really sure what to do with this.
873 raise InvalidDiff('Can\'t process header: %s\nFull diff:\n%s' %
874 (line, diff))
875
876 current_diff = ''
877 current_diff += '%s\n' % line
878 if current_header:
879 # We hit EOF, gotta save the last diff.
880 result.append((current_header, current_diff))
881 return result
882
883
884 def get_svn_patch(patch_url):
885 """Fetch patch from patch_url, return list of (filename, diff)"""
886 svn_exe = 'svn.bat' if sys.platform.startswith('win') else 'svn'
887 patch_data = call(svn_exe, 'cat', patch_url)
888 return parse_diff(patch_data)
889
890 # REMOVE
891 def apply_svn_patch(patch_root, patches, whitelist=None, blacklist=None):
892 """Expects a list of (filename, diff), applies it on top of patch_root."""
893 if whitelist:
894 patches = [(name, diff) for name, diff in patches if name in whitelist]
895 elif blacklist:
896 patches = [(name, diff) for name, diff in patches if name not in blacklist]
897 diffs = [diff for _, diff in patches]
898 patch = ''.join(diffs)
899
900 if patch:
901 print '===Patching files==='
902 for filename, _ in patches:
903 print 'Patching %s' % filename
904 try:
905 call(PATCH_TOOL, '-p0', '--remove-empty-files', '--force', '--forward',
906 stdin_data=patch, cwd=patch_root, tries=1)
907 for filename, _ in patches:
908 full_filename = path.abspath(path.join(patch_root, filename))
909 git('add', full_filename, cwd=path.dirname(full_filename))
910 except SubprocessFailed as e:
911 raise PatchFailed(e.message, e.code, e.output)
912
913 def apply_rietveld_issue(issue, patchset, root, server, _rev_map, _revision,
914 email_file, key_file, whitelist=None, blacklist=None):
915 apply_issue_bin = ('apply_issue.bat' if sys.platform.startswith('win')
916 else 'apply_issue')
917 cmd = [apply_issue_bin,
918 # The patch will be applied on top of this directory.
919 '--root_dir', root,
920 # Tell apply_issue how to fetch the patch.
921 '--issue', issue,
922 '--server', server,
923 # Always run apply_issue.py, otherwise it would see update.flag
924 # and then bail out.
925 '--force',
926 # Don't run gclient sync when it sees a DEPS change.
927 '--ignore_deps',
928 # TODO(tandrii): remove after http://crbug.com/537417 is resolved.
929 # Temporary enable verbosity to see if Rietveld requests are actually
930 # retried.
931 '-v', '-v', # = logging.DEBUG level.
932 ]
933 # Use an oauth key file if specified.
934 if email_file and key_file:
935 cmd.extend(['--email-file', email_file, '--private-key-file', key_file])
936 else:
937 cmd.append('--no-auth')
938
939 if patchset:
940 cmd.extend(['--patchset', patchset])
941 if whitelist:
942 for item in whitelist:
943 cmd.extend(['--whitelist', item])
944 elif blacklist:
945 for item in blacklist:
946 cmd.extend(['--blacklist', item])
947
948 # Only try once, since subsequent failures hide the real failure.
949 try:
950 call(*cmd, tries=1)
951 except SubprocessFailed as e:
952 raise PatchFailed(e.message, e.code, e.output)
953
954 def apply_gerrit_ref(gerrit_repo, gerrit_ref, root):
955 gerrit_repo = gerrit_repo or 'origin'
956 assert gerrit_ref
957 try:
958 base_rev = git('rev-parse', 'HEAD', cwd=root).strip()
959 git('retry', 'fetch', gerrit_repo, gerrit_ref, cwd=root, tries=1)
960 git('checkout', 'FETCH_HEAD', cwd=root)
961 git('reset', '--soft', base_rev, cwd=root)
962 except SubprocessFailed as e:
963 raise PatchFailed(e.message, e.code, e.output)
964
965 def check_flag(flag_file):
966 """Returns True if the flag file is present."""
967 return os.path.isfile(flag_file)
968
969
970 def delete_flag(flag_file):
971 """Remove bot update flag."""
972 if os.path.isfile(flag_file):
973 os.remove(flag_file)
974
975
976 def emit_flag(flag_file):
977 """Deposit a bot update flag on the system to tell gclient not to run."""
978 print 'Emitting flag file at %s' % flag_file
979 with open(flag_file, 'wb') as f:
980 f.write('Success!')
981
982
983 def get_commit_position_for_git_svn(url, revision):
984 """Generates a commit position string for a 'git-svn' URL/revision.
985
986 If the 'git-svn' URL maps to a known project, we will construct a commit
987 position branch value by applying substitution on the SVN URL.
988 """
989 # Identify the base URL so we can strip off trunk/branch name
990 project_config = branch = None
991 for _, project_config in GIT_SVN_PROJECT_MAP.iteritems():
992 if url.startswith(project_config['svn_url']):
993 branch = url[len(project_config['svn_url']):]
994 break
995
996 if branch:
997 # Strip any leading slashes
998 branch = branch.lstrip('/')
999
1000 # Try and map the branch
1001 for pattern, repl in project_config.get('branch_map', ()):
1002 nbranch, subn = re.subn(pattern, repl, branch, count=1)
1003 if subn:
1004 print 'INFO: Mapped SVN branch to Git branch [%s] => [%s]' % (
1005 branch, nbranch)
1006 branch = nbranch
1007 break
1008 else:
1009 # Use generic 'svn' branch
1010 print 'INFO: Could not resolve project for SVN URL %r' % (url,)
1011 branch = 'svn'
1012 return '%s@{#%s}' % (branch, revision)
1013
1014
1015 def get_commit_position(git_path, revision='HEAD'):
1016 """Dumps the 'git' log for a specific revision and parses out the commit
1017 position.
1018
1019 If a commit position metadata key is found, its value will be returned.
1020
1021 Otherwise, we will search for a 'git-svn' metadata entry. If one is found,
1022 we will compose a commit position from it, using its SVN revision value as
1023 the revision.
1024
1025 If the 'git-svn' URL maps to a known project, we will construct a commit
1026 position branch value by truncating the URL, mapping 'trunk' to
1027 "refs/heads/master". Otherwise, we will return the generic branch, 'svn'.
1028 """
1029 git_log = git('log', '--format=%B', '-n1', revision, cwd=git_path)
1030 footer_map = get_commit_message_footer_map(git_log)
1031
1032 # Search for commit position metadata
1033 value = (footer_map.get(COMMIT_POSITION_FOOTER_KEY) or
1034 footer_map.get(COMMIT_ORIGINAL_POSITION_FOOTER_KEY))
1035 if value:
1036 return value
1037
1038 # Compose a commit position from 'git-svn' metadata
1039 value = footer_map.get(GIT_SVN_ID_FOOTER_KEY)
1040 if value:
1041 m = GIT_SVN_ID_RE.match(value)
1042 if not m:
1043 raise ValueError("Invalid 'git-svn' value: [%s]" % (value,))
1044 return get_commit_position_for_git_svn(m.group(1), m.group(2))
1045 return None
1046
1047
1048 def parse_got_revision(gclient_output, got_revision_mapping, use_svn_revs):
1049 """Translate git gclient revision mapping to build properties.
1050
1051 If use_svn_revs is True, then translate git hashes in the revision mapping
1052 to svn revision numbers.
1053 """
1054 properties = {}
1055 solutions_output = {
1056 # Make sure path always ends with a single slash.
1057 '%s/' % path.rstrip('/') : solution_output for path, solution_output
1058 in gclient_output['solutions'].iteritems()
1059 }
1060 for dir_name, property_name in got_revision_mapping.iteritems():
1061 # Make sure dir_name always ends with a single slash.
1062 dir_name = '%s/' % dir_name.rstrip('/')
1063 if dir_name not in solutions_output:
1064 continue
1065 solution_output = solutions_output[dir_name]
1066 if solution_output.get('scm') is None:
1067 # This is an ignored DEPS, so the output got_revision should be 'None'.
1068 git_revision = revision = commit_position = None
1069 else:
1070 # Since we are using .DEPS.git, everything had better be git.
1071 assert solution_output.get('scm') == 'git'
1072 git_revision = git('rev-parse', 'HEAD', cwd=dir_name).strip()
1073 if use_svn_revs:
1074 revision = get_svn_rev(git_revision, dir_name)
1075 if not revision:
1076 revision = git_revision
1077 else:
1078 revision = git_revision
1079 commit_position = get_commit_position(dir_name)
1080
1081 properties[property_name] = revision
1082 if revision != git_revision:
1083 properties['%s_git' % property_name] = git_revision
1084 if commit_position:
1085 properties['%s_cp' % property_name] = commit_position
1086
1087 return properties
1088
1089
1090 def emit_json(out_file, did_run, gclient_output=None, **kwargs):
1091 """Write run information into a JSON file."""
1092 output = {}
1093 output.update(gclient_output if gclient_output else {})
1094 output.update({'did_run': did_run})
1095 output.update(kwargs)
1096 with open(out_file, 'wb') as f:
1097 f.write(json.dumps(output, sort_keys=True))
1098
1099
1100 def ensure_deps_revisions(deps_url_mapping, solutions, revisions):
1101 """Ensure correct DEPS revisions, ignores solutions."""
1102 for deps_name, deps_data in sorted(deps_url_mapping.items()):
1103 if deps_name.strip('/') in solutions:
1104 # This has already been forced to the correct solution by git_checkout().
1105 continue
1106 revision = get_target_revision(deps_name, deps_data.get('url', None),
1107 revisions)
1108 if not revision:
1109 continue
1110 # TODO(hinoka): Catch SVNRevisionNotFound error maybe?
1111 git('fetch', 'origin', cwd=deps_name)
1112 force_revision(deps_name, revision)
1113
1114
1115 def ensure_checkout(solutions, revisions, first_sln, target_os, target_os_only,
1116 patch_root, issue, patchset, patch_url, rietveld_server,
1117 gerrit_repo, gerrit_ref, revision_mapping,
1118 apply_issue_email_file, apply_issue_key_file, buildspec,
1119 gyp_env, shallow, runhooks, refs):
1120 # Get a checkout of each solution, without DEPS or hooks.
1121 # Calling git directly because there is no way to run Gclient without
1122 # invoking DEPS.
1123 print 'Fetching Git checkout'
1124
1125 git_ref = git_checkout(solutions, revisions, shallow, refs)
1126
1127 patches = None
1128 if patch_url:
1129 patches = get_svn_patch(patch_url)
1130
1131 already_patched = []
1132 patch_root = patch_root or ''
1133 for solution in solutions:
1134 if (patch_root == solution['name'] or
1135 solution['name'].startswith(patch_root + '/')):
1136 relative_root = solution['name'][len(patch_root) + 1:]
1137 target = '/'.join([relative_root, 'DEPS']).lstrip('/')
1138 if patches:
1139 print "HEY YOU"
1140 print "="*80
1141 print "You should not be trying to patch in a svn change."
1142 print "="*80
1143 sys.exit(1)
1144 elif issue:
1145 apply_rietveld_issue(issue, patchset, patch_root, rietveld_server,
1146 revision_mapping, git_ref, apply_issue_email_file,
1147 apply_issue_key_file, whitelist=[target])
1148 already_patched.append(target)
1149
1150 if not buildspec:
1151 # Run deps2git if there is a DEPS change after the last .DEPS.git commit.
1152 for solution in solutions:
1153 ensure_deps2git(solution, shallow)
1154
1155 # Ensure our build/ directory is set up with the correct .gclient file.
1156 gclient_configure(solutions, target_os, target_os_only)
1157
1158 # Let gclient do the DEPS syncing.
1159 # The branch-head refspec is a special case because its possible Chrome
1160 # src, which contains the branch-head refspecs, is DEPSed in.
1161 gclient_output = gclient_sync(buildspec or BRANCH_HEADS_REFSPEC in refs,
1162 shallow)
1163
1164 # Now that gclient_sync has finished, we should revert any .DEPS.git so that
1165 # presubmit doesn't complain about it being modified.
1166 if (not buildspec and
1167 git('ls-files', '.DEPS.git', cwd=first_sln).strip()):
1168 git('checkout', 'HEAD', '--', '.DEPS.git', cwd=first_sln)
1169
1170 if buildspec and runhooks:
1171 # Run gclient runhooks if we're on an official builder.
1172 # TODO(hinoka): Remove this when the official builders run their own
1173 # runhooks step.
1174 gclient_runhooks(gyp_env)
1175
1176 # Finally, ensure that all DEPS are pinned to the correct revision.
1177 dir_names = [sln['name'] for sln in solutions]
1178 ensure_deps_revisions(gclient_output.get('solutions', {}),
1179 dir_names, revisions)
1180 # Apply the rest of the patch here (sans DEPS)
1181 if patches:
1182 apply_svn_patch(patch_root, patches, blacklist=already_patched)
1183 elif issue:
1184 apply_rietveld_issue(issue, patchset, patch_root, rietveld_server,
1185 revision_mapping, git_ref, apply_issue_email_file,
1186 apply_issue_key_file, blacklist=already_patched)
1187 elif gerrit_ref:
1188 apply_gerrit_ref(gerrit_repo, gerrit_ref, patch_root)
1189
1190 # Reset the deps_file point in the solutions so that hooks get run properly.
1191 for sln in solutions:
1192 sln['deps_file'] = sln.get('deps_file', 'DEPS').replace('.DEPS.git', 'DEPS')
1193 gclient_configure(solutions, target_os, target_os_only)
1194
1195 return gclient_output
1196
1197
1198 def parse_revisions(revisions, root):
1199 """Turn a list of revision specs into a nice dictionary.
1200
1201 We will always return a dict with {root: something}. By default if root
1202 is unspecified, or if revisions is [], then revision will be assigned 'HEAD'
1203 """
1204 results = {root.strip('/'): 'HEAD'}
1205 expanded_revisions = []
1206 for revision in revisions:
1207 # Allow rev1,rev2,rev3 format.
1208 # TODO(hinoka): Delete this when webkit switches to recipes.
1209 expanded_revisions.extend(revision.split(','))
1210 for revision in expanded_revisions:
1211 split_revision = revision.split('@')
1212 if len(split_revision) == 1:
1213 # This is just a plain revision, set it as the revision for root.
1214 results[root] = split_revision[0]
1215 elif len(split_revision) == 2:
1216 # This is an alt_root@revision argument.
1217 current_root, current_rev = split_revision
1218
1219 # We want to normalize svn/git urls into .git urls.
1220 parsed_root = urlparse.urlparse(current_root)
1221 if parsed_root.scheme == 'svn':
1222 if parsed_root.path in RECOGNIZED_PATHS:
1223 normalized_root = RECOGNIZED_PATHS[parsed_root.path]
1224 else:
1225 print 'WARNING: SVN path %s not recognized, ignoring' % current_root
1226 continue
1227 elif parsed_root.scheme in ['http', 'https']:
1228 normalized_root = 'https://%s/%s' % (parsed_root.netloc,
1229 parsed_root.path)
1230 if not normalized_root.endswith('.git'):
1231 normalized_root = '%s.git' % normalized_root
1232 elif parsed_root.scheme:
1233 print 'WARNING: Unrecognized scheme %s, ignoring' % parsed_root.scheme
1234 continue
1235 else:
1236 # This is probably a local path.
1237 normalized_root = current_root.strip('/')
1238
1239 results[normalized_root] = current_rev
1240 else:
1241 print ('WARNING: %r is not recognized as a valid revision specification,'
1242 'skipping' % revision)
1243 return results
1244
1245
1246 def parse_args():
1247 parse = optparse.OptionParser()
1248
1249 parse.add_option('--issue', help='Issue number to patch from.')
1250 parse.add_option('--patchset',
1251 help='Patchset from issue to patch from, if applicable.')
1252 parse.add_option('--apply_issue_email_file',
1253 help='--email-file option passthrough for apply_patch.py.')
1254 parse.add_option('--apply_issue_key_file',
1255 help='--private-key-file option passthrough for '
1256 'apply_patch.py.')
1257 parse.add_option('--patch_url', help='Optional URL to SVN patch.')
1258 parse.add_option('--root', dest='patch_root',
1259 help='DEPRECATED: Use --patch_root.')
1260 parse.add_option('--patch_root', help='Directory to patch on top of.')
1261 parse.add_option('--rietveld_server',
1262 default='codereview.chromium.org',
1263 help='Rietveld server.')
1264 parse.add_option('--gerrit_repo',
1265 help='Gerrit repository to pull the ref from.')
1266 parse.add_option('--gerrit_ref', help='Gerrit ref to apply.')
1267 parse.add_option('--specs', help='Gcilent spec.')
1268 parse.add_option('--master', help='Master name.')
1269 parse.add_option('-f', '--force', action='store_true',
1270 help='Bypass check to see if we want to be run. '
1271 'Should ONLY be used locally or by smart recipes.')
1272 parse.add_option('--revision_mapping',
1273 help='{"path/to/repo/": "property_name"}')
1274 parse.add_option('--revision_mapping_file',
1275 help=('Same as revision_mapping, except its a path to a json'
1276 ' file containing that format.'))
1277 parse.add_option('--revision', action='append', default=[],
1278 help='Revision to check out. Can be an SVN revision number, '
1279 'git hash, or any form of git ref. Can prepend '
1280 'root@<rev> to specify which repository, where root '
1281 'is either a filesystem path, git https url, or '
1282 'svn url. To specify Tip of Tree, set rev to HEAD.'
1283 'To specify a git branch and an SVN rev, <rev> can be '
1284 'set to <branch>:<revision>.')
1285 parse.add_option('--output_manifest', action='store_true',
1286 help=('Add manifest json to the json output.'))
1287 parse.add_option('--slave_name', default=socket.getfqdn().split('.')[0],
1288 help='Hostname of the current machine, '
1289 'used for determining whether or not to activate.')
1290 parse.add_option('--builder_name', help='Name of the builder, '
1291 'used for determining whether or not to activate.')
1292 parse.add_option('--build_dir', default=os.getcwd())
1293 parse.add_option('--flag_file', default=path.join(os.getcwd(),
1294 'update.flag'))
1295 parse.add_option('--shallow', action='store_true',
1296 help='Use shallow clones for cache repositories.')
1297 parse.add_option('--gyp_env', action='append', default=[],
1298 help='Environment variables to pass into gclient runhooks.')
1299 parse.add_option('--clobber', action='store_true',
1300 help='Delete checkout first, always')
1301 parse.add_option('--bot_update_clobber', action='store_true', dest='clobber',
1302 help='(synonym for --clobber)')
1303 parse.add_option('-o', '--output_json',
1304 help='Output JSON information into a specified file')
1305 parse.add_option('--no_shallow', action='store_true',
1306 help='Bypass disk detection and never shallow clone. '
1307 'Does not override the --shallow flag')
1308 parse.add_option('--no_runhooks', action='store_true',
1309 help='Do not run hooks on official builder.')
1310 parse.add_option('--refs', action='append',
1311 help='Also fetch this refspec for the main solution(s). '
1312 'Eg. +refs/branch-heads/*')
1313 parse.add_option('--with_branch_heads', action='store_true',
1314 help='Always pass --with_branch_heads to gclient. This '
1315 'does the same thing as --refs +refs/branch-heads/*')
1316
1317
1318 options, args = parse.parse_args()
1319
1320 if not options.refs:
1321 options.refs = []
1322
1323 if options.with_branch_heads:
1324 options.refs.append(BRANCH_HEADS_REFSPEC)
1325 del options.with_branch_heads
1326
1327 try:
1328 if options.revision_mapping_file:
1329 if options.revision_mapping:
1330 print ('WARNING: Ignoring --revision_mapping: --revision_mapping_file '
1331 'was set at the same time as --revision_mapping?')
1332 with open(options.revision_mapping_file, 'r') as f:
1333 options.revision_mapping = json.load(f)
1334 elif options.revision_mapping:
1335 options.revision_mapping = json.loads(options.revision_mapping)
1336 except Exception as e:
1337 print (
1338 'WARNING: Caught execption while parsing revision_mapping*: %s'
1339 % (str(e),)
1340 )
1341
1342 return options, args
1343
1344
1345 def prepare(options, git_slns, active):
1346 """Prepares the target folder before we checkout."""
1347 dir_names = [sln.get('name') for sln in git_slns if 'name' in sln]
1348 # If we're active now, but the flag file doesn't exist (we weren't active
1349 # last run) or vice versa, blow away all checkouts.
1350 if bool(active) != bool(check_flag(options.flag_file)):
1351 ensure_no_checkout(dir_names, '*')
1352 if options.output_json:
1353 # Make sure we tell recipes that we didn't run if the script exits here.
1354 emit_json(options.output_json, did_run=active)
1355 if active:
1356 if options.clobber:
1357 ensure_no_checkout(dir_names, '*')
1358 else:
1359 ensure_no_checkout(dir_names, '.svn')
1360 emit_flag(options.flag_file)
1361 else:
1362 delete_flag(options.flag_file)
1363 raise Inactive # This is caught in main() and we exit cleanly.
1364
1365 # Do a shallow checkout if the disk is less than 100GB.
1366 total_disk_space, free_disk_space = get_total_disk_space()
1367 total_disk_space_gb = int(total_disk_space / (1024 * 1024 * 1024))
1368 used_disk_space_gb = int((total_disk_space - free_disk_space)
1369 / (1024 * 1024 * 1024))
1370 percent_used = int(used_disk_space_gb * 100 / total_disk_space_gb)
1371 step_text = '[%dGB/%dGB used (%d%%)]' % (used_disk_space_gb,
1372 total_disk_space_gb,
1373 percent_used)
1374 if not options.output_json:
1375 print '@@@STEP_TEXT@%s@@@' % step_text
1376 if not options.shallow:
1377 options.shallow = (total_disk_space < SHALLOW_CLONE_THRESHOLD
1378 and not options.no_shallow)
1379
1380 # The first solution is where the primary DEPS file resides.
1381 first_sln = dir_names[0]
1382
1383 # Split all the revision specifications into a nice dict.
1384 print 'Revisions: %s' % options.revision
1385 revisions = parse_revisions(options.revision, first_sln)
1386 print 'Fetching Git checkout at %s@%s' % (first_sln, revisions[first_sln])
1387 return revisions, step_text
1388
1389
1390 def checkout(options, git_slns, specs, buildspec, master,
1391 svn_root, revisions, step_text):
1392 first_sln = git_slns[0]['name']
1393 dir_names = [sln.get('name') for sln in git_slns if 'name' in sln]
1394 try:
1395 # Outer try is for catching patch failures and exiting gracefully.
1396 # Inner try is for catching gclient failures and retrying gracefully.
1397 try:
1398 checkout_parameters = dict(
1399 # First, pass in the base of what we want to check out.
1400 solutions=git_slns,
1401 revisions=revisions,
1402 first_sln=first_sln,
1403
1404 # Also, target os variables for gclient.
1405 target_os=specs.get('target_os', []),
1406 target_os_only=specs.get('target_os_only', False),
1407
1408 # Then, pass in information about how to patch.
1409 patch_root=options.patch_root,
1410 issue=options.issue,
1411 patchset=options.patchset,
1412 patch_url=options.patch_url,
1413 rietveld_server=options.rietveld_server,
1414 gerrit_repo=options.gerrit_repo,
1415 gerrit_ref=options.gerrit_ref,
1416 revision_mapping=options.revision_mapping,
1417 apply_issue_email_file=options.apply_issue_email_file,
1418 apply_issue_key_file=options.apply_issue_key_file,
1419
1420 # For official builders.
1421 buildspec=buildspec,
1422 gyp_env=options.gyp_env,
1423 runhooks=not options.no_runhooks,
1424
1425 # Finally, extra configurations such as shallowness of the clone.
1426 shallow=options.shallow,
1427 refs=options.refs)
1428 gclient_output = ensure_checkout(**checkout_parameters)
1429 except GclientSyncFailed:
1430 print 'We failed gclient sync, lets delete the checkout and retry.'
1431 ensure_no_checkout(dir_names, '*')
1432 gclient_output = ensure_checkout(**checkout_parameters)
1433 except PatchFailed as e:
1434 if options.output_json:
1435 # Tell recipes information such as root, got_revision, etc.
1436 emit_json(options.output_json,
1437 did_run=True,
1438 root=first_sln,
1439 log_lines=[('patch error', e.output),],
1440 patch_apply_return_code=e.code,
1441 patch_root=options.patch_root,
1442 patch_failure=True,
1443 step_text='%s PATCH FAILED' % step_text,
1444 fixed_revisions=revisions)
1445 else:
1446 # If we're not on recipes, tell annotator about our got_revisions.
1447 emit_log_lines('patch error', e.output)
1448 print '@@@STEP_TEXT@%s PATCH FAILED@@@' % step_text
1449 raise
1450
1451 # Revision is an svn revision, unless it's a git master.
1452 use_svn_rev = False
1453
1454 # Take care of got_revisions outputs.
1455 revision_mapping = dict(GOT_REVISION_MAPPINGS.get(svn_root, {}))
1456 if options.revision_mapping:
1457 revision_mapping.update(options.revision_mapping)
1458
1459 # If the repo is not in the default GOT_REVISION_MAPPINGS and no
1460 # revision_mapping were specified on the command line then
1461 # default to setting 'got_revision' based on the first solution.
1462 if not revision_mapping:
1463 revision_mapping[first_sln] = 'got_revision'
1464
1465 got_revisions = parse_got_revision(gclient_output, revision_mapping,
1466 use_svn_rev)
1467
1468 if not got_revisions:
1469 # TODO(hinoka): We should probably bail out here, but in the interest
1470 # of giving mis-configured bots some time to get fixed use a dummy
1471 # revision here.
1472 got_revisions = { 'got_revision': 'BOT_UPDATE_NO_REV_FOUND' }
1473 #raise Exception('No got_revision(s) found in gclient output')
1474
1475 if options.output_json:
1476 manifest = create_manifest() if options.output_manifest else None
1477 # Tell recipes information such as root, got_revision, etc.
1478 emit_json(options.output_json,
1479 did_run=True,
1480 root=first_sln,
1481 patch_root=options.patch_root,
1482 step_text=step_text,
1483 fixed_revisions=revisions,
1484 properties=got_revisions,
1485 manifest=manifest)
1486 else:
1487 # If we're not on recipes, tell annotator about our got_revisions.
1488 emit_properties(got_revisions)
1489
1490
1491 def print_help_text(force, output_json, active, master, builder, slave):
1492 """Print helpful messages to tell devs whats going on."""
1493 if force and output_json:
1494 recipe_force = 'Forced on by recipes'
1495 elif active and output_json:
1496 recipe_force = 'Off by recipes, but forced on by bot update'
1497 elif not active and output_json:
1498 recipe_force = 'Forced off by recipes'
1499 else:
1500 recipe_force = 'N/A. Was not called by recipes'
1501
1502 print BOT_UPDATE_MESSAGE % {
1503 'master': master or 'Not specified',
1504 'builder': builder or 'Not specified',
1505 'slave': slave or 'Not specified',
1506 'recipe': recipe_force,
1507 'CURRENT_DIR': CURRENT_DIR,
1508 'BUILDER_DIR': BUILDER_DIR,
1509 'THIS_DIR': THIS_DIR,
1510 },
1511 print ACTIVATED_MESSAGE if active else NOT_ACTIVATED_MESSAGE
1512
1513
1514 def main():
1515 # Get inputs.
1516 options, _ = parse_args()
1517 builder = options.builder_name
1518 slave = options.slave_name
1519 master = options.master
1520
1521 # Check if this script should activate or not.
1522 active = True
1523
1524 # Print a helpful message to tell developers whats going on with this step.
1525 print_help_text(
1526 options.force, options.output_json, active, master, builder, slave)
1527
1528 # Parse, munipulate, and print the gclient solutions.
1529 specs = {}
1530 exec(options.specs, specs)
1531 svn_solutions = specs.get('solutions', [])
1532 git_slns, svn_root, buildspec = solutions_to_git(svn_solutions)
1533 options.revision = maybe_ignore_revision(options.revision, buildspec)
1534
1535 solutions_printer(git_slns)
1536
1537 try:
1538 # Dun dun dun, the main part of bot_update.
1539 revisions, step_text = prepare(options, git_slns, active)
1540 checkout(options, git_slns, specs, buildspec, master, svn_root, revisions,
1541 step_text)
1542
1543 except Inactive:
1544 # Not active, should count as passing.
1545 pass
1546 except PatchFailed as e:
1547 emit_flag(options.flag_file)
1548 # Return a specific non-zero exit code for patch failure (because it is
1549 # a failure), but make it different than other failures to distinguish
1550 # between infra failures (independent from patch author), and patch
1551 # failures (that patch author can fix). However, PatchFailure due to
1552 # download patch failure is still an infra problem.
1553 if e.code == 3:
1554 # Patch download problem.
1555 return 87
1556 # Genuine patch problem.
1557 return 88
1558 except Exception:
1559 # Unexpected failure.
1560 emit_flag(options.flag_file)
1561 raise
1562 else:
1563 emit_flag(options.flag_file)
1564
1565
1566 if __name__ == '__main__':
1567 sys.exit(main())
OLDNEW
« no previous file with comments | « infra/recipe_modules/bot_update/example.expected/tryjob_v8.json ('k') | infra/recipe_modules/bot_update/test_api.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698