Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3)

Side by Side Diff: scripts/slave/annotated_run.py

Issue 1501663002: annotated_run.py: Add LogDog bootstrapping. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/tools/build
Patch Set: Update service account path now that it exists: https://chromereviews.googleplex.com/341937013 Created 4 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | scripts/slave/cipd.py » ('j') | scripts/slave/cipd.py » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright (c) 2013 The Chromium Authors. All rights reserved. 2 # Copyright (c) 2013 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 import argparse 6 import argparse
7 import collections 7 import collections
8 import contextlib 8 import contextlib
9 import json 9 import json
10 import logging 10 import logging
11 import os 11 import os
12 import platform 12 import platform
13 import shutil 13 import shutil
14 import socket 14 import socket
15 import subprocess 15 import subprocess
16 import sys 16 import sys
17 import tempfile 17 import tempfile
18 18
19 19
20 # Install Infra build environment. 20 # Install Infra build environment.
21 BUILD_ROOT = os.path.dirname(os.path.dirname(os.path.dirname( 21 BUILD_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(
22 os.path.abspath(__file__)))) 22 os.path.abspath(__file__))))
23 sys.path.insert(0, os.path.join(BUILD_ROOT, 'scripts')) 23 sys.path.insert(0, os.path.join(BUILD_ROOT, 'scripts'))
24 24
25 from common import annotator 25 from common import annotator
26 from common import chromium_utils 26 from common import chromium_utils
27 from common import env 27 from common import env
28 from common import master_cfg_utils 28 from common import master_cfg_utils
29 from slave import gce
29 30
30 # Logging instance. 31 # Logging instance.
31 LOGGER = logging.getLogger('annotated_run') 32 LOGGER = logging.getLogger('annotated_run')
32 33
34 # Return codes used by Butler/Annotee to indicate their failure (as opposed to
35 # a forwarded return code from the underlying process).
36 LOGDOG_ERROR_RETURNCODES = (
37 # Butler runtime error.
38 250,
39 # Annotee runtime error.
40 251,
41 )
42
43 # Sentinel value that, if present in master config, matches all builders
44 # underneath that master.
45 WHITELIST_ALL = '*'
46
47 # Whitelist of {master}=>[{builder}|WHITELIST_ALL] whitelisting specific masters
48 # and builders for experimental LogDog/Annotee export.
49 LOGDOG_WHITELIST_MASTER_BUILDERS = {
50 }
51
52 # Configuration for a Pub/Sub topic.
53 PubSubConfig = collections.namedtuple('PubSubConfig', ('project', 'topic'))
54
55 # LogDogPlatform is the set of platform-specific LogDog bootstrapping
56 # configuration parameters.
57 #
58 # See _logdog_get_streamserver_uri for "streamserver" parameter details.
59 LogDogPlatform = collections.namedtuple('LogDogPlatform', (
60 'butler', 'annotee', 'credential_path', 'streamserver',
61 ))
62
63 # A CIPD binary description, including the package name, version, and relative
64 # path of the binary within the package.
65 CipdBinary = collections.namedtuple('CipdBinary',
66 ('package', 'version', 'relpath'))
33 67
34 # RecipeRuntime will probe this for values. 68 # RecipeRuntime will probe this for values.
35 # - First, (system, platform) 69 # - First, (system, platform)
36 # - Then, (system,) 70 # - Then, (system,)
37 # - Finally, (), 71 # - Finally, (),
38 PLATFORM_CONFIG = { 72 PLATFORM_CONFIG = {
39 # All systems. 73 # All systems.
40 (): {}, 74 (): {
75 'logdog_pubsub': PubSubConfig(
76 project='luci-logdog',
77 topic='logs',
78 ),
79 },
41 80
42 # Linux 81 # Linux
43 ('Linux',): { 82 ('Linux',): {
44 'run_cmd': ['/opt/infra-python/run.py'], 83 'run_cmd': ['/opt/infra-python/run.py'],
84 'logdog_platform': LogDogPlatform(
85 butler=CipdBinary('infra/tools/luci/logdog/butler/linux-amd64',
86 'latest', 'logdog_butler'),
87 annotee=CipdBinary('infra/tools/luci/logdog/annotee/linux-amd64',
88 'latest', 'logdog_annotee'),
89 credential_path=(
90 '/creds/service_accounts/service-account-luci-logdog-pubsub.json'),
91 streamserver='unix',
92 ),
45 }, 93 },
46 94
47 # Mac OSX 95 # Mac OSX
48 ('Darwin',): { 96 ('Darwin',): {
49 'run_cmd': ['/opt/infra-python/run.py'], 97 'run_cmd': ['/opt/infra-python/run.py'],
50 }, 98 },
51 99
52 # Windows 100 # Windows
53 ('Windows',): { 101 ('Windows',): {
54 'run_cmd': ['C:\\infra-python\\ENV\\Scripts\\python.exe', 102 'run_cmd': ['C:\\infra-python\\ENV\\Scripts\\python.exe',
55 'C:\\infra-python\\run.py'], 103 'C:\\infra-python\\run.py'],
56 }, 104 },
57 } 105 }
58 106
59 107
60 # Config is the runtime configuration used by `annotated_run.py` to bootstrap 108 # Config is the runtime configuration used by `annotated_run.py` to bootstrap
61 # the recipe engine. 109 # the recipe engine.
62 Config = collections.namedtuple('Config', ( 110 Config = collections.namedtuple('Config', (
63 'run_cmd', 111 'run_cmd',
112 'logdog_pubsub',
113 'logdog_platform',
64 )) 114 ))
65 115
66 116
67 def get_config(): 117 def get_config():
68 """Returns (Config): The constructed Config object. 118 """Returns (Config): The constructed Config object.
69 119
70 The Config object is constructed from: 120 The Config object is constructed from:
71 - Cascading the PLATFORM_CONFIG fields together based on current 121 - Cascading the PLATFORM_CONFIG fields together based on current
72 OS/Architecture. 122 OS/Architecture.
73 123
74 Raises: 124 Raises:
75 KeyError: if a required configuration key/parameter is not available. 125 KeyError: if a required configuration key/parameter is not available.
76 """ 126 """
77 # Cascade the platform configuration. 127 # Cascade the platform configuration.
78 p = (platform.system(), platform.processor()) 128 p = (platform.system(), platform.processor())
79 platform_config = {} 129 platform_config = {}
80 for i in xrange(len(p)+1): 130 for i in xrange(len(p)+1):
81 platform_config.update(PLATFORM_CONFIG.get(p[:i], {})) 131 platform_config.update(PLATFORM_CONFIG.get(p[:i], {}))
82 132
83 # Construct runtime configuration. 133 # Construct runtime configuration.
84 return Config( 134 return Config(
85 run_cmd=platform_config.get('run_cmd'), 135 run_cmd=platform_config.get('run_cmd'),
136 logdog_pubsub=platform_config.get('logdog_pubsub'),
137 logdog_platform=platform_config.get('logdog_platform'),
86 ) 138 )
87 139
88 140
89 def ensure_directory(*path): 141 def ensure_directory(*path):
90 path = os.path.join(*path) 142 path = os.path.join(*path)
91 if not os.path.isdir(path): 143 if not os.path.isdir(path):
92 os.makedirs(path) 144 os.makedirs(path)
93 return path 145 return path
94 146
95 147
148 def _logdog_get_streamserver_uri(typ, d):
149 """Returns (str): The Butler StreamServer URI.
150
151 Args:
152 typ (str): The type of URI to generate. One of: ['unix'].
153 d (str): The working directory path.
154 Raises:
155 LogDogBootstrapError: if |typ| is not a known type.
156 """
157 if typ == 'unix':
158 return 'unix:%s' % (os.path.join(d, 'butler.sock'),)
159 raise LogDogBootstrapError('No streamserver URI generator.')
160
161
96 def _run_command(cmd, **kwargs): 162 def _run_command(cmd, **kwargs):
97 if kwargs.pop('dry_run', False): 163 if kwargs.pop('dry_run', False):
98 LOGGER.info('(Dry Run) Would have executed command: %s', cmd) 164 LOGGER.info('(Dry Run) Would have executed command: %s', cmd)
99 return 0, '' 165 return 0, ''
100 166
101 LOGGER.debug('Executing command: %s', cmd) 167 LOGGER.debug('Executing command: %s', cmd)
102 kwargs.setdefault('stderr', subprocess.STDOUT) 168 kwargs.setdefault('stderr', subprocess.STDOUT)
103 proc = subprocess.Popen(cmd, **kwargs) 169 proc = subprocess.Popen(cmd, **kwargs)
104 stdout, _ = proc.communicate() 170 stdout, _ = proc.communicate()
105 171
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
137 LOGGER.debug('Cleaning up temporary directory [%s].', basedir) 203 LOGGER.debug('Cleaning up temporary directory [%s].', basedir)
138 try: 204 try:
139 chromium_utils.RemoveDirectory(basedir) 205 chromium_utils.RemoveDirectory(basedir)
140 except Exception: 206 except Exception:
141 LOGGER.exception('Failed to clean up temporary directory [%s].', 207 LOGGER.exception('Failed to clean up temporary directory [%s].',
142 basedir) 208 basedir)
143 else: 209 else:
144 LOGGER.warning('(--leak) Leaking temporary directory [%s].', basedir) 210 LOGGER.warning('(--leak) Leaking temporary directory [%s].', basedir)
145 211
146 212
213 class LogDogNotBootstrapped(Exception):
214 pass
215
216
217 class LogDogBootstrapError(Exception):
218 pass
219
220
221 def is_executable(path):
222 return os.path.isfile(path) and os.access(path, os.X_OK)
223
224
225 def ensure_directory(*path):
226 path = os.path.join(*path)
227 if not os.path.isdir(path):
228 os.makedirs(path)
229 return path
230
231
232 def _get_service_account_json(opts, credential_path):
233 """Returns (str/None): If specified, the path to the service account JSON.
234
235 This method probes the local environment and returns a (possibly empty) list
236 of arguments to add to the Butler command line for authentication.
237
238 If we're running on a GCE instance, no arguments will be returned, as GCE
239 service account is implicitly authenticated. If we're running on Baremetal,
240 a path to those credentials will be returned.
241
242 Args:
243 rt (RecipeRuntime): The runtime environment.
244 Raises:
245 |LogDogBootstrapError| if no credentials could be found.
246 """
247 path = opts.logdog_service_account_json
248 if path:
249 return path
250
251 if gce.Authenticator.is_gce():
252 LOGGER.info('Running on GCE. No credentials necessary.')
253 return None
254
255 if os.path.isfile(credential_path):
256 return credential_path
257
258 raise LogDogBootstrapError('Could not find service account credentials. '
259 'Tried: %s' % (credential_path,))
260
261
262 def _logdog_install_cipd(path, *packages):
263 """Returns (list): The paths to the binaries in each of the packages.
264
265 This method bootstraps CIPD in "path", installing the packages specified
266 by "packages" and returning the paths to their binaries.
267
268 Args:
269 path (str): The CIPD installation root.
270 packages (CipdBinary): The set of CIPD binary packages to install.
271 """
272 verbosity = 0
273 level = logging.getLogger().level
274 if level <= logging.INFO:
275 verbosity += 1
276 if level <= logging.DEBUG:
277 verbosity += 1
278
279 packages_path = os.path.join(path, 'packages.json')
280 pmap = {}
281 cmd = [
282 sys.executable,
283 os.path.join(env.Build, 'scripts', 'slave', 'cipd.py'),
284 '--dest-directory', path,
285 '--json-output', packages_path,
286 ] + (['--verbose'] * verbosity)
287 for p in packages:
288 cmd += ['-P', '%s@%s' % (p.package, p.version)]
289 pmap[p.package] = os.path.join(path, p.relpath)
290
291 try:
292 _check_command(cmd)
293 except subprocess.CalledProcessError:
294 LOGGER.exception('Failed to install LogDog CIPD packages.')
295 raise LogDogBootstrapError()
296
297 # Resolve installed packages.
298 return tuple(pmap[p.package] for p in packages)
299
300
301 def _logdog_bootstrap(tempdir, config, opts, cmd):
302 """Executes the recipe engine, bootstrapping it through LogDog/Annotee.
303
304 This method executes the recipe engine, bootstrapping it through
305 LogDog/Annotee so its output and annotations are streamed to LogDog. The
306 bootstrap is configured to tee the annotations through STDOUT/STDERR so they
307 will still be sent to BuildBot.
308
309 The overall setup here is:
310 [annotated_run.py] => [logdog_butler] => [logdog_annotee] => [recipes.py]
311
312 Args:
313 config (Config): Recipe runtime configuration.
314 opts (argparse.Namespace): Command-line options.
315 cmd (list): The recipe runner command list to bootstrap.
316
317 Returns (int): The return code of the recipe runner process.
318
319 Raises:
320 LogDogNotBootstrapped: if the recipe engine was not executed because the
321 LogDog bootstrap requirements are not available.
322 LogDogBootstrapError: if there was an error bootstrapping the recipe runner
323 through LogDog.
324 """
325 bootstrap_dir = ensure_directory(tempdir, 'logdog_bootstrap')
Vadim Sh. 2016/01/09 01:25:13 it will run bootstrap from scratch each time? :( T
dnj (Google) 2016/01/09 02:50:29 No specific reason other than this is nice and her
326
327 plat = config.logdog_platform
328 if not plat:
329 raise LogDogNotBootstrapped('LogDog platform is not configured.')
330
331 cipd_path = os.path.join(bootstrap_dir, 'cipd')
332 butler, annotee = _logdog_install_cipd(cipd_path, plat.butler, plat.annotee)
333 if opts.logdog_butler_path:
334 butler = opts.logdog_butler_path
335 if opts.logdog_annotee_path:
336 annotee = opts.logdog_annotee_path
337
338 if not config.logdog_pubsub:
339 raise LogDogNotBootstrapped('No Pub/Sub configured.')
340 if not config.logdog_pubsub.project:
341 raise LogDogNotBootstrapped('No Pub/Sub project configured.')
342 if not config.logdog_pubsub.topic:
343 raise LogDogNotBootstrapped('No Pub/Sub topic configured.')
344
345 # Determine LogDog verbosity.
346 logdog_verbose = []
347 if opts.logdog_verbose == 0:
348 pass
349 elif opts.logdog_verbose == 1:
350 logdog_verbose.append('-log_level=info')
351 else:
352 logdog_verbose.append('-log_level=debug')
353
354 service_account_args = []
355 service_account_json = _get_service_account_json(opts, plat.credential_path)
356 if service_account_json:
357 service_account_args += ['-service-account-json', service_account_json]
358
359 # Generate our Butler stream server URI.
360 streamserver_uri = _logdog_get_streamserver_uri(plat.streamserver, tempdir)
361
362 # Dump the bootstrapped Annotee command to JSON for Annotee to load.
363 #
364 # Annotee can run accept bootstrap parameters through either JSON or
365 # command-line, but using JSON effectively steps around any sort of command-
366 # line length limits such as those experienced on Windows.
367 cmd_json = os.path.join(bootstrap_dir, 'annotee_cmd.json')
368 with open(cmd_json, 'w') as fd:
369 json.dump(cmd, fd)
370
371 # Butler Command.
372 cmd = [
373 butler,
374 '-output', 'pubsub,project="%(project)s",topic="%(topic)s"' % (
375 config.logdog_pubsub._asdict()),
376 ]
377 cmd += logdog_verbose
378 cmd += service_account_args
379 cmd += [
380 'run',
381 '-streamserver-uri', streamserver_uri,
382 '--',
383 ]
384
385 # Annotee Command.
386 cmd += [
387 annotee,
388 '-butler-stream-server', streamserver_uri,
389 '-json-args-path', cmd_json,
390 ]
391 cmd += logdog_verbose
392
393 rv, _ = _run_command(cmd, dry_run=opts.dry_run)
394 if rv in LOGDOG_ERROR_RETURNCODES:
395 raise LogDogBootstrapError('LogDog Error (%d)' % (rv,))
396 return rv
397
398
399 def _assert_logdog_whitelisted(mastername, buildername):
400 """Asserts that the runtime environment is whitelisted for LogDog bootstrap.
401
402 Args:
403 mastername (str): The master name string.
404 buildername (str): The builder name.
405 Raises:
406 LogDogNotBootstrapped: if the runtime is not whitelisted.
407 """
408 if not all((mastername, buildername)):
409 raise LogDogNotBootstrapped('Required mastername/buildername is not set.')
410
411 # Key on mastername.
412 bdict = LOGDOG_WHITELIST_MASTER_BUILDERS.get(mastername)
413 if bdict is not None:
414 # Key on buildername.
415 if WHITELIST_ALL in bdict or buildername in bdict:
416 LOGGER.info('Whitelisted master %s, builder %s.',
417 mastername, buildername)
418 return
419 raise LogDogNotBootstrapped('Master %s, builder %s is not whitelisted.' % (
420 mastername, buildername))
421
422
147 def get_recipe_properties(workdir, build_properties, 423 def get_recipe_properties(workdir, build_properties,
148 use_factory_properties_from_disk): 424 use_factory_properties_from_disk):
149 """Constructs the recipe's properties from buildbot's properties. 425 """Constructs the recipe's properties from buildbot's properties.
150 426
151 This retrieves the current factory properties from the master_config 427 This retrieves the current factory properties from the master_config
152 in the slave's checkout (no factory properties are handed to us from the 428 in the slave's checkout (no factory properties are handed to us from the
153 master), and merges in the build properties. 429 master), and merges in the build properties.
154 430
155 Using the values from the checkout allows us to do things like change 431 Using the values from the checkout allows us to do things like change
156 the recipe and other factory properties for a builder without needing 432 the recipe and other factory properties for a builder without needing
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after
290 help='factory properties in b64 gz JSON format') 566 help='factory properties in b64 gz JSON format')
291 parser.add_argument('--keep-stdin', action='store_true', default=False, 567 parser.add_argument('--keep-stdin', action='store_true', default=False,
292 help='don\'t close stdin when running recipe steps') 568 help='don\'t close stdin when running recipe steps')
293 parser.add_argument('--master-overrides-slave', action='store_true', 569 parser.add_argument('--master-overrides-slave', action='store_true',
294 help='use the property values given on the command line from the master, ' 570 help='use the property values given on the command line from the master, '
295 'not the ones looked up on the slave') 571 'not the ones looked up on the slave')
296 parser.add_argument('--use-factory-properties-from-disk', 572 parser.add_argument('--use-factory-properties-from-disk',
297 action='store_true', default=False, 573 action='store_true', default=False,
298 help='use factory properties loaded from disk on the slave') 574 help='use factory properties loaded from disk on the slave')
299 575
576 group = parser.add_argument_group('LogDog Bootstrap')
577 group.add_argument('--logdog-verbose',
578 action='count', default=0,
579 help='Increase LogDog verbosity. This can be specified multiple times.')
580 group.add_argument('--logdog-force', action='store_true',
581 help='Force LogDog bootstrapping, even if the system is not configured.')
582 group.add_argument('--logdog-butler-path',
583 help='Path to the LogDog Butler. If empty, one will be probed/downloaded '
584 'from CIPD.')
585 group.add_argument('--logdog-annotee-path',
586 help='Path to the LogDog Annotee. If empty, one will be '
587 'probed/downloaded from CIPD.')
588 group.add_argument('--logdog-service-account-json',
589 help='Path to the service account JSON. If one is not provided, the '
590 'local system credentials will be used.')
591
300 return parser.parse_args(argv) 592 return parser.parse_args(argv)
301 593
302 594
303 def update_scripts(): 595 def update_scripts():
304 if os.environ.get('RUN_SLAVE_UPDATED_SCRIPTS'): 596 if os.environ.get('RUN_SLAVE_UPDATED_SCRIPTS'):
305 os.environ.pop('RUN_SLAVE_UPDATED_SCRIPTS') 597 os.environ.pop('RUN_SLAVE_UPDATED_SCRIPTS')
306 return False 598 return False
307 599
308 stream = annotator.StructuredAnnotationStream() 600 stream = annotator.StructuredAnnotationStream()
309 601
310 with stream.step('update_scripts') as s: 602 with stream.step('update_scripts') as s:
311 gclient_name = 'gclient' 603 gclient_name = 'gclient'
312 if sys.platform.startswith('win'): 604 if sys.platform.startswith('win'):
313 gclient_name += '.bat' 605 gclient_name += '.bat'
314 gclient_path = os.path.join(env.Build, '..', 'depot_tools', 606 gclient_path = os.path.join(env.Build, os.pardir, 'depot_tools',
315 gclient_name) 607 gclient_name)
316 gclient_cmd = [gclient_path, 'sync', '--force', '--verbose', '--jobs=2'] 608 gclient_cmd = [gclient_path, 'sync', '--force', '--verbose', '--jobs=2']
317 try: 609 try:
318 fd, output_json = tempfile.mkstemp() 610 fd, output_json = tempfile.mkstemp()
319 os.close(fd) 611 os.close(fd)
320 gclient_cmd += ['--output-json', output_json] 612 gclient_cmd += ['--output-json', output_json]
321 except Exception: 613 except Exception:
322 # Super paranoia try block. 614 # Super paranoia try block.
323 output_json = None 615 output_json = None
324 cmd_dict = { 616 cmd_dict = {
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
417 ] 709 ]
418 # Add this conditionally so that we get an error in 710 # Add this conditionally so that we get an error in
419 # send_monitoring_event log files in case it isn't present. 711 # send_monitoring_event log files in case it isn't present.
420 if hostname: 712 if hostname:
421 cmd += ['--build-event-hostname', hostname] 713 cmd += ['--build-event-hostname', hostname]
422 _check_command(cmd) 714 _check_command(cmd)
423 except Exception: 715 except Exception:
424 LOGGER.warning("Failed to send monitoring event.", exc_info=True) 716 LOGGER.warning("Failed to send monitoring event.", exc_info=True)
425 717
426 718
719 def _exec_recipe(opts, tdir, config, properties):
720 # Find out if the recipe we intend to run is in build_internal's recipes. If
721 # so, use recipes.py from there, otherwise use the one from build.
722 recipe_file = properties['recipe'].replace('/', os.path.sep) + '.py'
723
724 # Use the standard recipe runner unless the recipes are explicitly in the
725 # "build_limited" repository.
726 recipe_runner = os.path.join(env.Build,
727 'scripts', 'slave', 'recipes.py')
728 if env.BuildInternal:
729 build_limited = os.path.join(env.BuildInternal, 'scripts', 'slave')
730 if os.path.exists(os.path.join(build_limited, 'recipes', recipe_file)):
731 recipe_runner = os.path.join(build_limited, 'recipes.py')
732
733 # Dump properties to JSON and build recipe command.
734 props_file = os.path.join(tdir, 'recipe_properties.json')
735 with open(props_file, 'w') as fh:
736 json.dump(properties, fh)
737
738 cmd = [
739 sys.executable, '-u', recipe_runner,
740 'run',
741 '--workdir=%s' % os.getcwd(),
742 '--properties-file=%s' % props_file,
743 properties['recipe'],
744 ]
745
746 status = None
747 try:
748 if not opts.logdog_force:
749 _assert_logdog_whitelisted(properties.get('mastername'),
Vadim Sh. 2016/01/09 01:25:13 I was confused by this... Do not use exception as
dnj (Google) 2016/01/09 02:50:28 Done.
750 properties.get('buildername'))
751 status = _logdog_bootstrap(tdir, config, opts, cmd)
752 except LogDogNotBootstrapped as e:
753 LOGGER.info('Not bootstrapped: %s', e.message)
754 except LogDogBootstrapError as e:
755 LOGGER.warning('Could not bootstrap LogDog: %s', e.message)
756 except Exception as e:
757 LOGGER.exception('Exception while bootstrapping LogDog.')
758 finally:
759 if status is None:
760 LOGGER.info('Not using LogDog. Invoking `recipes.py` directly.')
761 status, _ = _run_command(cmd, dry_run=opts.dry_run)
762
763 return status
764
765
427 def main(argv): 766 def main(argv):
428 opts = get_args(argv) 767 opts = get_args(argv)
429 768
430 if opts.verbose == 0: 769 if opts.verbose == 0:
431 level = logging.INFO 770 level = logging.INFO
432 else: 771 else:
433 level = logging.DEBUG 772 level = logging.DEBUG
434 logging.getLogger().setLevel(level) 773 logging.getLogger().setLevel(level)
435 774
436 clean_old_recipe_engine() 775 clean_old_recipe_engine()
437 776
438 # Enter our runtime environment. 777 # Enter our runtime environment.
439 with recipe_tempdir(leak=opts.leak) as tdir: 778 with recipe_tempdir(leak=opts.leak) as tdir:
440 LOGGER.debug('Using temporary directory: [%s].', tdir) 779 LOGGER.debug('Using temporary directory: [%s].', tdir)
441 780
442 # Load factory properties and configuration. 781 # Load factory properties and configuration.
443 # TODO(crbug.com/551165): remove flag "factory_properties". 782 # TODO(crbug.com/551165): remove flag "factory_properties".
444 use_factory_properties_from_disk = (opts.use_factory_properties_from_disk or 783 use_factory_properties_from_disk = (opts.use_factory_properties_from_disk or
445 bool(opts.factory_properties)) 784 bool(opts.factory_properties))
446 properties = get_recipe_properties( 785 properties = get_recipe_properties(
447 tdir, opts.build_properties, use_factory_properties_from_disk) 786 tdir, opts.build_properties, use_factory_properties_from_disk)
448 LOGGER.debug('Loaded properties: %s', properties) 787 LOGGER.debug('Loaded properties: %s', properties)
449 788
450 config = get_config() 789 config = get_config()
451 LOGGER.debug('Loaded runtime configuration: %s', config) 790 LOGGER.debug('Loaded runtime configuration: %s', config)
452 791
453 # Find out if the recipe we intend to run is in build_internal's recipes. If
454 # so, use recipes.py from there, otherwise use the one from build.
455 recipe_file = properties['recipe'].replace('/', os.path.sep) + '.py'
456
457 # Use the standard recipe runner unless the recipes are explicitly in the
458 # "build_limited" repository.
459 recipe_runner = os.path.join(env.Build,
460 'scripts', 'slave', 'recipes.py')
461 if env.BuildInternal:
462 build_limited = os.path.join(env.BuildInternal, 'scripts', 'slave')
463 if os.path.exists(os.path.join(build_limited, 'recipes', recipe_file)):
464 recipe_runner = os.path.join(build_limited, 'recipes.py')
465
466 # Setup monitoring directory and send a monitoring event. 792 # Setup monitoring directory and send a monitoring event.
467 build_data_dir = ensure_directory(tdir, 'build_data') 793 build_data_dir = ensure_directory(tdir, 'build_data')
468 properties['build_data_dir'] = build_data_dir 794 properties['build_data_dir'] = build_data_dir
469 795
470 # Write our annotated_run.py monitoring event. 796 # Write our annotated_run.py monitoring event.
471 write_monitoring_event(config, build_data_dir, properties) 797 write_monitoring_event(config, build_data_dir, properties)
472 798
473 # Dump properties to JSON and build recipe command. 799 # Execute our recipe.
474 props_file = os.path.join(tdir, 'recipe_properties.json') 800 return _exec_recipe(opts, tdir, config, properties)
475 with open(props_file, 'w') as fh:
476 json.dump(properties, fh)
477 cmd = [
478 sys.executable, '-u', recipe_runner,
479 'run',
480 '--workdir=%s' % os.getcwd(),
481 '--properties-file=%s' % props_file,
482 properties['recipe'],
483 ]
484
485 status, _ = _run_command(cmd, dry_run=opts.dry_run)
486
487 return status
488 801
489 802
490 def shell_main(argv): 803 def shell_main(argv):
491 if update_scripts(): 804 if update_scripts():
492 # Re-execute with the updated annotated_run.py. 805 # Re-execute with the updated annotated_run.py.
493 rv, _ = _run_command([sys.executable] + argv) 806 rv, _ = _run_command([sys.executable] + argv)
494 return rv 807 return rv
495 else: 808 else:
496 return main(argv[1:]) 809 return main(argv[1:])
497 810
498 811
499 if __name__ == '__main__': 812 if __name__ == '__main__':
500 logging.basicConfig(level=logging.INFO) 813 logging.basicConfig(level=logging.INFO)
501 sys.exit(shell_main(sys.argv)) 814 sys.exit(shell_main(sys.argv))
OLDNEW
« no previous file with comments | « no previous file | scripts/slave/cipd.py » ('j') | scripts/slave/cipd.py » ('J')

Powered by Google App Engine
This is Rietveld 408576698