OLD | NEW |
---|---|
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright (c) 2013 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2013 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 import argparse | 6 import argparse |
7 import collections | |
7 import contextlib | 8 import contextlib |
8 import json | 9 import json |
10 import logging | |
9 import os | 11 import os |
12 import platform | |
10 import shutil | 13 import shutil |
11 import socket | 14 import socket |
12 import subprocess | 15 import subprocess |
13 import sys | 16 import sys |
14 import tempfile | 17 import tempfile |
15 import traceback | |
16 | 18 |
19 | |
20 # Install Infra build environment. | |
17 BUILD_ROOT = os.path.dirname(os.path.dirname(os.path.dirname( | 21 BUILD_ROOT = os.path.dirname(os.path.dirname(os.path.dirname( |
18 os.path.abspath(__file__)))) | 22 os.path.abspath(__file__)))) |
19 sys.path.append(os.path.join(BUILD_ROOT, 'scripts')) | 23 sys.path.insert(0, os.path.join(BUILD_ROOT, 'scripts')) |
20 sys.path.append(os.path.join(BUILD_ROOT, 'third_party')) | 24 import common.env |
25 common.env.Install() | |
21 | 26 |
22 from common import annotator | 27 from common import annotator |
23 from common import chromium_utils | 28 from common import chromium_utils |
24 from common import master_cfg_utils | 29 from common import master_cfg_utils |
30 from slave import gce | |
25 | 31 |
26 SCRIPT_PATH = os.path.dirname(os.path.abspath(__file__)) | 32 SCRIPT_PATH = os.path.join(common.env.Build, 'scripts', 'slave') |
27 BUILD_LIMITED_ROOT = os.path.join( | 33 BUILD_LIMITED_ROOT = os.path.join(common.env.BuildInternal, 'scripts', 'slave') |
28 os.path.dirname(BUILD_ROOT), 'build_internal', 'scripts', 'slave') | |
29 | 34 |
30 PACKAGE_CFG = os.path.join( | 35 # Logging instance. |
31 os.path.dirname(os.path.dirname(SCRIPT_PATH)), | 36 LOGGER = logging.getLogger('annotated_run') |
32 'infra', 'config', 'recipes.cfg') | |
33 | 37 |
34 if sys.platform.startswith('win'): | 38 |
35 # TODO(pgervais): add windows support | 39 # RecipeRuntime will probe this for values. |
36 # QQ: Where is infra/run.py on windows machines? | 40 # - First, (system, platform) |
37 RUN_CMD = None | 41 # - Then, (system,) |
38 else: | 42 # - Finally, (), |
39 RUN_CMD = os.path.join('/', 'opt', 'infra-python', 'run.py') | 43 PLATFORM_CONFIG = { |
44 # All systems. | |
45 (): {}, | |
46 | |
47 # Linux | |
48 ('Linux',): { | |
49 'run_cmd': '/opt/infra-python/run.py', | |
50 }, | |
51 | |
52 # Mac OSX | |
53 ('Darwin',): { | |
54 'run_cmd': '/opt/infra-python/run.py', | |
55 }, | |
56 | |
57 # Windows | |
58 ('Windows',): {}, | |
59 } | |
60 | |
61 | |
62 # Config is the runtime configuration used by `annotated_run.py` to bootstrap | |
63 # the recipe engine. | |
64 Config = collections.namedtuple('Config', ( | |
65 'run_cmd', | |
66 )) | |
67 | |
68 | |
69 def get_config(): | |
70 """Returns (Config): The constructed Config object. | |
71 | |
72 The Config object is constructed from: | |
73 - Cascading the PLATFORM_CONFIG fields together based on current | |
74 OS/Architecture. | |
75 | |
76 Raises: | |
77 KeyError: if a required configuration key/parameter is not available. | |
78 """ | |
79 # Cascade the platform configuration. | |
80 p = (platform.system(), platform.processor()) | |
81 platform_config = {} | |
82 for i in xrange(len(p)+1): | |
83 platform_config.update(PLATFORM_CONFIG.get(p[:i], {})) | |
84 | |
85 # Construct runtime configuration. | |
86 return Config( | |
87 run_cmd=platform_config.get('run_cmd'), | |
88 ) | |
89 | |
90 | |
91 def ensure_directory(*path): | |
92 path = os.path.join(*path) | |
93 if not os.path.isdir(path): | |
94 os.makedirs(path) | |
95 return path | |
96 | |
97 | |
98 def _run_command(cmd, **kwargs): | |
99 dry_run = kwargs.pop('dry_run', False) | |
100 | |
101 LOGGER.debug('Executing command: %s', cmd) | |
102 if dry_run: | |
103 LOGGER.info('(Dry Run) Not executing command.') | |
iannucci
2015/12/02 02:13:34
'(Dry Run) But not really!'
dnj (Google)
2015/12/02 18:52:43
I was confused, like "but it returns!", but you're
| |
104 return 0, '' | |
105 proc = subprocess.Popen(cmd, stderr=subprocess.STDOUT) | |
106 stdout, _ = proc.communicate() | |
107 | |
108 LOGGER.debug('Process [%s] returned [%d] with output:\n%s', | |
109 cmd, proc.returncode, stdout) | |
110 return proc.returncode, stdout | |
111 | |
112 | |
113 def _check_command(*args, **kwargs): | |
114 rv, stdout = _run_command(args, **kwargs) | |
115 if rv != 0: | |
116 raise subprocess.CalledProcessError(rv, args, output=stdout) | |
117 return stdout | |
118 | |
40 | 119 |
41 @contextlib.contextmanager | 120 @contextlib.contextmanager |
42 def namedTempFile(): | 121 def recipe_tempdir(root=None, leak=False): |
iannucci
2015/12/02 02:13:34
TODO(crbug.com/361343): Make the recipe engine do
dnj (Google)
2015/12/02 18:52:43
We _could_ do that, but this script uses the tempd
| |
43 fd, name = tempfile.mkstemp() | 122 """Creates a temporary recipe-local working directory and yields it. |
44 os.close(fd) # let the exceptions fly | 123 |
124 This creates a temporary directory for this annotation run that is | |
125 automatically cleaned up. It returns the directory. | |
126 | |
127 Args: | |
128 root (str/None): If not None, the root directory. Otherwise, |os.cwd| will | |
129 be used. | |
130 leak (bool): If true, don't clean up the temporary directory on exit. | |
131 """ | |
132 basedir = ensure_directory((root or os.getcwd()), '.recipe_runtime') | |
45 try: | 133 try: |
46 yield name | 134 tdir = tempfile.mkdtemp(dir=basedir) |
135 yield tdir | |
47 finally: | 136 finally: |
48 try: | 137 if basedir and os.path.isdir(basedir): |
49 os.remove(name) | 138 if not leak: |
50 except OSError as e: | 139 LOGGER.debug('Cleaning up temporary directory [%s].', basedir) |
51 print >> sys.stderr, "LEAK: %s: %s" % (name, e) | 140 try: |
141 # TODO(pgervais): use infra_libs.rmtree instead. | |
iannucci
2015/12/02 02:13:34
I think we actually want RemoveDirectory from http
dnj (Google)
2015/12/02 18:52:43
ew yuk. Done.
| |
142 shutil.rmtree(basedir) | |
143 except Exception: | |
144 LOGGER.exception('Failed to clean up temporary directory [%s].', | |
145 basedir) | |
146 else: | |
147 LOGGER.warning('(--leak) Leaking temporary directory [%s].', basedir) | |
52 | 148 |
53 | 149 |
54 def get_recipe_properties(build_properties, use_factory_properties_from_disk): | 150 def get_recipe_properties(workdir, build_properties, |
151 use_factory_properties_from_disk): | |
55 """Constructs the recipe's properties from buildbot's properties. | 152 """Constructs the recipe's properties from buildbot's properties. |
56 | 153 |
57 This retrieves the current factory properties from the master_config | 154 This retrieves the current factory properties from the master_config |
58 in the slave's checkout (no factory properties are handed to us from the | 155 in the slave's checkout (no factory properties are handed to us from the |
59 master), and merges in the build properties. | 156 master), and merges in the build properties. |
60 | 157 |
61 Using the values from the checkout allows us to do things like change | 158 Using the values from the checkout allows us to do things like change |
62 the recipe and other factory properties for a builder without needing | 159 the recipe and other factory properties for a builder without needing |
63 a master restart. | 160 a master restart. |
64 | 161 |
65 As the build properties doesn't include the factory properties, we would: | 162 As the build properties doesn't include the factory properties, we would: |
66 1. Load factory properties from checkout on the slave. | 163 1. Load factory properties from checkout on the slave. |
67 2. Override the factory properties with the build properties. | 164 2. Override the factory properties with the build properties. |
68 3. Set the factory-only properties as build properties using annotation so | 165 3. Set the factory-only properties as build properties using annotation so |
69 that they will show up on the build page. | 166 that they will show up on the build page. |
70 """ | 167 """ |
71 if not use_factory_properties_from_disk: | 168 if not use_factory_properties_from_disk: |
72 return build_properties | 169 return build_properties |
73 | 170 |
74 stream = annotator.StructuredAnnotationStream() | 171 stream = annotator.StructuredAnnotationStream() |
75 with stream.step('setup_properties') as s: | 172 with stream.step('setup_properties') as s: |
76 factory_properties = {} | 173 factory_properties = {} |
77 | 174 |
78 mastername = build_properties.get('mastername') | 175 mastername = build_properties.get('mastername') |
79 buildername = build_properties.get('buildername') | 176 buildername = build_properties.get('buildername') |
80 if mastername and buildername: | 177 if mastername and buildername: |
81 # Load factory properties from tip-of-tree checkout on the slave builder. | 178 # Load factory properties from tip-of-tree checkout on the slave builder. |
82 factory_properties = get_factory_properties_from_disk( | 179 factory_properties = get_factory_properties_from_disk( |
83 mastername, buildername) | 180 workdir, mastername, buildername) |
84 | 181 |
85 # Check conflicts between factory properties and build properties. | 182 # Check conflicts between factory properties and build properties. |
86 conflicting_properties = {} | 183 conflicting_properties = {} |
87 for name, value in factory_properties.items(): | 184 for name, value in factory_properties.items(): |
88 if not build_properties.has_key(name) or build_properties[name] == value: | 185 if not build_properties.has_key(name) or build_properties[name] == value: |
89 continue | 186 continue |
90 conflicting_properties[name] = (value, build_properties[name]) | 187 conflicting_properties[name] = (value, build_properties[name]) |
91 | 188 |
92 if conflicting_properties: | 189 if conflicting_properties: |
93 s.step_text( | 190 s.step_text( |
94 '<br/>detected %d conflict[s] between factory and build properties' | 191 '<br/>detected %d conflict[s] between factory and build properties' |
95 % len(conflicting_properties)) | 192 % len(conflicting_properties)) |
96 print 'Conflicting factory and build properties:' | 193 |
97 for name, (factory_value, build_value) in conflicting_properties.items(): | 194 conflicts = [' "%s": factory: "%s", build: "%s"' % ( |
98 print (' "%s": factory: "%s", build: "%s"' % ( | |
99 name, | 195 name, |
100 '<unset>' if (factory_value is None) else factory_value, | 196 '<unset>' if (fv is None) else fv, |
101 '<unset>' if (build_value is None) else build_value)) | 197 '<unset>' if (bv is None) else bv) |
102 print "Will use the values from build properties." | 198 for name, (fv, bv) in conflicting_properties.items()] |
199 LOGGER.warning('Conflicting factory and build properties:\n%s', | |
200 '\n'.join(conflicts)) | |
201 LOGGER.warning("Will use the values from build properties.") | |
103 | 202 |
104 # Figure out the factory-only properties and set them as build properties so | 203 # Figure out the factory-only properties and set them as build properties so |
105 # that they will show up on the build page. | 204 # that they will show up on the build page. |
106 for name, value in factory_properties.items(): | 205 for name, value in factory_properties.items(): |
107 if not build_properties.has_key(name): | 206 if not build_properties.has_key(name): |
108 s.set_build_property(name, json.dumps(value)) | 207 s.set_build_property(name, json.dumps(value)) |
109 | 208 |
110 # Build properties override factory properties. | 209 # Build properties override factory properties. |
111 properties = factory_properties.copy() | 210 properties = factory_properties.copy() |
112 properties.update(build_properties) | 211 properties.update(build_properties) |
113 return properties | 212 return properties |
114 | 213 |
115 | 214 |
116 def get_factory_properties_from_disk(mastername, buildername): | 215 def get_factory_properties_from_disk(workdir, mastername, buildername): |
117 master_list = master_cfg_utils.GetMasters() | 216 master_list = master_cfg_utils.GetMasters() |
118 master_path = None | 217 master_path = None |
119 for name, path in master_list: | 218 for name, path in master_list: |
120 if name == mastername: | 219 if name == mastername: |
121 master_path = path | 220 master_path = path |
122 | 221 |
123 if not master_path: | 222 if not master_path: |
124 raise LookupError('master "%s" not found.' % mastername) | 223 raise LookupError('master "%s" not found.' % mastername) |
125 | 224 |
126 script_path = os.path.join(BUILD_ROOT, 'scripts', 'tools', | 225 script_path = os.path.join(BUILD_ROOT, 'scripts', 'tools', |
127 'dump_master_cfg.py') | 226 'dump_master_cfg.py') |
128 | 227 |
129 with namedTempFile() as fname: | 228 master_json = os.path.join(workdir, 'dump_master_cfg.json') |
130 dump_cmd = [sys.executable, | 229 dump_cmd = [sys.executable, |
131 script_path, | 230 script_path, |
132 master_path, fname] | 231 master_path, master_json] |
133 proc = subprocess.Popen(dump_cmd, cwd=BUILD_ROOT, stdout=subprocess.PIPE, | 232 proc = subprocess.Popen(dump_cmd, cwd=BUILD_ROOT, stdout=subprocess.PIPE, |
134 stderr=subprocess.PIPE) | 233 stderr=subprocess.PIPE) |
135 out, err = proc.communicate() | 234 out, err = proc.communicate() |
136 exit_code = proc.returncode | 235 if proc.returncode: |
236 raise LookupError('Failed to get the master config; dump_master_cfg %s' | |
237 'returned %d):\n%s\n%s\n'% ( | |
238 mastername, proc.returncode, out, err)) | |
137 | 239 |
138 if exit_code: | 240 with open(master_json, 'rU') as f: |
139 raise LookupError('Failed to get the master config; dump_master_cfg %s' | 241 config = json.load(f) |
140 'returned %d):\n%s\n%s\n'% ( | |
141 mastername, exit_code, out, err)) | |
142 | |
143 with open(fname, 'rU') as f: | |
144 config = json.load(f) | |
145 | 242 |
146 # Now extract just the factory properties for the requested builder | 243 # Now extract just the factory properties for the requested builder |
147 # from the master config. | 244 # from the master config. |
148 props = {} | 245 props = {} |
149 found = False | 246 found = False |
150 for builder_dict in config['builders']: | 247 for builder_dict in config['builders']: |
151 if builder_dict['name'] == buildername: | 248 if builder_dict['name'] == buildername: |
152 found = True | 249 found = True |
153 factory_properties = builder_dict['factory']['properties'] | 250 factory_properties = builder_dict['factory']['properties'] |
154 for name, (value, _) in factory_properties.items(): | 251 for name, (value, _) in factory_properties.items(): |
155 props[name] = value | 252 props[name] = value |
156 | 253 |
157 if not found: | 254 if not found: |
158 raise LookupError('builder "%s" not found on in master "%s"' % | 255 raise LookupError('builder "%s" not found on in master "%s"' % |
159 (buildername, mastername)) | 256 (buildername, mastername)) |
160 | 257 |
161 if 'recipe' not in props: | 258 if 'recipe' not in props: |
162 raise LookupError('Cannot find recipe for %s on %s' % | 259 raise LookupError('Cannot find recipe for %s on %s' % |
163 (buildername, mastername)) | 260 (buildername, mastername)) |
164 | 261 |
165 return props | 262 return props |
166 | 263 |
167 | 264 |
168 def get_args(argv): | 265 def get_args(argv): |
169 """Process command-line arguments.""" | 266 """Process command-line arguments.""" |
170 parser = argparse.ArgumentParser( | 267 parser = argparse.ArgumentParser( |
171 description='Entry point for annotated builds.') | 268 description='Entry point for annotated builds.') |
269 parser.add_argument('-v', '--verbose', | |
270 action='count', default=0, | |
271 help='Increase verbosity. This can be specified multiple times.') | |
272 parser.add_argument('-d', '--dry-run', action='store_true', | |
273 help='Perform the setup, but refrain from executing the recipe.') | |
274 parser.add_argument('-l', '--leak', action='store_true', | |
275 help="Refrain from cleaning up generated artifacts.") | |
172 parser.add_argument('--build-properties', | 276 parser.add_argument('--build-properties', |
173 type=json.loads, default={}, | 277 type=json.loads, default={}, |
174 help='build properties in JSON format') | 278 help='build properties in JSON format') |
175 parser.add_argument('--factory-properties', | 279 parser.add_argument('--factory-properties', |
176 type=json.loads, default={}, | 280 type=json.loads, default={}, |
177 help='factory properties in JSON format') | 281 help='factory properties in JSON format') |
178 parser.add_argument('--build-properties-gz', dest='build_properties', | 282 parser.add_argument('--build-properties-gz', dest='build_properties', |
179 type=chromium_utils.convert_gz_json_type, default={}, | 283 type=chromium_utils.convert_gz_json_type, default={}, |
180 help='build properties in b64 gz JSON format') | 284 help='build properties in b64 gz JSON format') |
181 parser.add_argument('--factory-properties-gz', dest='factory_properties', | 285 parser.add_argument('--factory-properties-gz', dest='factory_properties', |
182 type=chromium_utils.convert_gz_json_type, default={}, | 286 type=chromium_utils.convert_gz_json_type, default={}, |
183 help='factory properties in b64 gz JSON format') | 287 help='factory properties in b64 gz JSON format') |
184 parser.add_argument('--keep-stdin', action='store_true', default=False, | 288 parser.add_argument('--keep-stdin', action='store_true', default=False, |
185 help='don\'t close stdin when running recipe steps') | 289 help='don\'t close stdin when running recipe steps') |
186 parser.add_argument('--master-overrides-slave', action='store_true', | 290 parser.add_argument('--master-overrides-slave', action='store_true', |
187 help='use the property values given on the command line from the master, ' | 291 help='use the property values given on the command line from the master, ' |
188 'not the ones looked up on the slave') | 292 'not the ones looked up on the slave') |
189 parser.add_argument('--use-factory-properties-from-disk', | 293 parser.add_argument('--use-factory-properties-from-disk', |
190 action='store_true', default=False, | 294 action='store_true', default=False, |
191 help='use factory properties loaded from disk on the slave') | 295 help='use factory properties loaded from disk on the slave') |
296 | |
192 return parser.parse_args(argv) | 297 return parser.parse_args(argv) |
193 | 298 |
194 | 299 |
195 def update_scripts(): | 300 def update_scripts(): |
196 if os.environ.get('RUN_SLAVE_UPDATED_SCRIPTS'): | 301 if os.environ.get('RUN_SLAVE_UPDATED_SCRIPTS'): |
197 os.environ.pop('RUN_SLAVE_UPDATED_SCRIPTS') | 302 os.environ.pop('RUN_SLAVE_UPDATED_SCRIPTS') |
198 return False | 303 return False |
199 | 304 |
200 stream = annotator.StructuredAnnotationStream() | 305 stream = annotator.StructuredAnnotationStream() |
201 | 306 |
202 with stream.step('update_scripts') as s: | 307 with stream.step('update_scripts') as s: |
203 gclient_name = 'gclient' | 308 gclient_name = 'gclient' |
204 if sys.platform.startswith('win'): | 309 if sys.platform.startswith('win'): |
205 gclient_name += '.bat' | 310 gclient_name += '.bat' |
206 gclient_path = os.path.join(BUILD_ROOT, '..', 'depot_tools', gclient_name) | 311 gclient_path = os.path.join(BUILD_ROOT, '..', 'depot_tools', gclient_name) |
207 gclient_cmd = [gclient_path, 'sync', '--force', '--verbose'] | 312 gclient_cmd = [gclient_path, 'sync', '--force', '--verbose'] |
208 try: | 313 try: |
209 fd, output_json = tempfile.mkstemp() | 314 fd, output_json = tempfile.mkstemp() |
210 os.close(fd) | 315 os.close(fd) |
211 gclient_cmd += ['--output-json', output_json] | 316 gclient_cmd += ['--output-json', output_json] |
212 except Exception: | 317 except Exception: |
213 # Super paranoia try block. | 318 # Super paranoia try block. |
214 output_json = None | 319 output_json = None |
215 cmd_dict = { | 320 cmd_dict = { |
216 'name': 'update_scripts', | 321 'name': 'update_scripts', |
217 'cmd': gclient_cmd, | 322 'cmd': gclient_cmd, |
218 'cwd': BUILD_ROOT, | 323 'cwd': BUILD_ROOT, |
219 } | 324 } |
220 annotator.print_step(cmd_dict, os.environ, stream) | 325 annotator.print_step(cmd_dict, os.environ, stream) |
221 if subprocess.call(gclient_cmd, cwd=BUILD_ROOT) != 0: | 326 rv, _ = _run_command(gclient_cmd, cwd=BUILD_ROOT) |
327 if rv != 0: | |
222 s.step_text('gclient sync failed!') | 328 s.step_text('gclient sync failed!') |
223 s.step_warnings() | 329 s.step_warnings() |
224 elif output_json: | 330 elif output_json: |
225 try: | 331 try: |
226 with open(output_json, 'r') as f: | 332 with open(output_json, 'r') as f: |
227 gclient_json = json.load(f) | 333 gclient_json = json.load(f) |
228 for line in json.dumps( | 334 for line in json.dumps( |
229 gclient_json, sort_keys=True, | 335 gclient_json, sort_keys=True, |
230 indent=4, separators=(',', ': ')).splitlines(): | 336 indent=4, separators=(',', ': ')).splitlines(): |
231 s.step_log_line('gclient_json', line) | 337 s.step_log_line('gclient_json', line) |
232 s.step_log_end('gclient_json') | 338 s.step_log_end('gclient_json') |
233 revision = gclient_json['solutions']['build/']['revision'] | 339 revision = gclient_json['solutions']['build/']['revision'] |
234 scm = gclient_json['solutions']['build/']['scm'] | 340 scm = gclient_json['solutions']['build/']['scm'] |
235 s.step_text('%s - %s' % (scm, revision)) | 341 s.step_text('%s - %s' % (scm, revision)) |
236 s.set_build_property('build_scm', json.dumps(scm)) | 342 s.set_build_property('build_scm', json.dumps(scm)) |
237 s.set_build_property('build_revision', json.dumps(revision)) | 343 s.set_build_property('build_revision', json.dumps(revision)) |
238 except Exception as e: | 344 except Exception as e: |
239 s.step_text('Unable to process gclient JSON %s' % repr(e)) | 345 s.step_text('Unable to process gclient JSON %s' % repr(e)) |
240 s.step_warnings() | 346 s.step_warnings() |
241 finally: | 347 finally: |
242 try: | 348 try: |
243 os.remove(output_json) | 349 os.remove(output_json) |
244 except Exception as e: | 350 except Exception as e: |
245 print >> sys.stderr, "LEAKED:", output_json, e | 351 LOGGER.warning("LEAKED: %s", output_json, exc_info=True) |
246 else: | 352 else: |
247 s.step_text('Unable to get SCM data') | 353 s.step_text('Unable to get SCM data') |
248 s.step_warnings() | 354 s.step_warnings() |
249 | 355 |
250 os.environ['RUN_SLAVE_UPDATED_SCRIPTS'] = '1' | 356 os.environ['RUN_SLAVE_UPDATED_SCRIPTS'] = '1' |
251 | 357 |
252 # After running update scripts, set PYTHONIOENCODING=UTF-8 for the real | 358 # After running update scripts, set PYTHONIOENCODING=UTF-8 for the real |
253 # annotated_run. | 359 # annotated_run. |
254 os.environ['PYTHONIOENCODING'] = 'UTF-8' | 360 os.environ['PYTHONIOENCODING'] = 'UTF-8' |
255 | 361 |
256 return True | 362 return True |
257 | 363 |
258 | 364 |
259 def clean_old_recipe_engine(): | 365 def clean_old_recipe_engine(): |
260 """Clean stale pycs from the old location of recipe_engine. | 366 """Clean stale pycs from the old location of recipe_engine. |
261 | 367 |
262 This function should only be needed for a little while after the recipe | 368 This function should only be needed for a little while after the recipe |
263 packages rollout (2015-09-16). | 369 packages rollout (2015-09-16). |
264 """ | 370 """ |
265 for (dirpath, _, filenames) in os.walk( | 371 for (dirpath, _, filenames) in os.walk( |
266 os.path.join(BUILD_ROOT, 'third_party', 'recipe_engine')): | 372 os.path.join(BUILD_ROOT, 'third_party', 'recipe_engine')): |
267 for filename in filenames: | 373 for filename in filenames: |
268 if filename.endswith('.pyc'): | 374 if filename.endswith('.pyc'): |
269 path = os.path.join(dirpath, filename) | 375 os.remove(os.path.join(dirpath, filename)) |
270 os.remove(path) | |
271 | 376 |
272 | 377 |
273 @contextlib.contextmanager | 378 def write_monitoring_event(config, outdir, build_properties): |
274 def build_data_directory(): | 379 if not (config.run_cmd and os.path.exists(config.run_cmd)): |
275 """Context manager that creates a build-specific directory. | 380 LOGGER.warning('Unable to find run.py at %s, no events will be sent.', |
381 config.run_cmd) | |
382 return | |
276 | 383 |
277 The directory is wiped when exiting. | 384 hostname = socket.getfqdn() |
385 if hostname: # just in case getfqdn() returns None. | |
386 hostname = hostname.split('.')[0] | |
387 else: | |
388 hostname = None | |
278 | 389 |
279 Yields: | 390 try: |
280 build_data (str or None): full path to a writeable directory. Return None if | 391 cmd = [config.run_cmd, 'infra.tools.send_monitoring_event', |
281 no directory can be found or if it's not writeable. | 392 '--event-mon-output-file', |
282 """ | 393 ensure_directory(outdir, 'log_request_proto'), |
283 prefix = 'build_data' | 394 '--event-mon-run-type', 'file', |
284 | 395 '--event-mon-service-name', |
285 # TODO(pgervais): import that from infra_libs.logs instead | 396 'buildbot/master/master.%s' |
286 if sys.platform.startswith('win'): # pragma: no cover | 397 % build_properties.get('mastername', 'UNKNOWN'), |
287 DEFAULT_LOG_DIRECTORIES = [ | 398 '--build-event-build-name', |
288 'E:\\chrome-infra-logs', | 399 build_properties.get('buildername', 'UNKNOWN'), |
289 'C:\\chrome-infra-logs', | 400 '--build-event-build-number', |
290 ] | 401 str(build_properties.get('buildnumber', 0)), |
291 else: | 402 '--build-event-build-scheduling-time', |
292 DEFAULT_LOG_DIRECTORIES = ['/var/log/chrome-infra'] | 403 str(1000*int(build_properties.get('requestedAt', 0))), |
293 | 404 '--build-event-type', 'BUILD', |
294 build_data_dir = None | 405 '--event-mon-timestamp-kind', 'POINT', |
295 for candidate in DEFAULT_LOG_DIRECTORIES: | 406 # And use only defaults for credentials. |
296 if os.path.isdir(candidate): | 407 ] |
297 build_data_dir = os.path.join(candidate, prefix) | 408 # Add this conditionally so that we get an error in |
298 break | 409 # send_monitoring_event log files in case it isn't present. |
299 | 410 if hostname: |
300 # Remove any leftovers and recreate the dir. | 411 cmd += ['--build-event-hostname', hostname] |
301 if build_data_dir: | 412 _check_command(cmd) |
302 print >> sys.stderr, "Creating directory" | 413 except Exception: |
303 # TODO(pgervais): use infra_libs.rmtree instead. | 414 LOGGER.warning("Failed to send monitoring event.", exc_info=True) |
304 if os.path.exists(build_data_dir): | |
305 try: | |
306 shutil.rmtree(build_data_dir) | |
307 except Exception as exc: | |
308 # Catching everything: we don't want to break any builds for that reason | |
309 print >> sys.stderr, ( | |
310 "FAILURE: path can't be deleted: %s.\n%s" % (build_data_dir, str(exc)) | |
311 ) | |
312 print >> sys.stderr, "Creating directory" | |
313 | |
314 if not os.path.exists(build_data_dir): | |
315 try: | |
316 os.mkdir(build_data_dir) | |
317 except Exception as exc: | |
318 print >> sys.stderr, ( | |
319 "FAILURE: directory can't be created: %s.\n%s" % | |
320 (build_data_dir, str(exc)) | |
321 ) | |
322 build_data_dir = None | |
323 | |
324 # Under this line build_data_dir should point to an existing empty dir | |
325 # or be None. | |
326 yield build_data_dir | |
327 | |
328 # Clean up after ourselves | |
329 if build_data_dir: | |
330 # TODO(pgervais): use infra_libs.rmtree instead. | |
331 try: | |
332 shutil.rmtree(build_data_dir) | |
333 except Exception as exc: | |
334 # Catching everything: we don't want to break any builds for that reason. | |
335 print >> sys.stderr, ( | |
336 "FAILURE: path can't be deleted: %s.\n%s" % (build_data_dir, str(exc)) | |
337 ) | |
338 | 415 |
339 | 416 |
340 def main(argv): | 417 def main(argv): |
341 opts = get_args(argv) | 418 opts = get_args(argv) |
342 # TODO(crbug.com/551165): remove flag "factory_properties". | 419 |
343 use_factory_properties_from_disk = (opts.use_factory_properties_from_disk or | 420 if opts.verbose == 0: |
344 bool(opts.factory_properties)) | 421 level = logging.INFO |
345 properties = get_recipe_properties( | 422 else: |
346 opts.build_properties, use_factory_properties_from_disk) | 423 level = logging.DEBUG |
424 logging.getLogger().setLevel(level) | |
347 | 425 |
348 clean_old_recipe_engine() | 426 clean_old_recipe_engine() |
349 | 427 |
350 # Find out if the recipe we intend to run is in build_internal's recipes. If | 428 # Enter our runtime environment. |
351 # so, use recipes.py from there, otherwise use the one from build. | 429 with recipe_tempdir(leak=opts.leak) as tdir: |
352 recipe_file = properties['recipe'].replace('/', os.path.sep) + '.py' | 430 LOGGER.debug('Using temporary directory: [%s].', tdir) |
iannucci
2015/12/02 02:13:34
move log line to recipe_tmpdir
dnj (Google)
2015/12/02 18:52:43
Hmm, I just moved it out actually. The idea was th
| |
353 if os.path.exists(os.path.join(BUILD_LIMITED_ROOT, 'recipes', recipe_file)): | |
354 recipe_runner = os.path.join(BUILD_LIMITED_ROOT, 'recipes.py') | |
355 else: | |
356 recipe_runner = os.path.join(SCRIPT_PATH, 'recipes.py') | |
357 | 431 |
358 with build_data_directory() as build_data_dir: | 432 # Load factory properties and configuration. |
359 # Create a LogRequestLite proto containing this build's information. | 433 # TODO(crbug.com/551165): remove flag "factory_properties". |
360 if build_data_dir: | 434 use_factory_properties_from_disk = (opts.use_factory_properties_from_disk or |
361 properties['build_data_dir'] = build_data_dir | 435 bool(opts.factory_properties)) |
436 properties = get_recipe_properties( | |
437 tdir, opts.build_properties, use_factory_properties_from_disk) | |
438 LOGGER.debug('Loaded properties: %s', properties) | |
362 | 439 |
363 hostname = socket.getfqdn() | 440 config = get_config(tdir) |
364 if hostname: # just in case getfqdn() returns None. | 441 LOGGER.debug('Loaded runtime configuration: %s', config) |
365 hostname = hostname.split('.')[0] | |
366 else: | |
367 hostname = None | |
368 | 442 |
369 if RUN_CMD and os.path.exists(RUN_CMD): | 443 # Find out if the recipe we intend to run is in build_internal's recipes. If |
370 try: | 444 # so, use recipes.py from there, otherwise use the one from build. |
371 cmd = [RUN_CMD, 'infra.tools.send_monitoring_event', | 445 recipe_file = properties['recipe'].replace('/', os.path.sep) + '.py' |
372 '--event-mon-output-file', | 446 if os.path.exists(os.path.join(BUILD_LIMITED_ROOT, 'recipes', recipe_file)): |
373 os.path.join(build_data_dir, 'log_request_proto'), | 447 recipe_runner = os.path.join(BUILD_LIMITED_ROOT, 'recipes.py') |
374 '--event-mon-run-type', 'file', | 448 else: |
375 '--event-mon-service-name', | 449 recipe_runner = os.path.join(SCRIPT_PATH, 'recipes.py') |
376 'buildbot/master/master.%s' | |
377 % properties.get('mastername', 'UNKNOWN'), | |
378 '--build-event-build-name', | |
379 properties.get('buildername', 'UNKNOWN'), | |
380 '--build-event-build-number', | |
381 str(properties.get('buildnumber', 0)), | |
382 '--build-event-build-scheduling-time', | |
383 str(1000*int(properties.get('requestedAt', 0))), | |
384 '--build-event-type', 'BUILD', | |
385 '--event-mon-timestamp-kind', 'POINT', | |
386 # And use only defaults for credentials. | |
387 ] | |
388 # Add this conditionally so that we get an error in | |
389 # send_monitoring_event log files in case it isn't present. | |
390 if hostname: | |
391 cmd += ['--build-event-hostname', hostname] | |
392 subprocess.call(cmd) | |
393 except Exception: | |
394 print >> sys.stderr, traceback.format_exc() | |
395 | 450 |
396 else: | 451 # Setup monitoring directory and send a monitoring event. |
397 print >> sys.stderr, ( | 452 build_data_dir = ensure_directory(tdir, 'build_data') |
398 'WARNING: Unable to find run.py at %r, no events will be sent.' | 453 properties['build_data_dir'] = build_data_dir |
399 % str(RUN_CMD) | |
400 ) | |
401 | 454 |
402 with namedTempFile() as props_file: | 455 # Write our annotated_run.py monitoring event. |
403 with open(props_file, 'w') as fh: | 456 write_monitoring_event(config, tdir, properties) |
404 fh.write(json.dumps(properties)) | |
405 cmd = [ | |
406 sys.executable, '-u', recipe_runner, | |
407 'run', | |
408 '--workdir=%s' % os.getcwd(), | |
409 '--properties-file=%s' % props_file, | |
410 properties['recipe'] ] | |
411 status = subprocess.call(cmd) | |
412 | 457 |
413 # TODO(pgervais): Send events from build_data_dir to the endpoint. | 458 # Dump properties to JSON and build recipe command. |
459 props_file = os.path.join(tdir, 'recipe_properties.json') | |
460 with open(props_file, 'w') as fh: | |
461 json.dump(properties, fh) | |
462 cmd = [ | |
463 sys.executable, '-u', recipe_runner, | |
464 'run', | |
465 '--workdir=%s' % os.getcwd(), | |
466 '--properties-file=%s' % props_file, | |
467 properties['recipe'], | |
468 ] | |
469 | |
470 status, _ = _run_command(cmd, dry_run=opts.dry_run) | |
471 | |
414 return status | 472 return status |
415 | 473 |
474 | |
416 def shell_main(argv): | 475 def shell_main(argv): |
417 if update_scripts(): | 476 if update_scripts(): |
418 return subprocess.call([sys.executable] + argv) | 477 # Re-execute with the updated annotated_run.py. |
478 rv, _ = _run_command([sys.executable] + argv) | |
479 return rv | |
419 else: | 480 else: |
420 return main(argv[1:]) | 481 return main(argv[1:]) |
421 | 482 |
422 | 483 |
423 if __name__ == '__main__': | 484 if __name__ == '__main__': |
485 logging.basicConfig(level=logging.INFO) | |
424 sys.exit(shell_main(sys.argv)) | 486 sys.exit(shell_main(sys.argv)) |
OLD | NEW |