OLD | NEW |
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 """Does one of the following depending on the --mode argument: | 6 """Does one of the following depending on the --mode argument: |
7 check Verifies all the inputs exist, touches the file specified with | 7 check Verifies all the inputs exist, touches the file specified with |
8 --result and exits. | 8 --result and exits. |
9 hashtable Puts a manifest file and hard links each of the inputs into the | 9 hashtable Puts a manifest file and hard links each of the inputs into the |
10 output directory. | 10 output directory. |
(...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
222 | 222 |
223 for relfile in infiles: | 223 for relfile in infiles: |
224 infile = os.path.join(indir, relfile) | 224 infile = os.path.join(indir, relfile) |
225 outfile = os.path.join(outdir, relfile) | 225 outfile = os.path.join(outdir, relfile) |
226 outsubdir = os.path.dirname(outfile) | 226 outsubdir = os.path.dirname(outfile) |
227 if not os.path.isdir(outsubdir): | 227 if not os.path.isdir(outsubdir): |
228 os.makedirs(outsubdir) | 228 os.makedirs(outsubdir) |
229 run_test_from_archive.link_file(outfile, infile, action) | 229 run_test_from_archive.link_file(outfile, infile, action) |
230 | 230 |
231 | 231 |
232 def isolate( | 232 def load_results(resultfile): |
233 outdir, indir, infiles, mode, read_only, cmd, relative_cwd, resultfile): | 233 """Loads the previous results as an optimization.""" |
234 """Main function to isolate a target with its dependencies. | 234 data = {} |
235 | |
236 Arguments: | |
237 - outdir: Output directory where the result is stored. Depends on |mode|. | |
238 - indir: Root directory to be used as the base directory for infiles. | |
239 - infiles: List of files, with relative path, to process. | |
240 - mode: Action to do. See file level docstring. | |
241 - read_only: Makes the temporary directory read only. | |
242 - cmd: Command to execute. | |
243 - relative_cwd: Directory relative to the base directory where to start the | |
244 command from. In general, this path will be the path | |
245 containing the gyp file where the target was defined. This | |
246 relative directory may be created implicitely if a file from | |
247 this directory is needed to run the test. Otherwise it won't | |
248 be created and the process creation will fail. It's up to the | |
249 caller to create this directory manually before starting the | |
250 test. | |
251 - resultfile: Path where to read and write the metadata. | |
252 | |
253 Some arguments are optional, dependending on |mode|. See the corresponding | |
254 MODE<mode> function for the exact behavior. | |
255 """ | |
256 mode_fn = getattr(sys.modules[__name__], 'MODE' + mode) | |
257 assert mode_fn | |
258 | |
259 # Load the previous results as an optimization. | |
260 prevdict = {} | |
261 if resultfile and os.path.isfile(resultfile): | 235 if resultfile and os.path.isfile(resultfile): |
262 resultfile = os.path.abspath(resultfile) | 236 resultfile = os.path.abspath(resultfile) |
263 with open(resultfile, 'rb') as f: | 237 with open(resultfile, 'r') as f: |
264 prevdict = json.load(f) | 238 data = json.load(f) |
265 else: | 239 else: |
266 resultfile = os.path.abspath(resultfile) | 240 resultfile = os.path.abspath(resultfile) |
| 241 |
267 # Works with native os.path.sep but stores as '/'. | 242 # Works with native os.path.sep but stores as '/'. |
268 if 'files' in prevdict and os.path.sep != '/': | 243 if 'files' in data and os.path.sep != '/': |
269 prevdict['files'] = dict( | 244 data['files'] = dict( |
270 (k.replace('/', os.path.sep), v) | 245 (k.replace('/', os.path.sep), v) |
271 for k, v in prevdict['files'].iteritems()) | 246 for k, v in data['files'].iteritems()) |
| 247 return data |
272 | 248 |
273 | 249 |
274 infiles = expand_directories( | 250 def save_results(resultfile, data): |
275 indir, infiles, lambda x: re.match(r'.*\.(svn|pyc)$', x)) | 251 data = data.copy() |
276 | |
277 # Only hashtable mode really needs the sha-1. | |
278 level = { | |
279 'check': NO_INFO, | |
280 'hashtable': WITH_HASH, | |
281 'remap': STATS_ONLY, | |
282 'run': STATS_ONLY, | |
283 'trace': STATS_ONLY, | |
284 } | |
285 dictfiles = process_inputs( | |
286 prevdict.get('files', {}), indir, infiles, level[mode], read_only) | |
287 | |
288 result = mode_fn( | |
289 outdir, indir, dictfiles, read_only, cmd, relative_cwd, resultfile) | |
290 out = { | |
291 'command': cmd, | |
292 'relative_cwd': relative_cwd, | |
293 'files': dictfiles, | |
294 # Makes the directories read-only in addition to the files. | |
295 'read_only': read_only, | |
296 } | |
297 | 252 |
298 # Works with native os.path.sep but stores as '/'. | 253 # Works with native os.path.sep but stores as '/'. |
299 if os.path.sep != '/': | 254 if os.path.sep != '/': |
300 out['files'] = dict( | 255 data['files'] = dict( |
301 (k.replace(os.path.sep, '/'), v) for k, v in out['files'].iteritems()) | 256 (k.replace(os.path.sep, '/'), v) for k, v in data['files'].iteritems()) |
302 | 257 |
303 f = None | 258 f = None |
304 try: | 259 try: |
305 if resultfile: | 260 if resultfile: |
306 f = open(resultfile, 'wb') | 261 f = open(resultfile, 'wb') |
307 else: | 262 else: |
308 f = sys.stdout | 263 f = sys.stdout |
309 json.dump(out, f, indent=2, sort_keys=True) | 264 json.dump(data, f, indent=2, sort_keys=True) |
310 f.write('\n') | 265 f.write('\n') |
311 finally: | 266 finally: |
312 if resultfile and f: | 267 if resultfile and f: |
313 f.close() | 268 f.close() |
314 | 269 |
315 total_bytes = sum(i.get('size', 0) for i in out['files'].itervalues()) | 270 total_bytes = sum(i.get('size', 0) for i in data['files'].itervalues()) |
316 if total_bytes: | 271 if total_bytes: |
317 logging.debug('Total size: %d bytes' % total_bytes) | 272 logging.debug('Total size: %d bytes' % total_bytes) |
318 return result | |
319 | 273 |
320 | 274 |
321 def MODEcheck( | 275 def isolate(outdir, mode, indir, infiles, data): |
322 _outdir, _indir, _dictfiles, _read_only, _cmd, _relative_cwd, _resultfile): | 276 """Main function to isolate a target with its dependencies. |
| 277 |
| 278 Arguments: |
| 279 - outdir: Output directory where the result is stored. Depends on |mode|. |
| 280 - indir: Root directory to be used as the base directory for infiles. |
| 281 - infiles: List of files, with relative path, to process. |
| 282 - mode: Action to do. See file level docstring. |
| 283 - data: Contains all the command specific meta-data. |
| 284 |
| 285 Some arguments are optional, dependending on |mode|. See the corresponding |
| 286 MODE<mode> function for the exact behavior. |
| 287 """ |
| 288 modes = { |
| 289 'check': MODEcheck, |
| 290 'hashtable': MODEhashtable, |
| 291 'remap': MODEremap, |
| 292 'run': MODErun, |
| 293 'trace': MODEtrace, |
| 294 } |
| 295 mode_fn = modes[mode] |
| 296 |
| 297 infiles = expand_directories( |
| 298 indir, infiles, lambda x: re.match(r'.*\.(git|svn|pyc)$', x)) |
| 299 |
| 300 # Only hashtable mode really needs the sha-1. |
| 301 level = { |
| 302 'check': NO_INFO, |
| 303 'hashtable': WITH_HASH, |
| 304 'remap': STATS_ONLY, |
| 305 'run': STATS_ONLY, |
| 306 'trace': STATS_ONLY, |
| 307 } |
| 308 # Regenerate data['files'] from infiles. |
| 309 data['files'] = process_inputs( |
| 310 data.get('files', {}), indir, infiles, level[mode], data.get('read_only')) |
| 311 |
| 312 result = mode_fn(outdir, indir, data) |
| 313 return result, data |
| 314 |
| 315 |
| 316 def MODEcheck(_outdir, _indir, _data): |
323 """No-op.""" | 317 """No-op.""" |
324 return 0 | 318 return 0 |
325 | 319 |
326 | 320 |
327 def MODEhashtable( | 321 def MODEhashtable(outdir, indir, data): |
328 outdir, indir, dictfiles, _read_only, _cmd, _relative_cwd, resultfile): | 322 outdir = ( |
329 outdir = outdir or os.path.join(os.path.dirname(resultfile), 'hashtable') | 323 outdir or os.path.join(os.path.dirname(data['resultdir']), 'hashtable')) |
330 if not os.path.isdir(outdir): | 324 if not os.path.isdir(outdir): |
331 os.makedirs(outdir) | 325 os.makedirs(outdir) |
332 for relfile, properties in dictfiles.iteritems(): | 326 for relfile, properties in data['files'].iteritems(): |
333 infile = os.path.join(indir, relfile) | 327 infile = os.path.join(indir, relfile) |
334 outfile = os.path.join(outdir, properties['sha-1']) | 328 outfile = os.path.join(outdir, properties['sha-1']) |
335 if os.path.isfile(outfile): | 329 if os.path.isfile(outfile): |
336 # Just do a quick check that the file size matches. No need to stat() | 330 # Just do a quick check that the file size matches. No need to stat() |
337 # again the input file, grab the value from the dict. | 331 # again the input file, grab the value from the dict. |
338 out_size = os.stat(outfile).st_size | 332 out_size = os.stat(outfile).st_size |
339 in_size = dictfiles.get(infile, {}).get('size') or os.stat(infile).st_size | 333 in_size = ( |
| 334 data.get('files', {}).get(infile, {}).get('size') or |
| 335 os.stat(infile).st_size) |
340 if in_size == out_size: | 336 if in_size == out_size: |
341 continue | 337 continue |
342 # Otherwise, an exception will be raised. | 338 # Otherwise, an exception will be raised. |
343 run_test_from_archive.link_file( | 339 run_test_from_archive.link_file( |
344 outfile, infile, run_test_from_archive.HARDLINK) | 340 outfile, infile, run_test_from_archive.HARDLINK) |
345 return 0 | 341 return 0 |
346 | 342 |
347 | 343 |
348 def MODEremap( | 344 def MODEremap(outdir, indir, data): |
349 outdir, indir, dictfiles, read_only, _cmd, _relative_cwd, _resultfile): | |
350 if not outdir: | 345 if not outdir: |
351 outdir = tempfile.mkdtemp(prefix='isolate') | 346 outdir = tempfile.mkdtemp(prefix='isolate') |
352 else: | 347 else: |
353 if not os.path.isdir(outdir): | 348 if not os.path.isdir(outdir): |
354 os.makedirs(outdir) | 349 os.makedirs(outdir) |
355 print 'Remapping into %s' % outdir | 350 print 'Remapping into %s' % outdir |
356 if len(os.listdir(outdir)): | 351 if len(os.listdir(outdir)): |
357 print 'Can\'t remap in a non-empty directory' | 352 print 'Can\'t remap in a non-empty directory' |
358 return 1 | 353 return 1 |
359 recreate_tree(outdir, indir, dictfiles.keys(), run_test_from_archive.HARDLINK) | 354 recreate_tree( |
360 if read_only: | 355 outdir, indir, data['files'].keys(), run_test_from_archive.HARDLINK) |
| 356 if data['read_only']: |
361 run_test_from_archive.make_writable(outdir, True) | 357 run_test_from_archive.make_writable(outdir, True) |
362 return 0 | 358 return 0 |
363 | 359 |
364 | 360 |
365 def MODErun( | 361 def MODErun(_outdir, indir, data): |
366 _outdir, indir, dictfiles, read_only, cmd, relative_cwd, _resultfile): | |
367 """Always uses a temporary directory.""" | 362 """Always uses a temporary directory.""" |
368 try: | 363 try: |
369 outdir = tempfile.mkdtemp(prefix='isolate') | 364 outdir = tempfile.mkdtemp(prefix='isolate') |
370 recreate_tree( | 365 recreate_tree( |
371 outdir, indir, dictfiles.keys(), run_test_from_archive.HARDLINK) | 366 outdir, indir, data['files'].keys(), run_test_from_archive.HARDLINK) |
372 cwd = os.path.join(outdir, relative_cwd) | 367 cwd = os.path.join(outdir, data['relative_cwd']) |
373 if not os.path.isdir(cwd): | 368 if not os.path.isdir(cwd): |
374 os.makedirs(cwd) | 369 os.makedirs(cwd) |
375 if read_only: | 370 if data['read_only']: |
376 run_test_from_archive.make_writable(outdir, True) | 371 run_test_from_archive.make_writable(outdir, True) |
377 if not cmd: | 372 if not data['command']: |
378 print 'No command to run' | 373 print 'No command to run' |
379 return 1 | 374 return 1 |
380 cmd = trace_inputs.fix_python_path(cmd) | 375 cmd = trace_inputs.fix_python_path(data['command']) |
381 logging.info('Running %s, cwd=%s' % (cmd, cwd)) | 376 logging.info('Running %s, cwd=%s' % (cmd, cwd)) |
382 return subprocess.call(cmd, cwd=cwd) | 377 return subprocess.call(cmd, cwd=cwd) |
383 finally: | 378 finally: |
384 run_test_from_archive.rmtree(outdir) | 379 run_test_from_archive.rmtree(outdir) |
385 | 380 |
386 | 381 |
387 def MODEtrace( | 382 def MODEtrace(_outdir, indir, data): |
388 _outdir, indir, _dictfiles, _read_only, cmd, relative_cwd, resultfile): | |
389 """Shortcut to use trace_inputs.py properly. | 383 """Shortcut to use trace_inputs.py properly. |
390 | 384 |
391 It constructs the equivalent of dictfiles. It is hardcoded to base the | 385 It constructs the equivalent of dictfiles. It is hardcoded to base the |
392 checkout at src/. | 386 checkout at src/. |
393 """ | 387 """ |
394 logging.info('Running %s, cwd=%s' % (cmd, os.path.join(indir, relative_cwd))) | 388 logging.info( |
395 if resultfile: | 389 'Running %s, cwd=%s' % ( |
396 # Guesswork here. | 390 data['command'], os.path.join(indir, data['relative_cwd']))) |
397 product_dir = os.path.dirname(resultfile) | 391 product_dir = None |
398 if product_dir and indir: | 392 if data['resultdir'] and indir: |
399 product_dir = os.path.relpath(product_dir, indir) | 393 # Defaults to none if both are the same directory. |
400 else: | 394 product_dir = os.path.relpath(data['resultdir'], indir) or None |
401 product_dir = None | 395 if not data['command']: |
402 if not cmd: | |
403 print 'No command to run' | 396 print 'No command to run' |
404 return 1 | 397 return 1 |
405 return trace_inputs.trace_inputs( | 398 return trace_inputs.trace_inputs( |
406 '%s.log' % resultfile, | 399 data['resultfile'], |
407 cmd, | 400 data['command'], |
408 indir, | 401 indir, |
409 relative_cwd, | 402 data['relative_cwd'], |
410 product_dir, | 403 product_dir, |
411 False) | 404 False) |
412 | 405 |
413 | 406 |
414 def get_valid_modes(): | 407 def get_valid_modes(): |
415 """Returns the modes that can be used.""" | 408 """Returns the modes that can be used.""" |
416 return sorted( | 409 return sorted( |
417 i[4:] for i in dir(sys.modules[__name__]) if i.startswith('MODE')) | 410 i[4:] for i in dir(sys.modules[__name__]) if i.startswith('MODE')) |
418 | 411 |
419 | 412 |
| 413 def process_options(variables, resultfile, input_file, error): |
| 414 """Processes the options and loads the input file. Returns the processed |
| 415 values. |
| 416 """ |
| 417 input_file = os.path.abspath(input_file) |
| 418 isolate_dir = os.path.dirname(input_file) |
| 419 resultfile = os.path.abspath(resultfile) |
| 420 logging.info( |
| 421 'process_options(%s, %s, %s, ...)' % (variables, resultfile, input_file)) |
| 422 |
| 423 # Process path variables as a special case. First normalize it, verifies it |
| 424 # exists, convert it to an absolute path, then set it as relative to |
| 425 # isolate_dir. |
| 426 for i in ('DEPTH', 'PRODUCT_DIR'): |
| 427 if i not in variables: |
| 428 continue |
| 429 variable = os.path.normpath(variables[i]) |
| 430 if not os.path.isdir(variable): |
| 431 error('%s=%s is not a directory' % (i, variable)) |
| 432 variable = os.path.abspath(variable) |
| 433 # All variables are relative to the input file. |
| 434 variables[i] = os.path.relpath(variable, isolate_dir) |
| 435 |
| 436 command, infiles, read_only = load_isolate( |
| 437 open(input_file, 'r').read(), variables, error) |
| 438 |
| 439 # The trick used to determine the root directory is to look at "how far" back |
| 440 # up it is looking up. |
| 441 # TODO(maruel): Stop the msbuild generator from generating a mix of / and \\. |
| 442 root_dir = isolate_dir.replace(os.path.sep, '/') |
| 443 for i in infiles: |
| 444 i = i.replace(os.path.sep, '/') |
| 445 x = isolate_dir.replace(os.path.sep, '/') |
| 446 while i.startswith('../'): |
| 447 i = i[3:] |
| 448 assert not i.startswith('/') |
| 449 x = posixpath.dirname(x) |
| 450 if root_dir.startswith(x): |
| 451 root_dir = x |
| 452 root_dir = root_dir.replace('/', os.path.sep) |
| 453 # The relative directory is automatically determined by the relative path |
| 454 # between root_dir and the directory containing the .isolate file. |
| 455 relative_dir = os.path.relpath(isolate_dir, root_dir) |
| 456 logging.debug('relative_dir: %s' % relative_dir) |
| 457 |
| 458 logging.debug( |
| 459 'variables: %s' % ', '.join( |
| 460 '%s=%s' % (k, v) for k, v in variables.iteritems())) |
| 461 |
| 462 data = load_results(resultfile) |
| 463 |
| 464 command, infiles, read_only = load_isolate( |
| 465 open(input_file, 'r').read(), variables, error) |
| 466 |
| 467 # Update data with the up to date information: |
| 468 data['command'] = command |
| 469 data['read_only'] = read_only |
| 470 data['relative_cwd'] = relative_dir |
| 471 data['resultfile'] = resultfile |
| 472 data['resultdir'] = os.path.dirname(resultfile) |
| 473 |
| 474 # Keep the old variables. |
| 475 data.setdefault('variables', {}).update(variables) |
| 476 |
| 477 logging.debug('command: %s' % command) |
| 478 logging.debug('infiles: %s' % infiles) |
| 479 logging.debug('read_only: %s' % read_only) |
| 480 infiles = [normpath(os.path.join(relative_dir, f)) for f in infiles] |
| 481 logging.debug('processed infiles: %s' % infiles) |
| 482 return root_dir, infiles, data |
| 483 |
| 484 |
420 def main(): | 485 def main(): |
421 default_variables = [('OS', trace_inputs.get_flavor())] | 486 default_variables = [('OS', trace_inputs.get_flavor())] |
422 if sys.platform in ('win32', 'cygwin'): | 487 if sys.platform in ('win32', 'cygwin'): |
423 default_variables.append(('EXECUTABLE_SUFFIX', '.exe')) | 488 default_variables.append(('EXECUTABLE_SUFFIX', '.exe')) |
424 else: | 489 else: |
425 default_variables.append(('EXECUTABLE_SUFFIX', '')) | 490 default_variables.append(('EXECUTABLE_SUFFIX', '')) |
426 valid_modes = get_valid_modes() | 491 valid_modes = get_valid_modes() |
427 parser = optparse.OptionParser( | 492 parser = optparse.OptionParser( |
428 usage='%prog [options] [.isolate file]', | 493 usage='%prog [options] [.isolate file]', |
429 description=sys.modules[__name__].__doc__) | 494 description=sys.modules[__name__].__doc__) |
(...skipping 28 matching lines...) Expand all Loading... |
458 | 523 |
459 options, args = parser.parse_args() | 524 options, args = parser.parse_args() |
460 level = [logging.ERROR, logging.INFO, logging.DEBUG][min(2, options.verbose)] | 525 level = [logging.ERROR, logging.INFO, logging.DEBUG][min(2, options.verbose)] |
461 logging.basicConfig( | 526 logging.basicConfig( |
462 level=level, | 527 level=level, |
463 format='%(levelname)5s %(module)15s(%(lineno)3d): %(message)s') | 528 format='%(levelname)5s %(module)15s(%(lineno)3d): %(message)s') |
464 | 529 |
465 if not options.mode: | 530 if not options.mode: |
466 parser.error('--mode is required') | 531 parser.error('--mode is required') |
467 if len(args) != 1: | 532 if len(args) != 1: |
| 533 logging.debug('%s' % sys.argv) |
468 parser.error('Use only one argument which should be a .isolate file') | 534 parser.error('Use only one argument which should be a .isolate file') |
469 | 535 |
470 input_file = os.path.abspath(args[0]) | 536 root_dir, infiles, data = process_options( |
471 isolate_dir = os.path.dirname(input_file) | 537 dict(options.variables), options.result, args[0], parser.error) |
472 | |
473 # Extract the variables. | |
474 variables = dict(options.variables) | |
475 # Process path variables as a special case. First normalize it, verifies it | |
476 # exists, convert it to an absolute path, then set it as relative to | |
477 # isolate_dir. | |
478 for i in ('PRODUCT_DIR',): | |
479 if i not in variables: | |
480 continue | |
481 variable = os.path.normpath(variables[i]) | |
482 if not os.path.isdir(variable): | |
483 parser.error('%s=%s is not a directory' % (i, variable)) | |
484 variable = os.path.abspath(variable) | |
485 # All variables are relative to the input file. | |
486 variables[i] = os.path.relpath(variable, isolate_dir) | |
487 | |
488 command, infiles, read_only = load_isolate( | |
489 open(input_file, 'r').read(), variables, parser.error) | |
490 | |
491 # The trick used to determine the root directory is to look at "how far" back | |
492 # up it is looking up. | |
493 root_dir = isolate_dir.replace(os.path.sep, '/') | |
494 for i in infiles: | |
495 i = i.replace(os.path.sep, '/') | |
496 x = isolate_dir.replace(os.path.sep, '/') | |
497 while i.startswith('../'): | |
498 i = i[3:] | |
499 assert not i.startswith('/') | |
500 x = posixpath.dirname(x) | |
501 if root_dir.startswith(x): | |
502 root_dir = x | |
503 root_dir = root_dir.replace('/', os.path.sep) | |
504 # The relative directory is automatically determined by the relative path | |
505 # between root_dir and the directory containing the .isolate file. | |
506 relative_dir = os.path.relpath(isolate_dir, root_dir) | |
507 logging.debug('relative_dir: %s' % relative_dir) | |
508 | |
509 logging.debug( | |
510 'variables: %s' % ', '.join( | |
511 '%s=%s' % (k, v) for k, v in variables.iteritems())) | |
512 logging.debug('command: %s' % command) | |
513 logging.debug('infiles: %s' % infiles) | |
514 logging.debug('read_only: %s' % read_only) | |
515 infiles = [normpath(os.path.join(relative_dir, f)) for f in infiles] | |
516 logging.debug('processed infiles: %s' % infiles) | |
517 | 538 |
518 try: | 539 try: |
519 return isolate( | 540 resultcode, data = isolate( |
520 options.outdir, | 541 options.outdir, |
| 542 options.mode, |
521 root_dir, | 543 root_dir, |
522 infiles, | 544 infiles, |
523 options.mode, | 545 data) |
524 read_only, | |
525 command, | |
526 relative_dir, | |
527 options.result) | |
528 except run_test_from_archive.MappingError, e: | 546 except run_test_from_archive.MappingError, e: |
529 print >> sys.stderr, str(e) | 547 print >> sys.stderr, str(e) |
530 return 1 | 548 return 1 |
| 549 save_results(options.result, data) |
| 550 return resultcode |
531 | 551 |
532 | 552 |
533 if __name__ == '__main__': | 553 if __name__ == '__main__': |
534 sys.exit(main()) | 554 sys.exit(main()) |
OLD | NEW |