Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2012 The LUCI Authors. All rights reserved. | 2 # Copyright 2012 The LUCI Authors. All rights reserved. |
| 3 # Use of this source code is governed by the Apache v2.0 license that can be | 3 # Use of this source code is governed by the Apache v2.0 license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 """Reads a .isolated, creates a tree of hardlinks and runs the test. | 6 """Runs a command with optional isolated input/output. |
| 7 | 7 |
| 8 To improve performance, it keeps a local cache. The local cache can safely be | 8 Despite name "run_isolated", can run a generic non-isolated command specified as |
| 9 deleted. | 9 args. |
| 10 | |
| 11 If input isolated hash is provided, fetches it, creates a tree of hard links, | |
| 12 appends args to the command in the fetched isolated and runs it. | |
| 13 To improve performance, keeps a local cache. | |
| 14 The local cache can safely be deleted. | |
| 10 | 15 |
| 11 Any ${ISOLATED_OUTDIR} on the command line will be replaced by the location of a | 16 Any ${ISOLATED_OUTDIR} on the command line will be replaced by the location of a |
| 12 temporary directory upon execution of the command specified in the .isolated | 17 temporary directory upon execution of the command specified in the .isolated |
| 13 file. All content written to this directory will be uploaded upon termination | 18 file. All content written to this directory will be uploaded upon termination |
| 14 and the .isolated file describing this directory will be printed to stdout. | 19 and the .isolated file describing this directory will be printed to stdout. |
| 15 """ | 20 """ |
| 16 | 21 |
| 17 __version__ = '0.6.1' | 22 __version__ = '0.7.0' |
| 18 | 23 |
| 19 import base64 | 24 import base64 |
| 20 import logging | 25 import logging |
| 21 import optparse | 26 import optparse |
| 22 import os | 27 import os |
| 23 import sys | 28 import sys |
| 24 import tempfile | 29 import tempfile |
| 25 import time | 30 import time |
| 26 | 31 |
| 27 from third_party.depot_tools import fix_encoding | 32 from third_party.depot_tools import fix_encoding |
| 28 | 33 |
| 29 from utils import file_path | 34 from utils import file_path |
| 30 from utils import fs | 35 from utils import fs |
| 31 from utils import large | 36 from utils import large |
| 32 from utils import logging_utils | 37 from utils import logging_utils |
| 33 from utils import on_error | 38 from utils import on_error |
| 34 from utils import subprocess42 | 39 from utils import subprocess42 |
| 35 from utils import tools | 40 from utils import tools |
| 36 from utils import zip_package | 41 from utils import zip_package |
| 37 | 42 |
| 38 import auth | 43 import auth |
| 39 import isolated_format | |
| 40 import isolateserver | 44 import isolateserver |
| 41 | 45 |
| 42 | 46 |
| 47 ISOLATED_OUTDIR_PARAMETER = '${ISOLATED_OUTDIR}' | |
| 48 | |
| 43 # Absolute path to this file (can be None if running from zip on Mac). | 49 # Absolute path to this file (can be None if running from zip on Mac). |
| 44 THIS_FILE_PATH = os.path.abspath(__file__) if __file__ else None | 50 THIS_FILE_PATH = os.path.abspath(__file__) if __file__ else None |
| 45 | 51 |
| 46 # Directory that contains this file (might be inside zip package). | 52 # Directory that contains this file (might be inside zip package). |
| 47 BASE_DIR = os.path.dirname(THIS_FILE_PATH) if __file__ else None | 53 BASE_DIR = os.path.dirname(THIS_FILE_PATH) if __file__ else None |
| 48 | 54 |
| 49 # Directory that contains currently running script file. | 55 # Directory that contains currently running script file. |
| 50 if zip_package.get_main_script_path(): | 56 if zip_package.get_main_script_path(): |
| 51 MAIN_DIR = os.path.dirname( | 57 MAIN_DIR = os.path.dirname( |
| 52 os.path.abspath(zip_package.get_main_script_path())) | 58 os.path.abspath(zip_package.get_main_script_path())) |
| (...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 129 # deleted. | 135 # deleted. |
| 130 file_path.make_tree_writeable(rootdir) | 136 file_path.make_tree_writeable(rootdir) |
| 131 else: | 137 else: |
| 132 raise ValueError( | 138 raise ValueError( |
| 133 'change_tree_read_only(%s, %s): Unknown flag %s' % | 139 'change_tree_read_only(%s, %s): Unknown flag %s' % |
| 134 (rootdir, read_only, read_only)) | 140 (rootdir, read_only, read_only)) |
| 135 | 141 |
| 136 | 142 |
| 137 def process_command(command, out_dir): | 143 def process_command(command, out_dir): |
| 138 """Replaces isolated specific variables in a command line.""" | 144 """Replaces isolated specific variables in a command line.""" |
| 139 def fix(arg): | 145 out_dir = out_dir.replace('/', os.sep) |
| 140 if '${ISOLATED_OUTDIR}' in arg: | 146 return [arg.replace(ISOLATED_OUTDIR_PARAMETER, out_dir) for arg in command] |
|
M-A Ruel
2016/05/02 14:18:14
The reason for the previous code was things like:
nodir
2016/05/02 17:43:22
got it, reverted, added comment
| |
| 141 return arg.replace('${ISOLATED_OUTDIR}', out_dir).replace('/', os.sep) | |
| 142 return arg | |
| 143 | |
| 144 return [fix(arg) for arg in command] | |
| 145 | 147 |
| 146 | 148 |
| 147 def run_command(command, cwd, tmp_dir, hard_timeout, grace_period): | 149 def run_command(command, cwd, tmp_dir, hard_timeout, grace_period): |
| 148 """Runs the command. | 150 """Runs the command. |
| 149 | 151 |
| 150 Returns: | 152 Returns: |
| 151 tuple(process exit code, bool if had a hard timeout) | 153 tuple(process exit code, bool if had a hard timeout) |
| 152 """ | 154 """ |
| 153 logging.info('run_command(%s, %s)' % (command, cwd)) | 155 logging.info('run_command(%s, %s)' % (command, cwd)) |
| 154 | 156 |
| (...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 295 logging.exception('Had difficulties removing out_dir %s: %s', out_dir, e) | 297 logging.exception('Had difficulties removing out_dir %s: %s', out_dir, e) |
| 296 stats = { | 298 stats = { |
| 297 'duration': time.time() - start, | 299 'duration': time.time() - start, |
| 298 'items_cold': base64.b64encode(large.pack(cold)), | 300 'items_cold': base64.b64encode(large.pack(cold)), |
| 299 'items_hot': base64.b64encode(large.pack(hot)), | 301 'items_hot': base64.b64encode(large.pack(hot)), |
| 300 } | 302 } |
| 301 return outputs_ref, success, stats | 303 return outputs_ref, success, stats |
| 302 | 304 |
| 303 | 305 |
| 304 def map_and_run( | 306 def map_and_run( |
| 305 isolated_hash, storage, cache, leak_temp_dir, root_dir, hard_timeout, | 307 isolated_hash, args, storage, cache, leak_temp_dir, root_dir, hard_timeout, |
| 306 grace_period, extra_args): | 308 grace_period): |
| 307 """Maps and run the command. Returns metadata about the result.""" | 309 """Runs a command with optional isolated input/output. |
| 310 | |
| 311 See run_tha_test for argument documentation. | |
| 312 | |
| 313 Returns metadata about the result. | |
| 314 """ | |
| 308 result = { | 315 result = { |
| 309 'duration': None, | 316 'duration': None, |
| 310 'exit_code': None, | 317 'exit_code': None, |
| 311 'had_hard_timeout': False, | 318 'had_hard_timeout': False, |
| 312 'internal_failure': None, | 319 'internal_failure': None, |
| 313 'stats': { | 320 'stats': { |
| 314 # 'download': { | 321 # 'download': { |
| 315 # 'duration': 0., | 322 # 'duration': 0., |
| 316 # 'initial_number_items': 0, | 323 # 'initial_number_items': 0, |
| 317 # 'initial_size': 0, | 324 # 'initial_size': 0, |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 329 } | 336 } |
| 330 if root_dir: | 337 if root_dir: |
| 331 file_path.ensure_tree(root_dir, 0700) | 338 file_path.ensure_tree(root_dir, 0700) |
| 332 prefix = u'' | 339 prefix = u'' |
| 333 else: | 340 else: |
| 334 root_dir = os.path.dirname(cache.cache_dir) if cache.cache_dir else None | 341 root_dir = os.path.dirname(cache.cache_dir) if cache.cache_dir else None |
| 335 prefix = u'isolated_' | 342 prefix = u'isolated_' |
| 336 run_dir = make_temp_dir(prefix + u'run', root_dir) | 343 run_dir = make_temp_dir(prefix + u'run', root_dir) |
| 337 out_dir = make_temp_dir(prefix + u'out', root_dir) | 344 out_dir = make_temp_dir(prefix + u'out', root_dir) |
| 338 tmp_dir = make_temp_dir(prefix + u'tmp', root_dir) | 345 tmp_dir = make_temp_dir(prefix + u'tmp', root_dir) |
| 346 cwd = run_dir | |
| 347 | |
| 339 try: | 348 try: |
| 340 bundle, result['stats']['download'] = fetch_and_measure( | 349 command = args |
| 341 isolated_hash=isolated_hash, | 350 if isolated_hash: |
| 342 storage=storage, | 351 bundle, result['stats']['download'] = fetch_and_measure( |
| 343 cache=cache, | 352 isolated_hash=isolated_hash, |
| 344 outdir=run_dir) | 353 storage=storage, |
| 345 if not bundle.command: | 354 cache=cache, |
| 346 # Handle this as a task failure, not an internal failure. | 355 outdir=run_dir) |
| 347 sys.stderr.write( | 356 if not bundle.command: |
| 348 '<The .isolated doesn\'t declare any command to run!>\n' | 357 # Handle this as a task failure, not an internal failure. |
| 349 '<Check your .isolate for missing \'command\' variable>\n') | 358 sys.stderr.write( |
| 350 if os.environ.get('SWARMING_TASK_ID'): | 359 '<The .isolated doesn\'t declare any command to run!>\n' |
| 351 # Give an additional hint when running as a swarming task. | 360 '<Check your .isolate for missing \'command\' variable>\n') |
| 352 sys.stderr.write('<This occurs at the \'isolate\' step>\n') | 361 if os.environ.get('SWARMING_TASK_ID'): |
| 353 result['exit_code'] = 1 | 362 # Give an additional hint when running as a swarming task. |
| 354 return result | 363 sys.stderr.write('<This occurs at the \'isolate\' step>\n') |
| 364 result['exit_code'] = 1 | |
| 365 return result | |
| 355 | 366 |
| 356 change_tree_read_only(run_dir, bundle.read_only) | 367 change_tree_read_only(run_dir, bundle.read_only) |
| 357 cwd = os.path.normpath(os.path.join(run_dir, bundle.relative_cwd)) | 368 cwd = os.path.normpath(os.path.join(cwd, bundle.relative_cwd)) |
| 358 command = bundle.command + extra_args | 369 command = bundle.command + args |
| 359 file_path.ensure_command_has_abs_path(command, cwd) | 370 file_path.ensure_command_has_abs_path(command, cwd) |
| 360 sys.stdout.flush() | 371 sys.stdout.flush() |
| 361 start = time.time() | 372 start = time.time() |
| 362 try: | 373 try: |
| 363 result['exit_code'], result['had_hard_timeout'] = run_command( | 374 result['exit_code'], result['had_hard_timeout'] = run_command( |
| 364 process_command(command, out_dir), cwd, tmp_dir, hard_timeout, | 375 process_command(command, out_dir), cwd, tmp_dir, hard_timeout, |
| 365 grace_period) | 376 grace_period) |
| 366 finally: | 377 finally: |
| 367 result['duration'] = max(time.time() - start, 0) | 378 result['duration'] = max(time.time() - start, 0) |
| 368 except Exception as e: | 379 except Exception as e: |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 417 if not success and result['exit_code'] == 0: | 428 if not success and result['exit_code'] == 0: |
| 418 result['exit_code'] = 1 | 429 result['exit_code'] = 1 |
| 419 except Exception as e: | 430 except Exception as e: |
| 420 # Swallow any exception in the main finally clause. | 431 # Swallow any exception in the main finally clause. |
| 421 logging.exception('Leaking out_dir %s: %s', out_dir, e) | 432 logging.exception('Leaking out_dir %s: %s', out_dir, e) |
| 422 result['internal_failure'] = str(e) | 433 result['internal_failure'] = str(e) |
| 423 return result | 434 return result |
| 424 | 435 |
| 425 | 436 |
| 426 def run_tha_test( | 437 def run_tha_test( |
| 427 isolated_hash, storage, cache, leak_temp_dir, result_json, root_dir, | 438 isolated_hash, args, storage, cache, leak_temp_dir, result_json, root_dir, |
| 428 hard_timeout, grace_period, extra_args): | 439 hard_timeout, grace_period): |
| 429 """Downloads the dependencies in the cache, hardlinks them into a temporary | 440 """Runs an executable and records execution metadata. |
| 430 directory and runs the executable from there. | 441 |
| 442 If isolated_hash is specified, downloads the dependencies in the cache, | |
| 443 hardlinks them into a temporary directory and runs the command specified in | |
| 444 the .isolated. | |
| 431 | 445 |
| 432 A temporary directory is created to hold the output files. The content inside | 446 A temporary directory is created to hold the output files. The content inside |
| 433 this directory will be uploaded back to |storage| packaged as a .isolated | 447 this directory will be uploaded back to |storage| packaged as a .isolated |
| 434 file. | 448 file. |
| 435 | 449 |
| 436 Arguments: | 450 Arguments: |
| 437 isolated_hash: the SHA-1 of the .isolated file that must be retrieved to | 451 isolated_hash: if not empty, the SHA-1 of the .isolated file that must be |
| 438 recreate the tree of files to run the target executable. | 452 retrieved to recreate the tree of files to run the target |
| 453 executable. The command specified in the .isolated is | |
| 454 executed. | |
| 455 args: if isolated_hash is not empty, arguments to append to the command in | |
| 456 the .isolated file. Otherwise, the command line to execute. | |
|
M-A Ruel
2016/05/02 14:51:17
I'm not a fan of this ambivalence.
nodir
2016/05/02 17:43:22
fixed
| |
| 439 storage: an isolateserver.Storage object to retrieve remote objects. This | 457 storage: an isolateserver.Storage object to retrieve remote objects. This |
| 440 object has a reference to an isolateserver.StorageApi, which does | 458 object has a reference to an isolateserver.StorageApi, which does |
| 441 the actual I/O. | 459 the actual I/O. |
| 442 cache: an isolateserver.LocalCache to keep from retrieving the same objects | 460 cache: an isolateserver.LocalCache to keep from retrieving the same objects |
| 443 constantly by caching the objects retrieved. Can be on-disk or | 461 constantly by caching the objects retrieved. Can be on-disk or |
| 444 in-memory. | 462 in-memory. |
| 445 leak_temp_dir: if true, the temporary directory will be deliberately leaked | 463 leak_temp_dir: if true, the temporary directory will be deliberately leaked |
| 446 for later examination. | 464 for later examination. |
| 447 result_json: file path to dump result metadata into. If set, the process | 465 result_json: file path to dump result metadata into. If set, the process |
| 448 exit code is always 0 unless an internal error occured. | 466 exit code is always 0 unless an internal error occured. |
| 449 root_dir: directory to the path to use to create the temporary directory. If | 467 root_dir: directory to the path to use to create the temporary directory. If |
| 450 not specified, a random temporary directory is created. | 468 not specified, a random temporary directory is created. |
| 451 hard_timeout: kills the process if it lasts more than this amount of | 469 hard_timeout: kills the process if it lasts more than this amount of |
| 452 seconds. | 470 seconds. |
| 453 grace_period: number of seconds to wait between SIGTERM and SIGKILL. | 471 grace_period: number of seconds to wait between SIGTERM and SIGKILL. |
| 454 extra_args: optional arguments to add to the command stated in the .isolate | |
| 455 file. | |
| 456 | 472 |
| 457 Returns: | 473 Returns: |
| 458 Process exit code that should be used. | 474 Process exit code that should be used. |
| 459 """ | 475 """ |
| 476 if any(ISOLATED_OUTDIR_PARAMETER in a for a in args): | |
| 477 assert storage is not None, 'storage is None although outdir is specified' | |
| 478 | |
| 460 if result_json: | 479 if result_json: |
| 461 # Write a json output file right away in case we get killed. | 480 # Write a json output file right away in case we get killed. |
| 462 result = { | 481 result = { |
| 463 'exit_code': None, | 482 'exit_code': None, |
| 464 'had_hard_timeout': False, | 483 'had_hard_timeout': False, |
| 465 'internal_failure': 'Was terminated before completion', | 484 'internal_failure': 'Was terminated before completion', |
| 466 'outputs_ref': None, | 485 'outputs_ref': None, |
| 467 'version': 2, | 486 'version': 2, |
| 468 } | 487 } |
| 469 tools.write_json(result_json, result, dense=True) | 488 tools.write_json(result_json, result, dense=True) |
| 470 | 489 |
| 471 # run_isolated exit code. Depends on if result_json is used or not. | 490 # run_isolated exit code. Depends on if result_json is used or not. |
| 472 result = map_and_run( | 491 result = map_and_run( |
| 473 isolated_hash, storage, cache, leak_temp_dir, root_dir, hard_timeout, | 492 isolated_hash, args, storage, cache, leak_temp_dir, root_dir, |
| 474 grace_period, extra_args) | 493 hard_timeout, grace_period) |
| 475 logging.info('Result:\n%s', tools.format_json(result, dense=True)) | 494 logging.info('Result:\n%s', tools.format_json(result, dense=True)) |
| 476 if result_json: | 495 if result_json: |
| 477 # We've found tests to delete 'work' when quitting, causing an exception | 496 # We've found tests to delete 'work' when quitting, causing an exception |
| 478 # here. Try to recreate the directory if necessary. | 497 # here. Try to recreate the directory if necessary. |
| 479 file_path.ensure_tree(os.path.dirname(result_json)) | 498 file_path.ensure_tree(os.path.dirname(result_json)) |
| 480 tools.write_json(result_json, result, dense=True) | 499 tools.write_json(result_json, result, dense=True) |
| 481 # Only return 1 if there was an internal error. | 500 # Only return 1 if there was an internal error. |
| 482 return int(bool(result['internal_failure'])) | 501 return int(bool(result['internal_failure'])) |
| 483 | 502 |
| 484 # Marshall into old-style inline output. | 503 # Marshall into old-style inline output. |
| 485 if result['outputs_ref']: | 504 if result['outputs_ref']: |
| 486 data = { | 505 data = { |
| 487 'hash': result['outputs_ref']['isolated'], | 506 'hash': result['outputs_ref']['isolated'], |
| 488 'namespace': result['outputs_ref']['namespace'], | 507 'namespace': result['outputs_ref']['namespace'], |
| 489 'storage': result['outputs_ref']['isolatedserver'], | 508 'storage': result['outputs_ref']['isolatedserver'], |
| 490 } | 509 } |
| 491 sys.stdout.flush() | 510 sys.stdout.flush() |
| 492 print( | 511 print( |
| 493 '[run_isolated_out_hack]%s[/run_isolated_out_hack]' % | 512 '[run_isolated_out_hack]%s[/run_isolated_out_hack]' % |
| 494 tools.format_json(data, dense=True)) | 513 tools.format_json(data, dense=True)) |
| 495 sys.stdout.flush() | 514 sys.stdout.flush() |
| 496 return result['exit_code'] or int(bool(result['internal_failure'])) | 515 return result['exit_code'] or int(bool(result['internal_failure'])) |
| 497 | 516 |
| 498 | 517 |
| 499 def main(args): | 518 def main(args): |
| 500 parser = logging_utils.OptionParserWithLogging( | 519 parser = logging_utils.OptionParserWithLogging( |
| 501 usage='%prog <options>', | 520 usage='%prog <options> [args]', |
|
M-A Ruel
2016/05/02 14:51:17
[command to run or extra args]
nodir
2016/05/02 17:43:22
Done.
| |
| 502 version=__version__, | 521 version=__version__, |
| 503 log_file=RUN_ISOLATED_LOG_FILE) | 522 log_file=RUN_ISOLATED_LOG_FILE) |
| 504 parser.add_option( | 523 parser.add_option( |
| 505 '--clean', action='store_true', | 524 '--clean', action='store_true', |
| 506 help='Cleans the cache, trimming it necessary and remove corrupted items ' | 525 help='Cleans the cache, trimming it necessary and remove corrupted items ' |
| 507 'and returns without executing anything; use with -v to know what ' | 526 'and returns without executing anything; use with -v to know what ' |
| 508 'was done') | 527 'was done') |
| 509 parser.add_option( | 528 parser.add_option( |
| 510 '--json', | 529 '--json', |
| 511 help='dump output metadata to json file. When used, run_isolated returns ' | 530 help='dump output metadata to json file. When used, run_isolated returns ' |
| 512 'non-zero only on internal failure') | 531 'non-zero only on internal failure') |
| 513 parser.add_option( | 532 parser.add_option( |
| 514 '--hard-timeout', type='float', help='Enforce hard timeout in execution') | 533 '--hard-timeout', type='float', help='Enforce hard timeout in execution') |
| 515 parser.add_option( | 534 parser.add_option( |
| 516 '--grace-period', type='float', | 535 '--grace-period', type='float', |
| 517 help='Grace period between SIGTERM and SIGKILL') | 536 help='Grace period between SIGTERM and SIGKILL') |
| 518 data_group = optparse.OptionGroup(parser, 'Data source') | 537 data_group = optparse.OptionGroup(parser, 'Data source') |
| 519 data_group.add_option( | 538 data_group.add_option( |
| 520 '-s', '--isolated', | 539 '-s', '--isolated', |
| 521 help='Hash of the .isolated to grab from the isolate server') | 540 help='Hash of the .isolated to grab from the isolate server.') |
| 522 isolateserver.add_isolate_server_options(data_group) | 541 isolateserver.add_isolate_server_options(data_group) |
| 523 parser.add_option_group(data_group) | 542 parser.add_option_group(data_group) |
| 524 | 543 |
| 525 isolateserver.add_cache_options(parser) | 544 isolateserver.add_cache_options(parser) |
| 526 parser.set_defaults(cache='cache') | 545 parser.set_defaults(cache='cache') |
| 527 | 546 |
| 528 debug_group = optparse.OptionGroup(parser, 'Debugging') | 547 debug_group = optparse.OptionGroup(parser, 'Debugging') |
| 529 debug_group.add_option( | 548 debug_group.add_option( |
| 530 '--leak-temp-dir', | 549 '--leak-temp-dir', |
| 531 action='store_true', | 550 action='store_true', |
| (...skipping 10 matching lines...) Expand all Loading... | |
| 542 if options.clean: | 561 if options.clean: |
| 543 if options.isolated: | 562 if options.isolated: |
| 544 parser.error('Can\'t use --isolated with --clean.') | 563 parser.error('Can\'t use --isolated with --clean.') |
| 545 if options.isolate_server: | 564 if options.isolate_server: |
| 546 parser.error('Can\'t use --isolate-server with --clean.') | 565 parser.error('Can\'t use --isolate-server with --clean.') |
| 547 if options.json: | 566 if options.json: |
| 548 parser.error('Can\'t use --json with --clean.') | 567 parser.error('Can\'t use --json with --clean.') |
| 549 cache.cleanup() | 568 cache.cleanup() |
| 550 return 0 | 569 return 0 |
| 551 | 570 |
| 571 if not options.isolated and not args: | |
| 572 parser.error('--isolated or args is required.') | |
|
M-A Ruel
2016/05/02 14:51:17
s/args/command to run/ ?
nodir
2016/05/02 17:43:22
Done.
| |
| 573 | |
| 552 auth.process_auth_options(parser, options) | 574 auth.process_auth_options(parser, options) |
| 553 isolateserver.process_isolate_server_options(parser, options, True) | 575 |
| 576 isolateserver.process_isolate_server_options( | |
| 577 parser, options, True, required=False) | |
| 578 if not options.isolate_server: | |
| 579 if options.isolated: | |
| 580 parser.error('--isolated requires --isolate-server') | |
| 581 if ISOLATED_OUTDIR_PARAMETER in args: | |
| 582 parser.error( | |
| 583 '%s in args requires --isolate-server' % ISOLATED_OUTDIR_PARAMETER) | |
| 554 | 584 |
| 555 if options.root_dir: | 585 if options.root_dir: |
| 556 options.root_dir = unicode(os.path.abspath(options.root_dir)) | 586 options.root_dir = unicode(os.path.abspath(options.root_dir)) |
| 557 if options.json: | 587 if options.json: |
| 558 options.json = unicode(os.path.abspath(options.json)) | 588 options.json = unicode(os.path.abspath(options.json)) |
| 559 if not options.isolated: | 589 |
| 560 parser.error('--isolated is required.') | 590 storage = None |
| 561 with isolateserver.get_storage( | 591 if options.isolate_server: |
| 562 options.isolate_server, options.namespace) as storage: | 592 storage = isolateserver.get_storage( |
| 593 options.isolate_server, options.namespace) | |
| 594 assert storage.hash_algo == cache.hash_algo | |
| 595 | |
| 596 with storage or tools.noop_context(): | |
|
M-A Ruel
2016/05/02 14:51:17
while I understand the goal to not copy paste the
nodir
2016/05/02 17:43:22
Done.
| |
| 563 # Hashing schemes used by |storage| and |cache| MUST match. | 597 # Hashing schemes used by |storage| and |cache| MUST match. |
| 564 assert storage.hash_algo == cache.hash_algo | |
| 565 return run_tha_test( | 598 return run_tha_test( |
| 566 options.isolated, storage, cache, options.leak_temp_dir, options.json, | 599 options.isolated, args, storage, cache, options.leak_temp_dir, |
| 567 options.root_dir, options.hard_timeout, options.grace_period, args) | 600 options.json, options.root_dir, options.hard_timeout, |
| 601 options.grace_period) | |
| 568 | 602 |
| 569 | 603 |
| 570 if __name__ == '__main__': | 604 if __name__ == '__main__': |
| 571 # Ensure that we are always running with the correct encoding. | 605 # Ensure that we are always running with the correct encoding. |
| 572 fix_encoding.fix_encoding() | 606 fix_encoding.fix_encoding() |
| 573 sys.exit(main(sys.argv[1:])) | 607 sys.exit(main(sys.argv[1:])) |
| OLD | NEW |