Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(162)

Side by Side Diff: recipe_engine/run.py

Issue 2846703003: [recipes.py] move run arg parsing to its module. (Closed)
Patch Set: fixit Created 3 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | recipe_engine/unittests/run_test.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # Copyright 2016 The LUCI Authors. All rights reserved. 1 # Copyright 2016 The LUCI Authors. All rights reserved.
2 # Use of this source code is governed under the Apache License, Version 2.0 2 # Use of this source code is governed under the Apache License, Version 2.0
3 # that can be found in the LICENSE file. 3 # that can be found in the LICENSE file.
4 4
5 """Entry point for fully-annotated builds. 5 """Entry point for fully-annotated builds.
6 6
7 This script is part of the effort to move all builds to annotator-based 7 This script is part of the effort to move all builds to annotator-based
8 systems. Any builder configured to use the AnnotatorFactory.BaseFactory() 8 systems. Any builder configured to use the AnnotatorFactory.BaseFactory()
9 found in scripts/master/factory/annotator_factory.py executes a single 9 found in scripts/master/factory/annotator_factory.py executes a single
10 AddAnnotatedScript step. That step (found in annotator_commands.py) calls 10 AddAnnotatedScript step. That step (found in annotator_commands.py) calls
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
59 convenience functions defined: 59 convenience functions defined:
60 * last_step - Returns the last step that ran or None 60 * last_step - Returns the last step that ran or None
61 * nth_step(n) - Returns the N'th step that ran or None 61 * nth_step(n) - Returns the N'th step that ran or None
62 62
63 'failed' is a boolean representing if the build is in a 'failed' state. 63 'failed' is a boolean representing if the build is in a 'failed' state.
64 """ 64 """
65 65
66 import collections 66 import collections
67 import json 67 import json
68 import logging 68 import logging
69 import argparse
dnj 2017/04/27 17:14:01 nit: order
iannucci 2017/04/29 15:45:59 Done.
69 import os 70 import os
70 import sys 71 import sys
71 import traceback 72 import traceback
72 73
73 from . import loader 74 from . import loader
74 from . import recipe_api 75 from . import recipe_api
75 from . import recipe_test_api 76 from . import recipe_test_api
76 from . import types 77 from . import types
77 from . import util 78 from . import util
79
80 from . import env
dnj 2017/04/27 17:14:01 Shouldn't this be merged above?
iannucci 2017/04/29 15:45:59 I like to keep the env import line separate becaus
81
82 import subprocess42
83
78 from . import result_pb2 84 from . import result_pb2
79 import subprocess42 85
86 from google.protobuf import json_format as jsonpb
80 87
81 88
82 SCRIPT_PATH = os.path.dirname(os.path.abspath(__file__)) 89 SCRIPT_PATH = os.path.dirname(os.path.abspath(__file__))
83 90
84 91
85 # TODO(martiniss): Remove this 92 # TODO(martiniss): Remove this
86 RecipeResult = collections.namedtuple('RecipeResult', 'result') 93 RecipeResult = collections.namedtuple('RecipeResult', 'result')
87 94
88 # TODO(dnj): Replace "properties" with a generic runtime instance. This instance 95 # TODO(dnj): Replace "properties" with a generic runtime instance. This instance
89 # will be used to seed recipe clients and expanded to include managed runtime 96 # will be used to seed recipe clients and expanded to include managed runtime
(...skipping 337 matching lines...) Expand 10 before | Expand all | Expand 10 after
427 results.append( 434 results.append(
428 loader._invoke_with_properties( 435 loader._invoke_with_properties(
429 run_recipe, properties, recipe_script.PROPERTIES, 436 run_recipe, properties, recipe_script.PROPERTIES,
430 properties.keys())) 437 properties.keys()))
431 except TypeError as e: 438 except TypeError as e:
432 raise TypeError( 439 raise TypeError(
433 "Got %r while trying to call recipe %s with properties %r" % ( 440 "Got %r while trying to call recipe %s with properties %r" % (
434 e, recipe, properties)) 441 e, recipe, properties))
435 442
436 return results 443 return results
444
445
446 def add_subparser(parser):
447 def properties_file_type(filename):
448 with (sys.stdin if filename == '-' else open(filename)) as f:
449 obj = json.load(f)
450 if not isinstance(obj, dict):
451 raise argparse.ArgumentTypeError(
452 'must contain a JSON object, i.e. `{}`.')
453 return obj
454
455 def parse_prop(prop):
456 key, val = prop.split('=', 1)
457 try:
458 val = json.loads(val)
459 except (ValueError, SyntaxError):
460 pass # If a value couldn't be evaluated, keep the string version
461 return {key: val}
462
463 def properties_type(value):
464 obj = json.loads(value)
465 if not isinstance(obj, dict):
466 raise argparse.ArgumentTypeError('must contain a JSON object, i.e. `{}`.')
467 return obj
468
469 run_p = parser.add_parser(
470 'run',
471 description='Run a recipe locally')
472
473 run_p.add_argument(
474 '--workdir',
475 type=os.path.abspath,
476 help='The working directory of recipe execution')
477 run_p.add_argument(
478 '--output-result-json',
479 type=os.path.abspath,
480 help='The file to write the JSON serialized returned value \
481 of the recipe to')
482 run_p.add_argument(
483 '--timestamps',
484 action='store_true',
485 help='If true, emit CURRENT_TIMESTAMP annotations. '
486 'Default: false. '
487 'CURRENT_TIMESTAMP annotation has one parameter, current time in '
488 'Unix timestamp format. '
489 'CURRENT_TIMESTAMP annotation will be printed at the beginning and '
490 'end of the annotation stream and also immediately before each '
491 'STEP_STARTED and STEP_CLOSED annotations.',
492 )
493 prop_group = run_p.add_mutually_exclusive_group()
494 prop_group.add_argument(
495 '--properties-file',
496 dest='properties',
497 type=properties_file_type,
498 help=('A file containing a json blob of properties. '
499 'Pass "-" to read from stdin'))
500 prop_group.add_argument(
501 '--properties',
502 type=properties_type,
503 help='A json string containing the properties')
504
505 run_p.add_argument(
506 'recipe',
507 help='The recipe to execute')
508 run_p.add_argument(
509 'props',
510 nargs=argparse.REMAINDER,
511 type=parse_prop,
512 help='A list of property pairs; e.g. mastername=chromium.linux '
513 'issue=12345. The property value will be decoded as JSON, but if '
514 'this decoding fails the value will be interpreted as a string.')
515
516 run_p.set_defaults(command='run', properties={}, func=main)
517
dnj 2017/04/27 17:14:01 nit: two spaces (here and elsewhere)
iannucci 2017/04/29 15:45:59 Done.
518 def handle_recipe_return(recipe_result, result_filename, stream_engine,
519 engine_flags):
520 if engine_flags and engine_flags.use_result_proto:
521 return new_handle_recipe_return(
522 recipe_result, result_filename, stream_engine)
523
524 if 'recipe_result' in recipe_result.result:
525 result_string = json.dumps(
526 recipe_result.result['recipe_result'], indent=2)
527 if result_filename:
528 with open(result_filename, 'w') as f:
529 f.write(result_string)
530 with stream_engine.make_step_stream('recipe result') as s:
531 with s.new_log_stream('result') as l:
532 l.write_split(result_string)
533
534 if 'traceback' in recipe_result.result:
535 with stream_engine.make_step_stream('Uncaught Exception') as s:
536 with s.new_log_stream('exception') as l:
537 for line in recipe_result.result['traceback']:
538 l.write_line(line)
539
540 if 'reason' in recipe_result.result:
541 with stream_engine.make_step_stream('Failure reason') as s:
542 with s.new_log_stream('reason') as l:
543 for line in recipe_result.result['reason'].splitlines():
544 l.write_line(line)
545
546 if 'status_code' in recipe_result.result:
547 return recipe_result.result['status_code']
548 else:
549 return 0
550
551 def new_handle_recipe_return(result, result_filename, stream_engine):
552 if result_filename:
553 with open(result_filename, 'w') as fil:
554 fil.write(jsonpb.MessageToJson(
555 result, including_default_value_fields=True))
556
557 if result.json_result:
558 with stream_engine.make_step_stream('recipe result') as s:
559 with s.new_log_stream('result') as l:
560 l.write_split(result.json_result)
561
562 if result.HasField('failure'):
563 f = result.failure
564 if f.HasField('exception'):
565 with stream_engine.make_step_stream('Uncaught Exception') as s:
566 s.add_step_text(f.human_reason)
567 with s.new_log_stream('exception') as l:
568 for line in f.exception.traceback:
569 l.write_line(line)
570 # TODO(martiniss): Remove this code once calling code handles these states
571 elif f.HasField('timeout'):
572 with stream_engine.make_step_stream('Step Timed Out') as s:
573 with s.new_log_stream('timeout_s') as l:
574 l.write_line(f.timeout.timeout_s)
575 elif f.HasField('step_data'):
576 with stream_engine.make_step_stream('Invalid Step Data Access') as s:
577 with s.new_log_stream('step') as l:
578 l.write_line(f.step_data.step)
579
580 with stream_engine.make_step_stream('Failure reason') as s:
581 with s.new_log_stream('reason') as l:
582 l.write_split(f.human_reason)
583
584 return 1
585
586 return 0
587
588
589 # Map of arguments_pb2.Property "value" oneof conversion functions.
590 #
591 # The fields here should be kept in sync with the "value" oneof field names in
592 # the arguments_pb2.Arguments.Property protobuf message.
593 _OP_PROPERTY_CONV = {
594 's': lambda prop: prop.s,
595 'int': lambda prop: prop.int,
596 'uint': lambda prop: prop.uint,
597 'd': lambda prop: prop.d,
598 'b': lambda prop: prop.b,
599 'data': lambda prop: prop.data,
600 'map': lambda prop: _op_properties_to_dict(prop.map.property),
601 'list': lambda prop: [_op_property_value(v) for v in prop.list.property],
602 }
603
604 def _op_property_value(prop):
605 """Returns the Python-converted value of an arguments_pb2.Property.
606
607 Args:
608 prop (arguments_pb2.Property): property to convert.
609 Returns: The converted value.
610 Raises:
611 ValueError: If 'prop' is incomplete or invalid.
612 """
613 typ = prop.WhichOneof('value')
614 conv = _OP_PROPERTY_CONV.get(typ)
615 if not conv:
616 raise ValueError('Unknown property field [%s]' % (typ,))
617 return conv(prop)
618
619
620 def _op_properties_to_dict(pmap):
621 """Creates a properties dictionary from an arguments_pb2.PropertyMap entry.
622
623 Args:
624 pmap (arguments_pb2.PropertyMap): Map to convert to dictionary form.
625 Returns (dict): A dictionary derived from the properties in 'pmap'.
626 """
627 return dict((k, _op_property_value(pmap[k])) for k in pmap)
628
629
630 def main(package_deps, args):
631 from recipe_engine import step_runner
632 from recipe_engine import stream
633 from recipe_engine import stream_logdog
634
635 config_file = args.package
636
637 if args.props:
638 for p in args.props:
639 args.properties.update(p)
640
641 def get_properties_from_operational_args(op_args):
642 if not op_args.properties.property:
643 return None
644 return _op_properties_to_dict(op_args.properties.property)
645
646 op_args = args.operational_args
647 op_properties = get_properties_from_operational_args(op_args)
648 if args.properties and op_properties:
649 raise ValueError(
650 'Got operational args properties as well as CLI properties.')
651
652 properties = op_properties
653 if not properties:
654 properties = args.properties
655
656 properties['recipe'] = args.recipe
657
658 properties = util.strip_unicode(properties)
659
660 os.environ['PYTHONUNBUFFERED'] = '1'
661 os.environ['PYTHONIOENCODING'] = 'UTF-8'
662
663 universe_view = loader.UniverseView(
664 loader.RecipeUniverse(
665 package_deps, config_file), package_deps.root_package)
666
667 # TODO(iannucci): this is horrible; why do we want to set a workdir anyway?
668 # Shouldn't the caller of recipes just CD somewhere if they want a different
669 # workdir?
670 workdir = (args.workdir or
671 os.path.join(SCRIPT_PATH, os.path.pardir, 'workdir'))
672 logging.info('Using %s as work directory' % workdir)
673 if not os.path.exists(workdir):
674 os.makedirs(workdir)
675
676 old_cwd = os.getcwd()
677 os.chdir(workdir)
678
679 # Construct our stream engines. We may want to share stream events with more
680 # than one StreamEngine implementation, so we will accumulate them in a
681 # "stream_engines" list and compose them into a MultiStreamEngine.
682 def build_annotation_stream_engine():
683 return stream.AnnotatorStreamEngine(
684 sys.stdout,
685 emit_timestamps=(args.timestamps or
686 op_args.annotation_flags.emit_timestamp))
687
688 stream_engines = []
689 if op_args.logdog.streamserver_uri:
690 logging.debug('Using LogDog with parameters: [%s]', op_args.logdog)
691 stream_engines.append(stream_logdog.StreamEngine(
692 streamserver_uri=op_args.logdog.streamserver_uri,
693 name_base=(op_args.logdog.name_base or None),
694 dump_path=op_args.logdog.final_annotation_dump_path,
695 ))
696
697 # If we're teeing, also fold in a standard annotation stream engine.
698 if op_args.logdog.tee:
699 stream_engines.append(build_annotation_stream_engine())
700 else:
701 # Not using LogDog; use a standard annotation stream engine.
702 stream_engines.append(build_annotation_stream_engine())
703 multi_stream_engine = stream.MultiStreamEngine.create(*stream_engines)
704
705 emit_initial_properties = op_args.annotation_flags.emit_initial_properties
706 engine_flags = op_args.engine_flags
707
708 # Have a top-level set of invariants to enforce StreamEngine expectations.
709 with stream.StreamEngineInvariants.wrap(multi_stream_engine) as stream_engine:
710 try:
711 ret = run_steps(
712 properties, stream_engine,
713 step_runner.SubprocessStepRunner(stream_engine, engine_flags),
714 universe_view, engine_flags=engine_flags,
715 emit_initial_properties=emit_initial_properties)
716 finally:
717 os.chdir(old_cwd)
718
719 return handle_recipe_return(
720 ret, args.output_result_json, stream_engine, engine_flags)
OLDNEW
« no previous file with comments | « no previous file | recipe_engine/unittests/run_test.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698