| OLD | NEW |
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2015 The LUCI Authors. All rights reserved. | 2 # Copyright 2015 The LUCI Authors. All rights reserved. |
| 3 # Use of this source code is governed under the Apache License, Version 2.0 | 3 # Use of this source code is governed under the Apache License, Version 2.0 |
| 4 # that can be found in the LICENSE file. | 4 # that can be found in the LICENSE file. |
| 5 | 5 |
| 6 """Tool to interact with recipe repositories. | 6 """Tool to interact with recipe repositories. |
| 7 | 7 |
| 8 This tool operates on the nearest ancestor directory containing an | 8 This tool operates on the nearest ancestor directory containing an |
| 9 infra/config/recipes.cfg. | 9 infra/config/recipes.cfg. |
| 10 """ | 10 """ |
| (...skipping 12 matching lines...) Expand all Loading... |
| 23 reload(sys) | 23 reload(sys) |
| 24 sys.setdefaultencoding('UTF8') | 24 sys.setdefaultencoding('UTF8') |
| 25 | 25 |
| 26 ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) | 26 ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) |
| 27 sys.path.insert(0, ROOT_DIR) | 27 sys.path.insert(0, ROOT_DIR) |
| 28 | 28 |
| 29 from recipe_engine import env | 29 from recipe_engine import env |
| 30 | 30 |
| 31 import argparse # this is vendored | 31 import argparse # this is vendored |
| 32 from recipe_engine import arguments_pb2 | 32 from recipe_engine import arguments_pb2 |
| 33 from recipe_engine import util as recipe_util | |
| 34 from google.protobuf import json_format as jsonpb | 33 from google.protobuf import json_format as jsonpb |
| 35 | 34 |
| 36 | 35 |
| 37 def handle_recipe_return(recipe_result, result_filename, stream_engine, | 36 from recipe_engine import fetch, lint_test, bundle, depgraph, autoroll |
| 38 engine_flags): | 37 from recipe_engine import remote, refs, doc, test, run |
| 39 if engine_flags and engine_flags.use_result_proto: | |
| 40 return new_handle_recipe_return( | |
| 41 recipe_result, result_filename, stream_engine) | |
| 42 | |
| 43 if 'recipe_result' in recipe_result.result: | |
| 44 result_string = json.dumps( | |
| 45 recipe_result.result['recipe_result'], indent=2) | |
| 46 if result_filename: | |
| 47 with open(result_filename, 'w') as f: | |
| 48 f.write(result_string) | |
| 49 with stream_engine.make_step_stream('recipe result') as s: | |
| 50 with s.new_log_stream('result') as l: | |
| 51 l.write_split(result_string) | |
| 52 | |
| 53 if 'traceback' in recipe_result.result: | |
| 54 with stream_engine.make_step_stream('Uncaught Exception') as s: | |
| 55 with s.new_log_stream('exception') as l: | |
| 56 for line in recipe_result.result['traceback']: | |
| 57 l.write_line(line) | |
| 58 | |
| 59 if 'reason' in recipe_result.result: | |
| 60 with stream_engine.make_step_stream('Failure reason') as s: | |
| 61 with s.new_log_stream('reason') as l: | |
| 62 for line in recipe_result.result['reason'].splitlines(): | |
| 63 l.write_line(line) | |
| 64 | |
| 65 if 'status_code' in recipe_result.result: | |
| 66 return recipe_result.result['status_code'] | |
| 67 else: | |
| 68 return 0 | |
| 69 | |
| 70 def new_handle_recipe_return(result, result_filename, stream_engine): | |
| 71 if result_filename: | |
| 72 with open(result_filename, 'w') as fil: | |
| 73 fil.write(jsonpb.MessageToJson( | |
| 74 result, including_default_value_fields=True)) | |
| 75 | |
| 76 if result.json_result: | |
| 77 with stream_engine.make_step_stream('recipe result') as s: | |
| 78 with s.new_log_stream('result') as l: | |
| 79 l.write_split(result.json_result) | |
| 80 | |
| 81 if result.HasField('failure'): | |
| 82 f = result.failure | |
| 83 if f.HasField('exception'): | |
| 84 with stream_engine.make_step_stream('Uncaught Exception') as s: | |
| 85 s.add_step_text(f.human_reason) | |
| 86 with s.new_log_stream('exception') as l: | |
| 87 for line in f.exception.traceback: | |
| 88 l.write_line(line) | |
| 89 # TODO(martiniss): Remove this code once calling code handles these states | |
| 90 elif f.HasField('timeout'): | |
| 91 with stream_engine.make_step_stream('Step Timed Out') as s: | |
| 92 with s.new_log_stream('timeout_s') as l: | |
| 93 l.write_line(f.timeout.timeout_s) | |
| 94 elif f.HasField('step_data'): | |
| 95 with stream_engine.make_step_stream('Invalid Step Data Access') as s: | |
| 96 with s.new_log_stream('step') as l: | |
| 97 l.write_line(f.step_data.step) | |
| 98 | |
| 99 with stream_engine.make_step_stream('Failure reason') as s: | |
| 100 with s.new_log_stream('reason') as l: | |
| 101 l.write_split(f.human_reason) | |
| 102 | |
| 103 return 1 | |
| 104 | |
| 105 return 0 | |
| 106 | 38 |
| 107 | 39 |
| 108 def run(config_file, package_deps, args): | 40 _SUBCOMMANDS = [ |
| 109 from recipe_engine import run as recipe_run | 41 autoroll, |
| 110 from recipe_engine import loader | 42 bundle, |
| 111 from recipe_engine import step_runner | 43 depgraph, |
| 112 from recipe_engine import stream | 44 doc, |
| 113 from recipe_engine import stream_logdog | 45 fetch, |
| 114 | 46 lint_test, |
| 115 if args.props: | 47 refs, |
| 116 for p in args.props: | 48 remote, |
| 117 args.properties.update(p) | 49 run, |
| 118 | 50 test, |
| 119 def get_properties_from_operational_args(op_args): | 51 ] |
| 120 if not op_args.properties.property: | |
| 121 return None | |
| 122 return _op_properties_to_dict(op_args.properties.property) | |
| 123 | |
| 124 op_args = args.operational_args | |
| 125 op_properties = get_properties_from_operational_args(op_args) | |
| 126 if args.properties and op_properties: | |
| 127 raise ValueError( | |
| 128 'Got operational args properties as well as CLI properties.') | |
| 129 | |
| 130 properties = op_properties | |
| 131 if not properties: | |
| 132 properties = args.properties | |
| 133 | |
| 134 properties['recipe'] = args.recipe | |
| 135 | |
| 136 properties = recipe_util.strip_unicode(properties) | |
| 137 | |
| 138 os.environ['PYTHONUNBUFFERED'] = '1' | |
| 139 os.environ['PYTHONIOENCODING'] = 'UTF-8' | |
| 140 | |
| 141 universe_view = loader.UniverseView( | |
| 142 loader.RecipeUniverse( | |
| 143 package_deps, config_file), package_deps.root_package) | |
| 144 | |
| 145 workdir = (args.workdir or | |
| 146 os.path.join(os.path.dirname(os.path.realpath(__file__)), 'workdir')) | |
| 147 logging.info('Using %s as work directory' % workdir) | |
| 148 if not os.path.exists(workdir): | |
| 149 os.makedirs(workdir) | |
| 150 | |
| 151 old_cwd = os.getcwd() | |
| 152 os.chdir(workdir) | |
| 153 | |
| 154 # Construct our stream engines. We may want to share stream events with more | |
| 155 # than one StreamEngine implementation, so we will accumulate them in a | |
| 156 # "stream_engines" list and compose them into a MultiStreamEngine. | |
| 157 def build_annotation_stream_engine(): | |
| 158 return stream.AnnotatorStreamEngine( | |
| 159 sys.stdout, | |
| 160 emit_timestamps=(args.timestamps or | |
| 161 op_args.annotation_flags.emit_timestamp)) | |
| 162 | |
| 163 stream_engines = [] | |
| 164 if op_args.logdog.streamserver_uri: | |
| 165 logging.debug('Using LogDog with parameters: [%s]', op_args.logdog) | |
| 166 stream_engines.append(stream_logdog.StreamEngine( | |
| 167 streamserver_uri=op_args.logdog.streamserver_uri, | |
| 168 name_base=(op_args.logdog.name_base or None), | |
| 169 dump_path=op_args.logdog.final_annotation_dump_path, | |
| 170 )) | |
| 171 | |
| 172 # If we're teeing, also fold in a standard annotation stream engine. | |
| 173 if op_args.logdog.tee: | |
| 174 stream_engines.append(build_annotation_stream_engine()) | |
| 175 else: | |
| 176 # Not using LogDog; use a standard annotation stream engine. | |
| 177 stream_engines.append(build_annotation_stream_engine()) | |
| 178 multi_stream_engine = stream.MultiStreamEngine.create(*stream_engines) | |
| 179 | |
| 180 emit_initial_properties = op_args.annotation_flags.emit_initial_properties | |
| 181 engine_flags = op_args.engine_flags | |
| 182 | |
| 183 # Have a top-level set of invariants to enforce StreamEngine expectations. | |
| 184 with stream.StreamEngineInvariants.wrap(multi_stream_engine) as stream_engine: | |
| 185 try: | |
| 186 ret = recipe_run.run_steps( | |
| 187 properties, stream_engine, | |
| 188 step_runner.SubprocessStepRunner(stream_engine, engine_flags), | |
| 189 universe_view, engine_flags=engine_flags, | |
| 190 emit_initial_properties=emit_initial_properties) | |
| 191 finally: | |
| 192 os.chdir(old_cwd) | |
| 193 | |
| 194 return handle_recipe_return( | |
| 195 ret, args.output_result_json, stream_engine, engine_flags) | |
| 196 | |
| 197 | |
| 198 class ProjectOverrideAction(argparse.Action): | |
| 199 def __call__(self, parser, namespace, values, option_string=None): | |
| 200 p = values.split('=', 2) | |
| 201 if len(p) != 2: | |
| 202 raise ValueError('Override must have the form: repo=path') | |
| 203 project_id, path = p | |
| 204 | |
| 205 v = getattr(namespace, self.dest, None) | |
| 206 if v is None: | |
| 207 v = {} | |
| 208 setattr(namespace, self.dest, v) | |
| 209 | |
| 210 if v.get(project_id): | |
| 211 raise ValueError('An override is already defined for [%s] (%s)' % ( | |
| 212 project_id, v[project_id])) | |
| 213 path = os.path.abspath(os.path.expanduser(path)) | |
| 214 if not os.path.isdir(path): | |
| 215 raise ValueError('Override path [%s] is not a directory' % (path,)) | |
| 216 v[project_id] = path | |
| 217 | |
| 218 | |
| 219 # Map of arguments_pb2.Property "value" oneof conversion functions. | |
| 220 # | |
| 221 # The fields here should be kept in sync with the "value" oneof field names in | |
| 222 # the arguments_pb2.Arguments.Property protobuf message. | |
| 223 _OP_PROPERTY_CONV = { | |
| 224 's': lambda prop: prop.s, | |
| 225 'int': lambda prop: prop.int, | |
| 226 'uint': lambda prop: prop.uint, | |
| 227 'd': lambda prop: prop.d, | |
| 228 'b': lambda prop: prop.b, | |
| 229 'data': lambda prop: prop.data, | |
| 230 'map': lambda prop: _op_properties_to_dict(prop.map.property), | |
| 231 'list': lambda prop: [_op_property_value(v) for v in prop.list.property], | |
| 232 } | |
| 233 | |
| 234 def _op_property_value(prop): | |
| 235 """Returns the Python-converted value of an arguments_pb2.Property. | |
| 236 | |
| 237 Args: | |
| 238 prop (arguments_pb2.Property): property to convert. | |
| 239 Returns: The converted value. | |
| 240 Raises: | |
| 241 ValueError: If 'prop' is incomplete or invalid. | |
| 242 """ | |
| 243 typ = prop.WhichOneof('value') | |
| 244 conv = _OP_PROPERTY_CONV.get(typ) | |
| 245 if not conv: | |
| 246 raise ValueError('Unknown property field [%s]' % (typ,)) | |
| 247 return conv(prop) | |
| 248 | |
| 249 | |
| 250 def _op_properties_to_dict(pmap): | |
| 251 """Creates a properties dictionary from an arguments_pb2.PropertyMap entry. | |
| 252 | |
| 253 Args: | |
| 254 pmap (arguments_pb2.PropertyMap): Map to convert to dictionary form. | |
| 255 Returns (dict): A dictionary derived from the properties in 'pmap'. | |
| 256 """ | |
| 257 return dict((k, _op_property_value(pmap[k])) for k in pmap) | |
| 258 | 52 |
| 259 | 53 |
| 260 def add_common_args(parser): | 54 def add_common_args(parser): |
| 261 from recipe_engine import package_io | 55 from recipe_engine import package_io |
| 262 | 56 |
| 57 class ProjectOverrideAction(argparse.Action): |
| 58 def __call__(self, parser, namespace, values, option_string=None): |
| 59 p = values.split('=', 2) |
| 60 if len(p) != 2: |
| 61 raise ValueError('Override must have the form: repo=path') |
| 62 project_id, path = p |
| 63 |
| 64 v = getattr(namespace, self.dest, None) |
| 65 if v is None: |
| 66 v = {} |
| 67 setattr(namespace, self.dest, v) |
| 68 |
| 69 if v.get(project_id): |
| 70 raise ValueError('An override is already defined for [%s] (%s)' % ( |
| 71 project_id, v[project_id])) |
| 72 path = os.path.abspath(os.path.expanduser(path)) |
| 73 if not os.path.isdir(path): |
| 74 raise ValueError('Override path [%s] is not a directory' % (path,)) |
| 75 v[project_id] = path |
| 76 |
| 263 def package_type(value): | 77 def package_type(value): |
| 264 if not os.path.isfile(value): | 78 if not os.path.isfile(value): |
| 265 raise argparse.ArgumentTypeError( | 79 raise argparse.ArgumentTypeError( |
| 266 'Given recipes config file %r does not exist.' % (value,)) | 80 'Given recipes config file %r does not exist.' % (value,)) |
| 267 return package_io.PackageFile(value) | 81 return package_io.PackageFile(value) |
| 268 | 82 |
| 269 parser.add_argument( | 83 parser.add_argument( |
| 270 '--package', | 84 '--package', |
| 271 type=package_type, | 85 type=package_type, |
| 272 help='Path to recipes.cfg of the recipe package to operate on' | 86 help='Path to recipes.cfg of the recipe package to operate on' |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 329 | 143 |
| 330 return post_process_args | 144 return post_process_args |
| 331 | 145 |
| 332 | 146 |
| 333 def main(): | 147 def main(): |
| 334 parser = argparse.ArgumentParser( | 148 parser = argparse.ArgumentParser( |
| 335 description='Interact with the recipe system.') | 149 description='Interact with the recipe system.') |
| 336 | 150 |
| 337 common_postprocess_func = add_common_args(parser) | 151 common_postprocess_func = add_common_args(parser) |
| 338 | 152 |
| 339 from recipe_engine import fetch, lint_test, bundle, depgraph, autoroll | |
| 340 from recipe_engine import remote, refs, doc, test | |
| 341 to_add = [ | |
| 342 fetch, lint_test, bundle, depgraph, autoroll, remote, refs, doc, test, | |
| 343 ] | |
| 344 | |
| 345 subp = parser.add_subparsers() | 153 subp = parser.add_subparsers() |
| 346 for module in to_add: | 154 for module in _SUBCOMMANDS: |
| 347 module.add_subparser(subp) | 155 module.add_subparser(subp) |
| 348 | 156 |
| 349 | |
| 350 def properties_file_type(filename): | |
| 351 with (sys.stdin if filename == '-' else open(filename)) as f: | |
| 352 obj = json.load(f) | |
| 353 if not isinstance(obj, dict): | |
| 354 raise argparse.ArgumentTypeError( | |
| 355 'must contain a JSON object, i.e. `{}`.') | |
| 356 return obj | |
| 357 | |
| 358 def parse_prop(prop): | |
| 359 key, val = prop.split('=', 1) | |
| 360 try: | |
| 361 val = json.loads(val) | |
| 362 except (ValueError, SyntaxError): | |
| 363 pass # If a value couldn't be evaluated, keep the string version | |
| 364 return {key: val} | |
| 365 | |
| 366 def properties_type(value): | |
| 367 obj = json.loads(value) | |
| 368 if not isinstance(obj, dict): | |
| 369 raise argparse.ArgumentTypeError('must contain a JSON object, i.e. `{}`.') | |
| 370 return obj | |
| 371 | |
| 372 run_p = subp.add_parser( | |
| 373 'run', | |
| 374 description='Run a recipe locally') | |
| 375 run_p.set_defaults(command='run', properties={}) | |
| 376 | |
| 377 run_p.add_argument( | |
| 378 '--workdir', | |
| 379 type=os.path.abspath, | |
| 380 help='The working directory of recipe execution') | |
| 381 run_p.add_argument( | |
| 382 '--output-result-json', | |
| 383 type=os.path.abspath, | |
| 384 help='The file to write the JSON serialized returned value \ | |
| 385 of the recipe to') | |
| 386 run_p.add_argument( | |
| 387 '--timestamps', | |
| 388 action='store_true', | |
| 389 help='If true, emit CURRENT_TIMESTAMP annotations. ' | |
| 390 'Default: false. ' | |
| 391 'CURRENT_TIMESTAMP annotation has one parameter, current time in ' | |
| 392 'Unix timestamp format. ' | |
| 393 'CURRENT_TIMESTAMP annotation will be printed at the beginning and ' | |
| 394 'end of the annotation stream and also immediately before each ' | |
| 395 'STEP_STARTED and STEP_CLOSED annotations.', | |
| 396 ) | |
| 397 prop_group = run_p.add_mutually_exclusive_group() | |
| 398 prop_group.add_argument( | |
| 399 '--properties-file', | |
| 400 dest='properties', | |
| 401 type=properties_file_type, | |
| 402 help=('A file containing a json blob of properties. ' | |
| 403 'Pass "-" to read from stdin')) | |
| 404 prop_group.add_argument( | |
| 405 '--properties', | |
| 406 type=properties_type, | |
| 407 help='A json string containing the properties') | |
| 408 | |
| 409 run_p.add_argument( | |
| 410 'recipe', | |
| 411 help='The recipe to execute') | |
| 412 run_p.add_argument( | |
| 413 'props', | |
| 414 nargs=argparse.REMAINDER, | |
| 415 type=parse_prop, | |
| 416 help='A list of property pairs; e.g. mastername=chromium.linux ' | |
| 417 'issue=12345. The property value will be decoded as JSON, but if ' | |
| 418 'this decoding fails the value will be interpreted as a string.') | |
| 419 | |
| 420 args = parser.parse_args() | 157 args = parser.parse_args() |
| 421 common_postprocess_func(parser, args) | 158 common_postprocess_func(parser, args) |
| 422 args.postprocess_func(parser, args) | 159 args.postprocess_func(parser, args) |
| 423 | 160 |
| 424 # TODO(iannucci): We should always do logging.basicConfig() (probably with | 161 # TODO(iannucci): We should always do logging.basicConfig() (probably with |
| 425 # logging.WARNING), even if no verbose is passed. However we need to be | 162 # logging.WARNING), even if no verbose is passed. However we need to be |
| 426 # careful as this could cause issues with spurious/unexpected output. I think | 163 # careful as this could cause issues with spurious/unexpected output. I think |
| 427 # it's risky enough to do in a different CL. | 164 # it's risky enough to do in a different CL. |
| 428 | 165 |
| 429 if args.verbose > 0: | 166 if args.verbose > 0: |
| (...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 502 exc_info=excinfo) | 239 exc_info=excinfo) |
| 503 shutil.rmtree(temp_deps_dir, onerror=on_error) | 240 shutil.rmtree(temp_deps_dir, onerror=on_error) |
| 504 | 241 |
| 505 | 242 |
| 506 def _real_main(args): | 243 def _real_main(args): |
| 507 from recipe_engine import package | 244 from recipe_engine import package |
| 508 | 245 |
| 509 if args.bare_command: | 246 if args.bare_command: |
| 510 return args.func(None, args) | 247 return args.func(None, args) |
| 511 | 248 |
| 512 config_file = args.package | |
| 513 repo_root = package.InfraRepoConfig().from_recipes_cfg(args.package.path) | 249 repo_root = package.InfraRepoConfig().from_recipes_cfg(args.package.path) |
| 514 | 250 |
| 515 try: | 251 try: |
| 516 # TODO(phajdan.jr): gracefully handle inconsistent deps when rolling. | 252 # TODO(phajdan.jr): gracefully handle inconsistent deps when rolling. |
| 517 # This fails if the starting point does not have consistent dependency | 253 # This fails if the starting point does not have consistent dependency |
| 518 # graph. When performing an automated roll, it'd make sense to attempt | 254 # graph. When performing an automated roll, it'd make sense to attempt |
| 519 # to automatically find a consistent state, rather than bailing out. | 255 # to automatically find a consistent state, rather than bailing out. |
| 520 # Especially that only some subcommands refer to package_deps. | 256 # Especially that only some subcommands refer to package_deps. |
| 521 package_deps = package.PackageDeps.create( | 257 package_deps = package.PackageDeps.create( |
| 522 repo_root, config_file, allow_fetch=not args.no_fetch, | 258 repo_root, args.package, allow_fetch=not args.no_fetch, |
| 523 deps_path=args.deps_path, overrides=args.project_override) | 259 deps_path=args.deps_path, overrides=args.project_override) |
| 524 except subprocess.CalledProcessError: | 260 except subprocess.CalledProcessError: |
| 525 # A git checkout failed somewhere. Return 2, which is the sign that this is | 261 # A git checkout failed somewhere. Return 2, which is the sign that this is |
| 526 # an infra failure, rather than a test failure. | 262 # an infra failure, rather than a test failure. |
| 527 return 2 | 263 return 2 |
| 528 | 264 |
| 529 if hasattr(args, 'func'): | 265 return args.func(package_deps, args) |
| 530 return args.func(package_deps, args) | |
| 531 | 266 |
| 532 if args.command == 'run': | |
| 533 return run(config_file, package_deps, args) | |
| 534 else: | |
| 535 print """Dear sir or madam, | |
| 536 It has come to my attention that a quite impossible condition has come | |
| 537 to pass in the specification you have issued a request for us to fulfill. | |
| 538 It is with a heavy heart that I inform you that, at the present juncture, | |
| 539 there is no conceivable next action to be taken upon your request, and as | |
| 540 such, we have decided to abort the request with a nonzero status code. We | |
| 541 hope that your larger goals have not been put at risk due to this | |
| 542 unfortunate circumstance, and wish you the best in deciding the next action | |
| 543 in your venture and larger life. | |
| 544 | |
| 545 Warmly, | |
| 546 recipes.py | |
| 547 """ | |
| 548 return 1 | |
| 549 | |
| 550 return 0 | |
| 551 | 267 |
| 552 if __name__ == '__main__': | 268 if __name__ == '__main__': |
| 553 # Use os._exit instead of sys.exit to prevent the python interpreter from | 269 # Use os._exit instead of sys.exit to prevent the python interpreter from |
| 554 # hanging on threads/processes which may have been spawned and not reaped | 270 # hanging on threads/processes which may have been spawned and not reaped |
| 555 # (e.g. by a leaky test harness). | 271 # (e.g. by a leaky test harness). |
| 556 try: | 272 try: |
| 557 ret = main() | 273 ret = main() |
| 558 except Exception as e: | 274 except Exception as e: |
| 559 import traceback | 275 import traceback |
| 560 traceback.print_exc(file=sys.stderr) | 276 traceback.print_exc(file=sys.stderr) |
| 561 print >> sys.stderr, 'Uncaught exception (%s): %s' % (type(e).__name__, e) | 277 print >> sys.stderr, 'Uncaught exception (%s): %s' % (type(e).__name__, e) |
| 562 sys.exit(1) | 278 sys.exit(1) |
| 563 | 279 |
| 564 if not isinstance(ret, int): | 280 if not isinstance(ret, int): |
| 565 if ret is None: | 281 if ret is None: |
| 566 ret = 0 | 282 ret = 0 |
| 567 else: | 283 else: |
| 568 print >> sys.stderr, ret | 284 print >> sys.stderr, ret |
| 569 ret = 1 | 285 ret = 1 |
| 570 sys.stdout.flush() | 286 sys.stdout.flush() |
| 571 sys.stderr.flush() | 287 sys.stderr.flush() |
| 572 os._exit(ret) | 288 os._exit(ret) |
| OLD | NEW |