| Index: build/android/test_runner.py
|
| diff --git a/build/android/test_runner.py b/build/android/test_runner.py
|
| index 3e32f81b7cc95db8940d38d512d0c4b4c46352f3..c6605a950029f95a9ae5fd0bae4597850c983a2e 100755
|
| --- a/build/android/test_runner.py
|
| +++ b/build/android/test_runner.py
|
| @@ -15,6 +15,7 @@ import os
|
| import shutil
|
| import signal
|
| import sys
|
| +import tempfile
|
| import threading
|
| import traceback
|
| import unittest
|
| @@ -36,10 +37,13 @@ from devil.utils import run_tests_helper
|
| from pylib import constants
|
| from pylib.base import base_test_result
|
| from pylib.base import environment_factory
|
| +from pylib.base import output_manager
|
| +from pylib.base import output_manager_factory
|
| from pylib.base import test_instance_factory
|
| from pylib.base import test_run_factory
|
| from pylib.results import json_results
|
| from pylib.results import report_results
|
| +from pylib.results.presentation import test_results_presentation
|
| from pylib.utils import logdog_helper
|
| from pylib.utils import logging_utils
|
|
|
| @@ -141,6 +145,11 @@ def AddCommonOptions(parser):
|
| default='local', choices=constants.VALID_ENVIRONMENTS,
|
| help='Test environment to run in (default: %(default)s).')
|
|
|
| + parser.add_argument(
|
| + '--local-output', action='store_true',
|
| + help='Whether to archive test output locally and generate '
|
| + 'a local results detail page.')
|
| +
|
| class FastLocalDevAction(argparse.Action):
|
| def __call__(self, parser, namespace, values, option_string=None):
|
| namespace.verbose_count = max(namespace.verbose_count, 1)
|
| @@ -806,14 +815,16 @@ def RunTestsInPlatformMode(args):
|
|
|
| ### Set up test objects.
|
|
|
| - env = environment_factory.CreateEnvironment(args, infra_error)
|
| + out_manager = output_manager_factory.CreateOutputManager(args)
|
| + env = environment_factory.CreateEnvironment(
|
| + args, out_manager, infra_error)
|
| test_instance = test_instance_factory.CreateTestInstance(args, infra_error)
|
| test_run = test_run_factory.CreateTestRun(
|
| args, env, test_instance, infra_error)
|
|
|
| ### Run.
|
|
|
| - with json_writer, logcats_uploader, env, test_instance, test_run:
|
| + with json_writer, logcats_uploader, out_manager, env, test_instance, test_run:
|
|
|
| repetitions = (xrange(args.repeat + 1) if args.repeat >= 0
|
| else itertools.count())
|
| @@ -870,6 +881,24 @@ def RunTestsInPlatformMode(args):
|
| str(tot_tests),
|
| str(iteration_count))
|
|
|
| + if args.local_output:
|
| + try:
|
| + results_detail_file = tempfile.NamedTemporaryFile(delete=False)
|
| + result_html_string = test_results_presentation.result_details(
|
| + json_path=args.json_results_file,
|
| + test_name=args.command,
|
| + cs_base_url='http://cs.chromium.org',
|
| + local_output=True)
|
| + results_detail_file.write(result_html_string)
|
| + results_detail_file.flush()
|
| + finally:
|
| + results_detail_link = out_manager.ArchiveAndDeleteFile(
|
| + results_detail_file.name,
|
| + 'test_results_presentation.html',
|
| + 'test_results_presentation',
|
| + output_manager.Datatype.HTML)
|
| + logging.critical('TEST RESULTS: %s', results_detail_link)
|
| +
|
| if args.command == 'perf' and (args.steps or args.single_step):
|
| return 0
|
|
|
|
|