Chromium Code Reviews| Index: build/android/test_runner.py |
| diff --git a/build/android/test_runner.py b/build/android/test_runner.py |
| index 0fb133cc5ddb82d7de0cdab3285c8144087907b4..3fae0a4e5c02eee60166d80b3abbf6b28f204a21 100755 |
| --- a/build/android/test_runner.py |
| +++ b/build/android/test_runner.py |
| @@ -58,7 +58,7 @@ def AddCommonOptions(parser): |
| debug_or_release_group = group.add_mutually_exclusive_group() |
| debug_or_release_group.add_argument( |
| - '--debug', action='store_const', const='Debug', dest='build_type', |
| + '--debug', action='store_const', const='Default', dest='build_type', |
|
Bernhard Bauer
2016/08/16 23:37:35
What's up with this change again?
dozsa
2016/08/17 10:40:26
I need this change and the one below to be able to
|
| default=default_build_type, |
| help=('If set, run test suites under out/Debug. ' |
| 'Default is env var BUILDTYPE or Debug.')) |
| @@ -793,7 +793,8 @@ def _GetAttachedDevices(blacklist_file, test_device, enable_cache, num_retries): |
| return sorted(attached_devices) |
| -_DEFAULT_PLATFORM_MODE_TESTS = ['gtest', 'instrumentation', 'perf'] |
| +# TODO(rnephew): Add perf when ready to switch to platform mode as default. |
| +_DEFAULT_PLATFORM_MODE_TESTS = ['gtest', 'instrumentation'] |
| def RunTestsCommand(args): # pylint: disable=too-many-return-statements |
| @@ -820,11 +821,6 @@ def RunTestsCommand(args): # pylint: disable=too-many-return-statements |
| if not ports.ResetTestServerPortAllocation(): |
| raise Exception('Failed to reset test server port.') |
| - # pylint: disable=protected-access |
| - if os.path.exists(ports._TEST_SERVER_PORT_LOCKFILE): |
| - os.unlink(ports._TEST_SERVER_PORT_LOCKFILE) |
| - # pylint: enable=protected-access |
| - |
| def get_devices(): |
| return _GetAttachedDevices(args.blacklist_file, args.test_device, |
| args.enable_device_cache, args.num_retries) |
| @@ -937,9 +933,6 @@ def RunTestsInPlatformMode(args): |
| json_results.GenerateJsonResultsFile( |
| all_raw_results, args.json_results_file) |
| - if args.command == 'perf' and (args.steps or args.single_step): |
| - return 0 |
| - |
| return (0 if all(r.DidRunPass() for r in all_iteration_results) |
| else constants.ERROR_EXIT_CODE) |