Chromium Code Reviews| Index: build/android/pylib/host_driven/test_case.py |
| diff --git a/build/android/pylib/host_driven/test_case.py b/build/android/pylib/host_driven/test_case.py |
| index 4f70508c3d103b4c1852b6eb2429589a451b5564..bb8aa72c876cbd337b0c787e53fd8aeb448e9267 100644 |
| --- a/build/android/pylib/host_driven/test_case.py |
| +++ b/build/android/pylib/host_driven/test_case.py |
| @@ -76,22 +76,16 @@ class HostDrivenTestCase(object): |
| # Get the test method on the derived class and execute it |
| return getattr(self, self.test_name)() |
| - def __RunJavaTest(self, package_name, test_case, test_method): |
| - """Runs a single Java test method with a Java TestRunner. |
| + def __RunJavaTest(self, test, test_pkg): |
| + """Runs a single Java test in a Java TestRunner. |
| Args: |
| - package_name: Package name in which the java tests live |
| - (e.g. foo.bar.baz.tests) |
| - test_case: Name of the Java test case (e.g. FooTest) |
| - test_method: Name of the test method to run (e.g. testFooBar) |
| + test: Fully qualified test name (ex. foo.bar.TestClass#testMethod) |
| + test_pkg: TestPackage object. |
| Returns: |
| TestRunResults object with a single test result. |
| """ |
| - test = '%s.%s#%s' % (package_name, test_case, test_method) |
| - test_pkg = test_package.TestPackage( |
| - self.instrumentation_options.test_apk_path, |
| - self.instrumentation_options.test_apk_jar_path) |
| java_test_runner = test_runner.TestRunner(self.instrumentation_options, |
| self.device_id, |
| self.shard_index, test_pkg, |
| @@ -102,7 +96,7 @@ class HostDrivenTestCase(object): |
| finally: |
| java_test_runner.TearDown() |
| - def _RunJavaTests(self, package_name, tests): |
| + def _RunJavaTests(self, test_filters): |
|
bulach
2013/08/20 11:08:07
we should probably do this two-sided? keep the oth
gkanwar1
2013/08/20 15:45:33
By two-sided do you mean:
* Upstream: Create a new
bulach
2013/08/20 15:53:10
the former :)
requires less coordination between y
gkanwar
2013/08/20 16:01:57
Yeah, the only other problem I had with the former
|
| """Calls a list of tests and stops at the first test failure. |
| This method iterates until either it encounters a non-passing test or it |
| @@ -113,9 +107,7 @@ class HostDrivenTestCase(object): |
| being defined. |
| Args: |
| - package_name: Package name in which the java tests live |
| - (e.g. foo.bar.baz.tests) |
| - tests: A list of Java test names which will be run |
| + test_filters: A list of Java test filters. |
| Returns: |
| A TestRunResults object containing an overall result for this set of Java |
| @@ -124,17 +116,30 @@ class HostDrivenTestCase(object): |
| test_type = base_test_result.ResultType.PASS |
| log = '' |
| + test_pkg = test_package.TestPackage( |
| + self.instrumentation_options.test_apk_path, |
| + self.instrumentation_options.test_apk_jar_path) |
| + |
| start_ms = int(time.time()) * 1000 |
| - for test in tests: |
| - # We're only running one test at a time, so this TestRunResults object |
| - # will hold only one result. |
| - suite, test_name = test.split('.') |
| - java_result = self.__RunJavaTest(package_name, suite, test_name) |
| - assert len(java_result.GetAll()) == 1 |
| - if not java_result.DidRunPass(): |
| - result = java_result.GetNotPass().pop() |
| - log = result.GetLog() |
| - test_type = result.GetType() |
| + done = False |
| + for test_filter in test_filters: |
| + tests = test_pkg._GetAllMatchingTests(None, None, test_filter) |
| + # Filters should always result in >= 1 test. |
| + if len(tests) == 0: |
| + raise Exception('Java test filter "%s" returned no tests.' |
| + % test_filter) |
| + for test in tests: |
| + # We're only running one test at a time, so this TestRunResults object |
| + # will hold only one result. |
| + java_result = self.__RunJavaTest(test, test_pkg) |
| + assert len(java_result.GetAll()) == 1 |
| + if not java_result.DidRunPass(): |
| + result = java_result.GetNotPass().pop() |
| + log = result.GetLog() |
| + test_type = result.GetType() |
| + done = True |
| + break |
| + if done: |
| break |
| duration_ms = int(time.time()) * 1000 - start_ms |