Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 """Class for running instrumentation tests on a single device.""" | 5 """Class for running instrumentation tests on a single device.""" |
| 6 | 6 |
| 7 import logging | 7 import logging |
| 8 import os | 8 import os |
| 9 import re | 9 import re |
| 10 import time | 10 import time |
| 11 | 11 |
| 12 from pylib import android_commands | 12 from pylib import android_commands |
| 13 from pylib import constants | 13 from pylib import constants |
| 14 from pylib import flag_changer | |
| 14 from pylib import json_perf_parser | 15 from pylib import json_perf_parser |
| 15 from pylib import perf_tests_helper | 16 from pylib import perf_tests_helper |
| 16 from pylib import valgrind_tools | 17 from pylib import valgrind_tools |
| 17 from pylib.base import base_test_result | 18 from pylib.base import base_test_result |
| 18 from pylib.base import base_test_runner | 19 from pylib.base import base_test_runner |
| 19 | 20 |
| 20 import test_result | 21 import test_result |
| 21 | 22 |
| 22 | 23 |
| 23 _PERF_TEST_ANNOTATION = 'PerfTest' | 24 _PERF_TEST_ANNOTATION = 'PerfTest' |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 64 """ | 65 """ |
| 65 super(TestRunner, self).__init__(device, test_options.tool, | 66 super(TestRunner, self).__init__(device, test_options.tool, |
| 66 test_options.push_deps, | 67 test_options.push_deps, |
| 67 test_options.cleanup_test_files) | 68 test_options.cleanup_test_files) |
| 68 self._lighttp_port = constants.LIGHTTPD_RANDOM_PORT_FIRST + shard_index | 69 self._lighttp_port = constants.LIGHTTPD_RANDOM_PORT_FIRST + shard_index |
| 69 | 70 |
| 70 self.options = test_options | 71 self.options = test_options |
| 71 self.test_pkg = test_pkg | 72 self.test_pkg = test_pkg |
| 72 self.ports_to_forward = ports_to_forward | 73 self.ports_to_forward = ports_to_forward |
| 73 self.coverage_dir = test_options.coverage_dir | 74 self.coverage_dir = test_options.coverage_dir |
| 75 cmdline_file = [a.cmdline_file for a in constants.PACKAGE_INFO.itervalues() | |
| 76 if a.test_package == self.test_pkg.GetPackageName()] | |
| 77 if len(cmdline_file): | |
| 78 cmdline_file = cmdline_file[0] | |
|
frankf
2013/08/27 00:28:09
why would len > 0?
craigdh
2013/08/27 17:26:13
The length should be 1 if there is an entry for th
| |
| 79 else: | |
| 80 cmdline_file = constants.PACKAGE_INFO['chrome'].cmdline_file | |
| 81 self.flags = flag_changer.FlagChanger(self.adb, cmdline_file) | |
| 74 | 82 |
| 75 #override | 83 #override |
| 76 def InstallTestPackage(self): | 84 def InstallTestPackage(self): |
| 77 self.test_pkg.Install(self.adb) | 85 self.test_pkg.Install(self.adb) |
| 78 | 86 |
| 79 #override | 87 #override |
| 80 def PushDataDeps(self): | 88 def PushDataDeps(self): |
| 81 # TODO(frankf): Implement a general approach for copying/installing | 89 # TODO(frankf): Implement a general approach for copying/installing |
| 82 # once across test runners. | 90 # once across test runners. |
| 83 if TestRunner._DEVICE_HAS_TEST_FILES.get(self.device, False): | 91 if TestRunner._DEVICE_HAS_TEST_FILES.get(self.device, False): |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 134 if self.adb.SetJavaAssertsEnabled(True): | 142 if self.adb.SetJavaAssertsEnabled(True): |
| 135 self.adb.Reboot(full_reboot=False) | 143 self.adb.Reboot(full_reboot=False) |
| 136 | 144 |
| 137 # We give different default value to launch HTTP server based on shard index | 145 # We give different default value to launch HTTP server based on shard index |
| 138 # because it may have race condition when multiple processes are trying to | 146 # because it may have race condition when multiple processes are trying to |
| 139 # launch lighttpd with same port at same time. | 147 # launch lighttpd with same port at same time. |
| 140 http_server_ports = self.LaunchTestHttpServer( | 148 http_server_ports = self.LaunchTestHttpServer( |
| 141 os.path.join(constants.DIR_SOURCE_ROOT), self._lighttp_port) | 149 os.path.join(constants.DIR_SOURCE_ROOT), self._lighttp_port) |
| 142 if self.ports_to_forward: | 150 if self.ports_to_forward: |
| 143 self._ForwardPorts([(port, port) for port in self.ports_to_forward]) | 151 self._ForwardPorts([(port, port) for port in self.ports_to_forward]) |
| 144 self.flags.AddFlags(['--enable-test-intents']) | 152 self.flags.AddFlags(['--disable-fre', '--enable-test-intents']) |
| 145 | 153 |
| 146 def TearDown(self): | 154 def TearDown(self): |
| 147 """Cleans up the test harness and saves outstanding data from test run.""" | 155 """Cleans up the test harness and saves outstanding data from test run.""" |
| 156 self.flags.Restore() | |
| 148 if self.ports_to_forward: | 157 if self.ports_to_forward: |
| 149 self._UnmapPorts([(port, port) for port in self.ports_to_forward]) | 158 self._UnmapPorts([(port, port) for port in self.ports_to_forward]) |
| 150 super(TestRunner, self).TearDown() | 159 super(TestRunner, self).TearDown() |
| 151 | 160 |
| 152 def TestSetup(self, test): | 161 def TestSetup(self, test): |
| 153 """Sets up the test harness for running a particular test. | 162 """Sets up the test harness for running a particular test. |
| 154 | 163 |
| 155 Args: | 164 Args: |
| 156 test: The name of the test that will be run. | 165 test: The name of the test that will be run. |
| 157 """ | 166 """ |
| (...skipping 195 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 353 duration_ms = 0 | 362 duration_ms = 0 |
| 354 message = str(e) | 363 message = str(e) |
| 355 if not message: | 364 if not message: |
| 356 message = 'No information.' | 365 message = 'No information.' |
| 357 results.AddResult(test_result.InstrumentationTestResult( | 366 results.AddResult(test_result.InstrumentationTestResult( |
| 358 test, base_test_result.ResultType.CRASH, start_date_ms, duration_ms, | 367 test, base_test_result.ResultType.CRASH, start_date_ms, duration_ms, |
| 359 log=message)) | 368 log=message)) |
| 360 raw_result = None | 369 raw_result = None |
| 361 self.TestTeardown(test, raw_result) | 370 self.TestTeardown(test, raw_result) |
| 362 return (results, None if results.DidRunPass() else test) | 371 return (results, None if results.DidRunPass() else test) |
| OLD | NEW |