Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 # Copyright 2016 The Chromium Authors. All rights reserved. | 1 # Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 import io | 5 import io |
| 6 import json | 6 import json |
| 7 import logging | 7 import logging |
| 8 import os | 8 import os |
| 9 import pickle | 9 import pickle |
| 10 import shutil | 10 import shutil |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 54 self._timer.cancel() | 54 self._timer.cancel() |
| 55 self._running = False | 55 self._running = False |
| 56 | 56 |
| 57 def _LogMessage(self): | 57 def _LogMessage(self): |
| 58 logging.info('Currently working on test %s', self._shard.current_test) | 58 logging.info('Currently working on test %s', self._shard.current_test) |
| 59 self._timer = threading.Timer(self._wait_time, self._LogMessage) | 59 self._timer = threading.Timer(self._wait_time, self._LogMessage) |
| 60 self._timer.start() | 60 self._timer.start() |
| 61 | 61 |
| 62 | 62 |
| 63 class TestShard(object): | 63 class TestShard(object): |
| 64 def __init__( | 64 def __init__(self, env, test_instance, tests, retries=3, timeout=None): |
| 65 self, env, test_instance, device, index, tests, retries=3, timeout=None): | 65 logging.info('Create shard for the following tests:') |
| 66 logging.info('Create shard %s for device %s to run the following tests:', | |
| 67 index, device) | |
| 68 for t in tests: | 66 for t in tests: |
| 69 logging.info(' %s', t) | 67 logging.info(' %s', t) |
| 70 self._battery = battery_utils.BatteryUtils(device) | |
| 71 self._current_test = None | 68 self._current_test = None |
| 72 self._device = device | |
| 73 self._env = env | 69 self._env = env |
| 74 self._index = index | 70 self._heart_beat = HeartBeat(self) |
| 71 self._index = None | |
| 75 self._output_dir = None | 72 self._output_dir = None |
| 76 self._retries = retries | 73 self._retries = retries |
| 77 self._test_instance = test_instance | 74 self._test_instance = test_instance |
| 78 self._tests = tests | 75 self._tests = tests |
| 79 self._timeout = timeout | 76 self._timeout = timeout |
| 80 self._heart_beat = HeartBeat(self) | |
| 81 | |
| 82 @local_device_environment.handle_shard_failures | |
| 83 def RunTestsOnShard(self): | |
| 84 results = base_test_result.TestRunResults() | |
| 85 for test in self._tests: | |
| 86 tries_left = self._retries | |
| 87 result_type = None | |
| 88 while (result_type != base_test_result.ResultType.PASS | |
| 89 and tries_left > 0): | |
| 90 try: | |
| 91 self._TestSetUp(test) | |
| 92 result_type = self._RunSingleTest(test) | |
| 93 except device_errors.CommandTimeoutError: | |
| 94 result_type = base_test_result.ResultType.TIMEOUT | |
| 95 except device_errors.CommandFailedError: | |
| 96 logging.exception('Exception when executing %s.', test) | |
| 97 result_type = base_test_result.ResultType.FAIL | |
| 98 finally: | |
| 99 self._TestTearDown() | |
| 100 if result_type != base_test_result.ResultType.PASS: | |
| 101 try: | |
| 102 device_recovery.RecoverDevice(self._device, self._env.blacklist) | |
| 103 except device_errors.CommandTimeoutError: | |
| 104 logging.exception( | |
| 105 'Device failed to recover after failing %s.', test) | |
| 106 tries_left = tries_left - 1 | |
| 107 | |
| 108 results.AddResult(base_test_result.BaseTestResult(test, result_type)) | |
| 109 return results | |
| 110 | 77 |
| 111 def _TestSetUp(self, test): | 78 def _TestSetUp(self, test): |
| 112 if not self._device.IsOnline(): | |
| 113 msg = 'Device %s is unresponsive.' % str(self._device) | |
| 114 raise device_errors.DeviceUnreachableError(msg) | |
| 115 | |
| 116 logging.info('Charge level: %s%%', | |
| 117 str(self._battery.GetBatteryInfo().get('level'))) | |
| 118 if self._test_instance.min_battery_level: | |
| 119 self._battery.ChargeDeviceToLevel(self._test_instance.min_battery_level) | |
| 120 | |
| 121 logging.info('temperature: %s (0.1 C)', | |
| 122 str(self._battery.GetBatteryInfo().get('temperature'))) | |
| 123 if self._test_instance.max_battery_temp: | |
| 124 self._battery.LetBatteryCoolToTemperature( | |
| 125 self._test_instance.max_battery_temp) | |
| 126 | |
| 127 if not self._device.IsScreenOn(): | |
| 128 self._device.SetScreen(True) | |
| 129 | |
| 130 if (self._test_instance.collect_chartjson_data | 79 if (self._test_instance.collect_chartjson_data |
| 131 or self._tests[test].get('archive_output_dir')): | 80 or self._tests[test].get('archive_output_dir')): |
| 132 self._output_dir = tempfile.mkdtemp() | 81 self._output_dir = tempfile.mkdtemp() |
| 133 | 82 |
| 134 self._current_test = test | 83 self._current_test = test |
| 135 self._heart_beat.Start() | 84 self._heart_beat.Start() |
| 136 | 85 |
| 137 def _RunSingleTest(self, test): | 86 def _RunSingleTest(self, test): |
| 138 self._test_instance.WriteBuildBotJson(self._output_dir) | 87 self._test_instance.WriteBuildBotJson(self._output_dir) |
| 139 | 88 |
| 140 timeout = self._tests[test].get('timeout', self._timeout) | 89 timeout = self._tests[test].get('timeout', self._timeout) |
| 141 cmd = self._CreateCmd(test) | 90 cmd = self._CreateCmd(test) |
| 142 cwd = os.path.abspath(host_paths.DIR_SOURCE_ROOT) | 91 cwd = os.path.abspath(host_paths.DIR_SOURCE_ROOT) |
| 143 | 92 |
| 144 logging.debug("Running %s with command '%s' on shard %d with timeout %d", | 93 self._LogTest(test, cmd, timeout) |
| 145 test, cmd, self._index, timeout) | |
| 146 | 94 |
| 147 try: | 95 try: |
| 148 start_time = time.time() | 96 start_time = time.time() |
| 149 exit_code, output = cmd_helper.GetCmdStatusAndOutputWithTimeout( | 97 exit_code, output = cmd_helper.GetCmdStatusAndOutputWithTimeout( |
| 150 cmd, timeout, cwd=cwd, shell=True) | 98 cmd, timeout, cwd=cwd, shell=True) |
| 151 end_time = time.time() | 99 end_time = time.time() |
| 152 json_output = self._test_instance.ReadChartjsonOutput(self._output_dir) | 100 json_output = self._test_instance.ReadChartjsonOutput(self._output_dir) |
| 153 if exit_code == 0: | 101 if exit_code == 0: |
| 154 result_type = base_test_result.ResultType.PASS | 102 result_type = base_test_result.ResultType.PASS |
| 155 else: | 103 else: |
| 156 result_type = base_test_result.ResultType.FAIL | 104 result_type = base_test_result.ResultType.FAIL |
| 157 except cmd_helper.TimeoutError as e: | 105 except cmd_helper.TimeoutError as e: |
| 158 end_time = time.time() | 106 end_time = time.time() |
| 159 exit_code = -1 | 107 exit_code = -1 |
| 160 output = e.output | 108 output = e.output |
| 161 json_output = '' | 109 json_output = '' |
| 162 result_type = base_test_result.ResultType.TIMEOUT | 110 result_type = base_test_result.ResultType.TIMEOUT |
| 163 | 111 |
| 164 return self._ProcessTestResult(test, cmd, start_time, end_time, exit_code, | 112 return self._ProcessTestResult(test, cmd, start_time, end_time, exit_code, |
| 165 output, json_output, result_type) | 113 output, json_output, result_type) |
| 166 | 114 |
| 167 def _CreateCmd(self, test): | 115 def _CreateCmd(self, test): |
| 168 cmd = '%s --device %s' % (self._tests[test]['cmd'], str(self._device)) | 116 cmd = [] |
| 117 if self._test_instance.dry_run: | |
| 118 cmd.append('echo') | |
| 119 cmd.extend([self._tests[test]['cmd']]) | |
|
perezju
2016/08/05 11:02:57
nit: extend -> append :)
rnephew (Reviews Here)
2016/08/05 16:57:12
Done.
| |
| 169 if self._output_dir: | 120 if self._output_dir: |
| 170 cmd = cmd + ' --output-dir=%s' % self._output_dir | 121 cmd.append('--output-dir=%s' % self._output_dir) |
| 171 if self._test_instance.dry_run: | 122 return ' '.join(self._ExtendCmd(cmd)) |
| 172 cmd = 'echo %s' % cmd | 123 |
| 124 def _ExtendCmd(self, cmd): # pylint: disable=no-self-use | |
| 173 return cmd | 125 return cmd |
| 174 | 126 |
| 127 def _LogTest(self, _test, _cmd, _timeout): # pylint: disable=no-self-use | |
| 128 pass | |
|
perezju
2016/08/05 11:02:57
nit: I think you can raise NotImplementedError her
rnephew (Reviews Here)
2016/08/05 16:57:12
Done.
| |
| 129 | |
| 130 def _LogTestExit(self, test, exit_code, duration): | |
| 131 # pylint: disable=no-self-use | |
| 132 logging.info('%s : exit_code=%d in %d secs.', test, exit_code, duration) | |
| 133 | |
| 134 def _ExtendPersistedResult(self, persisted_result): | |
| 135 # pylint: disable=no-self-use | |
| 136 pass | |
|
perezju
2016/08/05 11:02:57
nit: same here
rnephew (Reviews Here)
2016/08/05 16:57:12
Done.
| |
| 137 | |
| 175 def _ProcessTestResult(self, test, cmd, start_time, end_time, exit_code, | 138 def _ProcessTestResult(self, test, cmd, start_time, end_time, exit_code, |
| 176 output, json_output, result_type): | 139 output, json_output, result_type): |
| 177 if exit_code is None: | 140 if exit_code is None: |
| 178 exit_code = -1 | 141 exit_code = -1 |
| 179 logging.info('%s : exit_code=%d in %d secs on device %s', | 142 |
| 180 test, exit_code, end_time - start_time, | 143 self._LogTestExit(test, exit_code, end_time - start_time) |
| 181 str(self._device)) | |
| 182 | 144 |
| 183 actual_exit_code = exit_code | 145 actual_exit_code = exit_code |
| 184 if (self._test_instance.flaky_steps | 146 if (self._test_instance.flaky_steps |
| 185 and test in self._test_instance.flaky_steps): | 147 and test in self._test_instance.flaky_steps): |
| 186 exit_code = 0 | 148 exit_code = 0 |
| 187 archive_bytes = (self._ArchiveOutputDir() | 149 archive_bytes = (self._ArchiveOutputDir() |
| 188 if self._tests[test].get('archive_output_dir') | 150 if self._tests[test].get('archive_output_dir') |
| 189 else None) | 151 else None) |
| 190 persisted_result = { | 152 persisted_result = { |
| 191 'name': test, | 153 'name': test, |
| 192 'output': [output], | 154 'output': [output], |
| 193 'chartjson': json_output, | 155 'chartjson': json_output, |
| 194 'archive_bytes': archive_bytes, | 156 'archive_bytes': archive_bytes, |
| 195 'exit_code': exit_code, | 157 'exit_code': exit_code, |
| 196 'actual_exit_code': actual_exit_code, | 158 'actual_exit_code': actual_exit_code, |
| 197 'result_type': result_type, | 159 'result_type': result_type, |
| 198 'start_time': start_time, | 160 'start_time': start_time, |
| 199 'end_time': end_time, | 161 'end_time': end_time, |
| 200 'total_time': end_time - start_time, | 162 'total_time': end_time - start_time, |
| 201 'device': str(self._device), | |
| 202 'cmd': cmd, | 163 'cmd': cmd, |
| 203 } | 164 } |
| 165 self._ExtendPersistedResult(persisted_result) | |
| 204 self._SaveResult(persisted_result) | 166 self._SaveResult(persisted_result) |
| 205 return result_type | 167 return result_type |
| 206 | 168 |
| 207 def _ArchiveOutputDir(self): | 169 def _ArchiveOutputDir(self): |
| 208 """Archive all files in the output dir, and return as compressed bytes.""" | 170 """Archive all files in the output dir, and return as compressed bytes.""" |
| 209 with io.BytesIO() as archive: | 171 with io.BytesIO() as archive: |
| 210 with zipfile.ZipFile(archive, 'w', zipfile.ZIP_DEFLATED) as contents: | 172 with zipfile.ZipFile(archive, 'w', zipfile.ZIP_DEFLATED) as contents: |
| 211 num_files = 0 | 173 num_files = 0 |
| 212 for absdir, _, files in os.walk(self._output_dir): | 174 for absdir, _, files in os.walk(self._output_dir): |
| 213 reldir = os.path.relpath(absdir, self._output_dir) | 175 reldir = os.path.relpath(absdir, self._output_dir) |
| (...skipping 16 matching lines...) Expand all Loading... | |
| 230 with file(pickled, 'r') as f: | 192 with file(pickled, 'r') as f: |
| 231 previous = pickle.loads(f.read()) | 193 previous = pickle.loads(f.read()) |
| 232 result['output'] = previous['output'] + result['output'] | 194 result['output'] = previous['output'] + result['output'] |
| 233 with file(pickled, 'w') as f: | 195 with file(pickled, 'w') as f: |
| 234 f.write(pickle.dumps(result)) | 196 f.write(pickle.dumps(result)) |
| 235 | 197 |
| 236 def _TestTearDown(self): | 198 def _TestTearDown(self): |
| 237 if self._output_dir: | 199 if self._output_dir: |
| 238 shutil.rmtree(self._output_dir, ignore_errors=True) | 200 shutil.rmtree(self._output_dir, ignore_errors=True) |
| 239 self._output_dir = None | 201 self._output_dir = None |
| 202 self._heart_beat.Stop() | |
| 203 self._current_test = None | |
| 204 | |
| 205 @property | |
| 206 def current_test(self): | |
| 207 return self._current_test | |
| 208 | |
| 209 | |
| 210 class DeviceTestShard(TestShard): | |
| 211 def __init__( | |
| 212 self, env, test_instance, device, index, tests, retries=3, timeout=None): | |
| 213 super(DeviceTestShard, self).__init__( | |
| 214 env, test_instance, tests, retries, timeout) | |
| 215 self._battery = battery_utils.BatteryUtils(device) if device else None | |
| 216 self._device = device | |
| 217 self._index = index | |
| 218 | |
| 219 @local_device_environment.handle_shard_failures | |
| 220 def RunTestsOnShard(self): | |
| 221 results = base_test_result.TestRunResults() | |
| 222 for test in self._tests: | |
| 223 tries_left = self._retries | |
| 224 result_type = None | |
| 225 while (result_type != base_test_result.ResultType.PASS | |
| 226 and tries_left > 0): | |
| 227 try: | |
| 228 self._TestSetUp(test) | |
| 229 result_type = self._RunSingleTest(test) | |
| 230 except device_errors.CommandTimeoutError: | |
| 231 result_type = base_test_result.ResultType.TIMEOUT | |
| 232 except device_errors.CommandFailedError: | |
| 233 logging.exception('Exception when executing %s.', test) | |
| 234 result_type = base_test_result.ResultType.FAIL | |
| 235 finally: | |
| 236 self._TestTearDown() | |
| 237 if result_type != base_test_result.ResultType.PASS: | |
| 238 try: | |
| 239 device_recovery.RecoverDevice(self._device, self._env.blacklist) | |
| 240 except device_errors.CommandTimeoutError: | |
| 241 logging.exception( | |
| 242 'Device failed to recover after failing %s.', test) | |
| 243 tries_left = tries_left - 1 | |
| 244 | |
| 245 results.AddResult(base_test_result.BaseTestResult(test, result_type)) | |
| 246 return results | |
| 247 | |
| 248 | |
|
perezju
2016/08/05 11:02:57
nit: remove extra line
rnephew (Reviews Here)
2016/08/05 16:57:12
Done.
| |
| 249 def _LogTestExit(self, test, exit_code, duration): | |
| 250 logging.info('%s : exit_code=%d in %d secs on device %s', | |
| 251 test, exit_code, duration, str(self._device)) | |
| 252 | |
| 253 def _TestSetUp(self, test): | |
| 254 if not self._device.IsOnline(): | |
| 255 msg = 'Device %s is unresponsive.' % str(self._device) | |
| 256 raise device_errors.DeviceUnreachableError(msg) | |
| 257 | |
| 258 logging.info('Charge level: %s%%', | |
| 259 str(self._battery.GetBatteryInfo().get('level'))) | |
| 260 if self._test_instance.min_battery_level: | |
| 261 self._battery.ChargeDeviceToLevel(self._test_instance.min_battery_level) | |
| 262 | |
| 263 logging.info('temperature: %s (0.1 C)', | |
| 264 str(self._battery.GetBatteryInfo().get('temperature'))) | |
| 265 if self._test_instance.max_battery_temp: | |
| 266 self._battery.LetBatteryCoolToTemperature( | |
| 267 self._test_instance.max_battery_temp) | |
| 268 | |
| 269 if not self._device.IsScreenOn(): | |
| 270 self._device.SetScreen(True) | |
| 271 | |
| 272 super(DeviceTestShard, self)._TestSetUp(test) | |
| 273 | |
| 274 def _LogTest(self, test, cmd, timeout): | |
| 275 logging.debug("Running %s with command '%s' on shard %s with timeout %d", | |
| 276 test, cmd, str(self._index), timeout) | |
| 277 | |
| 278 def _ExtendCmd(self, cmd): | |
| 279 cmd.extend(['--device=%s' % str(self._device)]) | |
| 280 return cmd | |
| 281 | |
| 282 def _ExtendPersistedResult(self, persisted_result): | |
| 283 persisted_result['host_test'] = False | |
| 284 persisted_result['device'] = str(self._device) | |
| 285 | |
| 286 def _TestTearDown(self): | |
| 240 try: | 287 try: |
| 241 logging.info('Unmapping device ports for %s.', self._device) | 288 logging.info('Unmapping device ports for %s.', self._device) |
| 242 forwarder.Forwarder.UnmapAllDevicePorts(self._device) | 289 forwarder.Forwarder.UnmapAllDevicePorts(self._device) |
| 243 except Exception: # pylint: disable=broad-except | 290 except Exception: # pylint: disable=broad-except |
| 244 logging.exception('Exception when resetting ports.') | 291 logging.exception('Exception when resetting ports.') |
| 245 finally: | 292 finally: |
| 246 self._heart_beat.Stop() | 293 super(DeviceTestShard, self)._TestTearDown() |
| 247 self._current_test = None | |
| 248 | 294 |
| 249 @property | 295 class HostTestShard(TestShard): |
| 250 def current_test(self): | 296 def __init__(self, env, test_instance, tests, retries=3, timeout=None): |
| 251 return self._current_test | 297 super(HostTestShard, self).__init__( |
| 298 env, test_instance, tests, retries, timeout) | |
| 299 | |
| 300 @local_device_environment.handle_shard_failures | |
| 301 def RunTestsOnShard(self): | |
| 302 results = base_test_result.TestRunResults() | |
| 303 for test in self._tests: | |
| 304 tries_left = self._retries | |
| 305 result_type = None | |
| 306 while (result_type != base_test_result.ResultType.PASS | |
| 307 and tries_left > 0): | |
| 308 try: | |
| 309 self._TestSetUp(test) | |
| 310 result_type = self._RunSingleTest(test) | |
| 311 finally: | |
| 312 self._TestTearDown() | |
| 313 results.AddResult(base_test_result.BaseTestResult(test, result_type)) | |
| 314 return results | |
| 315 | |
| 316 def _LogTest(self, test, cmd, timeout): | |
| 317 logging.debug("Running %s with command '%s' on host shard with timeout %d", | |
| 318 test, cmd, timeout) | |
| 319 | |
| 320 def _ExtendPersistedResult(self, persisted_result): | |
| 321 persisted_result['host_test'] = True | |
| 322 | |
| 252 | 323 |
| 253 class LocalDevicePerfTestRun(local_device_test_run.LocalDeviceTestRun): | 324 class LocalDevicePerfTestRun(local_device_test_run.LocalDeviceTestRun): |
| 254 | 325 |
| 255 _DEFAULT_TIMEOUT = 60 * 60 | 326 _DEFAULT_TIMEOUT = 60 * 60 |
| 256 _CONFIG_VERSION = 1 | 327 _CONFIG_VERSION = 1 |
| 257 | 328 |
| 258 def __init__(self, env, test_instance): | 329 def __init__(self, env, test_instance): |
| 259 super(LocalDevicePerfTestRun, self).__init__(env, test_instance) | 330 super(LocalDevicePerfTestRun, self).__init__(env, test_instance) |
| 260 self._devices = None | 331 self._devices = None |
| 261 self._env = env | 332 self._env = env |
| 333 self._no_device_tests = {} | |
| 262 self._test_buckets = [] | 334 self._test_buckets = [] |
| 263 self._test_instance = test_instance | 335 self._test_instance = test_instance |
| 264 self._timeout = None if test_instance.no_timeout else self._DEFAULT_TIMEOUT | 336 self._timeout = None if test_instance.no_timeout else self._DEFAULT_TIMEOUT |
| 265 | 337 |
| 266 def SetUp(self): | 338 def SetUp(self): |
| 267 self._devices = self._GetAllDevices(self._env.devices, | 339 self._devices = self._GetAllDevices(self._env.devices, |
| 268 self._test_instance.known_devices_file) | 340 self._test_instance.known_devices_file) |
| 269 | 341 |
| 270 if os.path.exists(constants.PERF_OUTPUT_DIR): | 342 if os.path.exists(constants.PERF_OUTPUT_DIR): |
| 271 shutil.rmtree(constants.PERF_OUTPUT_DIR) | 343 shutil.rmtree(constants.PERF_OUTPUT_DIR) |
| (...skipping 25 matching lines...) Expand all Loading... | |
| 297 raise PerfTestRunGetStepsError( | 369 raise PerfTestRunGetStepsError( |
| 298 'Neither single_step or steps set in test_instance.') | 370 'Neither single_step or steps set in test_instance.') |
| 299 | 371 |
| 300 def _SplitTestsByAffinity(self): | 372 def _SplitTestsByAffinity(self): |
| 301 # This splits tests by their device affinity so that the same tests always | 373 # This splits tests by their device affinity so that the same tests always |
| 302 # run on the same devices. This is important for perf tests since different | 374 # run on the same devices. This is important for perf tests since different |
| 303 # devices might yield slightly different performance results. | 375 # devices might yield slightly different performance results. |
| 304 test_dict = self._GetStepsFromDict() | 376 test_dict = self._GetStepsFromDict() |
| 305 for test, test_config in test_dict['steps'].iteritems(): | 377 for test, test_config in test_dict['steps'].iteritems(): |
| 306 try: | 378 try: |
| 307 affinity = test_config['device_affinity'] | 379 affinity = test_config.get('device_affinity') |
| 308 if len(self._test_buckets) < affinity + 1: | 380 if affinity is None: |
| 309 while len(self._test_buckets) != affinity + 1: | 381 self._no_device_tests[test] = test_config |
| 310 self._test_buckets.append({}) | 382 else: |
| 311 self._test_buckets[affinity][test] = test_config | 383 if len(self._test_buckets) < affinity + 1: |
| 384 while len(self._test_buckets) != affinity + 1: | |
| 385 self._test_buckets.append({}) | |
| 386 self._test_buckets[affinity][test] = test_config | |
| 312 except KeyError: | 387 except KeyError: |
| 313 logging.exception( | 388 logging.exception( |
| 314 'Test config for %s is bad.\n Config:%s', test, str(test_config)) | 389 'Test config for %s is bad.\n Config:%s', test, str(test_config)) |
| 315 | 390 |
| 316 @staticmethod | 391 @staticmethod |
| 317 def _GetAllDevices(active_devices, devices_path): | 392 def _GetAllDevices(active_devices, devices_path): |
| 318 try: | 393 try: |
| 319 if devices_path: | 394 if devices_path: |
| 320 devices = [device_utils.DeviceUtils(s) | 395 devices = [device_utils.DeviceUtils(s) |
| 321 for s in device_list.GetPersistentDeviceList(devices_path)] | 396 for s in device_list.GetPersistentDeviceList(devices_path)] |
| 322 if not devices and active_devices: | 397 if not devices and active_devices: |
| 323 logging.warning('%s is empty. Falling back to active devices.', | 398 logging.warning('%s is empty. Falling back to active devices.', |
| 324 devices_path) | 399 devices_path) |
| 325 devices = active_devices | 400 devices = active_devices |
| 326 else: | 401 else: |
| 327 logging.warning('Known devices file path not being passed. For device ' | 402 logging.warning('Known devices file path not being passed. For device ' |
| 328 'affinity to work properly, it must be passed.') | 403 'affinity to work properly, it must be passed.') |
| 329 devices = active_devices | 404 devices = active_devices |
| 330 except IOError as e: | 405 except IOError as e: |
| 331 logging.error('Unable to find %s [%s]', devices_path, e) | 406 logging.error('Unable to find %s [%s]', devices_path, e) |
| 332 devices = active_devices | 407 devices = active_devices |
| 333 return sorted(devices) | 408 return sorted(devices) |
| 334 | 409 |
| 335 #override | 410 #override |
| 336 def RunTests(self): | 411 def RunTests(self): |
| 337 # Affinitize the tests. | 412 # Affinitize the tests. |
| 338 self._SplitTestsByAffinity() | 413 self._SplitTestsByAffinity() |
| 339 if not self._test_buckets: | 414 if not self._test_buckets and not self._no_device_tests: |
| 340 raise local_device_test_run.NoTestsError() | 415 raise local_device_test_run.NoTestsError() |
| 341 | 416 |
| 342 def run_perf_tests(shard_id): | 417 def run_perf_tests(shard_id): |
| 343 if device_status.IsBlacklisted( | 418 if shard_id is None: |
| 344 str(self._devices[shard_id]), self._env.blacklist): | 419 s = HostTestShard(self._env, self._test_instance, self._no_device_tests, |
| 345 logging.warning('Device %s is not active. Will not create shard %s.', | 420 retries=3, timeout=self._timeout) |
| 346 str(self._devices[shard_id]), shard_id) | 421 else: |
| 347 return None | 422 if device_status.IsBlacklisted( |
| 348 s = TestShard(self._env, self._test_instance, self._devices[shard_id], | 423 str(self._devices[shard_id]), self._env.blacklist): |
| 349 shard_id, self._test_buckets[shard_id], | 424 logging.warning('Device %s is not active. Will not create shard %s.', |
| 350 retries=self._env.max_tries, timeout=self._timeout) | 425 str(self._devices[shard_id]), shard_id) |
| 426 return None | |
| 427 s = DeviceTestShard(self._env, self._test_instance, | |
| 428 self._devices[shard_id], shard_id, | |
| 429 self._test_buckets[shard_id], | |
| 430 retries=self._env.max_tries, timeout=self._timeout) | |
| 351 return s.RunTestsOnShard() | 431 return s.RunTestsOnShard() |
| 352 | 432 |
| 353 device_indices = range(min(len(self._devices), len(self._test_buckets))) | 433 device_indices = range(min(len(self._devices), len(self._test_buckets))) |
| 434 if self._no_device_tests: | |
| 435 device_indices.append(None) | |
| 354 shards = parallelizer.Parallelizer(device_indices).pMap(run_perf_tests) | 436 shards = parallelizer.Parallelizer(device_indices).pMap(run_perf_tests) |
| 355 return [x for x in shards.pGet(self._timeout) if x is not None] | 437 return [x for x in shards.pGet(self._timeout) if x is not None] |
| 356 | 438 |
| 357 # override | 439 # override |
| 358 def TestPackage(self): | 440 def TestPackage(self): |
| 359 return 'perf' | 441 return 'perf' |
| 360 | 442 |
| 361 # override | 443 # override |
| 362 def _CreateShards(self, _tests): | 444 def _CreateShards(self, _tests): |
| 363 raise NotImplementedError | 445 raise NotImplementedError |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 417 # override | 499 # override |
| 418 def _RunTest(self, _device, _test): | 500 def _RunTest(self, _device, _test): |
| 419 raise NotImplementedError | 501 raise NotImplementedError |
| 420 | 502 |
| 421 | 503 |
| 422 class TestDictVersionError(Exception): | 504 class TestDictVersionError(Exception): |
| 423 pass | 505 pass |
| 424 | 506 |
| 425 class PerfTestRunGetStepsError(Exception): | 507 class PerfTestRunGetStepsError(Exception): |
| 426 pass | 508 pass |
| OLD | NEW |