Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 # Copyright 2016 The Chromium Authors. All rights reserved. | 1 # Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 import io | 5 import io |
| 6 import json | 6 import json |
| 7 import logging | 7 import logging |
| 8 import os | 8 import os |
| 9 import pickle | 9 import pickle |
| 10 import shutil | 10 import shutil |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 54 self._timer.cancel() | 54 self._timer.cancel() |
| 55 self._running = False | 55 self._running = False |
| 56 | 56 |
| 57 def _LogMessage(self): | 57 def _LogMessage(self): |
| 58 logging.info('Currently working on test %s', self._shard.current_test) | 58 logging.info('Currently working on test %s', self._shard.current_test) |
| 59 self._timer = threading.Timer(self._wait_time, self._LogMessage) | 59 self._timer = threading.Timer(self._wait_time, self._LogMessage) |
| 60 self._timer.start() | 60 self._timer.start() |
| 61 | 61 |
| 62 | 62 |
| 63 class TestShard(object): | 63 class TestShard(object): |
| 64 def __init__( | 64 def __init__(self, env, test_instance, tests, retries=3, timeout=None): |
| 65 self, env, test_instance, device, index, tests, retries=3, timeout=None): | 65 logging.info('Create host shard for the following tests:') |
|
jbudorick
2016/08/02 18:51:50
This shouldn't be saying it's a host shard.
rnephew (Reviews Here)
2016/08/02 19:03:52
Done.
| |
| 66 logging.info('Create shard %s for device %s to run the following tests:', | |
| 67 index, device) | |
| 68 for t in tests: | 66 for t in tests: |
| 69 logging.info(' %s', t) | 67 logging.info(' %s', t) |
| 70 self._battery = battery_utils.BatteryUtils(device) | |
| 71 self._current_test = None | 68 self._current_test = None |
| 72 self._device = device | 69 self._device = None |
|
jbudorick
2016/08/02 18:51:50
Why is this still here rather than in DeviceTestSh
rnephew (Reviews Here)
2016/08/02 19:03:52
The CreateCmd function needs it. I decided to put
| |
| 73 self._env = env | 70 self._env = env |
| 74 self._index = index | 71 self._heart_beat = HeartBeat(self) |
| 72 self._index = -1 | |
| 75 self._output_dir = None | 73 self._output_dir = None |
| 76 self._retries = retries | 74 self._retries = retries |
| 77 self._test_instance = test_instance | 75 self._test_instance = test_instance |
| 78 self._tests = tests | 76 self._tests = tests |
| 79 self._timeout = timeout | 77 self._timeout = timeout |
| 80 self._heart_beat = HeartBeat(self) | |
| 81 | |
| 82 @local_device_environment.handle_shard_failures | |
| 83 def RunTestsOnShard(self): | |
| 84 results = base_test_result.TestRunResults() | |
| 85 for test in self._tests: | |
| 86 tries_left = self._retries | |
| 87 result_type = None | |
| 88 while (result_type != base_test_result.ResultType.PASS | |
| 89 and tries_left > 0): | |
| 90 try: | |
| 91 self._TestSetUp(test) | |
| 92 result_type = self._RunSingleTest(test) | |
| 93 except device_errors.CommandTimeoutError: | |
| 94 result_type = base_test_result.ResultType.TIMEOUT | |
| 95 except device_errors.CommandFailedError: | |
| 96 logging.exception('Exception when executing %s.', test) | |
| 97 result_type = base_test_result.ResultType.FAIL | |
| 98 finally: | |
| 99 self._TestTearDown() | |
| 100 if result_type != base_test_result.ResultType.PASS: | |
| 101 try: | |
| 102 device_recovery.RecoverDevice(self._device, self._env.blacklist) | |
| 103 except device_errors.CommandTimeoutError: | |
| 104 logging.exception( | |
| 105 'Device failed to recover after failing %s.', test) | |
| 106 tries_left = tries_left - 1 | |
| 107 | |
| 108 results.AddResult(base_test_result.BaseTestResult(test, result_type)) | |
| 109 return results | |
| 110 | |
| 111 def _TestSetUp(self, test): | |
| 112 if not self._device.IsOnline(): | |
| 113 msg = 'Device %s is unresponsive.' % str(self._device) | |
| 114 raise device_errors.DeviceUnreachableError(msg) | |
| 115 | |
| 116 logging.info('Charge level: %s%%', | |
| 117 str(self._battery.GetBatteryInfo().get('level'))) | |
| 118 if self._test_instance.min_battery_level: | |
| 119 self._battery.ChargeDeviceToLevel(self._test_instance.min_battery_level) | |
| 120 | |
| 121 logging.info('temperature: %s (0.1 C)', | |
| 122 str(self._battery.GetBatteryInfo().get('temperature'))) | |
| 123 if self._test_instance.max_battery_temp: | |
| 124 self._battery.LetBatteryCoolToTemperature( | |
| 125 self._test_instance.max_battery_temp) | |
| 126 | |
| 127 if not self._device.IsScreenOn(): | |
| 128 self._device.SetScreen(True) | |
| 129 | |
| 130 if (self._test_instance.collect_chartjson_data | |
| 131 or self._tests[test].get('archive_output_dir')): | |
| 132 self._output_dir = tempfile.mkdtemp() | |
| 133 | |
| 134 self._current_test = test | |
| 135 self._heart_beat.Start() | |
| 136 | 78 |
| 137 def _RunSingleTest(self, test): | 79 def _RunSingleTest(self, test): |
| 138 self._test_instance.WriteBuildBotJson(self._output_dir) | 80 self._test_instance.WriteBuildBotJson(self._output_dir) |
| 139 | 81 |
| 140 timeout = self._tests[test].get('timeout', self._timeout) | 82 timeout = self._tests[test].get('timeout', self._timeout) |
| 141 cmd = self._CreateCmd(test) | 83 cmd = self._CreateCmd(test) |
| 142 cwd = os.path.abspath(host_paths.DIR_SOURCE_ROOT) | 84 cwd = os.path.abspath(host_paths.DIR_SOURCE_ROOT) |
| 143 | 85 |
| 144 logging.debug("Running %s with command '%s' on shard %d with timeout %d", | 86 logging.debug("Running %s with command '%s' on shard %d with timeout %d", |
| 145 test, cmd, self._index, timeout) | 87 test, cmd, self._index, timeout) |
| (...skipping 12 matching lines...) Expand all Loading... | |
| 158 end_time = time.time() | 100 end_time = time.time() |
| 159 exit_code = -1 | 101 exit_code = -1 |
| 160 output = e.output | 102 output = e.output |
| 161 json_output = '' | 103 json_output = '' |
| 162 result_type = base_test_result.ResultType.TIMEOUT | 104 result_type = base_test_result.ResultType.TIMEOUT |
| 163 | 105 |
| 164 return self._ProcessTestResult(test, cmd, start_time, end_time, exit_code, | 106 return self._ProcessTestResult(test, cmd, start_time, end_time, exit_code, |
| 165 output, json_output, result_type) | 107 output, json_output, result_type) |
| 166 | 108 |
| 167 def _CreateCmd(self, test): | 109 def _CreateCmd(self, test): |
| 168 cmd = '%s --device %s' % (self._tests[test]['cmd'], str(self._device)) | 110 cmd = '%s' % self._tests[test]['cmd'] |
| 111 if self._device: | |
| 112 cmd = '%s --device %s' % (cmd, str(self._device)) | |
| 169 if self._output_dir: | 113 if self._output_dir: |
| 170 cmd = cmd + ' --output-dir=%s' % self._output_dir | 114 cmd = cmd + ' --output-dir=%s' % self._output_dir |
| 171 if self._test_instance.dry_run: | 115 if self._test_instance.dry_run: |
| 172 cmd = 'echo %s' % cmd | 116 cmd = 'echo %s' % cmd |
| 173 return cmd | 117 return cmd |
| 174 | 118 |
| 175 def _ProcessTestResult(self, test, cmd, start_time, end_time, exit_code, | 119 def _ProcessTestResult(self, test, cmd, start_time, end_time, exit_code, |
| 176 output, json_output, result_type): | 120 output, json_output, result_type): |
| 177 if exit_code is None: | 121 if exit_code is None: |
| 178 exit_code = -1 | 122 exit_code = -1 |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 226 @staticmethod | 170 @staticmethod |
| 227 def _SaveResult(result): | 171 def _SaveResult(result): |
| 228 pickled = os.path.join(constants.PERF_OUTPUT_DIR, result['name']) | 172 pickled = os.path.join(constants.PERF_OUTPUT_DIR, result['name']) |
| 229 if os.path.exists(pickled): | 173 if os.path.exists(pickled): |
| 230 with file(pickled, 'r') as f: | 174 with file(pickled, 'r') as f: |
| 231 previous = pickle.loads(f.read()) | 175 previous = pickle.loads(f.read()) |
| 232 result['output'] = previous['output'] + result['output'] | 176 result['output'] = previous['output'] + result['output'] |
| 233 with file(pickled, 'w') as f: | 177 with file(pickled, 'w') as f: |
| 234 f.write(pickle.dumps(result)) | 178 f.write(pickle.dumps(result)) |
| 235 | 179 |
| 180 @property | |
| 181 def current_test(self): | |
| 182 return self._current_test | |
| 183 | |
| 184 | |
| 185 class DeviceTestShard(TestShard): | |
| 186 def __init__( | |
| 187 self, env, test_instance, device, index, tests, retries=3, timeout=None): | |
| 188 super(DeviceTestShard, self).__init__( | |
| 189 env, test_instance, tests, retries, timeout) | |
| 190 self._battery = battery_utils.BatteryUtils(device) if device else None | |
| 191 self._device = device | |
| 192 self._index = index | |
| 193 | |
| 194 @local_device_environment.handle_shard_failures | |
| 195 def RunTestsOnShard(self): | |
| 196 results = base_test_result.TestRunResults() | |
| 197 for test in self._tests: | |
| 198 tries_left = self._retries | |
| 199 result_type = None | |
| 200 while (result_type != base_test_result.ResultType.PASS | |
| 201 and tries_left > 0): | |
| 202 try: | |
| 203 self._TestSetUp(test) | |
| 204 result_type = self._RunSingleTest(test) | |
| 205 except device_errors.CommandTimeoutError: | |
| 206 result_type = base_test_result.ResultType.TIMEOUT | |
| 207 except device_errors.CommandFailedError: | |
| 208 logging.exception('Exception when executing %s.', test) | |
| 209 result_type = base_test_result.ResultType.FAIL | |
| 210 finally: | |
| 211 self._TestTearDown() | |
| 212 if result_type != base_test_result.ResultType.PASS: | |
| 213 try: | |
| 214 device_recovery.RecoverDevice(self._device, self._env.blacklist) | |
| 215 except device_errors.CommandTimeoutError: | |
| 216 logging.exception( | |
| 217 'Device failed to recover after failing %s.', test) | |
| 218 tries_left = tries_left - 1 | |
| 219 | |
| 220 results.AddResult(base_test_result.BaseTestResult(test, result_type)) | |
| 221 return results | |
| 222 | |
| 223 def _TestSetUp(self, test): | |
| 224 if not self._device.IsOnline(): | |
| 225 msg = 'Device %s is unresponsive.' % str(self._device) | |
| 226 raise device_errors.DeviceUnreachableError(msg) | |
| 227 | |
| 228 logging.info('Charge level: %s%%', | |
| 229 str(self._battery.GetBatteryInfo().get('level'))) | |
| 230 if self._test_instance.min_battery_level: | |
| 231 self._battery.ChargeDeviceToLevel(self._test_instance.min_battery_level) | |
| 232 | |
| 233 logging.info('temperature: %s (0.1 C)', | |
| 234 str(self._battery.GetBatteryInfo().get('temperature'))) | |
| 235 if self._test_instance.max_battery_temp: | |
| 236 self._battery.LetBatteryCoolToTemperature( | |
| 237 self._test_instance.max_battery_temp) | |
| 238 | |
| 239 if not self._device.IsScreenOn(): | |
| 240 self._device.SetScreen(True) | |
| 241 | |
| 242 if (self._test_instance.collect_chartjson_data | |
| 243 or self._tests[test].get('archive_output_dir')): | |
| 244 self._output_dir = tempfile.mkdtemp() | |
| 245 | |
| 246 self._current_test = test | |
| 247 self._heart_beat.Start() | |
| 248 | |
| 236 def _TestTearDown(self): | 249 def _TestTearDown(self): |
| 237 if self._output_dir: | 250 if self._output_dir: |
| 238 shutil.rmtree(self._output_dir, ignore_errors=True) | 251 shutil.rmtree(self._output_dir, ignore_errors=True) |
| 239 self._output_dir = None | 252 self._output_dir = None |
| 240 try: | 253 try: |
| 241 logging.info('Unmapping device ports for %s.', self._device) | 254 logging.info('Unmapping device ports for %s.', self._device) |
| 242 forwarder.Forwarder.UnmapAllDevicePorts(self._device) | 255 forwarder.Forwarder.UnmapAllDevicePorts(self._device) |
| 243 except Exception: # pylint: disable=broad-except | 256 except Exception: # pylint: disable=broad-except |
| 244 logging.exception('Exception when resetting ports.') | 257 logging.exception('Exception when resetting ports.') |
| 245 finally: | 258 finally: |
| 246 self._heart_beat.Stop() | 259 self._heart_beat.Stop() |
| 247 self._current_test = None | 260 self._current_test = None |
| 248 | 261 |
| 249 @property | 262 |
| 250 def current_test(self): | 263 class HostTestShard(TestShard): |
| 251 return self._current_test | 264 def __init__(self, env, test_instance, tests, retries=3, timeout=None): |
| 265 super(HostTestShard, self).__init__( | |
| 266 env, test_instance, tests, retries, timeout) | |
| 267 | |
| 268 @local_device_environment.handle_shard_failures | |
| 269 def RunTestsOnShard(self): | |
| 270 results = base_test_result.TestRunResults() | |
| 271 for test in self._tests: | |
| 272 tries_left = self._retries | |
| 273 result_type = None | |
| 274 while (result_type != base_test_result.ResultType.PASS | |
| 275 and tries_left > 0): | |
| 276 try: | |
| 277 self._TestSetUp(test) | |
| 278 result_type = self._RunSingleTest(test) | |
| 279 finally: | |
| 280 self._TestTearDown() | |
| 281 results.AddResult(base_test_result.BaseTestResult(test, result_type)) | |
| 282 return results | |
| 283 | |
| 284 def _TestSetUp(self, test): | |
| 285 if (self._test_instance.collect_chartjson_data | |
| 286 or self._tests[test].get('archive_output_dir')): | |
| 287 self._output_dir = tempfile.mkdtemp() | |
| 288 self._current_test = test | |
| 289 self._heart_beat.Start() | |
| 290 | |
| 291 def _TestTearDown(self): | |
| 292 if self._output_dir: | |
| 293 shutil.rmtree(self._output_dir, ignore_errors=True) | |
| 294 self._output_dir = None | |
| 295 self._heart_beat.Stop() | |
| 296 self._current_test = None | |
| 297 | |
| 252 | 298 |
| 253 class LocalDevicePerfTestRun(local_device_test_run.LocalDeviceTestRun): | 299 class LocalDevicePerfTestRun(local_device_test_run.LocalDeviceTestRun): |
| 254 | 300 |
| 255 _DEFAULT_TIMEOUT = 60 * 60 | 301 _DEFAULT_TIMEOUT = 60 * 60 |
| 256 _CONFIG_VERSION = 1 | 302 _CONFIG_VERSION = 1 |
| 257 | 303 |
| 258 def __init__(self, env, test_instance): | 304 def __init__(self, env, test_instance): |
| 259 super(LocalDevicePerfTestRun, self).__init__(env, test_instance) | 305 super(LocalDevicePerfTestRun, self).__init__(env, test_instance) |
| 260 self._devices = None | 306 self._devices = None |
| 261 self._env = env | 307 self._env = env |
| 308 self._no_device_tests = {} | |
| 262 self._test_buckets = [] | 309 self._test_buckets = [] |
| 263 self._test_instance = test_instance | 310 self._test_instance = test_instance |
| 264 self._timeout = None if test_instance.no_timeout else self._DEFAULT_TIMEOUT | 311 self._timeout = None if test_instance.no_timeout else self._DEFAULT_TIMEOUT |
| 265 | 312 |
| 266 def SetUp(self): | 313 def SetUp(self): |
| 267 self._devices = self._GetAllDevices(self._env.devices, | 314 self._devices = self._GetAllDevices(self._env.devices, |
| 268 self._test_instance.known_devices_file) | 315 self._test_instance.known_devices_file) |
| 269 | 316 |
| 270 if os.path.exists(constants.PERF_OUTPUT_DIR): | 317 if os.path.exists(constants.PERF_OUTPUT_DIR): |
| 271 shutil.rmtree(constants.PERF_OUTPUT_DIR) | 318 shutil.rmtree(constants.PERF_OUTPUT_DIR) |
| (...skipping 26 matching lines...) Expand all Loading... | |
| 298 'Neither single_step or steps set in test_instance.') | 345 'Neither single_step or steps set in test_instance.') |
| 299 | 346 |
| 300 def _SplitTestsByAffinity(self): | 347 def _SplitTestsByAffinity(self): |
| 301 # This splits tests by their device affinity so that the same tests always | 348 # This splits tests by their device affinity so that the same tests always |
| 302 # run on the same devices. This is important for perf tests since different | 349 # run on the same devices. This is important for perf tests since different |
| 303 # devices might yield slightly different performance results. | 350 # devices might yield slightly different performance results. |
| 304 test_dict = self._GetStepsFromDict() | 351 test_dict = self._GetStepsFromDict() |
| 305 for test, test_config in test_dict['steps'].iteritems(): | 352 for test, test_config in test_dict['steps'].iteritems(): |
| 306 try: | 353 try: |
| 307 affinity = test_config['device_affinity'] | 354 affinity = test_config['device_affinity'] |
| 308 if len(self._test_buckets) < affinity + 1: | 355 if affinity == -1: |
|
jbudorick
2016/08/02 18:51:50
Could this allow for multiple host test shards by
rnephew (Reviews Here)
2016/08/02 19:03:53
Probably, but I'm not sure there is much advantage
| |
| 309 while len(self._test_buckets) != affinity + 1: | 356 self._no_device_tests[test] = test_config |
| 310 self._test_buckets.append({}) | 357 else: |
| 311 self._test_buckets[affinity][test] = test_config | 358 if len(self._test_buckets) < affinity + 1: |
| 359 while len(self._test_buckets) != affinity + 1: | |
| 360 self._test_buckets.append({}) | |
| 361 self._test_buckets[affinity][test] = test_config | |
| 312 except KeyError: | 362 except KeyError: |
| 313 logging.exception( | 363 logging.exception( |
| 314 'Test config for %s is bad.\n Config:%s', test, str(test_config)) | 364 'Test config for %s is bad.\n Config:%s', test, str(test_config)) |
| 315 | 365 |
| 316 @staticmethod | 366 @staticmethod |
| 317 def _GetAllDevices(active_devices, devices_path): | 367 def _GetAllDevices(active_devices, devices_path): |
| 318 try: | 368 try: |
| 319 if devices_path: | 369 if devices_path: |
| 320 devices = [device_utils.DeviceUtils(s) | 370 devices = [device_utils.DeviceUtils(s) |
| 321 for s in device_list.GetPersistentDeviceList(devices_path)] | 371 for s in device_list.GetPersistentDeviceList(devices_path)] |
| 322 if not devices and active_devices: | 372 if not devices and active_devices: |
| 323 logging.warning('%s is empty. Falling back to active devices.', | 373 logging.warning('%s is empty. Falling back to active devices.', |
| 324 devices_path) | 374 devices_path) |
| 325 devices = active_devices | 375 devices = active_devices |
| 326 else: | 376 else: |
| 327 logging.warning('Known devices file path not being passed. For device ' | 377 logging.warning('Known devices file path not being passed. For device ' |
| 328 'affinity to work properly, it must be passed.') | 378 'affinity to work properly, it must be passed.') |
| 329 devices = active_devices | 379 devices = active_devices |
| 330 except IOError as e: | 380 except IOError as e: |
| 331 logging.error('Unable to find %s [%s]', devices_path, e) | 381 logging.error('Unable to find %s [%s]', devices_path, e) |
| 332 devices = active_devices | 382 devices = active_devices |
| 333 return sorted(devices) | 383 return sorted(devices) |
| 334 | 384 |
| 335 #override | 385 #override |
| 336 def RunTests(self): | 386 def RunTests(self): |
| 337 # Affinitize the tests. | 387 # Affinitize the tests. |
| 338 self._SplitTestsByAffinity() | 388 self._SplitTestsByAffinity() |
| 339 if not self._test_buckets: | 389 if not self._test_buckets and not self._no_device_tests: |
| 340 raise local_device_test_run.NoTestsError() | 390 raise local_device_test_run.NoTestsError() |
| 341 | 391 |
| 342 def run_perf_tests(shard_id): | 392 def run_perf_tests(shard_id): |
| 393 if shard_id == -1: | |
|
jbudorick
2016/08/02 18:51:50
nit: I'd prefer
if shard_id == -1:
s = Host
rnephew (Reviews Here)
2016/08/02 19:03:53
Done.
| |
| 394 s = HostTestShard(self._env, self._test_instance, | |
| 395 self._no_device_tests, retries=3, timeout=self._timeout) | |
|
jbudorick
2016/08/02 18:51:50
nit: indentation is off
rnephew (Reviews Here)
2016/08/02 19:03:52
Done.
| |
| 396 return s.RunTestsOnShard() | |
| 343 if device_status.IsBlacklisted( | 397 if device_status.IsBlacklisted( |
| 344 str(self._devices[shard_id]), self._env.blacklist): | 398 str(self._devices[shard_id]), self._env.blacklist): |
| 345 logging.warning('Device %s is not active. Will not create shard %s.', | 399 logging.warning('Device %s is not active. Will not create shard %s.', |
| 346 str(self._devices[shard_id]), shard_id) | 400 str(self._devices[shard_id]), shard_id) |
| 347 return None | 401 return None |
| 348 s = TestShard(self._env, self._test_instance, self._devices[shard_id], | 402 s = DeviceTestShard(self._env, self._test_instance, |
| 349 shard_id, self._test_buckets[shard_id], | 403 self._devices[shard_id], shard_id, |
| 350 retries=self._env.max_tries, timeout=self._timeout) | 404 self._test_buckets[shard_id], |
| 405 retries=self._env.max_tries, timeout=self._timeout) | |
| 351 return s.RunTestsOnShard() | 406 return s.RunTestsOnShard() |
| 352 | 407 |
| 353 device_indices = range(min(len(self._devices), len(self._test_buckets))) | 408 device_indices = range(min(len(self._devices), len(self._test_buckets))) |
| 409 if self._no_device_tests: | |
| 410 device_indices.append(-1) | |
| 354 shards = parallelizer.Parallelizer(device_indices).pMap(run_perf_tests) | 411 shards = parallelizer.Parallelizer(device_indices).pMap(run_perf_tests) |
| 355 return [x for x in shards.pGet(self._timeout) if x is not None] | 412 return [x for x in shards.pGet(self._timeout) if x is not None] |
| 356 | 413 |
| 357 # override | 414 # override |
| 358 def TestPackage(self): | 415 def TestPackage(self): |
| 359 return 'perf' | 416 return 'perf' |
| 360 | 417 |
| 361 # override | 418 # override |
| 362 def _CreateShards(self, _tests): | 419 def _CreateShards(self, _tests): |
| 363 raise NotImplementedError | 420 raise NotImplementedError |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 417 # override | 474 # override |
| 418 def _RunTest(self, _device, _test): | 475 def _RunTest(self, _device, _test): |
| 419 raise NotImplementedError | 476 raise NotImplementedError |
| 420 | 477 |
| 421 | 478 |
| 422 class TestDictVersionError(Exception): | 479 class TestDictVersionError(Exception): |
| 423 pass | 480 pass |
| 424 | 481 |
| 425 class PerfTestRunGetStepsError(Exception): | 482 class PerfTestRunGetStepsError(Exception): |
| 426 pass | 483 pass |
| OLD | NEW |