Chromium Code Reviews| Index: build/android/pylib/perf/test_runner.py |
| diff --git a/build/android/pylib/perf/test_runner.py b/build/android/pylib/perf/test_runner.py |
| index 71a613e93c0e1c5903c9fc7a87ed4d6b1bd513ae..806289788cf15beb862d14644bcc37e8199fb123 100644 |
| --- a/build/android/pylib/perf/test_runner.py |
| +++ b/build/android/pylib/perf/test_runner.py |
| @@ -20,8 +20,22 @@ graph data. |
| with the step results previously saved. The buildbot will then process the graph |
| data accordingly. |
| - |
| The JSON steps file contains a dictionary in the format: |
| +{ "version": int, |
| + "steps": { |
| + "foo": { |
| + "device_affinity": int, |
| + "cmd": "script_to_execute foo" |
| + }, |
| + "bar": { |
| + "device_affinity": int, |
| + "cmd": "script_to_execute bar" |
| + } |
| + } |
| +} |
| + |
| +# TODO(bulach): remove once it rolls downstream, crbug.com/378862. |
| +The OLD JSON steps file contains a dictionary in the format: |
| [ |
| ["step_name_foo", "script_to_execute foo"], |
| ["step_name_bar", "script_to_execute bar"] |
| @@ -41,6 +55,7 @@ option: |
| --device: the serial number to be passed to all adb commands. |
| """ |
| +import collections |
| import datetime |
| import logging |
| import os |
| @@ -85,6 +100,7 @@ def PrintSummary(test_names): |
| logging.info('*' * 80) |
| logging.info('Sharding summary') |
| total_time = 0 |
| + device_total_time = collections.defaultdict(int) |
| for test_name in test_names: |
| file_name = os.path.join(constants.PERF_OUTPUT_DIR, test_name) |
| if not os.path.exists(file_name): |
| @@ -96,6 +112,9 @@ def PrintSummary(test_names): |
| result['name'], result['exit_code'], result['total_time'], |
| result['device']) |
| total_time += result['total_time'] |
|
jbudorick
2014/06/03 13:52:52
nit: you don't really need a separate total_time a
bulach
2014/06/03 14:25:52
Done.
|
| + device_total_time[result['device']] += result['total_time'] |
| + for device, device_time in device_total_time.iteritems(): |
| + logging.info('Total for device %s : %d secs', device, device_time) |
| logging.info('Total steps time: %d secs', total_time) |
| @@ -131,17 +150,22 @@ class _HeartBeatLogger(object): |
| class TestRunner(base_test_runner.BaseTestRunner): |
| - def __init__(self, test_options, device, tests, flaky_tests): |
| + def __init__(self, test_options, device, shard_index, max_shard, tests, |
| + flaky_tests): |
| """A TestRunner instance runs a perf test on a single device. |
| Args: |
| test_options: A PerfOptions object. |
| device: Device to run the tests. |
| + shard_index: the index of this device. |
| + max_shards: the maximum shard index. |
| tests: a dict mapping test_name to command. |
| flaky_tests: a list of flaky test_name. |
| """ |
| super(TestRunner, self).__init__(device, None, 'Release') |
| self._options = test_options |
| + self._shard_index = shard_index |
| + self._max_shard = max_shard |
| self._tests = tests |
| self._flaky_tests = flaky_tests |
| @@ -164,6 +188,16 @@ class TestRunner(base_test_runner.BaseTestRunner): |
| result['name']), 'w') as f: |
| f.write(pickle.dumps(result)) |
| + def _CheckDeviceAffinity(self, test_name): |
| + """Returns True if test_name has affinity for this shard.""" |
| + affinity = (self._tests['steps'][test_name]['device_affinity'] % |
|
jbudorick
2014/06/03 13:52:52
My point with the shard index vs the serial was th
bulach
2014/06/03 14:25:52
let's split this:
1) this is using the persistent
|
| + self._max_shard) |
| + if self._shard_index == affinity: |
| + return True |
| + logging.info('Skipping %s on %s (affinity is %s, device is %s)', |
| + test_name, self.device_serial, affinity, self._shard_index) |
| + return False |
| + |
| def _LaunchPerfTest(self, test_name): |
| """Runs a perf test. |
| @@ -173,6 +207,9 @@ class TestRunner(base_test_runner.BaseTestRunner): |
| Returns: |
| A tuple containing (Output, base_test_result.ResultType) |
| """ |
| + if not self._CheckDeviceAffinity(test_name): |
| + return '', base_test_result.ResultType.PASS |
| + |
| try: |
| logging.warning('Unmapping device ports') |
| forwarder.Forwarder.UnmapAllDevicePorts(self.device) |
| @@ -181,7 +218,8 @@ class TestRunner(base_test_runner.BaseTestRunner): |
| logging.error('Exception when tearing down device %s', e) |
| cmd = ('%s --device %s' % |
| - (self._tests[test_name], self.device.old_interface.GetDevice())) |
| + (self._tests['steps'][test_name]['cmd'], |
| + self.device_serial)) |
| logging.info('%s : %s', test_name, cmd) |
| start_time = datetime.datetime.now() |
| @@ -211,7 +249,7 @@ class TestRunner(base_test_runner.BaseTestRunner): |
| exit_code = -1 |
| logging.info('%s : exit_code=%d in %d secs at %s', |
| test_name, exit_code, (end_time - start_time).seconds, |
| - self.device.old_interface.GetDevice()) |
| + self.device_serial) |
|
jbudorick
2014/06/03 13:52:52
Sneaking in ahead of me here, I see.
(In the futu
bulach
2014/06/03 14:25:52
want me to keep the old way? happy either way :)
|
| result_type = base_test_result.ResultType.FAIL |
| if exit_code == 0: |
| result_type = base_test_result.ResultType.PASS |
| @@ -230,7 +268,7 @@ class TestRunner(base_test_runner.BaseTestRunner): |
| 'actual_exit_code': actual_exit_code, |
| 'result_type': result_type, |
| 'total_time': (end_time - start_time).seconds, |
| - 'device': self.device.old_interface.GetDevice(), |
| + 'device': self.device_serial, |
| 'cmd': cmd, |
| } |
| self._SaveResult(persisted_result) |