| Index: Tools/Scripts/webkitpy/layout_tests/controllers/layout_test_runner.py
|
| diff --git a/Tools/Scripts/webkitpy/layout_tests/controllers/layout_test_runner.py b/Tools/Scripts/webkitpy/layout_tests/controllers/layout_test_runner.py
|
| index 86e1a0911b997943e77c43115fac4fe278b9c539..4ed9178d4ccf739f7f2bdbd84ffcc1a053d81cde 100644
|
| --- a/Tools/Scripts/webkitpy/layout_tests/controllers/layout_test_runner.py
|
| +++ b/Tools/Scripts/webkitpy/layout_tests/controllers/layout_test_runner.py
|
| @@ -50,7 +50,9 @@ WorkerException = message_pool.WorkerException
|
|
|
|
|
| class TestRunInterruptedException(Exception):
|
| +
|
| """Raised when a test run should be stopped immediately."""
|
| +
|
| def __init__(self, reason):
|
| Exception.__init__(self)
|
| self.reason = reason
|
| @@ -61,6 +63,7 @@ class TestRunInterruptedException(Exception):
|
|
|
|
|
| class LayoutTestRunner(object):
|
| +
|
| def __init__(self, options, port, printer, results_directory, test_is_slow_fn):
|
| self._options = options
|
| self._port = port
|
| @@ -98,8 +101,8 @@ class LayoutTestRunner(object):
|
|
|
| self._printer.write_update('Sharding tests ...')
|
| locked_shards, unlocked_shards = self._sharder.shard_tests(test_inputs,
|
| - int(self._options.child_processes), self._options.fully_parallel,
|
| - self._options.run_singly or (self._options.batch_size == 1))
|
| + int(self._options.child_processes), self._options.fully_parallel,
|
| + self._options.run_singly or (self._options.batch_size == 1))
|
|
|
| # We don't have a good way to coordinate the workers so that they don't
|
| # try to run the shards that need a lock. The easiest solution is to
|
| @@ -123,14 +126,14 @@ class LayoutTestRunner(object):
|
| if num_workers > 0:
|
| with message_pool.get(self, self._worker_factory, num_workers, self._port.host) as pool:
|
| pool.run(('test_list', shard.name, shard.test_inputs) for shard in self._shards_to_redo)
|
| - except TestRunInterruptedException, e:
|
| + except TestRunInterruptedException as e:
|
| _log.warning(e.reason)
|
| run_results.interrupted = True
|
| except KeyboardInterrupt:
|
| self._printer.flush()
|
| self._printer.writeln('Interrupted, exiting ...')
|
| run_results.keyboard_interrupted = True
|
| - except Exception, e:
|
| + except Exception as e:
|
| _log.debug('%s("%s") raised, exiting' % (e.__class__.__name__, str(e)))
|
| raise
|
| finally:
|
| @@ -159,7 +162,7 @@ class LayoutTestRunner(object):
|
| # so that existing buildbot grep rules work.
|
| def interrupt_if_at_failure_limit(limit, failure_count, run_results, message):
|
| if limit and failure_count >= limit:
|
| - message += " %d tests run." % (run_results.expected + run_results.unexpected)
|
| + message += ' %d tests run.' % (run_results.expected + run_results.unexpected)
|
| self._mark_interrupted_tests_as_skipped(run_results)
|
| raise TestRunInterruptedException(message)
|
|
|
| @@ -167,21 +170,25 @@ class LayoutTestRunner(object):
|
| self._options.exit_after_n_failures,
|
| run_results.unexpected_failures,
|
| run_results,
|
| - "Exiting early after %d failures." % run_results.unexpected_failures)
|
| + 'Exiting early after %d failures.' % run_results.unexpected_failures)
|
| interrupt_if_at_failure_limit(
|
| self._options.exit_after_n_crashes_or_timeouts,
|
| run_results.unexpected_crashes + run_results.unexpected_timeouts,
|
| run_results,
|
| # This differs from ORWT because it does not include WebProcess crashes.
|
| - "Exiting early after %d crashes and %d timeouts." % (run_results.unexpected_crashes, run_results.unexpected_timeouts))
|
| + 'Exiting early after %d crashes and %d timeouts.' % (run_results.unexpected_crashes, run_results.unexpected_timeouts))
|
|
|
| def _update_summary_with_result(self, run_results, result):
|
| - expected = self._expectations.matches_an_expected_result(result.test_name, result.type, self._options.pixel_tests or result.reftest_type, self._options.enable_sanitizer)
|
| + expected = self._expectations.matches_an_expected_result(
|
| + result.test_name,
|
| + result.type,
|
| + self._options.pixel_tests or result.reftest_type,
|
| + self._options.enable_sanitizer)
|
| exp_str = self._expectations.get_expectations_string(result.test_name)
|
| got_str = self._expectations.expectation_to_string(result.type)
|
|
|
| if result.device_failed:
|
| - self._printer.print_finished_test(result, False, exp_str, "Aborted")
|
| + self._printer.print_finished_test(result, False, exp_str, 'Aborted')
|
| return
|
|
|
| run_results.add(result, expected, self._test_is_slow(result.test_name))
|
| @@ -204,11 +211,13 @@ class LayoutTestRunner(object):
|
| self._update_summary_with_result(self._current_run_results, result)
|
|
|
| def _handle_device_failed(self, worker_name, list_name, remaining_tests):
|
| - _log.warning("%s has failed" % worker_name)
|
| + _log.warning('%s has failed' % worker_name)
|
| if remaining_tests:
|
| self._shards_to_redo.append(TestShard(list_name, remaining_tests))
|
|
|
| +
|
| class Worker(object):
|
| +
|
| def __init__(self, caller, results_directory, options):
|
| self._caller = caller
|
| self._worker_number = caller.worker_number
|
| @@ -285,7 +294,7 @@ class Worker(object):
|
|
|
| self._caller.post('started_test', test_input, test_timeout_sec)
|
| result = single_test_runner.run_single_test(self._port, self._options, self._results_directory,
|
| - self._name, self._driver, test_input, stop_when_done)
|
| + self._name, self._driver, test_input, stop_when_done)
|
|
|
| result.shard_name = shard_name
|
| result.worker_name = self._name
|
| @@ -297,7 +306,7 @@ class Worker(object):
|
| return result.device_failed
|
|
|
| def stop(self):
|
| - _log.debug("%s cleaning up" % self._name)
|
| + _log.debug('%s cleaning up' % self._name)
|
| self._kill_driver()
|
|
|
| def _timeout(self, test_input):
|
| @@ -318,10 +327,9 @@ class Worker(object):
|
| driver = self._driver
|
| self._driver = None
|
| if driver:
|
| - _log.debug("%s killing driver" % self._name)
|
| + _log.debug('%s killing driver' % self._name)
|
| driver.stop()
|
|
|
| -
|
| def _clean_up_after_test(self, test_input, result):
|
| test_name = test_input.test_name
|
|
|
| @@ -333,16 +341,17 @@ class Worker(object):
|
| self._batch_count = 0
|
|
|
| # Print the error message(s).
|
| - _log.debug("%s %s failed:" % (self._name, test_name))
|
| + _log.debug('%s %s failed:' % (self._name, test_name))
|
| for f in result.failures:
|
| - _log.debug("%s %s" % (self._name, f.message()))
|
| + _log.debug('%s %s' % (self._name, f.message()))
|
| elif result.type == test_expectations.SKIP:
|
| - _log.debug("%s %s skipped" % (self._name, test_name))
|
| + _log.debug('%s %s skipped' % (self._name, test_name))
|
| else:
|
| - _log.debug("%s %s passed" % (self._name, test_name))
|
| + _log.debug('%s %s passed' % (self._name, test_name))
|
|
|
|
|
| class TestShard(object):
|
| +
|
| """A test shard is a named list of TestInputs."""
|
|
|
| def __init__(self, name, test_inputs):
|
| @@ -358,6 +367,7 @@ class TestShard(object):
|
|
|
|
|
| class Sharder(object):
|
| +
|
| def __init__(self, test_split_fn, max_locked_shards):
|
| self._split = test_split_fn
|
| self._max_locked_shards = max_locked_shards
|
| @@ -429,7 +439,7 @@ class Sharder(object):
|
| # The locked shards still need to be limited to self._max_locked_shards in order to not
|
| # overload the http server for the http tests.
|
| return (self._resize_shards(locked_virtual_shards + locked_shards, self._max_locked_shards, 'locked_shard'),
|
| - unlocked_virtual_shards + unlocked_shards)
|
| + unlocked_virtual_shards + unlocked_shards)
|
|
|
| def _shard_by_directory(self, test_inputs):
|
| """Returns two lists of shards, each shard containing all the files in a directory.
|
|
|