Chromium Code Reviews| Index: tools/testing/dart/test_runner.dart |
| diff --git a/tools/testing/dart/test_runner.dart b/tools/testing/dart/test_runner.dart |
| index 93c38e98f844924b5a8b20a76bcdea74f4ebb52b..64b3ce47efaddc6632401d3f91f50a0f405c24f8 100644 |
| --- a/tools/testing/dart/test_runner.dart |
| +++ b/tools/testing/dart/test_runner.dart |
| @@ -24,6 +24,7 @@ import "status_file_parser.dart"; |
| import "test_progress.dart"; |
| import "test_suite.dart"; |
| import "utils.dart"; |
| +import 'record_and_replay.dart'; |
| const int NO_TIMEOUT = 0; |
| const int SLOW_TIMEOUT_MULTIPLIER = 4; |
| @@ -1337,6 +1338,10 @@ class ProcessQueue { |
| int _numFailedTests = 0; |
| bool _allTestsWereEnqueued = false; |
| + // Support for recording and replaying test commands. |
| + TestCaseRecorder _testCaseRecorder; |
| + TestCaseOutputArchive _testCaseOutputArchive; |
| + |
| /** The number of tests we allow to actually fail before we stop retrying. */ |
| int _MAX_FAILED_NO_RETRY = 4; |
| bool _verbose; |
| @@ -1380,7 +1385,9 @@ class ProcessQueue { |
| this._eventListener, |
| this._allDone, |
| [bool verbose = false, |
| - bool listTests = false]) |
| + bool listTests = false, |
| + this._testCaseRecorder, |
| + this._testCaseOutputArchive]) |
| : _verbose = verbose, |
| _listTests = listTests, |
| _tests = new Queue<TestCase>(), |
| @@ -1411,6 +1418,28 @@ class ProcessQueue { |
| } |
| void _runTests(List<TestSuite> testSuites) { |
| + void addTestCase(TestCase testCase) { |
| + _tests.add(testCase); |
| + eventTestAdded(testCase); |
| + } |
| + var newTest = (TestCase testCase) { |
| + addTestCase(testCase); |
| + _runTest(testCase); |
| + }; |
| + if (_testCaseRecorder != null) { |
| + // FIXME: do we want to call [addTestCase]??? |
|
ricow1
2013/05/27 09:05:17
FIXME -> TODO
I would probably organize this a lit
kustermann
2013/05/27 11:11:37
Done.
|
| + newTest = _testCaseRecorder.nextTestCase; |
| + } else if (_testCaseOutputArchive != null) { |
| + newTest = (TestCase testCase) { |
| + addTestCase(testCase); |
| + Timer.run(() { |
|
ricow1
2013/05/27 09:05:17
add comment why we do this
|
| + var output = _testCaseOutputArchive.outputOf(testCase); |
| + testCase.completed(); |
| + eventFinishedTestCase(testCase); |
| + }); |
| + }; |
| + } |
| + |
| // FIXME: For some reason we cannot call this method on all test suites |
| // in parallel. |
| // If we do, not all tests get enqueued (if --arch=all was specified, |
| @@ -1421,9 +1450,12 @@ class ProcessQueue { |
| if (!iterator.moveNext()) { |
| _allTestsWereEnqueued = true; |
| eventAllTestsKnown(); |
| + if (_testCaseRecorder!= null) { |
| + _testCaseRecorder.finish(); |
| + } |
| _checkDone(); |
| } else { |
| - iterator.current.forEachTest(_runTest, _testCache, enqueueNextSuite); |
| + iterator.current.forEachTest(newTest, _testCache, enqueueNextSuite); |
| } |
| } |
| enqueueNextSuite(); |
| @@ -1495,8 +1527,6 @@ class ProcessQueue { |
| browserUsed = test.configuration['runtime']; |
| if (_needsSelenium) _ensureSeleniumServerRunning(); |
| } |
| - eventTestAdded(test); |
| - _tests.add(test); |
| _tryRunTest(); |
| } |