Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(21)

Unified Diff: tools/testing/dart/lib/test_case.dart

Issue 841193003: cleanup to tools/testing/dart (Closed) Base URL: https://dart.googlecode.com/svn/branches/bleeding_edge/dart
Patch Set: one last bit Created 5 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « tools/testing/dart/lib/summary_report.dart ('k') | tools/testing/dart/lib/test_information.dart » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: tools/testing/dart/lib/test_case.dart
diff --git a/tools/testing/dart/lib/test_case.dart b/tools/testing/dart/lib/test_case.dart
new file mode 100644
index 0000000000000000000000000000000000000000..abdb0a1ba41fe8f7c97bd0c69858aa53b6dec855
--- /dev/null
+++ b/tools/testing/dart/lib/test_case.dart
@@ -0,0 +1,177 @@
+library test_case;
+
+import 'command.dart';
+import 'status_file_parser.dart';
+import 'test_information.dart';
+import 'test_utils.dart';
+import 'utils.dart';
+
+const int SLOW_TIMEOUT_MULTIPLIER = 4;
+
+typedef void TestCaseEvent(TestCase testCase);
+
+/**
+ * TestCase contains all the information needed to run a test and evaluate
+ * its output. Running a test involves starting a separate process, with
+ * the executable and arguments given by the TestCase, and recording its
+ * stdout and stderr output streams, and its exit code. TestCase only
+ * contains static information about the test; actually running the test is
+ * performed by [ProcessQueue] using a [RunningProcess] object.
+ *
+ * The output information is stored in a [CommandOutput] instance contained
+ * in TestCase.commandOutputs. The last CommandOutput instance is responsible
+ * for evaluating if the test has passed, failed, crashed, or timed out, and the
+ * TestCase has information about what the expected result of the test should
+ * be.
+ *
+ * The TestCase has a callback function, [completedHandler], that is run when
+ * the test is completed.
+ */
+class TestCase extends UniqueObject {
+ // Flags set in _expectations from the optional argument info.
+ static final int IS_NEGATIVE = 1 << 0;
+ static final int HAS_RUNTIME_ERROR = 1 << 1;
+ static final int HAS_STATIC_WARNING = 1 << 2;
+ static final int IS_NEGATIVE_IF_CHECKED = 1 << 3;
+ static final int HAS_COMPILE_ERROR = 1 << 4;
+ static final int HAS_COMPILE_ERROR_IF_CHECKED = 1 << 5;
+ static final int EXPECT_COMPILE_ERROR = 1 << 6;
+ /**
+ * A list of commands to execute. Most test cases have a single command.
+ * Dart2js tests have two commands, one to compile the source and another
+ * to execute it. Some isolate tests might even have three, if they require
+ * compiling multiple sources that are run in isolation.
+ */
+ List<Command> commands;
+ Map<Command, CommandOutput> commandOutputs = new Map<Command,CommandOutput>();
+
+ Map configuration;
+ String displayName;
+ int _expectations = 0;
+ int hash = 0;
+ Set<Expectation> expectedOutcomes;
+
+ TestCase(this.displayName,
+ this.commands,
+ this.configuration,
+ this.expectedOutcomes,
+ {isNegative: false,
+ TestInformation info: null}) {
+ if (isNegative || displayName.contains("negative_test")) {
+ _expectations |= IS_NEGATIVE;
+ }
+ if (info != null) {
+ _setExpectations(info);
+ hash = info.originTestPath.relativeTo(TestUtils.dartDir)
+ .toString().hashCode;
+ }
+ }
+
+ void _setExpectations(TestInformation info) {
+ // We don't want to keep the entire (large) TestInformation structure,
+ // so we copy the needed bools into flags set in a single integer.
+ if (info.hasRuntimeError) _expectations |= HAS_RUNTIME_ERROR;
+ if (info.hasStaticWarning) _expectations |= HAS_STATIC_WARNING;
+ if (info.isNegativeIfChecked) _expectations |= IS_NEGATIVE_IF_CHECKED;
+ if (info.hasCompileError) _expectations |= HAS_COMPILE_ERROR;
+ if (info.hasCompileErrorIfChecked) {
+ _expectations |= HAS_COMPILE_ERROR_IF_CHECKED;
+ }
+ if (info.hasCompileError ||
+ (configuration['checked'] && info.hasCompileErrorIfChecked)) {
+ _expectations |= EXPECT_COMPILE_ERROR;
+ }
+ }
+
+ bool get isNegative => _expectations & IS_NEGATIVE != 0;
+ bool get hasRuntimeError => _expectations & HAS_RUNTIME_ERROR != 0;
+ bool get hasStaticWarning => _expectations & HAS_STATIC_WARNING != 0;
+ bool get isNegativeIfChecked => _expectations & IS_NEGATIVE_IF_CHECKED != 0;
+ bool get hasCompileError => _expectations & HAS_COMPILE_ERROR != 0;
+ bool get hasCompileErrorIfChecked =>
+ _expectations & HAS_COMPILE_ERROR_IF_CHECKED != 0;
+ bool get expectCompileError => _expectations & EXPECT_COMPILE_ERROR != 0;
+
+ bool get unexpectedOutput {
+ var outcome = lastCommandOutput.result(this);
+ return !expectedOutcomes.any((expectation) {
+ return outcome.canBeOutcomeOf(expectation);
+ });
+ }
+
+ Expectation get result => lastCommandOutput.result(this);
+
+ CommandOutput get lastCommandOutput {
+ if (commandOutputs.length == 0) {
+ throw new Exception("CommandOutputs is empty, maybe no command was run? ("
+ "displayName: '$displayName', "
+ "configurationString: '$configurationString')");
+ }
+ return commandOutputs[commands[commandOutputs.length - 1]];
+ }
+
+ Command get lastCommandExecuted {
+ if (commandOutputs.length == 0) {
+ throw new Exception("CommandOutputs is empty, maybe no command was run? ("
+ "displayName: '$displayName', "
+ "configurationString: '$configurationString')");
+ }
+ return commands[commandOutputs.length - 1];
+ }
+
+ int get timeout {
+ if (expectedOutcomes.contains(Expectation.SLOW)) {
+ return configuration['timeout'] * SLOW_TIMEOUT_MULTIPLIER;
+ } else {
+ return configuration['timeout'];
+ }
+ }
+
+ String get configurationString {
+ final compiler = configuration['compiler'];
+ final runtime = configuration['runtime'];
+ final mode = configuration['mode'];
+ final arch = configuration['arch'];
+ final checked = configuration['checked'] ? '-checked' : '';
+ return "$compiler-$runtime$checked ${mode}_$arch";
+ }
+
+ List<String> get batchTestArguments {
+ assert(commands.last is ProcessCommand);
+ return (commands.last as ProcessCommand).arguments;
+ }
+
+ bool get isFlaky {
+ if (expectedOutcomes.contains(Expectation.SKIP) ||
+ expectedOutcomes.contains(Expectation.SKIP_BY_DESIGN)) {
+ return false;
+ }
+
+ return expectedOutcomes
+ .where((expectation) => !expectation.isMetaExpectation).length > 1;
+ }
+
+ bool get isFinished {
+ return commandOutputs.length > 0 &&
+ (!lastCommandOutput.successful ||
+ commands.length == commandOutputs.length);
+ }
+}
+
+
+/**
+ * BrowserTestCase has an extra compilation command that is run in a separate
+ * process, before the regular test is run as in the base class [TestCase].
+ * If the compilation command fails, then the rest of the test is not run.
+ */
+class BrowserTestCase extends TestCase {
+
+ BrowserTestCase(displayName, commands, configuration,
+ expectedOutcomes, info, isNegative, this._testingUrl)
+ : super(displayName, commands, configuration,
+ expectedOutcomes, isNegative: isNegative, info: info);
+
+ String _testingUrl;
+
+ String get testingUrl => _testingUrl;
+}
« no previous file with comments | « tools/testing/dart/lib/summary_report.dart ('k') | tools/testing/dart/lib/test_information.dart » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698