Chromium Code Reviews| Index: tools/testing/dart/test_configurations.dart |
| diff --git a/tools/testing/dart/test_configurations.dart b/tools/testing/dart/test_configurations.dart |
| index 6c8a6157d74c659757a2122ce2e9c768f29b3582..20e5f654867a5fb12e4f9d0d3657a3c47ebcd180 100644 |
| --- a/tools/testing/dart/test_configurations.dart |
| +++ b/tools/testing/dart/test_configurations.dart |
| @@ -2,23 +2,20 @@ |
| // for details. All rights reserved. Use of this source code is governed by a |
| // BSD-style license that can be found in the LICENSE file. |
| -library test_configurations; |
| - |
| -import "dart:async"; |
| -import 'dart:convert'; |
| +import 'dart:async'; |
| import 'dart:io'; |
| -import "dart:math" as math; |
| +import 'dart:math' as math; |
| import 'android.dart'; |
| -import "browser_controller.dart"; |
| -import "co19_test_config.dart"; |
| -import "http_server.dart"; |
| -import "path.dart"; |
| -import "test_progress.dart"; |
| -import "test_runner.dart"; |
| -import "test_suite.dart"; |
| -import "utils.dart"; |
| -import "vm_test_config.dart"; |
| +import 'browser_controller.dart'; |
| +import 'co19_test_config.dart'; |
| +import 'configuration.dart'; |
| +import 'path.dart'; |
| +import 'test_progress.dart'; |
| +import 'test_runner.dart'; |
| +import 'test_suite.dart'; |
| +import 'utils.dart'; |
| +import 'vm_test_config.dart'; |
| /** |
| * The directories that contain test suites which follow the conventions |
| @@ -58,32 +55,30 @@ final TEST_SUITE_DIRECTORIES = [ |
| // This file is created by gclient runhooks. |
| final VS_TOOLCHAIN_FILE = new Path("build/win_toolchain.json"); |
| -Future testConfigurations(List<Map<String, dynamic>> configurations) async { |
| +Future testConfigurations(List<Configuration> configurations) async { |
| var startTime = new DateTime.now(); |
| // Extract global options from first configuration. |
| var firstConf = configurations[0]; |
| - var maxProcesses = firstConf['tasks'] as int; |
| - var progressIndicator = firstConf['progress'] as String; |
| - BuildbotProgressIndicator.stepName = firstConf['step_name'] as String; |
| - var verbose = firstConf['verbose'] as bool; |
| - var printTiming = firstConf['time'] as bool; |
| - var listTests = firstConf['list'] as bool; |
| - |
| - var reportInJson = firstConf['report_in_json'] as bool; |
| + var maxProcesses = firstConf.taskCount; |
| + var progressIndicator = firstConf.progress; |
| + BuildbotProgressIndicator.stepName = firstConf.stepName; |
| + var verbose = firstConf.isVerbose; |
| + var printTiming = firstConf.printTiming; |
| + var listTests = firstConf.listTests; |
| - var recordingPath = firstConf['record_to_file'] as String; |
| - var recordingOutputPath = firstConf['replay_from_file'] as String; |
| + var reportInJson = firstConf.reportInJson; |
| + var recordingPath = firstConf.recordingPath; |
| + var replayPath = firstConf.replayPath; |
| - Browser.resetBrowserConfiguration = |
| - firstConf['reset_browser_configuration'] as bool; |
| + Browser.resetBrowserConfiguration = firstConf.resetBrowser; |
| - if (recordingPath != null && recordingOutputPath != null) { |
| + if (recordingPath != null && replayPath != null) { |
| print("Fatal: Can't have the '--record_to_file' and '--replay_from_file'" |
| "at the same time. Exiting ..."); |
| exit(1); |
| } |
| - if (!(firstConf['append_logs'] as bool)) { |
| + if (!firstConf.appendLogs) { |
| var files = [ |
| new File(TestUtils.flakyFileName), |
| new File(TestUtils.testOutcomeFileName) |
| @@ -95,70 +90,51 @@ Future testConfigurations(List<Map<String, dynamic>> configurations) async { |
| } |
| } |
| - DebugLogger.init( |
| - firstConf['write_debug_log'] as bool ? TestUtils.debugLogFilePath : null, |
| - append: firstConf['append_logs'] as bool); |
| + DebugLogger.init(firstConf.writeDebugLog ? TestUtils.debugLogFilePath : null, |
| + append: firstConf.appendLogs); |
| // Print the configurations being run by this execution of |
| // test.dart. However, don't do it if the silent progress indicator |
| // is used. This is only needed because of the junit tests. |
| - if (progressIndicator != 'silent') { |
| + if (progressIndicator != Progress.silent) { |
| var outputWords = configurations.length > 1 |
| ? ['Test configurations:'] |
| : ['Test configuration:']; |
| - for (Map conf in configurations) { |
| - List settings = ['compiler', 'runtime', 'mode', 'arch'] |
| - .map((name) => conf[name]) |
| - .toList(); |
| - if (conf['checked'] as bool) settings.add('checked'); |
| - if (conf['strong'] as bool) settings.add('strong'); |
| + |
| + for (var configuration in configurations) { |
| + var settings = [ |
| + configuration.compiler.name, |
| + configuration.runtime.name, |
| + configuration.mode.name, |
| + configuration.architecture.name |
| + ]; |
| + if (configuration.isChecked) settings.add('checked'); |
| + if (configuration.isStrong) settings.add('strong'); |
|
Bill Hesse
2017/05/29 13:08:28
Please add printing of "fast-startup" if fast star
Bob Nystrom
2017/05/30 23:29:31
Done.
|
| outputWords.add(settings.join('_')); |
| } |
| print(outputWords.join(' ')); |
| } |
| - var runningBrowserTests = configurations.any((config) { |
| - return TestUtils.isBrowserRuntime(config['runtime'] as String); |
| - }); |
| + var runningBrowserTests = |
| + configurations.any((config) => config.runtime.isBrowser); |
| - List<Future> serverFutures = []; |
| + var serverFutures = <Future>[]; |
| var testSuites = <TestSuite>[]; |
| var maxBrowserProcesses = maxProcesses; |
| if (configurations.length > 1 && |
| - (configurations[0]['test_server_port'] != 0 || |
| - configurations[0]['test_server_cross_origin_port'] != 0)) { |
| + (configurations[0].testServerPort != 0 || |
| + configurations[0].testServerCrossOriginPort != 0)) { |
| print("If the http server ports are specified, only one configuration" |
| " may be run at a time"); |
| exit(1); |
| } |
| - for (var conf in configurations) { |
| - var selectors = conf['selectors'] as Map<String, RegExp>; |
| - var useContentSecurityPolicy = conf['csp'] as bool; |
| + |
| + for (var configuration in configurations) { |
| if (!listTests && runningBrowserTests) { |
| - // Start global http servers that serve the entire dart repo. |
| - // The http server is available on window.location.port, and a second |
| - // server for cross-domain tests can be found by calling |
| - // getCrossOriginPortNumber(). |
| - var servers = new TestingServers( |
| - TestUtils.buildDir(conf), |
| - useContentSecurityPolicy, |
| - conf['runtime'] as String, |
| - null, |
| - conf['package_root'] as String, |
| - conf['packages'] as String); |
| - serverFutures.add(servers.startServers(conf['local_ip'] as String, |
| - port: conf['test_server_port'] as int, |
| - crossOriginPort: conf['test_server_cross_origin_port'] as int)); |
| - conf['_servers_'] = servers; |
| - if (verbose) { |
| - serverFutures.last.then((_) { |
| - var commandline = servers.httpServerCommandLine(); |
| - print('Started HttpServers: $commandline'); |
| - }); |
| - } |
| + serverFutures.add(configuration.startServers()); |
| } |
| - if ((conf['runtime'] as String).startsWith('ie')) { |
| + if (configuration.runtime.isIE) { |
| // NOTE: We've experienced random timeouts of tests on ie9/ie10. The |
| // underlying issue has not been determined yet. Our current hypothesis |
| // is that windows does not handle the IE processes independently. |
| @@ -166,18 +142,18 @@ Future testConfigurations(List<Map<String, dynamic>> configurations) async { |
| // issues with starting up a new browser just after killing the hanging |
| // browser. |
| maxBrowserProcesses = 1; |
| - } else if ((conf['runtime'] as String).startsWith('safari')) { |
| + } else if (configuration.runtime.isSafari) { |
| // Safari does not allow us to run from a fresh profile, so we can only |
| // use one browser. Additionally, you can not start two simulators |
| // for mobile safari simultainiously. |
| maxBrowserProcesses = 1; |
| - } else if ((conf['runtime'] as String) == 'chrome' && |
| + } else if (configuration.runtime == Runtime.chrome && |
| Platform.operatingSystem == 'macos') { |
| // Chrome on mac results in random timeouts. |
| // Issue: https://github.com/dart-lang/sdk/issues/23891 |
| // This change does not fix the problem. |
| maxBrowserProcesses = math.max(1, maxBrowserProcesses ~/ 2); |
| - } else if ((conf['runtime'] as String) != 'drt') { |
| + } else if (configuration.runtime != Runtime.drt) { |
| // Even on machines with more than 16 processors, don't open more |
| // than 15 browser instances, to avoid overloading the machine. |
| // This is especially important when running locally on powerful |
| @@ -186,29 +162,30 @@ Future testConfigurations(List<Map<String, dynamic>> configurations) async { |
| } |
| // If we specifically pass in a suite only run that. |
| - if (conf['suite_dir'] != null) { |
| - var suite_path = new Path(conf['suite_dir'] as String); |
| - testSuites.add(new PKGTestSuite(conf, suite_path)); |
| + if (configuration.suiteDirectory != null) { |
| + var suitePath = new Path(configuration.suiteDirectory); |
| + testSuites.add(new PKGTestSuite(configuration, suitePath)); |
| } else { |
| - for (final testSuiteDir in TEST_SUITE_DIRECTORIES) { |
| - final name = testSuiteDir.filename; |
| - if (selectors.containsKey(name)) { |
| - testSuites |
| - .add(new StandardTestSuite.forDirectory(conf, testSuiteDir)); |
| + for (var testSuiteDir in TEST_SUITE_DIRECTORIES) { |
| + var name = testSuiteDir.filename; |
| + if (configuration.selectors.containsKey(name)) { |
| + testSuites.add( |
| + new StandardTestSuite.forDirectory(configuration, testSuiteDir)); |
| } |
| } |
| - for (String key in selectors.keys) { |
| + |
| + for (var key in configuration.selectors.keys) { |
| if (key == 'co19') { |
| - testSuites.add(new Co19TestSuite(conf)); |
| - } else if (conf['compiler'] == 'none' && |
| - conf['runtime'] == 'vm' && |
| + testSuites.add(new Co19TestSuite(configuration)); |
| + } else if (configuration.compiler == Compiler.none && |
| + configuration.runtime == Runtime.vm && |
| key == 'vm') { |
| // vm tests contain both cc tests (added here) and dart tests (added |
| // in [TEST_SUITE_DIRECTORIES]). |
| - testSuites.add(new VMTestSuite(conf)); |
| - } else if (conf['analyzer'] as bool) { |
| + testSuites.add(new VMTestSuite(configuration)); |
| + } else if (configuration.compiler == Compiler.dart2analyzer) { |
| if (key == 'analyze_library') { |
| - testSuites.add(new AnalyzeLibraryTestSuite(conf)); |
| + testSuites.add(new AnalyzeLibraryTestSuite(configuration)); |
| } |
| } |
| } |
| @@ -216,11 +193,10 @@ Future testConfigurations(List<Map<String, dynamic>> configurations) async { |
| } |
| void allTestsFinished() { |
| - for (var conf in configurations) { |
| - if (conf.containsKey('_servers_')) { |
| - conf['_servers_'].stopServers(); |
| - } |
| + for (var configuration in configurations) { |
| + configuration.stopServers(); |
| } |
| + |
| DebugLogger.close(); |
| TestUtils.deleteTempSnapshotDirectory(configurations[0]); |
| } |
| @@ -228,15 +204,15 @@ Future testConfigurations(List<Map<String, dynamic>> configurations) async { |
| var eventListener = <EventListener>[]; |
| // We don't print progress if we list tests. |
| - if (progressIndicator != 'silent' && !listTests) { |
| + if (progressIndicator != Progress.silent && !listTests) { |
| var printFailures = true; |
| var formatter = Formatter.normal; |
| - if (progressIndicator == 'color') { |
| - progressIndicator = 'compact'; |
| + if (progressIndicator == Progress.color) { |
| + progressIndicator = Progress.compact; |
| formatter = Formatter.color; |
| } |
| - if (progressIndicator == 'diff') { |
| - progressIndicator = 'compact'; |
| + if (progressIndicator == Progress.diff) { |
| + progressIndicator = Progress.compact; |
| formatter = Formatter.color; |
| printFailures = false; |
| eventListener.add(new StatusFileUpdatePrinter()); |
| @@ -246,20 +222,22 @@ Future testConfigurations(List<Map<String, dynamic>> configurations) async { |
| if (printFailures) { |
| // The buildbot has it's own failure summary since it needs to wrap it |
| // into '@@@'-annotated sections. |
| - var printFailureSummary = progressIndicator != 'buildbot'; |
| + var printFailureSummary = progressIndicator != Progress.buildbot; |
| eventListener.add(new TestFailurePrinter(printFailureSummary, formatter)); |
| } |
| - eventListener.add( |
| - ProgressIndicator.fromName(progressIndicator, startTime, formatter)); |
| + eventListener.add(ProgressIndicator.fromProgress( |
| + progressIndicator, startTime, formatter)); |
| if (printTiming) { |
| eventListener.add(new TimingPrinter(startTime)); |
| } |
| eventListener.add(new SkippedCompilationsPrinter()); |
| } |
| - if (firstConf['write_test_outcome_log'] as bool) { |
| + |
| + if (firstConf.writeTestOutcomeLog) { |
| eventListener.add(new TestOutcomeLogWriter()); |
| } |
| - if (firstConf['copy_coredumps'] as bool) { |
| + |
| + if (firstConf.copyCoreDumps) { |
| eventListener.add(new UnexpectedCrashLogger()); |
| } |
| @@ -275,8 +253,9 @@ Future testConfigurations(List<Map<String, dynamic>> configurations) async { |
| // If any of the configurations need to access android devices we'll first |
| // make a pool of all available adb devices. |
| AdbDevicePool adbDevicePool; |
| - var needsAdbDevicePool = configurations.any((Map conf) { |
| - return conf['runtime'] == 'dart_precompiled' && conf['system'] == 'android'; |
| + var needsAdbDevicePool = configurations.any((conf) { |
| + return conf.runtime == Runtime.dartPrecompiled && |
| + conf.system == System.android; |
| }); |
| if (needsAdbDevicePool) { |
| adbDevicePool = await AdbDevicePool.create(); |
| @@ -287,19 +266,6 @@ Future testConfigurations(List<Map<String, dynamic>> configurations) async { |
| await Future.wait(serverFutures); |
| } |
| - if (Platform.isWindows) { |
| - // When running tests on Windows, use cdb from depot_tools to dump |
| - // stack traces of tests timing out. |
| - try { |
| - var text = |
| - await new File(VS_TOOLCHAIN_FILE.toNativePath()).readAsString(); |
| - firstConf['win_sdk_path'] = JSON.decode(text)['win_sdk']; |
| - } on dynamic { |
| - // Ignore errors here. If win_sdk is not found, stack trace dumping |
| - // for timeouts won't work. |
| - } |
| - } |
| - |
| // [firstConf] is needed here, since the ProcessQueue needs to know the |
| // settings of 'noBatch' and 'local_ip' |
| new ProcessQueue( |
| @@ -312,6 +278,6 @@ Future testConfigurations(List<Map<String, dynamic>> configurations) async { |
| allTestsFinished, |
| verbose, |
| recordingPath, |
| - recordingOutputPath, |
| + replayPath, |
| adbDevicePool); |
| } |