OLD | NEW |
(Empty) | |
| 1 library test_pub; |
| 2 import 'dart:async'; |
| 3 import 'dart:convert'; |
| 4 import 'dart:io'; |
| 5 import 'dart:math'; |
| 6 import 'package:http/testing.dart'; |
| 7 import 'package:path/path.dart' as p; |
| 8 import 'package:scheduled_test/scheduled_process.dart'; |
| 9 import 'package:scheduled_test/scheduled_server.dart'; |
| 10 import 'package:scheduled_test/scheduled_stream.dart'; |
| 11 import 'package:scheduled_test/scheduled_test.dart' hide fail; |
| 12 import 'package:shelf/shelf.dart' as shelf; |
| 13 import 'package:shelf/shelf_io.dart' as shelf_io; |
| 14 import 'package:unittest/compact_vm_config.dart'; |
| 15 import 'package:yaml/yaml.dart'; |
| 16 import '../lib/src/entrypoint.dart'; |
| 17 import '../lib/src/exit_codes.dart' as exit_codes; |
| 18 import '../lib/src/git.dart' as gitlib; |
| 19 import '../lib/src/http.dart'; |
| 20 import '../lib/src/io.dart'; |
| 21 import '../lib/src/lock_file.dart'; |
| 22 import '../lib/src/log.dart' as log; |
| 23 import '../lib/src/package.dart'; |
| 24 import '../lib/src/pubspec.dart'; |
| 25 import '../lib/src/source/hosted.dart'; |
| 26 import '../lib/src/source/path.dart'; |
| 27 import '../lib/src/source_registry.dart'; |
| 28 import '../lib/src/system_cache.dart'; |
| 29 import '../lib/src/utils.dart'; |
| 30 import '../lib/src/validator.dart'; |
| 31 import '../lib/src/version.dart'; |
| 32 import 'descriptor.dart' as d; |
| 33 import 'serve_packages.dart'; |
| 34 export 'serve_packages.dart'; |
| 35 initConfig() { |
| 36 useCompactVMConfiguration(); |
| 37 filterStacks = true; |
| 38 unittestConfiguration.timeout = null; |
| 39 } |
| 40 var _server; |
| 41 final _requestedPaths = <String>[]; |
| 42 Completer<int> _portCompleterCache; |
| 43 Matcher isMinifiedDart2JSOutput = |
| 44 isNot(contains("// The code supports the following hooks")); |
| 45 Matcher isUnminifiedDart2JSOutput = |
| 46 contains("// The code supports the following hooks"); |
| 47 Map<String, String> _packageOverrides; |
| 48 final _barbackVersions = _findBarbackVersions(); |
| 49 final _barbackDeps = { |
| 50 new VersionConstraint.parse("<0.15.0"): { |
| 51 "source_maps": "0.9.4" |
| 52 } |
| 53 }; |
| 54 Map<Version, String> _findBarbackVersions() { |
| 55 var versions = {}; |
| 56 var currentBarback = p.join(repoRoot, 'pkg', 'barback'); |
| 57 versions[new Pubspec.load(currentBarback, new SourceRegistry()).version] = |
| 58 currentBarback; |
| 59 for (var dir in listDir(p.join(repoRoot, 'third_party', 'pkg'))) { |
| 60 var basename = p.basename(dir); |
| 61 if (!basename.startsWith('barback')) continue; |
| 62 versions[new Version.parse(split1(basename, '-').last)] = dir; |
| 63 } |
| 64 return versions; |
| 65 } |
| 66 void withBarbackVersions(String versionConstraint, void callback()) { |
| 67 var constraint = new VersionConstraint.parse(versionConstraint); |
| 68 var validVersions = _barbackVersions.keys.where(constraint.allows); |
| 69 if (validVersions.isEmpty) { |
| 70 throw new ArgumentError( |
| 71 'No available barback version matches "$versionConstraint".'); |
| 72 } |
| 73 for (var version in validVersions) { |
| 74 group("with barback $version", () { |
| 75 setUp(() { |
| 76 _packageOverrides = {}; |
| 77 _packageOverrides['barback'] = _barbackVersions[version]; |
| 78 _barbackDeps.forEach((constraint, deps) { |
| 79 if (!constraint.allows(version)) return; |
| 80 deps.forEach((packageName, version) { |
| 81 _packageOverrides[packageName] = |
| 82 p.join(repoRoot, 'third_party', 'pkg', '$packageName-$version'); |
| 83 }); |
| 84 }); |
| 85 currentSchedule.onComplete.schedule(() { |
| 86 _packageOverrides = null; |
| 87 }); |
| 88 }); |
| 89 callback(); |
| 90 }); |
| 91 } |
| 92 } |
| 93 Completer<int> get _portCompleter { |
| 94 if (_portCompleterCache != null) return _portCompleterCache; |
| 95 _portCompleterCache = new Completer<int>(); |
| 96 currentSchedule.onComplete.schedule(() { |
| 97 _portCompleterCache = null; |
| 98 }, 'clearing the port completer'); |
| 99 return _portCompleterCache; |
| 100 } |
| 101 Future<int> get port => _portCompleter.future; |
| 102 Future<List<String>> getRequestedPaths() { |
| 103 return schedule(() { |
| 104 var paths = _requestedPaths.toList(); |
| 105 _requestedPaths.clear(); |
| 106 return paths; |
| 107 }, "get previous network requests"); |
| 108 } |
| 109 void serve([List<d.Descriptor> contents]) { |
| 110 var baseDir = d.dir("serve-dir", contents); |
| 111 _hasServer = true; |
| 112 schedule(() { |
| 113 final completer0 = new Completer(); |
| 114 scheduleMicrotask(() { |
| 115 try { |
| 116 _closeServer().then((x0) { |
| 117 try { |
| 118 x0; |
| 119 shelf_io.serve(((request) { |
| 120 currentSchedule.heartbeat(); |
| 121 var path = |
| 122 p.posix.fromUri(request.url.path.replaceFirst("/", "")); |
| 123 _requestedPaths.add(path); |
| 124 return validateStream( |
| 125 baseDir.load( |
| 126 path)).then((stream) => new shelf.Response.ok(stream)).cat
chError((error) { |
| 127 return new shelf.Response.notFound('File "$path" not found.'); |
| 128 }); |
| 129 }), 'localhost', 0).then((x1) { |
| 130 try { |
| 131 var server = x1; |
| 132 _server = server; |
| 133 _portCompleter.complete(_server.port); |
| 134 currentSchedule.onComplete.schedule(_closeServer); |
| 135 completer0.complete(null); |
| 136 } catch (e1) { |
| 137 completer0.completeError(e1); |
| 138 } |
| 139 }, onError: (e2) { |
| 140 completer0.completeError(e2); |
| 141 }); |
| 142 } catch (e0) { |
| 143 completer0.completeError(e0); |
| 144 } |
| 145 }, onError: (e3) { |
| 146 completer0.completeError(e3); |
| 147 }); |
| 148 } catch (e4) { |
| 149 completer0.completeError(e4); |
| 150 } |
| 151 }); |
| 152 return completer0.future; |
| 153 }, 'starting a server serving:\n${baseDir.describe()}'); |
| 154 } |
| 155 Future _closeServer() { |
| 156 if (_server == null) return new Future.value(); |
| 157 var future = _server.close(); |
| 158 _server = null; |
| 159 _hasServer = false; |
| 160 _portCompleterCache = null; |
| 161 return future; |
| 162 } |
| 163 bool _hasServer = false; |
| 164 String yaml(value) => JSON.encode(value); |
| 165 String get sandboxDir => _sandboxDir; |
| 166 String _sandboxDir; |
| 167 final String pkgPath = |
| 168 p.absolute(p.join(p.dirname(Platform.executable), '../../../../pkg')); |
| 169 final String cachePath = "cache"; |
| 170 final String appPath = "myapp"; |
| 171 final String packagesPath = "$appPath/packages"; |
| 172 bool _abortScheduled = false; |
| 173 class RunCommand { |
| 174 static final get = new RunCommand( |
| 175 'get', |
| 176 new RegExp(r'Got dependencies!|Changed \d+ dependenc(y|ies)!')); |
| 177 static final upgrade = new RunCommand( |
| 178 'upgrade', |
| 179 new RegExp(r'(No dependencies changed\.|Changed \d+ dependenc(y|ies)!)$'))
; |
| 180 static final downgrade = new RunCommand( |
| 181 'downgrade', |
| 182 new RegExp(r'(No dependencies changed\.|Changed \d+ dependenc(y|ies)!)$'))
; |
| 183 final String name; |
| 184 final RegExp success; |
| 185 RunCommand(this.name, this.success); |
| 186 } |
| 187 void forBothPubGetAndUpgrade(void callback(RunCommand command)) { |
| 188 group(RunCommand.get.name, () => callback(RunCommand.get)); |
| 189 group(RunCommand.upgrade.name, () => callback(RunCommand.upgrade)); |
| 190 } |
| 191 void pubCommand(RunCommand command, {Iterable<String> args, output, error, |
| 192 warning, int exitCode}) { |
| 193 if (error != null && warning != null) { |
| 194 throw new ArgumentError("Cannot pass both 'error' and 'warning'."); |
| 195 } |
| 196 var allArgs = [command.name]; |
| 197 if (args != null) allArgs.addAll(args); |
| 198 if (output == null) output = command.success; |
| 199 if (error != null && exitCode == null) exitCode = 1; |
| 200 if (error != null) output = null; |
| 201 if (warning != null) error = warning; |
| 202 schedulePub(args: allArgs, output: output, error: error, exitCode: exitCode); |
| 203 } |
| 204 void pubGet({Iterable<String> args, output, error, warning, int exitCode}) { |
| 205 pubCommand( |
| 206 RunCommand.get, |
| 207 args: args, |
| 208 output: output, |
| 209 error: error, |
| 210 warning: warning, |
| 211 exitCode: exitCode); |
| 212 } |
| 213 void pubUpgrade({Iterable<String> args, output, error, warning, int exitCode}) { |
| 214 pubCommand( |
| 215 RunCommand.upgrade, |
| 216 args: args, |
| 217 output: output, |
| 218 error: error, |
| 219 warning: warning, |
| 220 exitCode: exitCode); |
| 221 } |
| 222 void pubDowngrade({Iterable<String> args, output, error, warning, int exitCode}) |
| 223 { |
| 224 pubCommand( |
| 225 RunCommand.downgrade, |
| 226 args: args, |
| 227 output: output, |
| 228 error: error, |
| 229 warning: warning, |
| 230 exitCode: exitCode); |
| 231 } |
| 232 ScheduledProcess pubRun({bool global: false, Iterable<String> args}) { |
| 233 var pubArgs = global ? ["global", "run"] : ["run"]; |
| 234 pubArgs.addAll(args); |
| 235 var pub = startPub(args: pubArgs); |
| 236 pub.stdout.expect(consumeWhile(startsWith("Loading"))); |
| 237 return pub; |
| 238 } |
| 239 void integration(String description, void body()) => |
| 240 _integration(description, body, test); |
| 241 void solo_integration(String description, void body()) => |
| 242 _integration(description, body, solo_test); |
| 243 void _integration(String description, void body(), [Function testFn]) { |
| 244 testFn(description, () { |
| 245 currentSchedule.timeout *= 2; |
| 246 if (Platform.operatingSystem == "windows") { |
| 247 currentSchedule.timeout *= 2; |
| 248 } |
| 249 _sandboxDir = createSystemTempDir(); |
| 250 d.defaultRoot = sandboxDir; |
| 251 currentSchedule.onComplete.schedule( |
| 252 () => deleteEntry(_sandboxDir), |
| 253 'deleting the sandbox directory'); |
| 254 body(); |
| 255 }); |
| 256 } |
| 257 String get testDirectory => p.absolute(p.dirname(libraryPath('test_pub'))); |
| 258 void scheduleRename(String from, String to) { |
| 259 schedule( |
| 260 () => renameDir(p.join(sandboxDir, from), p.join(sandboxDir, to)), |
| 261 'renaming $from to $to'); |
| 262 } |
| 263 void scheduleSymlink(String target, String symlink) { |
| 264 schedule( |
| 265 () => createSymlink(p.join(sandboxDir, target), p.join(sandboxDir, symlink
)), |
| 266 'symlinking $target to $symlink'); |
| 267 } |
| 268 void schedulePub({List args, output, error, outputJson, |
| 269 Future<Uri> tokenEndpoint, int exitCode: exit_codes.SUCCESS}) { |
| 270 assert(output == null || outputJson == null); |
| 271 var pub = startPub(args: args, tokenEndpoint: tokenEndpoint); |
| 272 pub.shouldExit(exitCode); |
| 273 var failures = []; |
| 274 var stderr; |
| 275 expect( |
| 276 Future.wait( |
| 277 [pub.stdoutStream().toList(), pub.stderrStream().toList()]).then((resu
lts) { |
| 278 var stdout = results[0].join("\n"); |
| 279 stderr = results[1].join("\n"); |
| 280 if (outputJson == null) { |
| 281 _validateOutput(failures, 'stdout', output, stdout); |
| 282 return null; |
| 283 } |
| 284 return awaitObject(outputJson).then((resolved) { |
| 285 _validateOutputJson(failures, 'stdout', resolved, stdout); |
| 286 }); |
| 287 }).then((_) { |
| 288 _validateOutput(failures, 'stderr', error, stderr); |
| 289 if (!failures.isEmpty) throw new TestFailure(failures.join('\n')); |
| 290 }), completes); |
| 291 } |
| 292 ScheduledProcess startPublish(ScheduledServer server, {List args}) { |
| 293 var tokenEndpoint = |
| 294 server.url.then((url) => url.resolve('/token').toString()); |
| 295 if (args == null) args = []; |
| 296 args = flatten(['lish', '--server', tokenEndpoint, args]); |
| 297 return startPub(args: args, tokenEndpoint: tokenEndpoint); |
| 298 } |
| 299 void confirmPublish(ScheduledProcess pub) { |
| 300 pub.stdout.expect(startsWith('Publishing test_pkg 1.0.0 to ')); |
| 301 pub.stdout.expect( |
| 302 emitsLines( |
| 303 "|-- LICENSE\n" "|-- lib\n" "| '-- test_pkg.dart\n" "'-- pubspec.yam
l\n" "\n" |
| 304 "Looks great! Are you ready to upload your package (y/n)?")); |
| 305 pub.writeLine("y"); |
| 306 } |
| 307 ScheduledProcess startPub({List args, Future<Uri> tokenEndpoint}) { |
| 308 String pathInSandbox(String relPath) { |
| 309 return p.join(p.absolute(sandboxDir), relPath); |
| 310 } |
| 311 ensureDir(pathInSandbox(appPath)); |
| 312 var dartBin = Platform.executable; |
| 313 if (dartBin.contains(Platform.pathSeparator)) { |
| 314 dartBin = p.absolute(dartBin); |
| 315 } |
| 316 var pubPath = p.join(p.dirname(dartBin), 'snapshots/pub.dart.snapshot'); |
| 317 var dartArgs = [pubPath, '--verbose']; |
| 318 dartArgs.addAll(args); |
| 319 if (tokenEndpoint == null) tokenEndpoint = new Future.value(); |
| 320 var environmentFuture = tokenEndpoint.then((tokenEndpoint) { |
| 321 var environment = {}; |
| 322 environment['_PUB_TESTING'] = 'true'; |
| 323 environment['PUB_CACHE'] = pathInSandbox(cachePath); |
| 324 environment['_PUB_TEST_SDK_VERSION'] = "0.1.2+3"; |
| 325 if (tokenEndpoint != null) { |
| 326 environment['_PUB_TEST_TOKEN_ENDPOINT'] = tokenEndpoint.toString(); |
| 327 } |
| 328 if (_hasServer) { |
| 329 return port.then((p) { |
| 330 environment['PUB_HOSTED_URL'] = "http://localhost:$p"; |
| 331 return environment; |
| 332 }); |
| 333 } |
| 334 return environment; |
| 335 }); |
| 336 return new PubProcess.start( |
| 337 dartBin, |
| 338 dartArgs, |
| 339 environment: environmentFuture, |
| 340 workingDirectory: pathInSandbox(appPath), |
| 341 description: args.isEmpty ? 'pub' : 'pub ${args.first}'); |
| 342 } |
| 343 class PubProcess extends ScheduledProcess { |
| 344 Stream<Pair<log.Level, String>> _log; |
| 345 Stream<String> _stdout; |
| 346 Stream<String> _stderr; |
| 347 PubProcess.start(executable, arguments, {workingDirectory, environment, |
| 348 String description, Encoding encoding: UTF8}) |
| 349 : super.start( |
| 350 executable, |
| 351 arguments, |
| 352 workingDirectory: workingDirectory, |
| 353 environment: environment, |
| 354 description: description, |
| 355 encoding: encoding); |
| 356 Stream<Pair<log.Level, String>> _logStream() { |
| 357 if (_log == null) { |
| 358 _log = mergeStreams( |
| 359 _outputToLog(super.stdoutStream(), log.Level.MESSAGE), |
| 360 _outputToLog(super.stderrStream(), log.Level.ERROR)); |
| 361 } |
| 362 var pair = tee(_log); |
| 363 _log = pair.first; |
| 364 return pair.last; |
| 365 } |
| 366 final _logLineRegExp = new RegExp(r"^([A-Z ]{4})[:|] (.*)$"); |
| 367 final _logLevels = [ |
| 368 log.Level.ERROR, |
| 369 log.Level.WARNING, |
| 370 log.Level.MESSAGE, |
| 371 log.Level.IO, |
| 372 log.Level.SOLVER, |
| 373 log.Level.FINE].fold(<String, log.Level>{}, (levels, level) { |
| 374 levels[level.name] = level; |
| 375 return levels; |
| 376 }); |
| 377 Stream<Pair<log.Level, String>> _outputToLog(Stream<String> stream, |
| 378 log.Level defaultLevel) { |
| 379 var lastLevel; |
| 380 return stream.map((line) { |
| 381 var match = _logLineRegExp.firstMatch(line); |
| 382 if (match == null) return new Pair<log.Level, String>(defaultLevel, line); |
| 383 var level = _logLevels[match[1]]; |
| 384 if (level == null) level = lastLevel; |
| 385 lastLevel = level; |
| 386 return new Pair<log.Level, String>(level, match[2]); |
| 387 }); |
| 388 } |
| 389 Stream<String> stdoutStream() { |
| 390 if (_stdout == null) { |
| 391 _stdout = _logStream().expand((entry) { |
| 392 if (entry.first != log.Level.MESSAGE) return []; |
| 393 return [entry.last]; |
| 394 }); |
| 395 } |
| 396 var pair = tee(_stdout); |
| 397 _stdout = pair.first; |
| 398 return pair.last; |
| 399 } |
| 400 Stream<String> stderrStream() { |
| 401 if (_stderr == null) { |
| 402 _stderr = _logStream().expand((entry) { |
| 403 if (entry.first != log.Level.ERROR && |
| 404 entry.first != log.Level.WARNING) { |
| 405 return []; |
| 406 } |
| 407 return [entry.last]; |
| 408 }); |
| 409 } |
| 410 var pair = tee(_stderr); |
| 411 _stderr = pair.first; |
| 412 return pair.last; |
| 413 } |
| 414 } |
| 415 String get _packageRoot => p.absolute(Platform.packageRoot); |
| 416 void ensureGit() { |
| 417 if (Platform.operatingSystem == "windows") { |
| 418 currentSchedule.timeout = new Duration(seconds: 30); |
| 419 } |
| 420 if (!gitlib.isInstalled) { |
| 421 throw new Exception("Git must be installed to run this test."); |
| 422 } |
| 423 } |
| 424 void makeGlobalPackage(String package, String version, |
| 425 Iterable<d.Descriptor> contents, {Iterable<String> pkg, Map<String, |
| 426 String> hosted}) { |
| 427 serveNoPackages(); |
| 428 d.hostedCache([d.dir("$package-$version", contents)]).create(); |
| 429 var lockFile = _createLockFile(pkg: pkg, hosted: hosted); |
| 430 var id = |
| 431 new PackageId(package, "hosted", new Version.parse(version), package); |
| 432 lockFile.packages[package] = id; |
| 433 var sources = new SourceRegistry(); |
| 434 sources.register(new HostedSource()); |
| 435 sources.register(new PathSource()); |
| 436 d.dir( |
| 437 cachePath, |
| 438 [ |
| 439 d.dir( |
| 440 "global_packages", |
| 441 [d.file("$package.lock", lockFile.serialize(null, sources))])]).cr
eate(); |
| 442 } |
| 443 void createLockFile(String package, {Iterable<String> sandbox, |
| 444 Iterable<String> pkg, Map<String, String> hosted}) { |
| 445 var lockFile = _createLockFile(sandbox: sandbox, pkg: pkg, hosted: hosted); |
| 446 var sources = new SourceRegistry(); |
| 447 sources.register(new HostedSource()); |
| 448 sources.register(new PathSource()); |
| 449 d.file( |
| 450 p.join(package, 'pubspec.lock'), |
| 451 lockFile.serialize(null, sources)).create(); |
| 452 } |
| 453 LockFile _createLockFile({Iterable<String> sandbox, Iterable<String> pkg, |
| 454 Map<String, String> hosted}) { |
| 455 var dependencies = {}; |
| 456 if (sandbox != null) { |
| 457 for (var package in sandbox) { |
| 458 dependencies[package] = '../$package'; |
| 459 } |
| 460 } |
| 461 if (pkg != null) { |
| 462 _addPackage(String package) { |
| 463 if (dependencies.containsKey(package)) return; |
| 464 var packagePath; |
| 465 if (package == 'barback' && _packageOverrides == null) { |
| 466 throw new StateError( |
| 467 "createLockFile() can only create a lock file " |
| 468 "with a barback dependency within a withBarbackVersions() " "blo
ck."); |
| 469 } |
| 470 if (_packageOverrides.containsKey(package)) { |
| 471 packagePath = _packageOverrides[package]; |
| 472 } else { |
| 473 packagePath = p.join(pkgPath, package); |
| 474 } |
| 475 dependencies[package] = packagePath; |
| 476 var pubspec = loadYaml(readTextFile(p.join(packagePath, 'pubspec.yaml'))); |
| 477 var packageDeps = pubspec['dependencies']; |
| 478 if (packageDeps == null) return; |
| 479 packageDeps.keys.forEach(_addPackage); |
| 480 } |
| 481 pkg.forEach(_addPackage); |
| 482 } |
| 483 var lockFile = new LockFile.empty(); |
| 484 dependencies.forEach((name, dependencyPath) { |
| 485 var id = new PackageId(name, 'path', new Version(0, 0, 0), { |
| 486 'path': dependencyPath, |
| 487 'relative': p.isRelative(dependencyPath) |
| 488 }); |
| 489 lockFile.packages[name] = id; |
| 490 }); |
| 491 if (hosted != null) { |
| 492 hosted.forEach((name, version) { |
| 493 var id = new PackageId(name, 'hosted', new Version.parse(version), name); |
| 494 lockFile.packages[name] = id; |
| 495 }); |
| 496 } |
| 497 return lockFile; |
| 498 } |
| 499 void useMockClient(MockClient client) { |
| 500 var oldInnerClient = innerHttpClient; |
| 501 innerHttpClient = client; |
| 502 currentSchedule.onComplete.schedule(() { |
| 503 innerHttpClient = oldInnerClient; |
| 504 }, 'de-activating the mock client'); |
| 505 } |
| 506 Map packageMap(String name, String version, [Map dependencies]) { |
| 507 var package = { |
| 508 "name": name, |
| 509 "version": version, |
| 510 "author": "Natalie Weizenbaum <nweiz@google.com>", |
| 511 "homepage": "http://pub.dartlang.org", |
| 512 "description": "A package, I guess." |
| 513 }; |
| 514 if (dependencies != null) package["dependencies"] = dependencies; |
| 515 return package; |
| 516 } |
| 517 String testAssetPath(String target) { |
| 518 var libPath = libraryPath('test_pub'); |
| 519 libPath = libPath.replaceAll('pub_generated', 'pub'); |
| 520 return p.join(p.dirname(libPath), 'asset', target); |
| 521 } |
| 522 Map packageVersionApiMap(Map pubspec, {bool full: false}) { |
| 523 var name = pubspec['name']; |
| 524 var version = pubspec['version']; |
| 525 var map = { |
| 526 'pubspec': pubspec, |
| 527 'version': version, |
| 528 'url': '/api/packages/$name/versions/$version', |
| 529 'archive_url': '/packages/$name/versions/$version.tar.gz', |
| 530 'new_dartdoc_url': '/api/packages/$name/versions/$version' '/new_dartdoc', |
| 531 'package_url': '/api/packages/$name' |
| 532 }; |
| 533 if (full) { |
| 534 map.addAll({ |
| 535 'downloads': 0, |
| 536 'created': '2012-09-25T18:38:28.685260', |
| 537 'libraries': ['$name.dart'], |
| 538 'uploader': ['nweiz@google.com'] |
| 539 }); |
| 540 } |
| 541 return map; |
| 542 } |
| 543 void _validateOutput(List<String> failures, String pipe, expected, |
| 544 String actual) { |
| 545 if (expected == null) return; |
| 546 if (expected is String) { |
| 547 _validateOutputString(failures, pipe, expected, actual); |
| 548 } else { |
| 549 if (expected is RegExp) expected = matches(expected); |
| 550 expect(actual, expected); |
| 551 } |
| 552 } |
| 553 void _validateOutputString(List<String> failures, String pipe, String expected, |
| 554 String actual) { |
| 555 var actualLines = actual.split("\n"); |
| 556 var expectedLines = expected.split("\n"); |
| 557 if (expectedLines.last.trim() == '') { |
| 558 expectedLines.removeLast(); |
| 559 } |
| 560 var results = []; |
| 561 var failed = false; |
| 562 var length = max(expectedLines.length, actualLines.length); |
| 563 for (var i = 0; i < length; i++) { |
| 564 if (i >= actualLines.length) { |
| 565 failed = true; |
| 566 results.add('? ${expectedLines[i]}'); |
| 567 } else if (i >= expectedLines.length) { |
| 568 failed = true; |
| 569 results.add('X ${actualLines[i]}'); |
| 570 } else { |
| 571 var expectedLine = expectedLines[i].trim(); |
| 572 var actualLine = actualLines[i].trim(); |
| 573 if (expectedLine != actualLine) { |
| 574 failed = true; |
| 575 results.add('X ${actualLines[i]}'); |
| 576 } else { |
| 577 results.add('| ${actualLines[i]}'); |
| 578 } |
| 579 } |
| 580 } |
| 581 if (failed) { |
| 582 failures.add('Expected $pipe:'); |
| 583 failures.addAll(expectedLines.map((line) => '| $line')); |
| 584 failures.add('Got:'); |
| 585 failures.addAll(results); |
| 586 } |
| 587 } |
| 588 void _validateOutputJson(List<String> failures, String pipe, expected, |
| 589 String actualText) { |
| 590 var actual; |
| 591 try { |
| 592 actual = JSON.decode(actualText); |
| 593 } on FormatException catch (error) { |
| 594 failures.add('Expected $pipe JSON:'); |
| 595 failures.add(expected); |
| 596 failures.add('Got invalid JSON:'); |
| 597 failures.add(actualText); |
| 598 } |
| 599 expect(actual, expected); |
| 600 } |
| 601 typedef Validator ValidatorCreator(Entrypoint entrypoint); |
| 602 Future<Pair<List<String>, List<String>>> |
| 603 schedulePackageValidation(ValidatorCreator fn) { |
| 604 return schedule(() { |
| 605 var cache = new SystemCache.withSources(p.join(sandboxDir, cachePath)); |
| 606 return syncFuture(() { |
| 607 var validator = fn(new Entrypoint(p.join(sandboxDir, appPath), cache)); |
| 608 return validator.validate().then((_) { |
| 609 return new Pair(validator.errors, validator.warnings); |
| 610 }); |
| 611 }); |
| 612 }, "validating package"); |
| 613 } |
| 614 Matcher pairOf(Matcher firstMatcher, Matcher lastMatcher) => |
| 615 new _PairMatcher(firstMatcher, lastMatcher); |
| 616 class _PairMatcher extends Matcher { |
| 617 final Matcher _firstMatcher; |
| 618 final Matcher _lastMatcher; |
| 619 _PairMatcher(this._firstMatcher, this._lastMatcher); |
| 620 bool matches(item, Map matchState) { |
| 621 if (item is! Pair) return false; |
| 622 return _firstMatcher.matches(item.first, matchState) && |
| 623 _lastMatcher.matches(item.last, matchState); |
| 624 } |
| 625 Description describe(Description description) { |
| 626 return description.addAll("(", ", ", ")", [_firstMatcher, _lastMatcher]); |
| 627 } |
| 628 } |
| 629 StreamMatcher emitsLines(String output) => inOrder(output.split("\n")); |
OLD | NEW |