OLD | NEW |
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 /// Test infrastructure for testing pub. Unlike typical unit tests, most pub | 5 /// Test infrastructure for testing pub. Unlike typical unit tests, most pub |
6 /// tests are integration tests that stage some stuff on the file system, run | 6 /// tests are integration tests that stage some stuff on the file system, run |
7 /// pub, and then validate the results. This library provides an API to build | 7 /// pub, and then validate the results. This library provides an API to build |
8 /// tests like that. | 8 /// tests like that. |
9 library test_pub; | 9 library test_pub; |
10 | 10 |
| 11 import 'dart:async'; |
11 import 'dart:io'; | 12 import 'dart:io'; |
12 import 'dart:isolate'; | 13 import 'dart:json' as json; |
13 import 'dart:json'; | |
14 import 'dart:math'; | 14 import 'dart:math'; |
15 import 'dart:uri'; | 15 import 'dart:uri'; |
16 | 16 |
17 import '../../../pkg/oauth2/lib/oauth2.dart' as oauth2; | 17 import '../../../pkg/oauth2/lib/oauth2.dart' as oauth2; |
18 import '../../../pkg/path/lib/path.dart' as path; | 18 import '../../../pkg/path/lib/path.dart' as path; |
19 import '../../../pkg/unittest/lib/unittest.dart'; | 19 import '../../../pkg/unittest/lib/unittest.dart'; |
20 import '../../../pkg/http/lib/testing.dart'; | 20 import '../../../pkg/http/lib/testing.dart'; |
21 import '../../lib/file_system.dart' as fs; | 21 import '../../lib/file_system.dart' as fs; |
22 import '../../pub/entrypoint.dart'; | 22 import '../../pub/entrypoint.dart'; |
23 import '../../pub/git_source.dart'; | 23 import '../../pub/git_source.dart'; |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
73 Future<int> get port => _portCompleter.future; | 73 Future<int> get port => _portCompleter.future; |
74 | 74 |
75 /// Creates an HTTP server to serve [contents] as static files. This server will | 75 /// Creates an HTTP server to serve [contents] as static files. This server will |
76 /// exist only for the duration of the pub run. | 76 /// exist only for the duration of the pub run. |
77 /// | 77 /// |
78 /// Subsequent calls to [serve] will replace the previous server. | 78 /// Subsequent calls to [serve] will replace the previous server. |
79 void serve([List<Descriptor> contents]) { | 79 void serve([List<Descriptor> contents]) { |
80 var baseDir = dir("serve-dir", contents); | 80 var baseDir = dir("serve-dir", contents); |
81 | 81 |
82 _schedule((_) { | 82 _schedule((_) { |
83 return _closeServer().transform((_) { | 83 return _closeServer().then((_) { |
84 _server = new HttpServer(); | 84 _server = new HttpServer(); |
85 _server.defaultRequestHandler = (request, response) { | 85 _server.defaultRequestHandler = (request, response) { |
86 var path = request.uri.replaceFirst("/", "").split("/"); | 86 var path = request.uri.replaceFirst("/", "").split("/"); |
87 response.persistentConnection = false; | 87 response.persistentConnection = false; |
88 var stream; | 88 var stream; |
89 try { | 89 try { |
90 stream = baseDir.load(path); | 90 stream = baseDir.load(path); |
91 } catch (e) { | 91 } catch (e) { |
92 response.statusCode = 404; | 92 response.statusCode = 404; |
93 response.contentLength = 0; | 93 response.contentLength = 0; |
94 response.outputStream.close(); | 94 response.outputStream.close(); |
95 return; | 95 return; |
96 } | 96 } |
97 | 97 |
98 var future = consumeInputStream(stream); | 98 var future = consumeInputStream(stream); |
99 future.then((data) { | 99 future.then((data) { |
100 response.statusCode = 200; | 100 response.statusCode = 200; |
101 response.contentLength = data.length; | 101 response.contentLength = data.length; |
102 response.outputStream.write(data); | 102 response.outputStream.write(data); |
103 response.outputStream.close(); | 103 response.outputStream.close(); |
104 }); | 104 }).catchError((e) { |
105 | |
106 future.handleException((e) { | |
107 print("Exception while handling ${request.uri}: $e"); | 105 print("Exception while handling ${request.uri}: $e"); |
108 response.statusCode = 500; | 106 response.statusCode = 500; |
109 response.reasonPhrase = e.message; | 107 response.reasonPhrase = e.message; |
110 response.outputStream.close(); | 108 response.outputStream.close(); |
111 }); | 109 }); |
112 }; | 110 }; |
113 _server.listen("127.0.0.1", 0); | 111 _server.listen("127.0.0.1", 0); |
114 _portCompleter.complete(_server.port); | 112 _portCompleter.complete(_server.port); |
115 _scheduleCleanup((_) => _closeServer()); | 113 _scheduleCleanup((_) => _closeServer()); |
116 return null; | 114 return null; |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
154 _servedPackageDir = dir('packages', []); | 152 _servedPackageDir = dir('packages', []); |
155 serve([_servedPackageDir]); | 153 serve([_servedPackageDir]); |
156 | 154 |
157 _scheduleCleanup((_) { | 155 _scheduleCleanup((_) { |
158 _servedPackages = null; | 156 _servedPackages = null; |
159 _servedPackageDir = null; | 157 _servedPackageDir = null; |
160 }); | 158 }); |
161 } | 159 } |
162 | 160 |
163 _schedule((_) { | 161 _schedule((_) { |
164 return _awaitObject(pubspecs).transform((resolvedPubspecs) { | 162 return _awaitObject(pubspecs).then((resolvedPubspecs) { |
165 for (var spec in resolvedPubspecs) { | 163 for (var spec in resolvedPubspecs) { |
166 var name = spec['name']; | 164 var name = spec['name']; |
167 var version = spec['version']; | 165 var version = spec['version']; |
168 var versions = _servedPackages.putIfAbsent( | 166 var versions = _servedPackages.putIfAbsent( |
169 name, () => <String, String>{}); | 167 name, () => <String, String>{}); |
170 versions[version] = yaml(spec); | 168 versions[version] = yaml(spec); |
171 } | 169 } |
172 | 170 |
173 _servedPackageDir.contents.clear(); | 171 _servedPackageDir.contents.clear(); |
174 for (var name in _servedPackages.keys) { | 172 for (var name in _servedPackages.keys) { |
175 var versions = _servedPackages[name].keys; | 173 var versions = _servedPackages[name].keys.toList()); |
176 _servedPackageDir.contents.addAll([ | 174 _servedPackageDir.contents.addAll([ |
177 file('$name.json', | 175 file('$name.json', |
178 JSON.stringify({'versions': versions})), | 176 json.stringify({'versions': versions})), |
179 dir(name, [ | 177 dir(name, [ |
180 dir('versions', flatten(versions.map((version) { | 178 dir('versions', flatten(versions.mappedBy((version) { |
181 return [ | 179 return [ |
182 file('$version.yaml', _servedPackages[name][version]), | 180 file('$version.yaml', _servedPackages[name][version]), |
183 tar('$version.tar.gz', [ | 181 tar('$version.tar.gz', [ |
184 file('pubspec.yaml', _servedPackages[name][version]), | 182 file('pubspec.yaml', _servedPackages[name][version]), |
185 libDir(name, '$name $version') | 183 libDir(name, '$name $version') |
186 ]) | 184 ]) |
187 ]; | 185 ]; |
188 }))) | 186 }))) |
189 ]) | 187 ]) |
190 ]); | 188 ]); |
191 } | 189 } |
192 }); | 190 }); |
193 }); | 191 }); |
194 } | 192 } |
195 | 193 |
196 /// Converts [value] into a YAML string. | 194 /// Converts [value] into a YAML string. |
197 String yaml(value) => JSON.stringify(value); | 195 String yaml(value) => json.stringify(value); |
198 | 196 |
199 /// Describes a package that passes all validation. | 197 /// Describes a package that passes all validation. |
200 Descriptor get normalPackage => dir(appPath, [ | 198 Descriptor get normalPackage => dir(appPath, [ |
201 libPubspec("test_pkg", "1.0.0"), | 199 libPubspec("test_pkg", "1.0.0"), |
202 file("LICENSE", "Eh, do what you want."), | 200 file("LICENSE", "Eh, do what you want."), |
203 dir("lib", [ | 201 dir("lib", [ |
204 file("test_pkg.dart", "int i = 1;") | 202 file("test_pkg.dart", "int i = 1;") |
205 ]) | 203 ]) |
206 ]); | 204 ]); |
207 | 205 |
208 /// Describes a file named `pubspec.yaml` with the given YAML-serialized | 206 /// Describes a file named `pubspec.yaml` with the given YAML-serialized |
209 /// [contents], which should be a serializable object. | 207 /// [contents], which should be a serializable object. |
210 /// | 208 /// |
211 /// [contents] may contain [Future]s that resolve to serializable objects, | 209 /// [contents] may contain [Future]s that resolve to serializable objects, |
212 /// which may in turn contain [Future]s recursively. | 210 /// which may in turn contain [Future]s recursively. |
213 Descriptor pubspec(Map contents) { | 211 Descriptor pubspec(Map contents) { |
214 return async(_awaitObject(contents).transform((resolvedContents) => | 212 return async(_awaitObject(contents).then((resolvedContents) => |
215 file("pubspec.yaml", yaml(resolvedContents)))); | 213 file("pubspec.yaml", yaml(resolvedContents)))); |
216 } | 214 } |
217 | 215 |
218 /// Describes a file named `pubspec.yaml` for an application package with the | 216 /// Describes a file named `pubspec.yaml` for an application package with the |
219 /// given [dependencies]. | 217 /// given [dependencies]. |
220 Descriptor appPubspec(List dependencies) { | 218 Descriptor appPubspec(List dependencies) { |
221 return pubspec({ | 219 return pubspec({ |
222 "name": "myapp", | 220 "name": "myapp", |
223 "dependencies": _dependencyListToMap(dependencies) | 221 "dependencies": _dependencyListToMap(dependencies) |
224 }); | 222 }); |
(...skipping 29 matching lines...) Expand all Loading... |
254 }; | 252 }; |
255 if (dependencies != null) { | 253 if (dependencies != null) { |
256 package["dependencies"] = _dependencyListToMap(dependencies); | 254 package["dependencies"] = _dependencyListToMap(dependencies); |
257 } | 255 } |
258 return package; | 256 return package; |
259 } | 257 } |
260 | 258 |
261 /// Describes a map representing a dependency on a package in the package | 259 /// Describes a map representing a dependency on a package in the package |
262 /// repository. | 260 /// repository. |
263 Map dependency(String name, [String versionConstraint]) { | 261 Map dependency(String name, [String versionConstraint]) { |
264 var url = port.transform((p) => "http://localhost:$p"); | 262 var url = port.then((p) => "http://localhost:$p"); |
265 var dependency = {"hosted": {"name": name, "url": url}}; | 263 var dependency = {"hosted": {"name": name, "url": url}}; |
266 if (versionConstraint != null) dependency["version"] = versionConstraint; | 264 if (versionConstraint != null) dependency["version"] = versionConstraint; |
267 return dependency; | 265 return dependency; |
268 } | 266 } |
269 | 267 |
270 /// Describes a directory for a package installed from the mock package server. | 268 /// Describes a directory for a package installed from the mock package server. |
271 /// This directory is of the form found in the global package cache. | 269 /// This directory is of the form found in the global package cache. |
272 DirectoryDescriptor packageCacheDir(String name, String version) { | 270 DirectoryDescriptor packageCacheDir(String name, String version) { |
273 return dir("$name-$version", [ | 271 return dir("$name-$version", [ |
274 libDir(name, '$name $version') | 272 libDir(name, '$name $version') |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
324 DirectoryDescriptor cacheDir(Map packages) { | 322 DirectoryDescriptor cacheDir(Map packages) { |
325 var contents = <Descriptor>[]; | 323 var contents = <Descriptor>[]; |
326 packages.forEach((name, versions) { | 324 packages.forEach((name, versions) { |
327 if (versions is! List) versions = [versions]; | 325 if (versions is! List) versions = [versions]; |
328 for (var version in versions) { | 326 for (var version in versions) { |
329 contents.add(packageCacheDir(name, version)); | 327 contents.add(packageCacheDir(name, version)); |
330 } | 328 } |
331 }); | 329 }); |
332 return dir(cachePath, [ | 330 return dir(cachePath, [ |
333 dir('hosted', [ | 331 dir('hosted', [ |
334 async(port.transform((p) => dir('localhost%58$p', contents))) | 332 async(port.then((p) => dir('localhost%58$p', contents))) |
335 ]) | 333 ]) |
336 ]); | 334 ]); |
337 } | 335 } |
338 | 336 |
339 /// Describes the file in the system cache that contains the client's OAuth2 | 337 /// Describes the file in the system cache that contains the client's OAuth2 |
340 /// credentials. The URL "/token" on [server] will be used as the token | 338 /// credentials. The URL "/token" on [server] will be used as the token |
341 /// endpoint for refreshing the access token. | 339 /// endpoint for refreshing the access token. |
342 Descriptor credentialsFile( | 340 Descriptor credentialsFile( |
343 ScheduledServer server, | 341 ScheduledServer server, |
344 String accessToken, | 342 String accessToken, |
345 {String refreshToken, | 343 {String refreshToken, |
346 Date expiration}) { | 344 Date expiration}) { |
347 return async(server.url.transform((url) { | 345 return async(server.url.then((url) { |
348 return dir(cachePath, [ | 346 return dir(cachePath, [ |
349 file('credentials.json', new oauth2.Credentials( | 347 file('credentials.json', new oauth2.Credentials( |
350 accessToken, | 348 accessToken, |
351 refreshToken, | 349 refreshToken, |
352 url.resolve('/token'), | 350 url.resolve('/token'), |
353 ['https://www.googleapis.com/auth/userinfo.email'], | 351 ['https://www.googleapis.com/auth/userinfo.email'], |
354 expiration).toJson()) | 352 expiration).toJson()) |
355 ]); | 353 ]); |
356 })); | 354 })); |
357 } | 355 } |
358 | 356 |
359 /// Describes the application directory, containing only a pubspec specifying | 357 /// Describes the application directory, containing only a pubspec specifying |
360 /// the given [dependencies]. | 358 /// the given [dependencies]. |
361 DirectoryDescriptor appDir(List dependencies) => | 359 DirectoryDescriptor appDir(List dependencies) => |
362 dir(appPath, [appPubspec(dependencies)]); | 360 dir(appPath, [appPubspec(dependencies)]); |
363 | 361 |
364 /// Converts a list of dependencies as passed to [package] into a hash as used | 362 /// Converts a list of dependencies as passed to [package] into a hash as used |
365 /// in a pubspec. | 363 /// in a pubspec. |
366 Future<Map> _dependencyListToMap(List<Map> dependencies) { | 364 Future<Map> _dependencyListToMap(List<Map> dependencies) { |
367 return _awaitObject(dependencies).transform((resolvedDependencies) { | 365 return _awaitObject(dependencies).then((resolvedDependencies) { |
368 var result = <String, Map>{}; | 366 var result = <String, Map>{}; |
369 for (var dependency in resolvedDependencies) { | 367 for (var dependency in resolvedDependencies) { |
370 var keys = dependency.keys.filter((key) => key != "version"); | 368 var keys = dependency.keys.where((key) => key != "version"); |
371 var sourceName = only(keys); | 369 var sourceName = only(keys); |
372 var source; | 370 var source; |
373 switch (sourceName) { | 371 switch (sourceName) { |
374 case "git": | 372 case "git": |
375 source = new GitSource(); | 373 source = new GitSource(); |
376 break; | 374 break; |
377 case "hosted": | 375 case "hosted": |
378 source = new HostedSource(); | 376 source = new HostedSource(); |
379 break; | 377 break; |
380 case "sdk": | 378 case "sdk": |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
446 final _TIMEOUT = 30000; | 444 final _TIMEOUT = 30000; |
447 | 445 |
448 /// Runs all the scheduled events for a test case. This should only be called | 446 /// Runs all the scheduled events for a test case. This should only be called |
449 /// once per test case. | 447 /// once per test case. |
450 void run() { | 448 void run() { |
451 var createdSandboxDir; | 449 var createdSandboxDir; |
452 | 450 |
453 var asyncDone = expectAsync0(() {}); | 451 var asyncDone = expectAsync0(() {}); |
454 | 452 |
455 Future cleanup() { | 453 Future cleanup() { |
456 return _runScheduled(createdSandboxDir, _scheduledCleanup).chain((_) { | 454 return _runScheduled(createdSandboxDir, _scheduledCleanup).then((_) { |
457 _scheduled = null; | 455 _scheduled = null; |
458 _scheduledCleanup = null; | 456 _scheduledCleanup = null; |
459 _scheduledOnException = null; | 457 _scheduledOnException = null; |
460 if (createdSandboxDir != null) return deleteDir(createdSandboxDir); | 458 if (createdSandboxDir != null) return deleteDir(createdSandboxDir); |
461 return new Future.immediate(null); | 459 return new Future.immediate(null); |
462 }); | 460 }); |
463 } | 461 } |
464 | 462 |
465 final future = _setUpSandbox().chain((sandboxDir) { | 463 final future = _setUpSandbox().then((sandboxDir) { |
466 createdSandboxDir = sandboxDir; | 464 createdSandboxDir = sandboxDir; |
467 return _runScheduled(sandboxDir, _scheduled); | 465 return _runScheduled(sandboxDir, _scheduled); |
468 }); | 466 }); |
469 | 467 |
470 future.handleException((error) { | 468 future.catchError((error) { |
471 // If an error occurs during testing, delete the sandbox, throw the error so | 469 // If an error occurs during testing, delete the sandbox, throw the error so |
472 // that the test framework sees it, then finally call asyncDone so that the | 470 // that the test framework sees it, then finally call asyncDone so that the |
473 // test framework knows we're done doing asynchronous stuff. | 471 // test framework knows we're done doing asynchronous stuff. |
474 var subFuture = _runScheduled(createdSandboxDir, _scheduledOnException) | 472 var subFuture = _runScheduled(createdSandboxDir, _scheduledOnException) |
475 .chain((_) => cleanup()); | 473 .then((_) => cleanup()); |
476 subFuture.handleException((e) { | 474 subFuture.catchError((e) { |
477 print("Exception while cleaning up: $e"); | 475 print("Exception while cleaning up: ${e.error}"); |
478 print(subFuture.stackTrace); | 476 print(e.stackTrace); |
479 registerException(error, subFuture.stackTrace); | 477 registerException(e.error, e.stackTrace); |
480 return true; | 478 return true; |
481 }); | 479 }); |
482 subFuture.then((_) => registerException(error, future.stackTrace)); | |
483 return true; | |
484 }); | |
485 | |
486 timeout(future, _TIMEOUT, 'waiting for a test to complete') | 480 timeout(future, _TIMEOUT, 'waiting for a test to complete') |
487 .chain((_) => cleanup()) | 481 .then((_) => cleanup()) |
488 .then((_) => asyncDone()); | 482 .then((_) => asyncDone()); |
489 } | 483 } |
490 | 484 |
491 /// Get the path to the root "util/test/pub" directory containing the pub | 485 /// Get the path to the root "util/test/pub" directory containing the pub |
492 /// tests. | 486 /// tests. |
493 String get testDirectory { | 487 String get testDirectory { |
494 var dir = new Options().script; | 488 var dir = new Options().script; |
495 while (basename(dir) != 'pub') dir = dirname(dir); | 489 while (basename(dir) != 'pub') dir = dirname(dir); |
496 | 490 |
497 return getFullPath(dir); | 491 return getFullPath(dir); |
498 } | 492 } |
499 | 493 |
500 /// Schedules a call to the Pub command-line utility. Runs Pub with [args] and | 494 /// Schedules a call to the Pub command-line utility. Runs Pub with [args] and |
501 /// validates that its results match [output], [error], and [exitCode]. | 495 /// validates that its results match [output], [error], and [exitCode]. |
502 void schedulePub({List args, Pattern output, Pattern error, | 496 void schedulePub({List args, Pattern output, Pattern error, |
503 Future<Uri> tokenEndpoint, int exitCode: 0}) { | 497 Future<Uri> tokenEndpoint, int exitCode: 0}) { |
504 _schedule((sandboxDir) { | 498 _schedule((sandboxDir) { |
505 return _doPub(runProcess, sandboxDir, args, tokenEndpoint) | 499 return _doPub(runProcess, sandboxDir, args, tokenEndpoint) |
506 .transform((result) { | 500 .then((result) { |
507 var failures = []; | 501 var failures = []; |
508 | 502 |
509 _validateOutput(failures, 'stdout', output, result.stdout); | 503 _validateOutput(failures, 'stdout', output, result.stdout); |
510 _validateOutput(failures, 'stderr', error, result.stderr); | 504 _validateOutput(failures, 'stderr', error, result.stderr); |
511 | 505 |
512 if (result.exitCode != exitCode) { | 506 if (result.exitCode != exitCode) { |
513 failures.add( | 507 failures.add( |
514 'Pub returned exit code ${result.exitCode}, expected $exitCode.'); | 508 'Pub returned exit code ${result.exitCode}, expected $exitCode.'); |
515 } | 509 } |
516 | 510 |
517 if (failures.length > 0) { | 511 if (failures.length > 0) { |
518 if (error == null) { | 512 if (error == null) { |
519 // If we aren't validating the error, still show it on failure. | 513 // If we aren't validating the error, still show it on failure. |
520 failures.add('Pub stderr:'); | 514 failures.add('Pub stderr:'); |
521 failures.addAll(result.stderr.map((line) => '| $line')); | 515 failures.addAll(result.stderr.mappedBy((line) => '| $line')); |
522 } | 516 } |
523 | 517 |
524 throw new ExpectException(Strings.join(failures, '\n')); | 518 throw new ExpectException(Strings.join(failures, '\n')); |
525 } | 519 } |
526 | 520 |
527 return null; | 521 return null; |
528 }); | 522 }); |
529 }); | 523 }); |
530 } | 524 } |
531 | 525 |
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
622 } | 616 } |
623 | 617 |
624 /// Skips the current test if Git is not installed. This validates that the | 618 /// Skips the current test if Git is not installed. This validates that the |
625 /// current test is running on a buildbot in which case we expect git to be | 619 /// current test is running on a buildbot in which case we expect git to be |
626 /// installed. If we are not running on the buildbot, we will instead see if | 620 /// installed. If we are not running on the buildbot, we will instead see if |
627 /// git is installed and skip the test if not. This way, users don't need to | 621 /// git is installed and skip the test if not. This way, users don't need to |
628 /// have git installed to run the tests locally (unless they actually care | 622 /// have git installed to run the tests locally (unless they actually care |
629 /// about the pub git tests). | 623 /// about the pub git tests). |
630 void ensureGit() { | 624 void ensureGit() { |
631 _schedule((_) { | 625 _schedule((_) { |
632 return isGitInstalled.transform((installed) { | 626 return isGitInstalled.then((installed) { |
633 if (!installed && | 627 if (!installed && |
634 !Platform.environment.containsKey('BUILDBOT_BUILDERNAME')) { | 628 !Platform.environment.containsKey('BUILDBOT_BUILDERNAME')) { |
635 _abortScheduled = true; | 629 _abortScheduled = true; |
636 } | 630 } |
637 return null; | 631 return null; |
638 }); | 632 }); |
639 }); | 633 }); |
640 } | 634 } |
641 | 635 |
642 /// Use [client] as the mock HTTP client for this test. | 636 /// Use [client] as the mock HTTP client for this test. |
643 /// | 637 /// |
644 /// Note that this will only affect HTTP requests made via http.dart in the | 638 /// Note that this will only affect HTTP requests made via http.dart in the |
645 /// parent process. | 639 /// parent process. |
646 void useMockClient(MockClient client) { | 640 void useMockClient(MockClient client) { |
647 var oldInnerClient = httpClient.inner; | 641 var oldInnerClient = httpClient.inner; |
648 httpClient.inner = client; | 642 httpClient.inner = client; |
649 _scheduleCleanup((_) { | 643 _scheduleCleanup((_) { |
650 httpClient.inner = oldInnerClient; | 644 httpClient.inner = oldInnerClient; |
651 }); | 645 }); |
652 } | 646 } |
653 | 647 |
654 Future<Directory> _setUpSandbox() => createTempDir(); | 648 Future<Directory> _setUpSandbox() => createTempDir(); |
655 | 649 |
656 Future _runScheduled(Directory parentDir, List<_ScheduledEvent> scheduled) { | 650 Future _runScheduled(Directory parentDir, List<_ScheduledEvent> scheduled) { |
657 if (scheduled == null) return new Future.immediate(null); | 651 if (scheduled == null) return new Future.immediate(null); |
658 var iterator = scheduled.iterator(); | 652 var iterator = scheduled.iterator; |
659 | 653 |
660 Future runNextEvent(_) { | 654 Future runNextEvent(_) { |
661 if (_abortScheduled || !iterator.hasNext) { | 655 if (_abortScheduled || !iterator.moveNext()) { |
662 _abortScheduled = false; | 656 _abortScheduled = false; |
663 scheduled.clear(); | 657 scheduled.clear(); |
664 return new Future.immediate(null); | 658 return new Future.immediate(null); |
665 } | 659 } |
666 | 660 |
667 var future = iterator.next()(parentDir); | 661 var future = iterator.current(parentDir); |
668 if (future != null) { | 662 if (future != null) { |
669 return future.chain(runNextEvent); | 663 return future.then(runNextEvent); |
670 } else { | 664 } else { |
671 return runNextEvent(null); | 665 return runNextEvent(null); |
672 } | 666 } |
673 } | 667 } |
674 | 668 |
675 return runNextEvent(null); | 669 return runNextEvent(null); |
676 } | 670 } |
677 | 671 |
678 /// Compares the [actual] output from running pub with [expected]. For [String] | 672 /// Compares the [actual] output from running pub with [expected]. For [String] |
679 /// patterns, ignores leading and trailing whitespace differences and tries to | 673 /// patterns, ignores leading and trailing whitespace differences and tries to |
(...skipping 12 matching lines...) Expand all Loading... |
692 | 686 |
693 void _validateOutputRegex(List<String> failures, String pipe, | 687 void _validateOutputRegex(List<String> failures, String pipe, |
694 RegExp expected, List<String> actual) { | 688 RegExp expected, List<String> actual) { |
695 var actualText = Strings.join(actual, '\n'); | 689 var actualText = Strings.join(actual, '\n'); |
696 if (actualText.contains(expected)) return; | 690 if (actualText.contains(expected)) return; |
697 | 691 |
698 if (actual.length == 0) { | 692 if (actual.length == 0) { |
699 failures.add('Expected $pipe to match "${expected.pattern}" but got none.'); | 693 failures.add('Expected $pipe to match "${expected.pattern}" but got none.'); |
700 } else { | 694 } else { |
701 failures.add('Expected $pipe to match "${expected.pattern}" but got:'); | 695 failures.add('Expected $pipe to match "${expected.pattern}" but got:'); |
702 failures.addAll(actual.map((line) => '| $line')); | 696 failures.addAll(actual.mappedBy((line) => '| $line')); |
703 } | 697 } |
704 } | 698 } |
705 | 699 |
706 void _validateOutputString(List<String> failures, String pipe, | 700 void _validateOutputString(List<String> failures, String pipe, |
707 String expectedText, List<String> actual) { | 701 String expectedText, List<String> actual) { |
708 final expected = expectedText.split('\n'); | 702 final expected = expectedText.split('\n'); |
709 | 703 |
710 // Strip off the last line. This lets us have expected multiline strings | 704 // Strip off the last line. This lets us have expected multiline strings |
711 // where the closing ''' is on its own line. It also fixes '' expected output | 705 // where the closing ''' is on its own line. It also fixes '' expected output |
712 // to expect zero lines of output, not a single empty line. | 706 // to expect zero lines of output, not a single empty line. |
(...skipping 24 matching lines...) Expand all Loading... |
737 } else { | 731 } else { |
738 // Output is OK, but include it in case other lines are wrong. | 732 // Output is OK, but include it in case other lines are wrong. |
739 results.add('| ${actual[i]}'); | 733 results.add('| ${actual[i]}'); |
740 } | 734 } |
741 } | 735 } |
742 } | 736 } |
743 | 737 |
744 // If any lines mismatched, show the expected and actual. | 738 // If any lines mismatched, show the expected and actual. |
745 if (failed) { | 739 if (failed) { |
746 failures.add('Expected $pipe:'); | 740 failures.add('Expected $pipe:'); |
747 failures.addAll(expected.map((line) => '| $line')); | 741 failures.addAll(expected.mappedBy((line) => '| $line')); |
748 failures.add('Got:'); | 742 failures.add('Got:'); |
749 failures.addAll(results); | 743 failures.addAll(results); |
750 } | 744 } |
751 } | 745 } |
752 | 746 |
753 /// Base class for [FileDescriptor] and [DirectoryDescriptor] so that a | 747 /// Base class for [FileDescriptor] and [DirectoryDescriptor] so that a |
754 /// directory can contain a heterogeneous collection of files and | 748 /// directory can contain a heterogeneous collection of files and |
755 /// subdirectories. | 749 /// subdirectories. |
756 abstract class Descriptor { | 750 abstract class Descriptor { |
757 /// The name of this file or directory. This must be a [String] if the file | 751 /// The name of this file or directory. This must be a [String] if the file |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
795 throw 'Pattern $name must be a string.'; | 789 throw 'Pattern $name must be a string.'; |
796 } | 790 } |
797 | 791 |
798 /// Validates that at least one file in [dir] matching [name] is valid | 792 /// Validates that at least one file in [dir] matching [name] is valid |
799 /// according to [validate]. [validate] should complete to an exception if | 793 /// according to [validate]. [validate] should complete to an exception if |
800 /// the input path is invalid. | 794 /// the input path is invalid. |
801 Future _validateOneMatch(String dir, Future validate(String path)) { | 795 Future _validateOneMatch(String dir, Future validate(String path)) { |
802 // Special-case strings to support multi-level names like "myapp/packages". | 796 // Special-case strings to support multi-level names like "myapp/packages". |
803 if (name is String) { | 797 if (name is String) { |
804 var path = join(dir, name); | 798 var path = join(dir, name); |
805 return exists(path).chain((exists) { | 799 return exists(path).then((exists) { |
806 if (!exists) Expect.fail('File $name in $dir not found.'); | 800 if (!exists) Expect.fail('File $name in $dir not found.'); |
807 return validate(path); | 801 return validate(path); |
808 }); | 802 }); |
809 } | 803 } |
810 | 804 |
811 // TODO(nweiz): remove this when issue 4061 is fixed. | 805 // TODO(nweiz): remove this when issue 4061 is fixed. |
812 var stackTrace; | 806 var stackTrace; |
813 try { | 807 try { |
814 throw ""; | 808 throw ""; |
815 } catch (_, localStackTrace) { | 809 } catch (_, localStackTrace) { |
816 stackTrace = localStackTrace; | 810 stackTrace = localStackTrace; |
817 } | 811 } |
818 | 812 |
819 return listDir(dir).chain((files) { | 813 return listDir(dir).then((files) { |
820 var matches = files.filter((file) => endsWithPattern(file, name)); | 814 var matches = files.where((file) => endsWithPattern(file, name)).toList(); |
821 if (matches.length == 0) { | 815 if (matches.isEmpty) { |
822 Expect.fail('No files in $dir match pattern $name.'); | 816 Expect.fail('No files in $dir match pattern $name.'); |
823 } | 817 } |
824 if (matches.length == 1) return validate(matches[0]); | 818 if (matches.length == 1) return validate(matches[0]); |
825 | 819 |
826 var failures = []; | 820 var failures = []; |
827 var successes = 0; | 821 var successes = 0; |
828 var completer = new Completer(); | 822 var completer = new Completer(); |
829 checkComplete() { | 823 checkComplete() { |
830 if (failures.length + successes != matches.length) return; | 824 if (failures.length + successes != matches.length) return; |
831 if (successes > 0) { | 825 if (successes > 0) { |
832 completer.complete(null); | 826 completer.complete(null); |
833 return; | 827 return; |
834 } | 828 } |
835 | 829 |
836 var error = new StringBuffer(); | 830 var error = new StringBuffer(); |
837 error.add("No files named $name in $dir were valid:\n"); | 831 error.add("No files named $name in $dir were valid:\n"); |
838 for (var failure in failures) { | 832 for (var failure in failures) { |
839 error.add(" ").add(failure).add("\n"); | 833 error.add(" ").add(failure).add("\n"); |
840 } | 834 } |
841 completer.completeException( | 835 completer.completeException( |
842 new ExpectException(error.toString()), stackTrace); | 836 new ExpectException(error.toString()), stackTrace); |
843 } | 837 } |
844 | 838 |
845 for (var match in matches) { | 839 for (var match in matches) { |
846 var future = validate(match); | 840 var future = validate(match); |
847 | 841 |
848 future.handleException((e) { | 842 future.catchError((e) { |
849 failures.add(e); | 843 failures.add(e); |
850 checkComplete(); | 844 checkComplete(); |
851 return true; | |
852 }); | 845 }); |
853 | 846 |
854 future.then((_) { | 847 future.then((_) { |
855 successes++; | 848 successes++; |
856 checkComplete(); | 849 checkComplete(); |
857 }); | 850 }).catchError(() {}); |
858 } | 851 } |
859 return completer.future; | 852 return completer.future; |
860 }); | 853 }); |
861 } | 854 } |
862 } | 855 } |
863 | 856 |
864 /// Describes a file. These are used both for setting up an expected directory | 857 /// Describes a file. These are used both for setting up an expected directory |
865 /// tree before running a test, and for validating that the file system matches | 858 /// tree before running a test, and for validating that the file system matches |
866 /// some expectations after running it. | 859 /// some expectations after running it. |
867 class FileDescriptor extends Descriptor { | 860 class FileDescriptor extends Descriptor { |
(...skipping 10 matching lines...) Expand all Loading... |
878 | 871 |
879 /// Deletes the file within [dir]. Returns a [Future] that is completed after | 872 /// Deletes the file within [dir]. Returns a [Future] that is completed after |
880 /// the deletion is done. | 873 /// the deletion is done. |
881 Future delete(dir) { | 874 Future delete(dir) { |
882 return deleteFile(join(dir, _stringName)); | 875 return deleteFile(join(dir, _stringName)); |
883 } | 876 } |
884 | 877 |
885 /// Validates that this file correctly matches the actual file at [path]. | 878 /// Validates that this file correctly matches the actual file at [path]. |
886 Future validate(String path) { | 879 Future validate(String path) { |
887 return _validateOneMatch(path, (file) { | 880 return _validateOneMatch(path, (file) { |
888 return readTextFile(file).transform((text) { | 881 return readTextFile(file).then((text) { |
889 if (text == contents) return null; | 882 if (text == contents) return null; |
890 | 883 |
891 Expect.fail('File $file should contain:\n\n$contents\n\n' | 884 Expect.fail('File $file should contain:\n\n$contents\n\n' |
892 'but contained:\n\n$text'); | 885 'but contained:\n\n$text'); |
893 }); | 886 }); |
894 }); | 887 }); |
895 } | 888 } |
896 | 889 |
897 /// Loads the contents of the file. | 890 /// Loads the contents of the file. |
898 InputStream load(List<String> path) { | 891 InputStream load(List<String> path) { |
(...skipping 17 matching lines...) Expand all Loading... |
916 final List<Descriptor> contents; | 909 final List<Descriptor> contents; |
917 | 910 |
918 DirectoryDescriptor(Pattern name, List<Descriptor> contents) | 911 DirectoryDescriptor(Pattern name, List<Descriptor> contents) |
919 : this.contents = contents == null ? <Descriptor>[] : contents, | 912 : this.contents = contents == null ? <Descriptor>[] : contents, |
920 super(name); | 913 super(name); |
921 | 914 |
922 /// Creates the file within [dir]. Returns a [Future] that is completed after | 915 /// Creates the file within [dir]. Returns a [Future] that is completed after |
923 /// the creation is done. | 916 /// the creation is done. |
924 Future<Directory> create(parentDir) { | 917 Future<Directory> create(parentDir) { |
925 // Create the directory. | 918 // Create the directory. |
926 return ensureDir(join(parentDir, _stringName)).chain((dir) { | 919 return ensureDir(join(parentDir, _stringName)).then((dir) { |
927 if (contents == null) return new Future<Directory>.immediate(dir); | 920 if (contents == null) return new Future<Directory>.immediate(dir); |
928 | 921 |
929 // Recursively create all of its children. | 922 // Recursively create all of its children. |
930 final childFutures = contents.map((child) => child.create(dir)); | 923 final childFutures = |
| 924 contents.mappedBy((child) => child.create(dir)).toList(); |
931 // Only complete once all of the children have been created too. | 925 // Only complete once all of the children have been created too. |
932 return Futures.wait(childFutures).transform((_) => dir); | 926 return Futures.wait(childFutures).then((_) => dir); |
933 }); | 927 }); |
934 } | 928 } |
935 | 929 |
936 /// Deletes the directory within [dir]. Returns a [Future] that is completed | 930 /// Deletes the directory within [dir]. Returns a [Future] that is completed |
937 /// after the deletion is done. | 931 /// after the deletion is done. |
938 Future delete(dir) { | 932 Future delete(dir) { |
939 return deleteDir(join(dir, _stringName)); | 933 return deleteDir(join(dir, _stringName)); |
940 } | 934 } |
941 | 935 |
942 /// Validates that the directory at [path] contains all of the expected | 936 /// Validates that the directory at [path] contains all of the expected |
943 /// contents in this descriptor. Note that this does *not* check that the | 937 /// contents in this descriptor. Note that this does *not* check that the |
944 /// directory doesn't contain other unexpected stuff, just that it *does* | 938 /// directory doesn't contain other unexpected stuff, just that it *does* |
945 /// contain the stuff we do expect. | 939 /// contain the stuff we do expect. |
946 Future validate(String path) { | 940 Future validate(String path) { |
947 return _validateOneMatch(path, (dir) { | 941 return _validateOneMatch(path, (dir) { |
948 // Validate each of the items in this directory. | 942 // Validate each of the items in this directory. |
949 final entryFutures = contents.map((entry) => entry.validate(dir)); | 943 final entryFutures = |
| 944 contents.mappedBy((entry) => entry.validate(dir)).toList(); |
950 | 945 |
951 // If they are all valid, the directory is valid. | 946 // If they are all valid, the directory is valid. |
952 return Futures.wait(entryFutures).transform((entries) => null); | 947 return Futures.wait(entryFutures).then((entries) => null); |
953 }); | 948 }); |
954 } | 949 } |
955 | 950 |
956 /// Loads [path] from within this directory. | 951 /// Loads [path] from within this directory. |
957 InputStream load(List<String> path) { | 952 InputStream load(List<String> path) { |
958 if (path.isEmpty) { | 953 if (path.isEmpty) { |
959 throw "Can't load the contents of $name: is a directory."; | 954 throw "Can't load the contents of $name: is a directory."; |
960 } | 955 } |
961 | 956 |
962 for (var descriptor in contents) { | 957 for (var descriptor in contents) { |
963 if (descriptor.name == path[0]) { | 958 if (descriptor.name == path[0]) { |
964 return descriptor.load(path.getRange(1, path.length - 1)); | 959 return descriptor.load(path.getRange(1, path.length - 1)); |
965 } | 960 } |
966 } | 961 } |
967 | 962 |
968 throw "Directory $name doesn't contain ${Strings.join(path, '/')}."; | 963 throw "Directory $name doesn't contain ${Strings.join(path, '/')}."; |
969 } | 964 } |
970 } | 965 } |
971 | 966 |
972 /// Wraps a [Future] that will complete to a [Descriptor] and makes it behave | 967 /// Wraps a [Future] that will complete to a [Descriptor] and makes it behave |
973 /// like a concrete [Descriptor]. This is necessary when the contents of the | 968 /// like a concrete [Descriptor]. This is necessary when the contents of the |
974 /// descriptor depends on information that's not available until part of the | 969 /// descriptor depends on information that's not available until part of the |
975 /// test run is completed. | 970 /// test run is completed. |
976 class FutureDescriptor extends Descriptor { | 971 class FutureDescriptor extends Descriptor { |
977 Future<Descriptor> _future; | 972 Future<Descriptor> _future; |
978 | 973 |
979 FutureDescriptor(this._future) : super('<unknown>'); | 974 FutureDescriptor(this._future) : super('<unknown>'); |
980 | 975 |
981 Future create(dir) => _future.chain((desc) => desc.create(dir)); | 976 Future create(dir) => _future.then((desc) => desc.create(dir)); |
982 | 977 |
983 Future validate(dir) => _future.chain((desc) => desc.validate(dir)); | 978 Future validate(dir) => _future.then((desc) => desc.validate(dir)); |
984 | 979 |
985 Future delete(dir) => _future.chain((desc) => desc.delete(dir)); | 980 Future delete(dir) => _future.then((desc) => desc.delete(dir)); |
986 | 981 |
987 InputStream load(List<String> path) { | 982 InputStream load(List<String> path) { |
988 var resultStream = new ListInputStream(); | 983 var resultStream = new ListInputStream(); |
989 _future.then((desc) => pipeInputToInput(desc.load(path), resultStream)); | 984 _future.then((desc) => pipeInputToInput(desc.load(path), resultStream)); |
990 return resultStream; | 985 return resultStream; |
991 } | 986 } |
992 } | 987 } |
993 | 988 |
994 /// Describes a Git repository and its contents. | 989 /// Describes a Git repository and its contents. |
995 class GitRepoDescriptor extends DirectoryDescriptor { | 990 class GitRepoDescriptor extends DirectoryDescriptor { |
(...skipping 17 matching lines...) Expand all Loading... |
1013 ]); | 1008 ]); |
1014 } | 1009 } |
1015 | 1010 |
1016 /// Schedules changes to be committed to the Git repository. | 1011 /// Schedules changes to be committed to the Git repository. |
1017 void scheduleCommit() => _schedule((dir) => this.commit(dir)); | 1012 void scheduleCommit() => _schedule((dir) => this.commit(dir)); |
1018 | 1013 |
1019 /// Return a Future that completes to the commit in the git repository | 1014 /// Return a Future that completes to the commit in the git repository |
1020 /// referred to by [ref] at the current point in the scheduled test run. | 1015 /// referred to by [ref] at the current point in the scheduled test run. |
1021 Future<String> revParse(String ref) { | 1016 Future<String> revParse(String ref) { |
1022 return _scheduleValue((parentDir) { | 1017 return _scheduleValue((parentDir) { |
1023 return super.create(parentDir).chain((rootDir) { | 1018 return super.create(parentDir).then((rootDir) { |
1024 return _runGit(['rev-parse', ref], rootDir); | 1019 return _runGit(['rev-parse', ref], rootDir); |
1025 }).transform((output) => output[0]); | 1020 }).then((output) => output[0]); |
1026 }); | 1021 }); |
1027 } | 1022 } |
1028 | 1023 |
1029 /// Schedule a Git command to run in this repository. | 1024 /// Schedule a Git command to run in this repository. |
1030 void scheduleGit(List<String> args) { | 1025 void scheduleGit(List<String> args) { |
1031 _schedule((parentDir) { | 1026 _schedule((parentDir) { |
1032 var gitDir = new Directory(join(parentDir, name)); | 1027 var gitDir = new Directory(join(parentDir, name)); |
1033 return _runGit(args, gitDir); | 1028 return _runGit(args, gitDir); |
1034 }); | 1029 }); |
1035 } | 1030 } |
1036 | 1031 |
1037 Future _runGitCommands(parentDir, List<List<String>> commands) { | 1032 Future _runGitCommands(parentDir, List<List<String>> commands) { |
1038 var workingDir; | 1033 var workingDir; |
1039 | 1034 |
1040 Future runGitStep(_) { | 1035 Future runGitStep(_) { |
1041 if (commands.isEmpty) return new Future.immediate(workingDir); | 1036 if (commands.isEmpty) return new Future.immediate(workingDir); |
1042 var command = commands.removeAt(0); | 1037 var command = commands.removeAt(0); |
1043 return _runGit(command, workingDir).chain(runGitStep); | 1038 return _runGit(command, workingDir).then(runGitStep); |
1044 } | 1039 } |
1045 | 1040 |
1046 return super.create(parentDir).chain((rootDir) { | 1041 return super.create(parentDir).then((rootDir) { |
1047 workingDir = rootDir; | 1042 workingDir = rootDir; |
1048 return runGitStep(null); | 1043 return runGitStep(null); |
1049 }); | 1044 }); |
1050 } | 1045 } |
1051 | 1046 |
1052 Future<String> _runGit(List<String> args, Directory workingDir) { | 1047 Future<String> _runGit(List<String> args, Directory workingDir) { |
1053 // Explicitly specify the committer information. Git needs this to commit | 1048 // Explicitly specify the committer information. Git needs this to commit |
1054 // and we don't want to rely on the buildbots having this already set up. | 1049 // and we don't want to rely on the buildbots having this already set up. |
1055 var environment = { | 1050 var environment = { |
1056 'GIT_AUTHOR_NAME': 'Pub Test', | 1051 'GIT_AUTHOR_NAME': 'Pub Test', |
1057 'GIT_AUTHOR_EMAIL': 'pub@dartlang.org', | 1052 'GIT_AUTHOR_EMAIL': 'pub@dartlang.org', |
1058 'GIT_COMMITTER_NAME': 'Pub Test', | 1053 'GIT_COMMITTER_NAME': 'Pub Test', |
1059 'GIT_COMMITTER_EMAIL': 'pub@dartlang.org' | 1054 'GIT_COMMITTER_EMAIL': 'pub@dartlang.org' |
1060 }; | 1055 }; |
1061 | 1056 |
1062 return runGit(args, workingDir: workingDir.path, | 1057 return runGit(args, workingDir: workingDir.path, |
1063 environment: environment).transform((result) { | 1058 environment: environment).then((result) { |
1064 if (!result.success) { | 1059 if (!result.success) { |
1065 throw "Error running: git ${Strings.join(args, ' ')}\n" | 1060 throw "Error running: git ${Strings.join(args, ' ')}\n" |
1066 "${Strings.join(result.stderr, '\n')}"; | 1061 "${Strings.join(result.stderr, '\n')}"; |
1067 } | 1062 } |
1068 | 1063 |
1069 return result.stdout; | 1064 return result.stdout; |
1070 }); | 1065 }); |
1071 } | 1066 } |
1072 } | 1067 } |
1073 | 1068 |
1074 /// Describes a gzipped tar file and its contents. | 1069 /// Describes a gzipped tar file and its contents. |
1075 class TarFileDescriptor extends Descriptor { | 1070 class TarFileDescriptor extends Descriptor { |
1076 final List<Descriptor> contents; | 1071 final List<Descriptor> contents; |
1077 | 1072 |
1078 TarFileDescriptor(Pattern name, this.contents) | 1073 TarFileDescriptor(Pattern name, this.contents) |
1079 : super(name); | 1074 : super(name); |
1080 | 1075 |
1081 /// Creates the files and directories within this tar file, then archives | 1076 /// Creates the files and directories within this tar file, then archives |
1082 /// them, compresses them, and saves the result to [parentDir]. | 1077 /// them, compresses them, and saves the result to [parentDir]. |
1083 Future<File> create(parentDir) { | 1078 Future<File> create(parentDir) { |
1084 // TODO(rnystrom): Use withTempDir(). | 1079 // TODO(rnystrom): Use withTempDir(). |
1085 var tempDir; | 1080 var tempDir; |
1086 return createTempDir().chain((_tempDir) { | 1081 return createTempDir().then((_tempDir) { |
1087 tempDir = _tempDir; | 1082 tempDir = _tempDir; |
1088 return Futures.wait(contents.map((child) => child.create(tempDir))); | 1083 return Futures.wait(contents.mappedBy((child) => child.create(tempDir))); |
1089 }).chain((createdContents) { | 1084 }).then((createdContents) { |
1090 return consumeInputStream(createTarGz(createdContents, baseDir: tempDir)); | 1085 return consumeInputStream(createTarGz(createdContents, baseDir: tempDir)); |
1091 }).chain((bytes) { | 1086 }).then((bytes) { |
1092 return new File(join(parentDir, _stringName)).writeAsBytes(bytes); | 1087 return new File(join(parentDir, _stringName)).writeAsBytes(bytes); |
1093 }).chain((file) { | 1088 }).then((file) { |
1094 return deleteDir(tempDir).transform((_) => file); | 1089 return deleteDir(tempDir).then((_) => file); |
1095 }); | 1090 }); |
1096 } | 1091 } |
1097 | 1092 |
1098 /// Validates that the `.tar.gz` file at [path] contains the expected | 1093 /// Validates that the `.tar.gz` file at [path] contains the expected |
1099 /// contents. | 1094 /// contents. |
1100 Future validate(String path) { | 1095 Future validate(String path) { |
1101 throw "TODO(nweiz): implement this"; | 1096 throw "TODO(nweiz): implement this"; |
1102 } | 1097 } |
1103 | 1098 |
1104 Future delete(dir) { | 1099 Future delete(dir) { |
1105 throw new UnsupportedError(''); | 1100 throw new UnsupportedError(''); |
1106 } | 1101 } |
1107 | 1102 |
1108 /// Loads the contents of this tar file. | 1103 /// Loads the contents of this tar file. |
1109 InputStream load(List<String> path) { | 1104 InputStream load(List<String> path) { |
1110 if (!path.isEmpty) { | 1105 if (!path.isEmpty) { |
1111 var joinedPath = Strings.join(path, '/'); | 1106 var joinedPath = Strings.join(path, '/'); |
1112 throw "Can't load $joinedPath from within $name: not a directory."; | 1107 throw "Can't load $joinedPath from within $name: not a directory."; |
1113 } | 1108 } |
1114 | 1109 |
1115 var sinkStream = new ListInputStream(); | 1110 var sinkStream = new ListInputStream(); |
1116 var tempDir; | 1111 var tempDir; |
1117 // TODO(rnystrom): Use withTempDir() here. | 1112 // TODO(rnystrom): Use withTempDir() here. |
1118 // TODO(nweiz): propagate any errors to the return value. See issue 3657. | 1113 // TODO(nweiz): propagate any errors to the return value. See issue 3657. |
1119 createTempDir().chain((_tempDir) { | 1114 createTempDir().then((_tempDir) { |
1120 tempDir = _tempDir; | 1115 tempDir = _tempDir; |
1121 return create(tempDir); | 1116 return create(tempDir); |
1122 }).then((tar) { | 1117 }).then((tar) { |
1123 var sourceStream = tar.openInputStream(); | 1118 var sourceStream = tar.openInputStream(); |
1124 pipeInputToInput(sourceStream, sinkStream).then((_) { | 1119 pipeInputToInput(sourceStream, sinkStream).then((_) { |
1125 tempDir.delete(recursive: true); | 1120 tempDir.delete(recursive: true); |
1126 }); | 1121 }); |
1127 }); | 1122 }); |
1128 return sinkStream; | 1123 return sinkStream; |
1129 } | 1124 } |
1130 } | 1125 } |
1131 | 1126 |
1132 /// A descriptor that validates that no file exists with the given name. | 1127 /// A descriptor that validates that no file exists with the given name. |
1133 class NothingDescriptor extends Descriptor { | 1128 class NothingDescriptor extends Descriptor { |
1134 NothingDescriptor(String name) : super(name); | 1129 NothingDescriptor(String name) : super(name); |
1135 | 1130 |
1136 Future create(dir) => new Future.immediate(null); | 1131 Future create(dir) => new Future.immediate(null); |
1137 Future delete(dir) => new Future.immediate(null); | 1132 Future delete(dir) => new Future.immediate(null); |
1138 | 1133 |
1139 Future validate(String dir) { | 1134 Future validate(String dir) { |
1140 return exists(join(dir, name)).transform((exists) { | 1135 return exists(join(dir, name)).then((exists) { |
1141 if (exists) Expect.fail('File $name in $dir should not exist.'); | 1136 if (exists) Expect.fail('File $name in $dir should not exist.'); |
1142 }); | 1137 }); |
1143 } | 1138 } |
1144 | 1139 |
1145 InputStream load(List<String> path) { | 1140 InputStream load(List<String> path) { |
1146 if (path.isEmpty) { | 1141 if (path.isEmpty) { |
1147 throw "Can't load the contents of $name: it doesn't exist."; | 1142 throw "Can't load the contents of $name: it doesn't exist."; |
1148 } else { | 1143 } else { |
1149 throw "Can't load ${Strings.join(path, '/')} from within $name: $name " | 1144 throw "Can't load ${Strings.join(path, '/')} from within $name: $name " |
1150 "doesn't exist."; | 1145 "doesn't exist."; |
1151 } | 1146 } |
1152 } | 1147 } |
1153 } | 1148 } |
1154 | 1149 |
1155 /// A function that creates a [Validator] subclass. | 1150 /// A function that creates a [Validator] subclass. |
1156 typedef Validator ValidatorCreator(Entrypoint entrypoint); | 1151 typedef Validator ValidatorCreator(Entrypoint entrypoint); |
1157 | 1152 |
1158 /// Schedules a single [Validator] to run on the [appPath]. Returns a scheduled | 1153 /// Schedules a single [Validator] to run on the [appPath]. Returns a scheduled |
1159 /// Future that contains the erros and warnings produced by that validator. | 1154 /// Future that contains the erros and warnings produced by that validator. |
1160 Future<Pair<List<String>, List<String>>> schedulePackageValidation( | 1155 Future<Pair<List<String>, List<String>>> schedulePackageValidation( |
1161 ValidatorCreator fn) { | 1156 ValidatorCreator fn) { |
1162 return _scheduleValue((sandboxDir) { | 1157 return _scheduleValue((sandboxDir) { |
1163 var cache = new SystemCache.withSources( | 1158 var cache = new SystemCache.withSources( |
1164 join(sandboxDir, cachePath), | 1159 join(sandboxDir, cachePath), |
1165 join(sandboxDir, sdkPath)); | 1160 join(sandboxDir, sdkPath)); |
1166 | 1161 |
1167 return Entrypoint.load(join(sandboxDir, appPath), cache) | 1162 return Entrypoint.load(join(sandboxDir, appPath), cache) |
1168 .chain((entrypoint) { | 1163 .chain((entrypoint) { |
1169 var validator = fn(entrypoint); | 1164 var validator = fn(entrypoint); |
1170 return validator.validate().transform((_) { | 1165 return validator.validate().then((_) { |
1171 return new Pair(validator.errors, validator.warnings); | 1166 return new Pair(validator.errors, validator.warnings); |
1172 }); | 1167 }); |
1173 }); | 1168 }); |
1174 }); | 1169 }); |
1175 } | 1170 } |
1176 | 1171 |
1177 /// A matcher that matches a Pair. | 1172 /// A matcher that matches a Pair. |
1178 Matcher pairOf(Matcher firstMatcher, Matcher lastMatcher) => | 1173 Matcher pairOf(Matcher firstMatcher, Matcher lastMatcher) => |
1179 new _PairMatcher(firstMatcher, lastMatcher); | 1174 new _PairMatcher(firstMatcher, lastMatcher); |
1180 | 1175 |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1233 /// Whether the user has scheduled the end of this process by calling either | 1228 /// Whether the user has scheduled the end of this process by calling either |
1234 /// [shouldExit] or [kill]. | 1229 /// [shouldExit] or [kill]. |
1235 bool _endScheduled = false; | 1230 bool _endScheduled = false; |
1236 | 1231 |
1237 /// Whether the process is expected to terminate at this point. | 1232 /// Whether the process is expected to terminate at this point. |
1238 bool _endExpected = false; | 1233 bool _endExpected = false; |
1239 | 1234 |
1240 /// Wraps a [Process] [Future] in a scheduled process. | 1235 /// Wraps a [Process] [Future] in a scheduled process. |
1241 ScheduledProcess(this.name, Future<Process> process) | 1236 ScheduledProcess(this.name, Future<Process> process) |
1242 : _process = process, | 1237 : _process = process, |
1243 _stdout = process.transform((p) => new StringInputStream(p.stdout)), | 1238 _stdout = process.then((p) => new StringInputStream(p.stdout)), |
1244 _stderr = process.transform((p) => new StringInputStream(p.stderr)) { | 1239 _stderr = process.then((p) => new StringInputStream(p.stderr)) { |
1245 | 1240 |
1246 _schedule((_) { | 1241 _schedule((_) { |
1247 if (!_endScheduled) { | 1242 if (!_endScheduled) { |
1248 throw new StateError("Scheduled process $name must have shouldExit() " | 1243 throw new StateError("Scheduled process $name must have shouldExit() " |
1249 "or kill() called before the test is run."); | 1244 "or kill() called before the test is run."); |
1250 } | 1245 } |
1251 | 1246 |
1252 return _process.transform((p) { | 1247 return _process.then((p) { |
1253 p.onExit = (c) { | 1248 p.onExit = (c) { |
1254 if (_endExpected) { | 1249 if (_endExpected) { |
1255 _exitCodeCompleter.complete(c); | 1250 _exitCodeCompleter.complete(c); |
1256 return; | 1251 return; |
1257 } | 1252 } |
1258 | 1253 |
1259 // Sleep for half a second in case _endExpected is set in the next | 1254 // Sleep for half a second in case _endExpected is set in the next |
1260 // scheduled event. | 1255 // scheduled event. |
1261 sleep(500).then((_) { | 1256 sleep(500).then((_) { |
1262 if (_endExpected) { | 1257 if (_endExpected) { |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1336 } | 1331 } |
1337 | 1332 |
1338 return _scheduleValue((_) { | 1333 return _scheduleValue((_) { |
1339 return timeout(_stderr.chain(consumeStringInputStream), _SCHEDULE_TIMEOUT, | 1334 return timeout(_stderr.chain(consumeStringInputStream), _SCHEDULE_TIMEOUT, |
1340 "waiting for the last stderr line from process $name"); | 1335 "waiting for the last stderr line from process $name"); |
1341 }); | 1336 }); |
1342 } | 1337 } |
1343 | 1338 |
1344 /// Writes [line] to the process as stdin. | 1339 /// Writes [line] to the process as stdin. |
1345 void writeLine(String line) { | 1340 void writeLine(String line) { |
1346 _schedule((_) => _process.transform((p) => p.stdin.writeString('$line\n'))); | 1341 _schedule((_) => _process.then((p) => p.stdin.writeString('$line\n'))); |
1347 } | 1342 } |
1348 | 1343 |
1349 /// Kills the process, and waits until it's dead. | 1344 /// Kills the process, and waits until it's dead. |
1350 void kill() { | 1345 void kill() { |
1351 _endScheduled = true; | 1346 _endScheduled = true; |
1352 _schedule((_) { | 1347 _schedule((_) { |
1353 _endExpected = true; | 1348 _endExpected = true; |
1354 return _process.chain((p) { | 1349 return _process.chain((p) { |
1355 p.kill(); | 1350 p.kill(); |
1356 return timeout(_exitCode, _SCHEDULE_TIMEOUT, | 1351 return timeout(_exitCode, _SCHEDULE_TIMEOUT, |
1357 "waiting for process $name to die"); | 1352 "waiting for process $name to die"); |
1358 }); | 1353 }); |
1359 }); | 1354 }); |
1360 } | 1355 } |
1361 | 1356 |
1362 /// Waits for the process to exit, and verifies that the exit code matches | 1357 /// Waits for the process to exit, and verifies that the exit code matches |
1363 /// [expectedExitCode] (if given). | 1358 /// [expectedExitCode] (if given). |
1364 void shouldExit([int expectedExitCode]) { | 1359 void shouldExit([int expectedExitCode]) { |
1365 _endScheduled = true; | 1360 _endScheduled = true; |
1366 _schedule((_) { | 1361 _schedule((_) { |
1367 _endExpected = true; | 1362 _endExpected = true; |
1368 return timeout(_exitCode, _SCHEDULE_TIMEOUT, | 1363 return timeout(_exitCode, _SCHEDULE_TIMEOUT, |
1369 "waiting for process $name to exit").transform((exitCode) { | 1364 "waiting for process $name to exit").then((exitCode) { |
1370 if (expectedExitCode != null) { | 1365 if (expectedExitCode != null) { |
1371 expect(exitCode, equals(expectedExitCode)); | 1366 expect(exitCode, equals(expectedExitCode)); |
1372 } | 1367 } |
1373 }); | 1368 }); |
1374 }); | 1369 }); |
1375 } | 1370 } |
1376 | 1371 |
1377 /// Prints the remaining data in the process's stdout and stderr streams. | 1372 /// Prints the remaining data in the process's stdout and stderr streams. |
1378 /// Prints nothing if the straems are empty. | 1373 /// Prints nothing if the straems are empty. |
1379 Future _printStreams() { | 1374 Future _printStreams() { |
1380 Future printStream(String streamName, StringInputStream stream) { | 1375 Future printStream(String streamName, StringInputStream stream) { |
1381 return consumeStringInputStream(stream).transform((output) { | 1376 return consumeStringInputStream(stream).then((output) { |
1382 if (output.isEmpty) return; | 1377 if (output.isEmpty) return; |
1383 | 1378 |
1384 print('\nProcess $name $streamName:'); | 1379 print('\nProcess $name $streamName:'); |
1385 for (var line in output.trim().split("\n")) { | 1380 for (var line in output.trim().split("\n")) { |
1386 print('| $line'); | 1381 print('| $line'); |
1387 } | 1382 } |
1388 return; | 1383 return; |
1389 }); | 1384 }); |
1390 } | 1385 } |
1391 | 1386 |
(...skipping 25 matching lines...) Expand all Loading... |
1417 var server = new HttpServer(); | 1412 var server = new HttpServer(); |
1418 server.defaultRequestHandler = scheduledServer._awaitHandle; | 1413 server.defaultRequestHandler = scheduledServer._awaitHandle; |
1419 server.listen("127.0.0.1", 0); | 1414 server.listen("127.0.0.1", 0); |
1420 _scheduleCleanup((_) => server.close()); | 1415 _scheduleCleanup((_) => server.close()); |
1421 return new Future.immediate(server); | 1416 return new Future.immediate(server); |
1422 })); | 1417 })); |
1423 return scheduledServer; | 1418 return scheduledServer; |
1424 } | 1419 } |
1425 | 1420 |
1426 /// The port on which the server is listening. | 1421 /// The port on which the server is listening. |
1427 Future<int> get port => _server.transform((s) => s.port); | 1422 Future<int> get port => _server.then((s) => s.port); |
1428 | 1423 |
1429 /// The base URL of the server, including its port. | 1424 /// The base URL of the server, including its port. |
1430 Future<Uri> get url => | 1425 Future<Uri> get url => |
1431 port.transform((p) => new Uri.fromString("http://localhost:$p")); | 1426 port.then((p) => new Uri.fromString("http://localhost:$p")); |
1432 | 1427 |
1433 /// Assert that the next request has the given [method] and [path], and pass | 1428 /// Assert that the next request has the given [method] and [path], and pass |
1434 /// it to [handler] to handle. If [handler] returns a [Future], wait until | 1429 /// it to [handler] to handle. If [handler] returns a [Future], wait until |
1435 /// it's completed to continue the schedule. | 1430 /// it's completed to continue the schedule. |
1436 void handle(String method, String path, | 1431 void handle(String method, String path, |
1437 Future handler(HttpRequest request, HttpResponse response)) { | 1432 Future handler(HttpRequest request, HttpResponse response)) { |
1438 var handlerCompleter = new Completer<Function>(); | 1433 var handlerCompleter = new Completer<Function>(); |
1439 _scheduleValue((_) { | 1434 _scheduleValue((_) { |
1440 var requestCompleteCompleter = new Completer(); | 1435 var requestCompleteCompleter = new Completer(); |
1441 handlerCompleter.complete((request, response) { | 1436 handlerCompleter.complete((request, response) { |
(...skipping 17 matching lines...) Expand all Loading... |
1459 _ignored.add(new Pair(method, path)); | 1454 _ignored.add(new Pair(method, path)); |
1460 | 1455 |
1461 /// Raises an error complaining of an unexpected request. | 1456 /// Raises an error complaining of an unexpected request. |
1462 void _awaitHandle(HttpRequest request, HttpResponse response) { | 1457 void _awaitHandle(HttpRequest request, HttpResponse response) { |
1463 if (_ignored.contains(new Pair(request.method, request.path))) return; | 1458 if (_ignored.contains(new Pair(request.method, request.path))) return; |
1464 var future = timeout(new Future.immediate(null).chain((_) { | 1459 var future = timeout(new Future.immediate(null).chain((_) { |
1465 if (_handlers.isEmpty) { | 1460 if (_handlers.isEmpty) { |
1466 fail('Unexpected ${request.method} request to ${request.path}.'); | 1461 fail('Unexpected ${request.method} request to ${request.path}.'); |
1467 } | 1462 } |
1468 return _handlers.removeFirst(); | 1463 return _handlers.removeFirst(); |
1469 }).transform((handler) { | 1464 }).then((handler) { |
1470 handler(request, response); | 1465 handler(request, response); |
1471 }), _SCHEDULE_TIMEOUT, "waiting for a handler for ${request.method} " | 1466 }), _SCHEDULE_TIMEOUT, "waiting for a handler for ${request.method} " |
1472 "${request.path}"); | 1467 "${request.path}"); |
1473 expect(future, completes); | 1468 expect(future, completes); |
1474 } | 1469 } |
1475 } | 1470 } |
1476 | 1471 |
1477 /// Takes a simple data structure (composed of [Map]s, [List]s, scalar objects, | 1472 /// Takes a simple data structure (composed of [Map]s, [List]s, scalar objects, |
1478 /// and [Future]s) and recursively resolves all the [Future]s contained within. | 1473 /// and [Future]s) and recursively resolves all the [Future]s contained within. |
1479 /// Completes with the fully resolved structure. | 1474 /// Completes with the fully resolved structure. |
1480 Future _awaitObject(object) { | 1475 Future _awaitObject(object) { |
1481 // Unroll nested futures. | 1476 // Unroll nested futures. |
1482 if (object is Future) return object.chain(_awaitObject); | 1477 if (object is Future) return object.then(_awaitObject); |
1483 if (object is Collection) return Futures.wait(object.map(_awaitObject)); | 1478 if (object is Collection) { |
| 1479 return Futures.wait(object.mappedBy(_awaitObject).toList()); |
| 1480 } |
1484 if (object is! Map) return new Future.immediate(object); | 1481 if (object is! Map) return new Future.immediate(object); |
1485 | 1482 |
1486 var pairs = <Future<Pair>>[]; | 1483 var pairs = <Future<Pair>>[]; |
1487 object.forEach((key, value) { | 1484 object.forEach((key, value) { |
1488 pairs.add(_awaitObject(value) | 1485 pairs.add(_awaitObject(value) |
1489 .transform((resolved) => new Pair(key, resolved))); | 1486 .then((resolved) => new Pair(key, resolved))); |
1490 }); | 1487 }); |
1491 return Futures.wait(pairs).transform((resolvedPairs) { | 1488 return Futures.wait(pairs).then((resolvedPairs) { |
1492 var map = {}; | 1489 var map = {}; |
1493 for (var pair in resolvedPairs) { | 1490 for (var pair in resolvedPairs) { |
1494 map[pair.first] = pair.last; | 1491 map[pair.first] = pair.last; |
1495 } | 1492 } |
1496 return map; | 1493 return map; |
1497 }); | 1494 }); |
1498 } | 1495 } |
1499 | 1496 |
1500 /// Schedules a callback to be called as part of the test case. | 1497 /// Schedules a callback to be called as part of the test case. |
1501 void _schedule(_ScheduledEvent event) { | 1498 void _schedule(_ScheduledEvent event) { |
(...skipping 28 matching lines...) Expand all Loading... |
1530 | 1527 |
1531 /// Like [expect], but for [Future]s that complete as part of the scheduled | 1528 /// Like [expect], but for [Future]s that complete as part of the scheduled |
1532 /// test. This is necessary to ensure that the exception thrown by the | 1529 /// test. This is necessary to ensure that the exception thrown by the |
1533 /// expectation failing is handled by the scheduler. | 1530 /// expectation failing is handled by the scheduler. |
1534 /// | 1531 /// |
1535 /// Note that [matcher] matches against the completed value of [actual], so | 1532 /// Note that [matcher] matches against the completed value of [actual], so |
1536 /// calling [completion] is unnecessary. | 1533 /// calling [completion] is unnecessary. |
1537 void expectLater(Future actual, matcher, {String reason, | 1534 void expectLater(Future actual, matcher, {String reason, |
1538 FailureHandler failureHandler, bool verbose: false}) { | 1535 FailureHandler failureHandler, bool verbose: false}) { |
1539 _schedule((_) { | 1536 _schedule((_) { |
1540 return actual.transform((value) { | 1537 return actual.then((value) { |
1541 expect(value, matcher, reason: reason, failureHandler: failureHandler, | 1538 expect(value, matcher, reason: reason, failureHandler: failureHandler, |
1542 verbose: false); | 1539 verbose: false); |
1543 }); | 1540 }); |
1544 }); | 1541 }); |
1545 } | 1542 } |
OLD | NEW |