| OLD | NEW |
| 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 import 'dart:async'; | 5 import 'dart:async'; |
| 6 import 'dart:io'; | 6 import 'dart:io'; |
| 7 | 7 |
| 8 import 'package:barback/barback.dart'; | 8 import 'package:barback/barback.dart'; |
| 9 import 'package:package_config/packages_file.dart' as packages_file; | 9 import 'package:package_config/packages_file.dart' as packages_file; |
| 10 import 'package:path/path.dart' as p; | 10 import 'package:path/path.dart' as p; |
| (...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 102 | 102 |
| 103 /// The path to the entrypoint's ".packages" file. | 103 /// The path to the entrypoint's ".packages" file. |
| 104 String get packagesFile => root.path('.packages'); | 104 String get packagesFile => root.path('.packages'); |
| 105 | 105 |
| 106 /// The path to the entrypoint package's pubspec. | 106 /// The path to the entrypoint package's pubspec. |
| 107 String get pubspecPath => root.path('pubspec.yaml'); | 107 String get pubspecPath => root.path('pubspec.yaml'); |
| 108 | 108 |
| 109 /// The path to the entrypoint package's lockfile. | 109 /// The path to the entrypoint package's lockfile. |
| 110 String get lockFilePath => root.path('pubspec.lock'); | 110 String get lockFilePath => root.path('pubspec.lock'); |
| 111 | 111 |
| 112 /// The path to the directory containing precompiled dependencies. |
| 113 /// |
| 114 /// We just precompile the debug version of a package. We're mostly interested |
| 115 /// in improving speed for development iteration loops, which usually use |
| 116 /// debug mode. |
| 117 String get _precompiledDepsPath => root.path('.pub', 'deps', 'debug'); |
| 118 |
| 119 /// The path to the directory containing dependency executable snapshots. |
| 120 String get _snapshotPath => root.path('.pub', 'bin'); |
| 121 |
| 112 /// Loads the entrypoint from a package at [rootDir]. | 122 /// Loads the entrypoint from a package at [rootDir]. |
| 113 /// | 123 /// |
| 114 /// If [packageSymlinks] is `true`, this will create a "packages" directory | 124 /// If [packageSymlinks] is `true`, this will create a "packages" directory |
| 115 /// with symlinks to the installed packages. This directory will be symlinked | 125 /// with symlinks to the installed packages. This directory will be symlinked |
| 116 /// into any directory that might contain an entrypoint. | 126 /// into any directory that might contain an entrypoint. |
| 117 Entrypoint(String rootDir, SystemCache cache, {bool packageSymlinks: true, | 127 Entrypoint(String rootDir, SystemCache cache, {bool packageSymlinks: true, |
| 118 this.isGlobal: false}) | 128 this.isGlobal: false}) |
| 119 : root = new Package.load(null, rootDir, cache.sources), | 129 : root = new Package.load(null, rootDir, cache.sources), |
| 120 cache = cache, | 130 cache = cache, |
| 121 _packageSymlinks = packageSymlinks, | 131 _packageSymlinks = packageSymlinks, |
| (...skipping 23 matching lines...) Expand all Loading... |
| 145 /// unlocked and forced to their latest versions. If [upgradeAll] is | 155 /// unlocked and forced to their latest versions. If [upgradeAll] is |
| 146 /// true, the previous lockfile is ignored and all packages are re-resolved | 156 /// true, the previous lockfile is ignored and all packages are re-resolved |
| 147 /// from scratch. Otherwise, it will attempt to preserve the versions of all | 157 /// from scratch. Otherwise, it will attempt to preserve the versions of all |
| 148 /// previously locked packages. | 158 /// previously locked packages. |
| 149 /// | 159 /// |
| 150 /// Shows a report of the changes made relative to the previous lockfile. If | 160 /// Shows a report of the changes made relative to the previous lockfile. If |
| 151 /// this is an upgrade or downgrade, all transitive dependencies are shown in | 161 /// this is an upgrade or downgrade, all transitive dependencies are shown in |
| 152 /// the report. Otherwise, only dependencies that were changed are shown. If | 162 /// the report. Otherwise, only dependencies that were changed are shown. If |
| 153 /// [dryRun] is `true`, no physical changes are made. | 163 /// [dryRun] is `true`, no physical changes are made. |
| 154 /// | 164 /// |
| 165 /// If [precompile] is `true` (the default), this snapshots dependencies' |
| 166 /// executables and runs transformers on transformed dependencies. |
| 167 /// |
| 155 /// Updates [lockFile] and [packageRoot] accordingly. | 168 /// Updates [lockFile] and [packageRoot] accordingly. |
| 156 Future acquireDependencies(SolveType type, {List<String> useLatest, | 169 Future acquireDependencies(SolveType type, {List<String> useLatest, |
| 157 bool dryRun: false}) async { | 170 bool dryRun: false, bool precompile: true}) async { |
| 158 var result = await resolveVersions(type, cache.sources, root, | 171 var result = await resolveVersions(type, cache.sources, root, |
| 159 lockFile: lockFile, useLatest: useLatest); | 172 lockFile: lockFile, useLatest: useLatest); |
| 160 if (!result.succeeded) throw result.error; | 173 if (!result.succeeded) throw result.error; |
| 161 | 174 |
| 162 result.showReport(type); | 175 result.showReport(type); |
| 163 | 176 |
| 164 if (dryRun) { | 177 if (dryRun) { |
| 165 result.summarizeChanges(type, dryRun: dryRun); | 178 result.summarizeChanges(type, dryRun: dryRun); |
| 166 return; | 179 return; |
| 167 } | 180 } |
| (...skipping 12 matching lines...) Expand all Loading... |
| 180 _linkOrDeleteSecondaryPackageDirs(); | 193 _linkOrDeleteSecondaryPackageDirs(); |
| 181 | 194 |
| 182 result.summarizeChanges(type, dryRun: dryRun); | 195 result.summarizeChanges(type, dryRun: dryRun); |
| 183 | 196 |
| 184 /// Build a package graph from the version solver results so we don't | 197 /// Build a package graph from the version solver results so we don't |
| 185 /// have to reload and reparse all the pubspecs. | 198 /// have to reload and reparse all the pubspecs. |
| 186 _packageGraph = new PackageGraph.fromSolveResult(this, result); | 199 _packageGraph = new PackageGraph.fromSolveResult(this, result); |
| 187 packageGraph.loadTransformerCache().clearIfOutdated(result.changedPackages); | 200 packageGraph.loadTransformerCache().clearIfOutdated(result.changedPackages); |
| 188 | 201 |
| 189 try { | 202 try { |
| 190 await precompileDependencies(changed: result.changedPackages); | 203 if (precompile) { |
| 191 await precompileExecutables(changed: result.changedPackages); | 204 await _precompileDependencies(changed: result.changedPackages); |
| 205 await precompileExecutables(changed: result.changedPackages); |
| 206 } else { |
| 207 // If precompilation is disabled, delete any stale cached dependencies |
| 208 // or snapshots. |
| 209 _deletePrecompiledDependencies( |
| 210 _dependenciesToPrecompile(changed: result.changedPackages)); |
| 211 _deleteExecutableSnapshots(changed: result.changedPackages); |
| 212 } |
| 192 } catch (error, stackTrace) { | 213 } catch (error, stackTrace) { |
| 193 // Just log exceptions here. Since the method is just about acquiring | 214 // Just log exceptions here. Since the method is just about acquiring |
| 194 // dependencies, it shouldn't fail unless that fails. | 215 // dependencies, it shouldn't fail unless that fails. |
| 195 log.exception(error, stackTrace); | 216 log.exception(error, stackTrace); |
| 196 } | 217 } |
| 197 | 218 |
| 198 writeTextFile(packagesFile, lockFile.packagesFile(root.name)); | 219 writeTextFile(packagesFile, lockFile.packagesFile(root.name)); |
| 199 } | 220 } |
| 200 | 221 |
| 201 /// Precompile any transformed dependencies of the entrypoint. | 222 /// Precompile any transformed dependencies of the entrypoint. |
| 202 /// | 223 /// |
| 203 /// If [changed] is passed, only dependencies whose contents might be changed | 224 /// If [changed] is passed, only dependencies whose contents might be changed |
| 204 /// if one of the given packages changes will be recompiled. | 225 /// if one of the given packages changes will be recompiled. |
| 205 Future precompileDependencies({Iterable<String> changed}) async { | 226 Future _precompileDependencies({Iterable<String> changed}) async { |
| 206 if (changed != null) changed = changed.toSet(); | 227 if (changed != null) changed = changed.toSet(); |
| 207 | 228 |
| 208 // Just precompile the debug version of a package. We're mostly interested | 229 var dependenciesToPrecompile = _dependenciesToPrecompile(changed: changed); |
| 209 // in improving speed for development iteration loops, which usually use | 230 _deletePrecompiledDependencies(dependenciesToPrecompile); |
| 210 // debug mode. | |
| 211 var depsDir = p.join('.pub', 'deps', 'debug'); | |
| 212 | |
| 213 var dependenciesToPrecompile = packageGraph.packages.values | |
| 214 .where((package) { | |
| 215 if (package.pubspec.transformers.isEmpty) return false; | |
| 216 if (packageGraph.isPackageMutable(package.name)) return false; | |
| 217 if (!dirExists(p.join(depsDir, package.name))) return true; | |
| 218 if (changed == null) return true; | |
| 219 | |
| 220 /// Only recompile [package] if any of its transitive dependencies have | |
| 221 /// changed. We check all transitive dependencies because it's possible | |
| 222 /// that a transformer makes decisions based on their contents. | |
| 223 return overlaps( | |
| 224 packageGraph.transitiveDependencies(package.name) | |
| 225 .map((package) => package.name).toSet(), | |
| 226 changed); | |
| 227 }).map((package) => package.name).toSet(); | |
| 228 | |
| 229 if (dirExists(depsDir)) { | |
| 230 // Delete any cached dependencies that are going to be recached. | |
| 231 for (var package in dependenciesToPrecompile) { | |
| 232 deleteEntry(p.join(depsDir, package)); | |
| 233 } | |
| 234 | |
| 235 // Also delete any cached dependencies that should no longer be cached. | |
| 236 for (var subdir in listDir(depsDir)) { | |
| 237 var package = packageGraph.packages[p.basename(subdir)]; | |
| 238 if (package == null || package.pubspec.transformers.isEmpty || | |
| 239 packageGraph.isPackageMutable(package.name)) { | |
| 240 deleteEntry(subdir); | |
| 241 } | |
| 242 } | |
| 243 } | |
| 244 | |
| 245 if (dependenciesToPrecompile.isEmpty) return; | 231 if (dependenciesToPrecompile.isEmpty) return; |
| 246 | 232 |
| 247 try { | 233 try { |
| 248 await log.progress("Precompiling dependencies", () async { | 234 await log.progress("Precompiling dependencies", () async { |
| 249 var packagesToLoad = | 235 var packagesToLoad = |
| 250 unionAll(dependenciesToPrecompile.map( | 236 unionAll(dependenciesToPrecompile.map( |
| 251 packageGraph.transitiveDependencies)) | 237 packageGraph.transitiveDependencies)) |
| 252 .map((package) => package.name).toSet(); | 238 .map((package) => package.name).toSet(); |
| 253 | 239 |
| 254 var environment = await AssetEnvironment.create(this, BarbackMode.DEBUG, | 240 var environment = await AssetEnvironment.create(this, BarbackMode.DEBUG, |
| 255 packages: packagesToLoad, useDart2JS: false); | 241 packages: packagesToLoad, useDart2JS: false); |
| 256 | 242 |
| 257 /// Ignore barback errors since they'll be emitted via [getAllAssets] | 243 /// Ignore barback errors since they'll be emitted via [getAllAssets] |
| 258 /// below. | 244 /// below. |
| 259 environment.barback.errors.listen((_) {}); | 245 environment.barback.errors.listen((_) {}); |
| 260 | 246 |
| 261 // TODO(nweiz): only get assets from [dependenciesToPrecompile] so as | 247 // TODO(nweiz): only get assets from [dependenciesToPrecompile] so as |
| 262 // not to trigger unnecessary lazy transformers. | 248 // not to trigger unnecessary lazy transformers. |
| 263 var assets = await environment.barback.getAllAssets(); | 249 var assets = await environment.barback.getAllAssets(); |
| 264 await waitAndPrintErrors(assets.map((asset) async { | 250 await waitAndPrintErrors(assets.map((asset) async { |
| 265 if (!dependenciesToPrecompile.contains(asset.id.package)) return; | 251 if (!dependenciesToPrecompile.contains(asset.id.package)) return; |
| 266 | 252 |
| 267 var destPath = p.join( | 253 var destPath = p.join( |
| 268 depsDir, asset.id.package, p.fromUri(asset.id.path)); | 254 _precompiledDepsPath, asset.id.package, p.fromUri(asset.id.path)); |
| 269 ensureDir(p.dirname(destPath)); | 255 ensureDir(p.dirname(destPath)); |
| 270 await createFileFromStream(asset.read(), destPath); | 256 await createFileFromStream(asset.read(), destPath); |
| 271 })); | 257 })); |
| 272 | 258 |
| 273 log.message("Precompiled " + | 259 log.message("Precompiled " + |
| 274 toSentence(ordered(dependenciesToPrecompile).map(log.bold)) + "."); | 260 toSentence(ordered(dependenciesToPrecompile).map(log.bold)) + "."); |
| 275 }); | 261 }); |
| 276 } catch (_) { | 262 } catch (_) { |
| 277 // TODO(nweiz): When barback does a better job of associating errors with | 263 // TODO(nweiz): When barback does a better job of associating errors with |
| 278 // assets (issue 19491), catch and handle compilation errors on a | 264 // assets (issue 19491), catch and handle compilation errors on a |
| 279 // per-package basis. | 265 // per-package basis. |
| 280 for (var package in dependenciesToPrecompile) { | 266 for (var package in dependenciesToPrecompile) { |
| 281 deleteEntry(p.join(depsDir, package)); | 267 deleteEntry(p.join(_precompiledDepsPath, package)); |
| 282 } | 268 } |
| 283 rethrow; | 269 rethrow; |
| 284 } | 270 } |
| 285 } | 271 } |
| 286 | 272 |
| 273 /// Returns the set of dependencies that need to be precompiled. |
| 274 /// |
| 275 /// If [changed] is passed, only dependencies whose contents might be changed |
| 276 /// if one of the given packages changes will be returned. |
| 277 Set<String> _dependenciesToPrecompile({Iterable<String> changed}) { |
| 278 return packageGraph.packages.values.where((package) { |
| 279 if (package.pubspec.transformers.isEmpty) return false; |
| 280 if (packageGraph.isPackageMutable(package.name)) return false; |
| 281 if (!dirExists(p.join(_precompiledDepsPath, package.name))) return true; |
| 282 if (changed == null) return true; |
| 283 |
| 284 /// Only recompile [package] if any of its transitive dependencies have |
| 285 /// changed. We check all transitive dependencies because it's possible |
| 286 /// that a transformer makes decisions based on their contents. |
| 287 return overlaps( |
| 288 packageGraph.transitiveDependencies(package.name) |
| 289 .map((package) => package.name).toSet(), |
| 290 changed); |
| 291 }).map((package) => package.name).toSet(); |
| 292 } |
| 293 |
| 294 /// Deletes outdated precompiled dependencies. |
| 295 /// |
| 296 /// This deletes the precompilations of all packages in [packages], as well as |
| 297 /// any packages that are now untransformed or mutable. |
| 298 void _deletePrecompiledDependencies([Iterable<String> packages]) { |
| 299 if (!dirExists(_precompiledDepsPath)) return; |
| 300 |
| 301 // Delete any cached dependencies that are going to be recached. |
| 302 packages ??= []; |
| 303 for (var package in packages) { |
| 304 var path = p.join(_precompiledDepsPath, package); |
| 305 if (dirExists(path)) deleteEntry(path); |
| 306 } |
| 307 |
| 308 // Also delete any cached dependencies that should no longer be cached. |
| 309 for (var subdir in listDir(_precompiledDepsPath)) { |
| 310 var package = packageGraph.packages[p.basename(subdir)]; |
| 311 if (package == null || package.pubspec.transformers.isEmpty || |
| 312 packageGraph.isPackageMutable(package.name)) { |
| 313 deleteEntry(subdir); |
| 314 } |
| 315 } |
| 316 } |
| 317 |
| 287 /// Precompiles all executables from dependencies that don't transitively | 318 /// Precompiles all executables from dependencies that don't transitively |
| 288 /// depend on [this] or on a path dependency. | 319 /// depend on [this] or on a path dependency. |
| 289 Future precompileExecutables({Iterable<String> changed}) async { | 320 Future precompileExecutables({Iterable<String> changed}) async { |
| 290 if (changed != null) changed = changed.toSet(); | 321 _deleteExecutableSnapshots(changed: changed); |
| 291 | |
| 292 var binDir = p.join('.pub', 'bin'); | |
| 293 var sdkVersionPath = p.join(binDir, 'sdk-version'); | |
| 294 | |
| 295 // If the existing executable was compiled with a different SDK, we need to | |
| 296 // recompile regardless of what changed. | |
| 297 // TODO(nweiz): Use the VM to check this when issue 20802 is fixed. | |
| 298 var sdkMatches = fileExists(sdkVersionPath) && | |
| 299 readTextFile(sdkVersionPath) == "${sdk.version}\n"; | |
| 300 if (!sdkMatches) changed = null; | |
| 301 | |
| 302 // Clean out any outdated snapshots. | |
| 303 if (dirExists(binDir)) { | |
| 304 for (var entry in listDir(binDir)) { | |
| 305 if (!dirExists(entry)) continue; | |
| 306 | |
| 307 var package = p.basename(entry); | |
| 308 if (!packageGraph.packages.containsKey(package) || | |
| 309 packageGraph.isPackageMutable(package)) { | |
| 310 deleteEntry(entry); | |
| 311 } | |
| 312 } | |
| 313 } | |
| 314 | 322 |
| 315 var executables = new Map.fromIterable(root.immediateDependencies, | 323 var executables = new Map.fromIterable(root.immediateDependencies, |
| 316 key: (dep) => dep.name, | 324 key: (dep) => dep.name, |
| 317 value: (dep) => _executablesForPackage(dep.name, changed)); | 325 value: (dep) => _executablesForPackage(dep.name)); |
| 318 | 326 |
| 319 for (var package in executables.keys.toList()) { | 327 for (var package in executables.keys.toList()) { |
| 320 if (executables[package].isEmpty) executables.remove(package); | 328 if (executables[package].isEmpty) executables.remove(package); |
| 321 } | 329 } |
| 322 | 330 |
| 323 if (!sdkMatches) deleteEntry(binDir); | |
| 324 if (executables.isEmpty) return; | 331 if (executables.isEmpty) return; |
| 325 | 332 |
| 326 await log.progress("Precompiling executables", () async { | 333 await log.progress("Precompiling executables", () async { |
| 327 ensureDir(binDir); | 334 ensureDir(_snapshotPath); |
| 328 | 335 |
| 329 // Make sure there's a trailing newline so our version file matches the | 336 // Make sure there's a trailing newline so our version file matches the |
| 330 // SDK's. | 337 // SDK's. |
| 331 writeTextFile(sdkVersionPath, "${sdk.version}\n"); | 338 writeTextFile(p.join(_snapshotPath, 'sdk-version'), "${sdk.version}\n"); |
| 332 | 339 |
| 333 var packagesToLoad = | 340 var packagesToLoad = |
| 334 unionAll(executables.keys.map(packageGraph.transitiveDependencies)) | 341 unionAll(executables.keys.map(packageGraph.transitiveDependencies)) |
| 335 .map((package) => package.name).toSet(); | 342 .map((package) => package.name).toSet(); |
| 336 var executableIds = unionAll( | 343 var executableIds = unionAll( |
| 337 executables.values.map((ids) => ids.toSet())); | 344 executables.values.map((ids) => ids.toSet())); |
| 338 var environment = await AssetEnvironment.create(this, BarbackMode.RELEASE, | 345 var environment = await AssetEnvironment.create(this, BarbackMode.RELEASE, |
| 339 packages: packagesToLoad, | 346 packages: packagesToLoad, |
| 340 entrypoints: executableIds, | 347 entrypoints: executableIds, |
| 341 useDart2JS: false); | 348 useDart2JS: false); |
| 342 environment.barback.errors.listen((error) { | 349 environment.barback.errors.listen((error) { |
| 343 log.error(log.red("Build error:\n$error")); | 350 log.error(log.red("Build error:\n$error")); |
| 344 }); | 351 }); |
| 345 | 352 |
| 346 await waitAndPrintErrors(executables.keys.map((package) async { | 353 await waitAndPrintErrors(executables.keys.map((package) async { |
| 347 var dir = p.join(binDir, package); | 354 var dir = p.join(_snapshotPath, package); |
| 348 cleanDir(dir); | 355 cleanDir(dir); |
| 349 await environment.precompileExecutables(package, dir, | 356 await environment.precompileExecutables(package, dir, |
| 350 executableIds: executables[package]); | 357 executableIds: executables[package]); |
| 351 })); | 358 })); |
| 352 }); | 359 }); |
| 353 } | 360 } |
| 354 | 361 |
| 362 /// Deletes outdated cached executable snapshots. |
| 363 /// |
| 364 /// If [changed] is passed, only dependencies whose contents might be changed |
| 365 /// if one of the given packages changes will have their executables deleted. |
| 366 void _deleteExecutableSnapshots({Iterable<String> changed}) { |
| 367 if (!dirExists(_snapshotPath)) return; |
| 368 |
| 369 // If we don't know what changed, we can't safely re-use any snapshots. |
| 370 if (changed == null) { |
| 371 deleteEntry(_snapshotPath); |
| 372 return; |
| 373 } |
| 374 changed = changed.toSet(); |
| 375 |
| 376 // If the existing executable was compiled with a different SDK, we need to |
| 377 // recompile regardless of what changed. |
| 378 // TODO(nweiz): Use the VM to check this when issue 20802 is fixed. |
| 379 var sdkVersionPath = p.join(_snapshotPath, 'sdk-version'); |
| 380 if (!fileExists(sdkVersionPath) || |
| 381 readTextFile(sdkVersionPath) != "${sdk.version}\n") { |
| 382 deleteEntry(_snapshotPath); |
| 383 return; |
| 384 } |
| 385 |
| 386 // Clean out any outdated snapshots. |
| 387 for (var entry in listDir(_snapshotPath)) { |
| 388 if (!dirExists(entry)) continue; |
| 389 |
| 390 var package = p.basename(entry); |
| 391 if (!packageGraph.packages.containsKey(package) || |
| 392 packageGraph.isPackageMutable(package) || |
| 393 packageGraph.transitiveDependencies(package) |
| 394 .any((dep) => changed.contains(dep.name))) { |
| 395 deleteEntry(entry); |
| 396 } |
| 397 } |
| 398 } |
| 399 |
| 355 /// Returns the list of all executable assets for [packageName] that should be | 400 /// Returns the list of all executable assets for [packageName] that should be |
| 356 /// precompiled. | 401 /// precompiled. |
| 357 /// | 402 List<AssetId> _executablesForPackage(String packageName) { |
| 358 /// If [changed] isn't `null`, executables for [packageName] will only be | |
| 359 /// compiled if they might depend on a package in [changed]. | |
| 360 List<AssetId> _executablesForPackage(String packageName, | |
| 361 Set<String> changed) { | |
| 362 var package = packageGraph.packages[packageName]; | 403 var package = packageGraph.packages[packageName]; |
| 363 var binDir = package.path('bin'); | 404 var binDir = package.path('bin'); |
| 364 if (!dirExists(binDir)) return []; | 405 if (!dirExists(binDir)) return []; |
| 365 if (packageGraph.isPackageMutable(packageName)) return []; | 406 if (packageGraph.isPackageMutable(packageName)) return []; |
| 366 | 407 |
| 367 var executables = package.executableIds; | 408 var executables = package.executableIds; |
| 368 | 409 |
| 369 // If we don't know which packages were changed, always precompile the | 410 // If any executables don't exist, recompile all executables. |
| 370 // executables. | 411 // |
| 371 if (changed == null) return executables; | 412 // Normally, [_deleteExecutableSnapshots] will ensure that all the outdated |
| 372 | 413 // executable directories will be deleted, any checking for any non-existent |
| 373 // If any of the package's dependencies changed, recompile the executables. | 414 // executable will save us a few IO operations over checking each one. If |
| 374 if (packageGraph.transitiveDependencies(packageName) | 415 // some executables do exist and some do not, the directory is corrupted and |
| 375 .any((package) => changed.contains(package.name))) { | 416 // it's good to start from scratch anyway. |
| 376 return executables; | |
| 377 } | |
| 378 | |
| 379 // If any executables don't exist, precompile them regardless of what | |
| 380 // changed. Since we delete the bin directory before recompiling, we need to | |
| 381 // recompile all executables. | |
| 382 var executablesExist = executables.every((executable) => | 417 var executablesExist = executables.every((executable) => |
| 383 fileExists(p.join('.pub', 'bin', packageName, | 418 fileExists(p.join(_snapshotPath, packageName, |
| 384 "${p.url.basename(executable.path)}.snapshot"))); | 419 "${p.url.basename(executable.path)}.snapshot"))); |
| 385 if (!executablesExist) return executables; | 420 if (!executablesExist) return executables; |
| 386 | 421 |
| 387 // Otherwise, we don't need to recompile. | 422 // Otherwise, we don't need to recompile. |
| 388 return []; | 423 return []; |
| 389 } | 424 } |
| 390 | 425 |
| 391 /// Makes sure the package at [id] is locally available. | 426 /// Makes sure the package at [id] is locally available. |
| 392 /// | 427 /// |
| 393 /// This automatically downloads the package to the system-wide cache as well | 428 /// This automatically downloads the package to the system-wide cache as well |
| (...skipping 235 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 629 /// If [packageSymlinks] is true, creates a symlink to the "packages" | 664 /// If [packageSymlinks] is true, creates a symlink to the "packages" |
| 630 /// directory in [dir]. | 665 /// directory in [dir]. |
| 631 /// | 666 /// |
| 632 /// Otherwise, deletes a "packages" directories in [dir] if one exists. | 667 /// Otherwise, deletes a "packages" directories in [dir] if one exists. |
| 633 void _linkOrDeleteSecondaryPackageDir(String dir) { | 668 void _linkOrDeleteSecondaryPackageDir(String dir) { |
| 634 var symlink = p.join(dir, 'packages'); | 669 var symlink = p.join(dir, 'packages'); |
| 635 if (entryExists(symlink)) deleteEntry(symlink); | 670 if (entryExists(symlink)) deleteEntry(symlink); |
| 636 if (_packageSymlinks) createSymlink(packagesDir, symlink, relative: true); | 671 if (_packageSymlinks) createSymlink(packagesDir, symlink, relative: true); |
| 637 } | 672 } |
| 638 } | 673 } |
| OLD | NEW |