Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(45)

Side by Side Diff: pkg/analyzer/lib/src/context/context.dart

Issue 1133673007: Remove the 'cache' prefix by hiding the conflicting entities from engine.dart (Closed) Base URL: https://dart.googlecode.com/svn/branches/bleeding_edge/dart
Patch Set: Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 library analyzer.src.context.context; 5 library analyzer.src.context.context;
6 6
7 import 'dart:async'; 7 import 'dart:async';
8 import 'dart:collection'; 8 import 'dart:collection';
9 9
10 import 'package:analyzer/src/cancelable_future.dart'; 10 import 'package:analyzer/src/cancelable_future.dart';
11 import 'package:analyzer/src/context/cache.dart' as cache; 11 import 'package:analyzer/src/context/cache.dart';
12 import 'package:analyzer/src/generated/ast.dart'; 12 import 'package:analyzer/src/generated/ast.dart';
13 import 'package:analyzer/src/generated/constant.dart'; 13 import 'package:analyzer/src/generated/constant.dart';
14 import 'package:analyzer/src/generated/element.dart'; 14 import 'package:analyzer/src/generated/element.dart';
15 import 'package:analyzer/src/generated/engine.dart' hide WorkManager; 15 import 'package:analyzer/src/generated/engine.dart'
16 hide
17 AnalysisCache,
18 CachePartition,
19 SdkCachePartition,
20 UniversalCachePartition,
21 WorkManager;
16 import 'package:analyzer/src/generated/error.dart'; 22 import 'package:analyzer/src/generated/error.dart';
17 import 'package:analyzer/src/generated/html.dart' as ht; 23 import 'package:analyzer/src/generated/html.dart' as ht;
18 import 'package:analyzer/src/generated/java_core.dart'; 24 import 'package:analyzer/src/generated/java_core.dart';
19 import 'package:analyzer/src/generated/java_engine.dart'; 25 import 'package:analyzer/src/generated/java_engine.dart';
20 import 'package:analyzer/src/generated/resolver.dart'; 26 import 'package:analyzer/src/generated/resolver.dart';
21 import 'package:analyzer/src/generated/scanner.dart'; 27 import 'package:analyzer/src/generated/scanner.dart';
22 import 'package:analyzer/src/generated/sdk.dart' show DartSdk; 28 import 'package:analyzer/src/generated/sdk.dart' show DartSdk;
23 import 'package:analyzer/src/generated/source.dart'; 29 import 'package:analyzer/src/generated/source.dart';
24 import 'package:analyzer/src/generated/utilities_collection.dart'; 30 import 'package:analyzer/src/generated/utilities_collection.dart';
25 import 'package:analyzer/src/task/dart.dart'; 31 import 'package:analyzer/src/task/dart.dart';
(...skipping 10 matching lines...) Expand all
36 * the computation can't be performed yet because more analysis is needed, 42 * the computation can't be performed yet because more analysis is needed,
37 * `null` should be returned. 43 * `null` should be returned.
38 * 44 *
39 * The function may also throw an exception, in which case the corresponding 45 * The function may also throw an exception, in which case the corresponding
40 * future will be completed with failure. 46 * future will be completed with failure.
41 * 47 *
42 * Because this function is called while the state of analysis is being updated, 48 * Because this function is called while the state of analysis is being updated,
43 * it should be free of side effects so that it doesn't cause reentrant changes 49 * it should be free of side effects so that it doesn't cause reentrant changes
44 * to the analysis state. 50 * to the analysis state.
45 */ 51 */
46 typedef T PendingFutureComputer<T>(cache.CacheEntry entry); 52 typedef T PendingFutureComputer<T>(CacheEntry entry);
47 53
48 /** 54 /**
49 * An [AnalysisContext] in which analysis can be performed. 55 * An [AnalysisContext] in which analysis can be performed.
50 */ 56 */
51 class AnalysisContextImpl implements InternalAnalysisContext { 57 class AnalysisContextImpl implements InternalAnalysisContext {
52 /** 58 /**
53 * A client-provided name used to identify this context, or `null` if the 59 * A client-provided name used to identify this context, or `null` if the
54 * client has not provided a name. 60 * client has not provided a name.
55 */ 61 */
56 String name; 62 String name;
(...skipping 21 matching lines...) Expand all
78 84
79 /** 85 /**
80 * The set of declared variables used when computing constant values. 86 * The set of declared variables used when computing constant values.
81 */ 87 */
82 DeclaredVariables _declaredVariables = new DeclaredVariables(); 88 DeclaredVariables _declaredVariables = new DeclaredVariables();
83 89
84 /** 90 /**
85 * The partition that contains analysis results that are not shared with other 91 * The partition that contains analysis results that are not shared with other
86 * contexts. 92 * contexts.
87 */ 93 */
88 cache.CachePartition _privatePartition; 94 CachePartition _privatePartition;
89 95
90 /** 96 /**
91 * The cache in which information about the results associated with targets 97 * The cache in which information about the results associated with targets
92 * are stored. 98 * are stored.
93 */ 99 */
94 cache.AnalysisCache _cache; 100 AnalysisCache _cache;
95 101
96 /** 102 /**
97 * The task manager used to manage the tasks used to analyze code. 103 * The task manager used to manage the tasks used to analyze code.
98 */ 104 */
99 TaskManager _taskManager; 105 TaskManager _taskManager;
100 106
101 /** 107 /**
102 * The [DartWorkManager] instance that performs Dart specific scheduling. 108 * The [DartWorkManager] instance that performs Dart specific scheduling.
103 */ 109 */
104 DartWorkManager dartWorkManager; 110 DartWorkManager dartWorkManager;
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
180 186
181 /** 187 /**
182 * A factory to override how [LibraryResolver] is created. 188 * A factory to override how [LibraryResolver] is created.
183 */ 189 */
184 LibraryResolverFactory libraryResolverFactory; 190 LibraryResolverFactory libraryResolverFactory;
185 191
186 /** 192 /**
187 * Initialize a newly created analysis context. 193 * Initialize a newly created analysis context.
188 */ 194 */
189 AnalysisContextImpl() { 195 AnalysisContextImpl() {
190 _privatePartition = new cache.UniversalCachePartition(this); 196 _privatePartition = new UniversalCachePartition(this);
191 _cache = createCacheFromSourceFactory(null); 197 _cache = createCacheFromSourceFactory(null);
192 _taskManager = AnalysisEngine.instance.taskManager; 198 _taskManager = AnalysisEngine.instance.taskManager;
193 // TODO(scheglov) Get WorkManager(Factory)(s) from plugins. 199 // TODO(scheglov) Get WorkManager(Factory)(s) from plugins.
194 dartWorkManager = new DartWorkManager(this); 200 dartWorkManager = new DartWorkManager(this);
195 driver = 201 driver =
196 new AnalysisDriver(_taskManager, <WorkManager>[dartWorkManager], this); 202 new AnalysisDriver(_taskManager, <WorkManager>[dartWorkManager], this);
197 _onSourcesChangedController = 203 _onSourcesChangedController =
198 new StreamController<SourcesChangedEvent>.broadcast(); 204 new StreamController<SourcesChangedEvent>.broadcast();
199 } 205 }
200 206
201 @override 207 @override
202 cache.AnalysisCache get analysisCache => _cache; 208 AnalysisCache get analysisCache => _cache;
203 209
204 @override 210 @override
205 AnalysisOptions get analysisOptions => _options; 211 AnalysisOptions get analysisOptions => _options;
206 212
207 @override 213 @override
208 void set analysisOptions(AnalysisOptions options) { 214 void set analysisOptions(AnalysisOptions options) {
209 bool needsRecompute = this._options.analyzeFunctionBodiesPredicate != 215 bool needsRecompute = this._options.analyzeFunctionBodiesPredicate !=
210 options.analyzeFunctionBodiesPredicate || 216 options.analyzeFunctionBodiesPredicate ||
211 this._options.generateImplicitErrors != 217 this._options.generateImplicitErrors !=
212 options.generateImplicitErrors || 218 options.generateImplicitErrors ||
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
260 set contentCache(ContentCache value) { 266 set contentCache(ContentCache value) {
261 _contentCache = value; 267 _contentCache = value;
262 } 268 }
263 269
264 @override 270 @override
265 DeclaredVariables get declaredVariables => _declaredVariables; 271 DeclaredVariables get declaredVariables => _declaredVariables;
266 272
267 @override 273 @override
268 List<AnalysisTarget> get explicitTargets { 274 List<AnalysisTarget> get explicitTargets {
269 List<AnalysisTarget> targets = <AnalysisTarget>[]; 275 List<AnalysisTarget> targets = <AnalysisTarget>[];
270 MapIterator<AnalysisTarget, cache.CacheEntry> iterator = _cache.iterator(); 276 MapIterator<AnalysisTarget, CacheEntry> iterator = _cache.iterator();
271 while (iterator.moveNext()) { 277 while (iterator.moveNext()) {
272 if (iterator.value.explicitlyAdded) { 278 if (iterator.value.explicitlyAdded) {
273 targets.add(iterator.key); 279 targets.add(iterator.key);
274 } 280 }
275 } 281 }
276 return targets; 282 return targets;
277 } 283 }
278 284
279 @override 285 @override
280 List<Source> get htmlSources => _getSources(SourceKind.HTML); 286 List<Source> get htmlSources => _getSources(SourceKind.HTML);
281 287
282 @override 288 @override
283 bool get isDisposed => _disposed; 289 bool get isDisposed => _disposed;
284 290
285 @override 291 @override
286 List<Source> get launchableClientLibrarySources { 292 List<Source> get launchableClientLibrarySources {
287 List<Source> sources = new List<Source>(); 293 List<Source> sources = new List<Source>();
288 MapIterator<AnalysisTarget, cache.CacheEntry> iterator = _cache.iterator(); 294 MapIterator<AnalysisTarget, CacheEntry> iterator = _cache.iterator();
289 while (iterator.moveNext()) { 295 while (iterator.moveNext()) {
290 AnalysisTarget target = iterator.key; 296 AnalysisTarget target = iterator.key;
291 cache.CacheEntry entry = iterator.value; 297 CacheEntry entry = iterator.value;
292 if (target is Source && 298 if (target is Source &&
293 entry.getValue(SOURCE_KIND) == SourceKind.LIBRARY && 299 entry.getValue(SOURCE_KIND) == SourceKind.LIBRARY &&
294 !target.isInSystemLibrary && 300 !target.isInSystemLibrary &&
295 isClientLibrary(target)) { 301 isClientLibrary(target)) {
296 sources.add(target); 302 sources.add(target);
297 } 303 }
298 } 304 }
299 return sources; 305 return sources;
300 } 306 }
301 307
302 @override 308 @override
303 List<Source> get launchableServerLibrarySources { 309 List<Source> get launchableServerLibrarySources {
304 List<Source> sources = new List<Source>(); 310 List<Source> sources = new List<Source>();
305 MapIterator<AnalysisTarget, cache.CacheEntry> iterator = _cache.iterator(); 311 MapIterator<AnalysisTarget, CacheEntry> iterator = _cache.iterator();
306 while (iterator.moveNext()) { 312 while (iterator.moveNext()) {
307 AnalysisTarget target = iterator.key; 313 AnalysisTarget target = iterator.key;
308 cache.CacheEntry entry = iterator.value; 314 CacheEntry entry = iterator.value;
309 if (target is Source && 315 if (target is Source &&
310 entry.getValue(SOURCE_KIND) == SourceKind.LIBRARY && 316 entry.getValue(SOURCE_KIND) == SourceKind.LIBRARY &&
311 !target.isInSystemLibrary && 317 !target.isInSystemLibrary &&
312 isServerLibrary(target)) { 318 isServerLibrary(target)) {
313 sources.add(target); 319 sources.add(target);
314 } 320 }
315 } 321 }
316 return sources; 322 return sources;
317 } 323 }
318 324
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
351 } 357 }
352 factory.context = this; 358 factory.context = this;
353 _sourceFactory = factory; 359 _sourceFactory = factory;
354 _cache = createCacheFromSourceFactory(factory); 360 _cache = createCacheFromSourceFactory(factory);
355 _invalidateAllLocalResolutionInformation(true); 361 _invalidateAllLocalResolutionInformation(true);
356 } 362 }
357 363
358 @override 364 @override
359 List<Source> get sources { 365 List<Source> get sources {
360 List<Source> sources = new List<Source>(); 366 List<Source> sources = new List<Source>();
361 MapIterator<AnalysisTarget, cache.CacheEntry> iterator = _cache.iterator(); 367 MapIterator<AnalysisTarget, CacheEntry> iterator = _cache.iterator();
362 while (iterator.moveNext()) { 368 while (iterator.moveNext()) {
363 AnalysisTarget target = iterator.key; 369 AnalysisTarget target = iterator.key;
364 if (target is Source) { 370 if (target is Source) {
365 sources.add(target); 371 sources.add(target);
366 } 372 }
367 } 373 }
368 return sources; 374 return sources;
369 } 375 }
370 376
371 /** 377 /**
372 * Return a list of the sources that would be processed by 378 * Return a list of the sources that would be processed by
373 * [performAnalysisTask]. This method duplicates, and must therefore be kept 379 * [performAnalysisTask]. This method duplicates, and must therefore be kept
374 * in sync with, [getNextAnalysisTask]. This method is intended to be used for 380 * in sync with, [getNextAnalysisTask]. This method is intended to be used for
375 * testing purposes only. 381 * testing purposes only.
376 */ 382 */
377 List<Source> get sourcesNeedingProcessing { 383 List<Source> get sourcesNeedingProcessing {
378 HashSet<Source> sources = new HashSet<Source>(); 384 HashSet<Source> sources = new HashSet<Source>();
379 bool hintsEnabled = _options.hint; 385 bool hintsEnabled = _options.hint;
380 bool lintsEnabled = _options.lint; 386 bool lintsEnabled = _options.lint;
381 387
382 MapIterator<AnalysisTarget, cache.CacheEntry> iterator = 388 MapIterator<AnalysisTarget, CacheEntry> iterator =
383 _privatePartition.iterator(); 389 _privatePartition.iterator();
384 while (iterator.moveNext()) { 390 while (iterator.moveNext()) {
385 AnalysisTarget target = iterator.key; 391 AnalysisTarget target = iterator.key;
386 if (target is Source) { 392 if (target is Source) {
387 _getSourcesNeedingProcessing( 393 _getSourcesNeedingProcessing(
388 target, iterator.value, false, hintsEnabled, lintsEnabled, sources); 394 target, iterator.value, false, hintsEnabled, lintsEnabled, sources);
389 } 395 }
390 } 396 }
391 return new List<Source>.from(sources); 397 return new List<Source>.from(sources);
392 } 398 }
(...skipping 196 matching lines...) Expand 10 before | Expand all | Expand 10 after
589 } 595 }
590 596
591 @override 597 @override
592 CancelableFuture<CompilationUnit> computeResolvedCompilationUnitAsync( 598 CancelableFuture<CompilationUnit> computeResolvedCompilationUnitAsync(
593 Source unitSource, Source librarySource) { 599 Source unitSource, Source librarySource) {
594 if (!AnalysisEngine.isDartFileName(unitSource.shortName) || 600 if (!AnalysisEngine.isDartFileName(unitSource.shortName) ||
595 !AnalysisEngine.isDartFileName(librarySource.shortName)) { 601 !AnalysisEngine.isDartFileName(librarySource.shortName)) {
596 return new CancelableFuture.error(new AnalysisNotScheduledError()); 602 return new CancelableFuture.error(new AnalysisNotScheduledError());
597 } 603 }
598 return new _AnalysisFutureHelper<CompilationUnit>(this).computeAsync( 604 return new _AnalysisFutureHelper<CompilationUnit>(this).computeAsync(
599 new LibrarySpecificUnit(librarySource, unitSource), 605 new LibrarySpecificUnit(librarySource, unitSource), (CacheEntry entry) {
600 (cache.CacheEntry entry) {
601 CacheState state = entry.getState(RESOLVED_UNIT); 606 CacheState state = entry.getState(RESOLVED_UNIT);
602 if (state == CacheState.ERROR) { 607 if (state == CacheState.ERROR) {
603 throw entry.exception; 608 throw entry.exception;
604 } else if (state == CacheState.INVALID) { 609 } else if (state == CacheState.INVALID) {
605 return null; 610 return null;
606 } 611 }
607 return entry.getValue(RESOLVED_UNIT); 612 return entry.getValue(RESOLVED_UNIT);
608 }); 613 });
609 } 614 }
610 615
611 /** 616 /**
612 * Create an analysis cache based on the given source [factory]. 617 * Create an analysis cache based on the given source [factory].
613 */ 618 */
614 cache.AnalysisCache createCacheFromSourceFactory(SourceFactory factory) { 619 AnalysisCache createCacheFromSourceFactory(SourceFactory factory) {
615 if (factory == null) { 620 if (factory == null) {
616 return new cache.AnalysisCache(<cache.CachePartition>[_privatePartition]); 621 return new AnalysisCache(<CachePartition>[_privatePartition]);
617 } 622 }
618 DartSdk sdk = factory.dartSdk; 623 DartSdk sdk = factory.dartSdk;
619 if (sdk == null) { 624 if (sdk == null) {
620 return new cache.AnalysisCache(<cache.CachePartition>[_privatePartition]); 625 return new AnalysisCache(<CachePartition>[_privatePartition]);
621 } 626 }
622 return new cache.AnalysisCache(<cache.CachePartition>[ 627 return new AnalysisCache(<CachePartition>[
623 AnalysisEngine.instance.partitionManager_new.forSdk(sdk), 628 AnalysisEngine.instance.partitionManager_new.forSdk(sdk),
624 _privatePartition 629 _privatePartition
625 ]); 630 ]);
626 } 631 }
627 632
628 @override 633 @override
629 void dispose() { 634 void dispose() {
630 _disposed = true; 635 _disposed = true;
631 for (List<PendingFuture> pendingFutures in _pendingFutureTargets.values) { 636 for (List<PendingFuture> pendingFutures in _pendingFutureTargets.values) {
632 for (PendingFuture pendingFuture in pendingFutures) { 637 for (PendingFuture pendingFuture in pendingFutures) {
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
671 if (source == null) { 676 if (source == null) {
672 return false; 677 return false;
673 } 678 }
674 if (_contentCache.getContents(source) != null) { 679 if (_contentCache.getContents(source) != null) {
675 return true; 680 return true;
676 } 681 }
677 return source.exists(); 682 return source.exists();
678 } 683 }
679 684
680 @override 685 @override
681 cache.CacheEntry getCacheEntry(AnalysisTarget target) { 686 CacheEntry getCacheEntry(AnalysisTarget target) {
682 cache.CacheEntry entry = _cache.get(target); 687 CacheEntry entry = _cache.get(target);
683 if (entry == null) { 688 if (entry == null) {
684 entry = new cache.CacheEntry(target); 689 entry = new CacheEntry(target);
685 _cache.put(entry); 690 _cache.put(entry);
686 } 691 }
687 return entry; 692 return entry;
688 } 693 }
689 694
690 @override 695 @override
691 CompilationUnitElement getCompilationUnitElement( 696 CompilationUnitElement getCompilationUnitElement(
692 Source unitSource, Source librarySource) { 697 Source unitSource, Source librarySource) {
693 AnalysisTarget target = new LibrarySpecificUnit(librarySource, unitSource); 698 AnalysisTarget target = new LibrarySpecificUnit(librarySource, unitSource);
694 return _cache.getValue(target, COMPILATION_UNIT_ELEMENT); 699 return _cache.getValue(target, COMPILATION_UNIT_ELEMENT);
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
776 @override 781 @override
777 List<Source> getHtmlFilesReferencing(Source source) { 782 List<Source> getHtmlFilesReferencing(Source source) {
778 SourceKind sourceKind = getKindOf(source); 783 SourceKind sourceKind = getKindOf(source);
779 if (sourceKind == null) { 784 if (sourceKind == null) {
780 return Source.EMPTY_LIST; 785 return Source.EMPTY_LIST;
781 } 786 }
782 List<Source> htmlSources = new List<Source>(); 787 List<Source> htmlSources = new List<Source>();
783 while (true) { 788 while (true) {
784 if (sourceKind == SourceKind.PART) { 789 if (sourceKind == SourceKind.PART) {
785 List<Source> librarySources = getLibrariesContaining(source); 790 List<Source> librarySources = getLibrariesContaining(source);
786 MapIterator<AnalysisTarget, cache.CacheEntry> iterator = 791 MapIterator<AnalysisTarget, CacheEntry> iterator = _cache.iterator();
787 _cache.iterator();
788 while (iterator.moveNext()) { 792 while (iterator.moveNext()) {
789 cache.CacheEntry entry = iterator.value; 793 CacheEntry entry = iterator.value;
790 if (entry.getValue(SOURCE_KIND) == SourceKind.HTML) { 794 if (entry.getValue(SOURCE_KIND) == SourceKind.HTML) {
791 List<Source> referencedLibraries = 795 List<Source> referencedLibraries =
792 (entry as HtmlEntry).getValue(HtmlEntry.REFERENCED_LIBRARIES); 796 (entry as HtmlEntry).getValue(HtmlEntry.REFERENCED_LIBRARIES);
793 if (_containsAny(referencedLibraries, librarySources)) { 797 if (_containsAny(referencedLibraries, librarySources)) {
794 htmlSources.add(iterator.key); 798 htmlSources.add(iterator.key);
795 } 799 }
796 } 800 }
797 } 801 }
798 } else { 802 } else {
799 MapIterator<AnalysisTarget, cache.CacheEntry> iterator = 803 MapIterator<AnalysisTarget, CacheEntry> iterator = _cache.iterator();
800 _cache.iterator();
801 while (iterator.moveNext()) { 804 while (iterator.moveNext()) {
802 cache.CacheEntry entry = iterator.value; 805 CacheEntry entry = iterator.value;
803 if (entry.getValue(SOURCE_KIND) == SourceKind.HTML) { 806 if (entry.getValue(SOURCE_KIND) == SourceKind.HTML) {
804 List<Source> referencedLibraries = 807 List<Source> referencedLibraries =
805 (entry as HtmlEntry).getValue(HtmlEntry.REFERENCED_LIBRARIES); 808 (entry as HtmlEntry).getValue(HtmlEntry.REFERENCED_LIBRARIES);
806 if (_contains(referencedLibraries, source)) { 809 if (_contains(referencedLibraries, source)) {
807 htmlSources.add(iterator.key); 810 htmlSources.add(iterator.key);
808 } 811 }
809 } 812 }
810 } 813 }
811 } 814 }
812 break; 815 break;
(...skipping 15 matching lines...) Expand all
828 return SourceKind.UNKNOWN; 831 return SourceKind.UNKNOWN;
829 } 832 }
830 833
831 @override 834 @override
832 List<Source> getLibrariesContaining(Source source) { 835 List<Source> getLibrariesContaining(Source source) {
833 SourceKind kind = getKindOf(source); 836 SourceKind kind = getKindOf(source);
834 if (kind == SourceKind.LIBRARY) { 837 if (kind == SourceKind.LIBRARY) {
835 return <Source>[source]; 838 return <Source>[source];
836 } else if (kind == SourceKind.PART) { 839 } else if (kind == SourceKind.PART) {
837 List<Source> libraries = <Source>[]; 840 List<Source> libraries = <Source>[];
838 MapIterator<AnalysisTarget, cache.CacheEntry> iterator = 841 MapIterator<AnalysisTarget, CacheEntry> iterator = _cache.iterator();
839 _cache.iterator();
840 while (iterator.moveNext()) { 842 while (iterator.moveNext()) {
841 AnalysisTarget target = iterator.key; 843 AnalysisTarget target = iterator.key;
842 if (target is Source && getKindOf(target) == SourceKind.LIBRARY) { 844 if (target is Source && getKindOf(target) == SourceKind.LIBRARY) {
843 List<Source> parts = _cache.getValue(target, INCLUDED_PARTS); 845 List<Source> parts = _cache.getValue(target, INCLUDED_PARTS);
844 if (parts.contains(source)) { 846 if (parts.contains(source)) {
845 libraries.add(target); 847 libraries.add(target);
846 } 848 }
847 } 849 }
848 } 850 }
849 if (libraries.isNotEmpty) { 851 if (libraries.isNotEmpty) {
850 return libraries; 852 return libraries;
851 } 853 }
852 } 854 }
853 return Source.EMPTY_ARRAY; 855 return Source.EMPTY_ARRAY;
854 } 856 }
855 857
856 @override 858 @override
857 List<Source> getLibrariesDependingOn(Source librarySource) { 859 List<Source> getLibrariesDependingOn(Source librarySource) {
858 List<Source> dependentLibraries = new List<Source>(); 860 List<Source> dependentLibraries = new List<Source>();
859 MapIterator<AnalysisTarget, cache.CacheEntry> iterator = _cache.iterator(); 861 MapIterator<AnalysisTarget, CacheEntry> iterator = _cache.iterator();
860 while (iterator.moveNext()) { 862 while (iterator.moveNext()) {
861 cache.CacheEntry entry = iterator.value; 863 CacheEntry entry = iterator.value;
862 if (entry.getValue(SOURCE_KIND) == SourceKind.LIBRARY) { 864 if (entry.getValue(SOURCE_KIND) == SourceKind.LIBRARY) {
863 if (_contains(entry.getValue(EXPORTED_LIBRARIES), librarySource)) { 865 if (_contains(entry.getValue(EXPORTED_LIBRARIES), librarySource)) {
864 dependentLibraries.add(iterator.key); 866 dependentLibraries.add(iterator.key);
865 } 867 }
866 if (_contains(entry.getValue(IMPORTED_LIBRARIES), librarySource)) { 868 if (_contains(entry.getValue(IMPORTED_LIBRARIES), librarySource)) {
867 dependentLibraries.add(iterator.key); 869 dependentLibraries.add(iterator.key);
868 } 870 }
869 } 871 }
870 } 872 }
871 if (dependentLibraries.isEmpty) { 873 if (dependentLibraries.isEmpty) {
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
911 // and it is not used with tasks - instead we compute export namespace once 913 // and it is not used with tasks - instead we compute export namespace once
912 // using BuildExportNamespaceTask and reuse in scopes. 914 // using BuildExportNamespaceTask and reuse in scopes.
913 NamespaceBuilder builder = new NamespaceBuilder(); 915 NamespaceBuilder builder = new NamespaceBuilder();
914 return builder.createPublicNamespaceForLibrary(library); 916 return builder.createPublicNamespaceForLibrary(library);
915 } 917 }
916 918
917 /** 919 /**
918 * Return the cache entry associated with the given [target], or `null` if 920 * Return the cache entry associated with the given [target], or `null` if
919 * there is no entry associated with the target. 921 * there is no entry associated with the target.
920 */ 922 */
921 cache.CacheEntry getReadableSourceEntryOrNull(AnalysisTarget target) => 923 CacheEntry getReadableSourceEntryOrNull(AnalysisTarget target) =>
922 _cache.get(target); 924 _cache.get(target);
923 925
924 @override 926 @override
925 CompilationUnit getResolvedCompilationUnit( 927 CompilationUnit getResolvedCompilationUnit(
926 Source unitSource, LibraryElement library) { 928 Source unitSource, LibraryElement library) {
927 if (library == null || 929 if (library == null ||
928 !AnalysisEngine.isDartFileName(unitSource.shortName)) { 930 !AnalysisEngine.isDartFileName(unitSource.shortName)) {
929 return null; 931 return null;
930 } 932 }
931 return getResolvedCompilationUnit2(unitSource, library.source); 933 return getResolvedCompilationUnit2(unitSource, library.source);
(...skipping 17 matching lines...) Expand all
949 // if (sourceEntry is HtmlEntry) { 951 // if (sourceEntry is HtmlEntry) {
950 // HtmlEntry htmlEntry = sourceEntry; 952 // HtmlEntry htmlEntry = sourceEntry;
951 // return htmlEntry.getValue(HtmlEntry.RESOLVED_UNIT); 953 // return htmlEntry.getValue(HtmlEntry.RESOLVED_UNIT);
952 // } 954 // }
953 return null; 955 return null;
954 } 956 }
955 957
956 @override 958 @override
957 List<Source> getSourcesWithFullName(String path) { 959 List<Source> getSourcesWithFullName(String path) {
958 List<Source> sources = <Source>[]; 960 List<Source> sources = <Source>[];
959 MapIterator<AnalysisTarget, cache.CacheEntry> iterator = _cache.iterator(); 961 MapIterator<AnalysisTarget, CacheEntry> iterator = _cache.iterator();
960 while (iterator.moveNext()) { 962 while (iterator.moveNext()) {
961 AnalysisTarget target = iterator.key; 963 AnalysisTarget target = iterator.key;
962 if (target is Source && target.fullName == path) { 964 if (target is Source && target.fullName == path) {
963 sources.add(target); 965 sources.add(target);
964 } 966 }
965 } 967 }
966 return sources; 968 return sources;
967 } 969 }
968 970
969 @override 971 @override
970 bool handleContentsChanged( 972 bool handleContentsChanged(
971 Source source, String originalContents, String newContents, bool notify) { 973 Source source, String originalContents, String newContents, bool notify) {
972 cache.CacheEntry entry = _cache.get(source); 974 CacheEntry entry = _cache.get(source);
973 if (entry == null) { 975 if (entry == null) {
974 return false; 976 return false;
975 } 977 }
976 bool changed = newContents != originalContents; 978 bool changed = newContents != originalContents;
977 if (newContents != null) { 979 if (newContents != null) {
978 if (newContents != originalContents) { 980 if (newContents != originalContents) {
979 _incrementalAnalysisCache = 981 _incrementalAnalysisCache =
980 IncrementalAnalysisCache.clear(_incrementalAnalysisCache, source); 982 IncrementalAnalysisCache.clear(_incrementalAnalysisCache, source);
981 if (!analysisOptions.incremental || 983 if (!analysisOptions.incremental ||
982 !_tryPoorMansIncrementalResolution(source, newContents)) { 984 !_tryPoorMansIncrementalResolution(source, newContents)) {
983 _sourceChanged(source); 985 _sourceChanged(source);
984 } 986 }
985 entry.modificationTime = _contentCache.getModificationStamp(source); 987 entry.modificationTime = _contentCache.getModificationStamp(source);
986 entry.setValue(CONTENT, newContents, cache.TargetedResult.EMPTY_LIST); 988 entry.setValue(CONTENT, newContents, TargetedResult.EMPTY_LIST);
987 } else { 989 } else {
988 entry.modificationTime = _contentCache.getModificationStamp(source); 990 entry.modificationTime = _contentCache.getModificationStamp(source);
989 } 991 }
990 } else if (originalContents != null) { 992 } else if (originalContents != null) {
991 _incrementalAnalysisCache = 993 _incrementalAnalysisCache =
992 IncrementalAnalysisCache.clear(_incrementalAnalysisCache, source); 994 IncrementalAnalysisCache.clear(_incrementalAnalysisCache, source);
993 changed = newContents != originalContents; 995 changed = newContents != originalContents;
994 // We are removing the overlay for the file, check if the file's 996 // We are removing the overlay for the file, check if the file's
995 // contents is the same as it was in the overlay. 997 // contents is the same as it was in the overlay.
996 try { 998 try {
997 TimestampedData<String> fileContents = getContents(source); 999 TimestampedData<String> fileContents = getContents(source);
998 String fileContentsData = fileContents.data; 1000 String fileContentsData = fileContents.data;
999 if (fileContentsData == originalContents) { 1001 if (fileContentsData == originalContents) {
1000 entry.setValue( 1002 entry.setValue(CONTENT, fileContentsData, TargetedResult.EMPTY_LIST);
1001 CONTENT, fileContentsData, cache.TargetedResult.EMPTY_LIST);
1002 entry.modificationTime = fileContents.modificationTime; 1003 entry.modificationTime = fileContents.modificationTime;
1003 changed = false; 1004 changed = false;
1004 } 1005 }
1005 } catch (e) {} 1006 } catch (e) {}
1006 // If not the same content (e.g. the file is being closed without save), 1007 // If not the same content (e.g. the file is being closed without save),
1007 // then force analysis. 1008 // then force analysis.
1008 if (changed) { 1009 if (changed) {
1009 _sourceChanged(source); 1010 _sourceChanged(source);
1010 } 1011 }
1011 } 1012 }
1012 if (notify && changed) { 1013 if (notify && changed) {
1013 _onSourcesChangedController 1014 _onSourcesChangedController
1014 .add(new SourcesChangedEvent.changedContent(source, newContents)); 1015 .add(new SourcesChangedEvent.changedContent(source, newContents));
1015 } 1016 }
1016 return changed; 1017 return changed;
1017 } 1018 }
1018 1019
1019 @override 1020 @override
1020 bool isClientLibrary(Source librarySource) { 1021 bool isClientLibrary(Source librarySource) {
1021 cache.CacheEntry entry = _cache.get(librarySource); 1022 CacheEntry entry = _cache.get(librarySource);
1022 return entry.getValue(IS_CLIENT) && entry.getValue(IS_LAUNCHABLE); 1023 return entry.getValue(IS_CLIENT) && entry.getValue(IS_LAUNCHABLE);
1023 } 1024 }
1024 1025
1025 @override 1026 @override
1026 bool isServerLibrary(Source librarySource) { 1027 bool isServerLibrary(Source librarySource) {
1027 cache.CacheEntry entry = _cache.get(librarySource); 1028 CacheEntry entry = _cache.get(librarySource);
1028 return !entry.getValue(IS_CLIENT) && entry.getValue(IS_LAUNCHABLE); 1029 return !entry.getValue(IS_CLIENT) && entry.getValue(IS_LAUNCHABLE);
1029 } 1030 }
1030 1031
1031 @override 1032 @override
1032 CompilationUnit parseCompilationUnit(Source source) { 1033 CompilationUnit parseCompilationUnit(Source source) {
1033 if (!AnalysisEngine.isDartFileName(source.shortName)) { 1034 if (!AnalysisEngine.isDartFileName(source.shortName)) {
1034 return null; 1035 return null;
1035 } 1036 }
1036 return _computeResult(source, PARSED_UNIT); 1037 return _computeResult(source, PARSED_UNIT);
1037 } 1038 }
(...skipping 25 matching lines...) Expand all
1063 return new AnalysisResult(notices, -1, '', -1); 1064 return new AnalysisResult(notices, -1, '', -1);
1064 }); 1065 });
1065 } 1066 }
1066 1067
1067 @override 1068 @override
1068 void recordLibraryElements(Map<Source, LibraryElement> elementMap) { 1069 void recordLibraryElements(Map<Source, LibraryElement> elementMap) {
1069 elementMap.forEach((Source librarySource, LibraryElement library) { 1070 elementMap.forEach((Source librarySource, LibraryElement library) {
1070 // 1071 //
1071 // Cache the element in the library's info. 1072 // Cache the element in the library's info.
1072 // 1073 //
1073 cache.CacheEntry entry = getCacheEntry(librarySource); 1074 CacheEntry entry = getCacheEntry(librarySource);
1074 setValue(ResultDescriptor result, value) { 1075 setValue(ResultDescriptor result, value) {
1075 entry.setValue(result, value, cache.TargetedResult.EMPTY_LIST); 1076 entry.setValue(result, value, TargetedResult.EMPTY_LIST);
1076 } 1077 }
1077 setValue(BUILD_DIRECTIVES_ERRORS, AnalysisError.NO_ERRORS); 1078 setValue(BUILD_DIRECTIVES_ERRORS, AnalysisError.NO_ERRORS);
1078 setValue(BUILD_FUNCTION_TYPE_ALIASES_ERRORS, AnalysisError.NO_ERRORS); 1079 setValue(BUILD_FUNCTION_TYPE_ALIASES_ERRORS, AnalysisError.NO_ERRORS);
1079 setValue(BUILD_LIBRARY_ERRORS, AnalysisError.NO_ERRORS); 1080 setValue(BUILD_LIBRARY_ERRORS, AnalysisError.NO_ERRORS);
1080 // CLASS_ELEMENTS 1081 // CLASS_ELEMENTS
1081 setValue(COMPILATION_UNIT_ELEMENT, library.definingCompilationUnit); 1082 setValue(COMPILATION_UNIT_ELEMENT, library.definingCompilationUnit);
1082 // CONSTRUCTORS 1083 // CONSTRUCTORS
1083 // CONSTRUCTORS_ERRORS 1084 // CONSTRUCTORS_ERRORS
1084 entry.setState(CONTENT, CacheState.FLUSHED); 1085 entry.setState(CONTENT, CacheState.FLUSHED);
1085 setValue(EXPORTED_LIBRARIES, Source.EMPTY_LIST); 1086 setValue(EXPORTED_LIBRARIES, Source.EMPTY_LIST);
(...skipping 28 matching lines...) Expand all
1114 entry.setState(RESOLVED_UNIT1, CacheState.FLUSHED); 1115 entry.setState(RESOLVED_UNIT1, CacheState.FLUSHED);
1115 entry.setState(RESOLVED_UNIT2, CacheState.FLUSHED); 1116 entry.setState(RESOLVED_UNIT2, CacheState.FLUSHED);
1116 entry.setState(RESOLVED_UNIT3, CacheState.FLUSHED); 1117 entry.setState(RESOLVED_UNIT3, CacheState.FLUSHED);
1117 entry.setState(RESOLVED_UNIT4, CacheState.FLUSHED); 1118 entry.setState(RESOLVED_UNIT4, CacheState.FLUSHED);
1118 entry.setState(RESOLVED_UNIT5, CacheState.FLUSHED); 1119 entry.setState(RESOLVED_UNIT5, CacheState.FLUSHED);
1119 // USED_IMPORTED_ELEMENTS 1120 // USED_IMPORTED_ELEMENTS
1120 // USED_LOCAL_ELEMENTS 1121 // USED_LOCAL_ELEMENTS
1121 setValue(VERIFY_ERRORS, AnalysisError.NO_ERRORS); 1122 setValue(VERIFY_ERRORS, AnalysisError.NO_ERRORS);
1122 }); 1123 });
1123 1124
1124 cache.CacheEntry entry = getCacheEntry(AnalysisContextTarget.request); 1125 CacheEntry entry = getCacheEntry(AnalysisContextTarget.request);
1125 entry.setValue( 1126 entry.setValue(TYPE_PROVIDER, typeProvider, TargetedResult.EMPTY_LIST);
1126 TYPE_PROVIDER, typeProvider, cache.TargetedResult.EMPTY_LIST);
1127 } 1127 }
1128 1128
1129 @override 1129 @override
1130 void removeListener(AnalysisListener listener) { 1130 void removeListener(AnalysisListener listener) {
1131 _listeners.remove(listener); 1131 _listeners.remove(listener);
1132 } 1132 }
1133 1133
1134 @override 1134 @override
1135 CompilationUnit resolveCompilationUnit( 1135 CompilationUnit resolveCompilationUnit(
1136 Source unitSource, LibraryElement library) { 1136 Source unitSource, LibraryElement library) {
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after
1243 */ 1243 */
1244 void visitContentCache(ContentCacheVisitor visitor) { 1244 void visitContentCache(ContentCacheVisitor visitor) {
1245 _contentCache.accept(visitor); 1245 _contentCache.accept(visitor);
1246 } 1246 }
1247 1247
1248 /** 1248 /**
1249 * Add all of the sources contained in the given source [container] to the 1249 * Add all of the sources contained in the given source [container] to the
1250 * given list of [sources]. 1250 * given list of [sources].
1251 */ 1251 */
1252 void _addSourcesInContainer(List<Source> sources, SourceContainer container) { 1252 void _addSourcesInContainer(List<Source> sources, SourceContainer container) {
1253 MapIterator<AnalysisTarget, cache.CacheEntry> iterator = _cache.iterator(); 1253 MapIterator<AnalysisTarget, CacheEntry> iterator = _cache.iterator();
1254 while (iterator.moveNext()) { 1254 while (iterator.moveNext()) {
1255 Source source = iterator.key; 1255 Source source = iterator.key;
1256 if (container.contains(source)) { 1256 if (container.contains(source)) {
1257 sources.add(source); 1257 sources.add(source);
1258 } 1258 }
1259 } 1259 }
1260 } 1260 }
1261 1261
1262 /** 1262 /**
1263 * Remove the given [pendingFuture] from [_pendingFutureTargets], since the 1263 * Remove the given [pendingFuture] from [_pendingFutureTargets], since the
1264 * client has indicated its computation is not needed anymore. 1264 * client has indicated its computation is not needed anymore.
1265 */ 1265 */
1266 void _cancelFuture(PendingFuture pendingFuture) { 1266 void _cancelFuture(PendingFuture pendingFuture) {
1267 List<PendingFuture> pendingFutures = 1267 List<PendingFuture> pendingFutures =
1268 _pendingFutureTargets[pendingFuture.target]; 1268 _pendingFutureTargets[pendingFuture.target];
1269 if (pendingFutures != null) { 1269 if (pendingFutures != null) {
1270 pendingFutures.remove(pendingFuture); 1270 pendingFutures.remove(pendingFuture);
1271 if (pendingFutures.isEmpty) { 1271 if (pendingFutures.isEmpty) {
1272 _pendingFutureTargets.remove(pendingFuture.target); 1272 _pendingFutureTargets.remove(pendingFuture.target);
1273 } 1273 }
1274 } 1274 }
1275 } 1275 }
1276 1276
1277 Object /*V*/ _computeResult( 1277 Object /*V*/ _computeResult(
1278 AnalysisTarget target, ResultDescriptor /*<V>*/ descriptor) { 1278 AnalysisTarget target, ResultDescriptor /*<V>*/ descriptor) {
1279 cache.CacheEntry entry = getCacheEntry(target); 1279 CacheEntry entry = getCacheEntry(target);
1280 CacheState state = entry.getState(descriptor); 1280 CacheState state = entry.getState(descriptor);
1281 if (state == CacheState.FLUSHED || state == CacheState.INVALID) { 1281 if (state == CacheState.FLUSHED || state == CacheState.INVALID) {
1282 driver.computeResult(target, descriptor); 1282 driver.computeResult(target, descriptor);
1283 } 1283 }
1284 state = entry.getState(descriptor); 1284 state = entry.getState(descriptor);
1285 if (state == CacheState.ERROR) { 1285 if (state == CacheState.ERROR) {
1286 throw new AnalysisException( 1286 throw new AnalysisException(
1287 'Cannot compute $descriptor for $target', entry.exception); 1287 'Cannot compute $descriptor for $target', entry.exception);
1288 } 1288 }
1289 return entry.getValue(descriptor); 1289 return entry.getValue(descriptor);
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
1336 if (contents != null) { 1336 if (contents != null) {
1337 if (contents != originalContents) { 1337 if (contents != originalContents) {
1338 // TODO(brianwilkerson) Find a better way to do incremental analysis. 1338 // TODO(brianwilkerson) Find a better way to do incremental analysis.
1339 // if (_options.incremental) { 1339 // if (_options.incremental) {
1340 // _incrementalAnalysisCache = IncrementalAnalysisCache.update( 1340 // _incrementalAnalysisCache = IncrementalAnalysisCache.update(
1341 // _incrementalAnalysisCache, source, originalContents, contents, 1341 // _incrementalAnalysisCache, source, originalContents, contents,
1342 // offset, oldLength, newLength, _cache.get(source)); 1342 // offset, oldLength, newLength, _cache.get(source));
1343 // } 1343 // }
1344 _sourceChanged(source); 1344 _sourceChanged(source);
1345 changed = true; 1345 changed = true;
1346 cache.CacheEntry entry = _cache.get(source); 1346 CacheEntry entry = _cache.get(source);
1347 if (entry != null) { 1347 if (entry != null) {
1348 entry.modificationTime = _contentCache.getModificationStamp(source); 1348 entry.modificationTime = _contentCache.getModificationStamp(source);
1349 entry.setValue(CONTENT, contents, cache.TargetedResult.EMPTY_LIST); 1349 entry.setValue(CONTENT, contents, TargetedResult.EMPTY_LIST);
1350 } 1350 }
1351 } 1351 }
1352 } else if (originalContents != null) { 1352 } else if (originalContents != null) {
1353 _incrementalAnalysisCache = 1353 _incrementalAnalysisCache =
1354 IncrementalAnalysisCache.clear(_incrementalAnalysisCache, source); 1354 IncrementalAnalysisCache.clear(_incrementalAnalysisCache, source);
1355 _sourceChanged(source); 1355 _sourceChanged(source);
1356 changed = true; 1356 changed = true;
1357 } 1357 }
1358 return changed; 1358 return changed;
1359 } 1359 }
1360 1360
1361 /** 1361 /**
1362 * Set the contents of the given [source] to the given [contents] and mark the 1362 * Set the contents of the given [source] to the given [contents] and mark the
1363 * source as having changed. This has the effect of overriding the default 1363 * source as having changed. This has the effect of overriding the default
1364 * contents of the source. If the contents are `null` the override is removed 1364 * contents of the source. If the contents are `null` the override is removed
1365 * so that the default contents will be returned. If [notify] is true, a 1365 * so that the default contents will be returned. If [notify] is true, a
1366 * source changed event is triggered. 1366 * source changed event is triggered.
1367 */ 1367 */
1368 void _contentsChanged(Source source, String contents, bool notify) { 1368 void _contentsChanged(Source source, String contents, bool notify) {
1369 String originalContents = _contentCache.setContents(source, contents); 1369 String originalContents = _contentCache.setContents(source, contents);
1370 handleContentsChanged(source, originalContents, contents, notify); 1370 handleContentsChanged(source, originalContents, contents, notify);
1371 } 1371 }
1372 1372
1373 /** 1373 /**
1374 * Create a cache entry for the given [source]. The source was explicitly 1374 * Create a cache entry for the given [source]. The source was explicitly
1375 * added to this context if [explicitlyAdded] is `true`. Return the cache 1375 * added to this context if [explicitlyAdded] is `true`. Return the cache
1376 * entry that was created. 1376 * entry that was created.
1377 */ 1377 */
1378 cache.CacheEntry _createCacheEntry(Source source, bool explicitlyAdded) { 1378 CacheEntry _createCacheEntry(Source source, bool explicitlyAdded) {
1379 cache.CacheEntry entry = new cache.CacheEntry(source); 1379 CacheEntry entry = new CacheEntry(source);
1380 entry.modificationTime = getModificationStamp(source); 1380 entry.modificationTime = getModificationStamp(source);
1381 entry.explicitlyAdded = explicitlyAdded; 1381 entry.explicitlyAdded = explicitlyAdded;
1382 _cache.put(entry); 1382 _cache.put(entry);
1383 return entry; 1383 return entry;
1384 } 1384 }
1385 1385
1386 /** 1386 /**
1387 * Return a list containing all of the cache entries for targets associated 1387 * Return a list containing all of the cache entries for targets associated
1388 * with the given [source]. 1388 * with the given [source].
1389 */ 1389 */
1390 List<cache.CacheEntry> _entriesFor(Source source) { 1390 List<CacheEntry> _entriesFor(Source source) {
1391 List<cache.CacheEntry> entries = <cache.CacheEntry>[]; 1391 List<CacheEntry> entries = <CacheEntry>[];
1392 MapIterator<AnalysisTarget, cache.CacheEntry> iterator = _cache.iterator(); 1392 MapIterator<AnalysisTarget, CacheEntry> iterator = _cache.iterator();
1393 while (iterator.moveNext()) { 1393 while (iterator.moveNext()) {
1394 if (iterator.key.source == source) { 1394 if (iterator.key.source == source) {
1395 entries.add(iterator.value); 1395 entries.add(iterator.value);
1396 } 1396 }
1397 } 1397 }
1398 return entries; 1398 return entries;
1399 } 1399 }
1400 1400
1401 /** 1401 /**
1402 * Return a list containing all of the change notices that are waiting to be 1402 * Return a list containing all of the change notices that are waiting to be
(...skipping 25 matching lines...) Expand all
1428 } 1428 }
1429 return notice; 1429 return notice;
1430 } 1430 }
1431 1431
1432 /** 1432 /**
1433 * Return a list containing all of the sources known to this context that have 1433 * Return a list containing all of the sources known to this context that have
1434 * the given [kind]. 1434 * the given [kind].
1435 */ 1435 */
1436 List<Source> _getSources(SourceKind kind) { 1436 List<Source> _getSources(SourceKind kind) {
1437 List<Source> sources = new List<Source>(); 1437 List<Source> sources = new List<Source>();
1438 MapIterator<AnalysisTarget, cache.CacheEntry> iterator = _cache.iterator(); 1438 MapIterator<AnalysisTarget, CacheEntry> iterator = _cache.iterator();
1439 while (iterator.moveNext()) { 1439 while (iterator.moveNext()) {
1440 if (iterator.value.getValue(SOURCE_KIND) == kind && 1440 if (iterator.value.getValue(SOURCE_KIND) == kind &&
1441 iterator.key is Source) { 1441 iterator.key is Source) {
1442 sources.add(iterator.key); 1442 sources.add(iterator.key);
1443 } 1443 }
1444 } 1444 }
1445 return sources; 1445 return sources;
1446 } 1446 }
1447 1447
1448 /** 1448 /**
1449 * Look at the given [source] to see whether a task needs to be performed 1449 * Look at the given [source] to see whether a task needs to be performed
1450 * related to it. If so, add the source to the set of sources that need to be 1450 * related to it. If so, add the source to the set of sources that need to be
1451 * processed. This method is intended to be used for testing purposes only. 1451 * processed. This method is intended to be used for testing purposes only.
1452 */ 1452 */
1453 void _getSourcesNeedingProcessing(Source source, cache.CacheEntry entry, 1453 void _getSourcesNeedingProcessing(Source source, CacheEntry entry,
1454 bool isPriority, bool hintsEnabled, bool lintsEnabled, 1454 bool isPriority, bool hintsEnabled, bool lintsEnabled,
1455 HashSet<Source> sources) { 1455 HashSet<Source> sources) {
1456 CacheState state = entry.getState(CONTENT); 1456 CacheState state = entry.getState(CONTENT);
1457 if (state == CacheState.INVALID || 1457 if (state == CacheState.INVALID ||
1458 (isPriority && state == CacheState.FLUSHED)) { 1458 (isPriority && state == CacheState.FLUSHED)) {
1459 sources.add(source); 1459 sources.add(source);
1460 return; 1460 return;
1461 } else if (state == CacheState.ERROR) { 1461 } else if (state == CacheState.ERROR) {
1462 return; 1462 return;
1463 } 1463 }
(...skipping 23 matching lines...) Expand all
1487 } else if (state == CacheState.ERROR) { 1487 } else if (state == CacheState.ERROR) {
1488 return; 1488 return;
1489 } 1489 }
1490 // if (isPriority) { 1490 // if (isPriority) {
1491 // if (!entry.hasResolvableCompilationUnit) { 1491 // if (!entry.hasResolvableCompilationUnit) {
1492 // sources.add(source); 1492 // sources.add(source);
1493 // return; 1493 // return;
1494 // } 1494 // }
1495 // } 1495 // }
1496 for (Source librarySource in getLibrariesContaining(source)) { 1496 for (Source librarySource in getLibrariesContaining(source)) {
1497 cache.CacheEntry libraryEntry = _cache.get(librarySource); 1497 CacheEntry libraryEntry = _cache.get(librarySource);
1498 state = libraryEntry.getState(LIBRARY_ELEMENT); 1498 state = libraryEntry.getState(LIBRARY_ELEMENT);
1499 if (state == CacheState.INVALID || 1499 if (state == CacheState.INVALID ||
1500 (isPriority && state == CacheState.FLUSHED)) { 1500 (isPriority && state == CacheState.FLUSHED)) {
1501 sources.add(source); 1501 sources.add(source);
1502 return; 1502 return;
1503 } else if (state == CacheState.ERROR) { 1503 } else if (state == CacheState.ERROR) {
1504 return; 1504 return;
1505 } 1505 }
1506 cache.CacheEntry unitEntry = 1506 CacheEntry unitEntry =
1507 _cache.get(new LibrarySpecificUnit(librarySource, source)); 1507 _cache.get(new LibrarySpecificUnit(librarySource, source));
1508 state = unitEntry.getState(RESOLVED_UNIT); 1508 state = unitEntry.getState(RESOLVED_UNIT);
1509 if (state == CacheState.INVALID || 1509 if (state == CacheState.INVALID ||
1510 (isPriority && state == CacheState.FLUSHED)) { 1510 (isPriority && state == CacheState.FLUSHED)) {
1511 sources.add(source); 1511 sources.add(source);
1512 return; 1512 return;
1513 } else if (state == CacheState.ERROR) { 1513 } else if (state == CacheState.ERROR) {
1514 return; 1514 return;
1515 } 1515 }
1516 if (_shouldErrorsBeAnalyzed(source, unitEntry)) { 1516 if (_shouldErrorsBeAnalyzed(source, unitEntry)) {
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after
1646 } 1646 }
1647 if (newOrder.length < count) { 1647 if (newOrder.length < count) {
1648 analysisPriorityOrder = newOrder; 1648 analysisPriorityOrder = newOrder;
1649 } 1649 }
1650 } 1650 }
1651 1651
1652 /** 1652 /**
1653 * Return `true` if errors should be produced for the given [source]. The 1653 * Return `true` if errors should be produced for the given [source]. The
1654 * [entry] associated with the source is passed in for efficiency. 1654 * [entry] associated with the source is passed in for efficiency.
1655 */ 1655 */
1656 bool _shouldErrorsBeAnalyzed(Source source, cache.CacheEntry entry) { 1656 bool _shouldErrorsBeAnalyzed(Source source, CacheEntry entry) {
1657 if (source.isInSystemLibrary) { 1657 if (source.isInSystemLibrary) {
1658 return _options.generateSdkErrors; 1658 return _options.generateSdkErrors;
1659 } else if (!entry.explicitlyAdded) { 1659 } else if (!entry.explicitlyAdded) {
1660 return _options.generateImplicitErrors; 1660 return _options.generateImplicitErrors;
1661 } else { 1661 } else {
1662 return true; 1662 return true;
1663 } 1663 }
1664 } 1664 }
1665 1665
1666 /** 1666 /**
1667 * Create an entry for the newly added [source] and invalidate any sources 1667 * Create an entry for the newly added [source] and invalidate any sources
1668 * that referenced the source before it existed. 1668 * that referenced the source before it existed.
1669 */ 1669 */
1670 void _sourceAvailable(Source source) { 1670 void _sourceAvailable(Source source) {
1671 cache.CacheEntry entry = _cache.get(source); 1671 CacheEntry entry = _cache.get(source);
1672 if (entry == null) { 1672 if (entry == null) {
1673 _createCacheEntry(source, true); 1673 _createCacheEntry(source, true);
1674 } else { 1674 } else {
1675 // TODO(brianwilkerson) Implement this. 1675 // TODO(brianwilkerson) Implement this.
1676 // _propagateInvalidation(source, entry); 1676 // _propagateInvalidation(source, entry);
1677 } 1677 }
1678 } 1678 }
1679 1679
1680 /** 1680 /**
1681 * Invalidate the [source] that was changed and any sources that referenced 1681 * Invalidate the [source] that was changed and any sources that referenced
1682 * the source before it existed. 1682 * the source before it existed.
1683 */ 1683 */
1684 void _sourceChanged(Source source) { 1684 void _sourceChanged(Source source) {
1685 cache.CacheEntry entry = _cache.get(source); 1685 CacheEntry entry = _cache.get(source);
1686 // If the source is removed, we don't care about it. 1686 // If the source is removed, we don't care about it.
1687 if (entry == null) { 1687 if (entry == null) {
1688 return; 1688 return;
1689 } 1689 }
1690 // Check whether the content of the source is the same as it was the last 1690 // Check whether the content of the source is the same as it was the last
1691 // time. 1691 // time.
1692 String sourceContent = entry.getValue(CONTENT); 1692 String sourceContent = entry.getValue(CONTENT);
1693 if (sourceContent != null) { 1693 if (sourceContent != null) {
1694 entry.setState(CONTENT, CacheState.FLUSHED); 1694 entry.setState(CONTENT, CacheState.FLUSHED);
1695 try { 1695 try {
1696 TimestampedData<String> fileContents = getContents(source); 1696 TimestampedData<String> fileContents = getContents(source);
1697 if (fileContents.data == sourceContent) { 1697 if (fileContents.data == sourceContent) {
1698 int time = fileContents.modificationTime; 1698 int time = fileContents.modificationTime;
1699 for (cache.CacheEntry entry in _entriesFor(source)) { 1699 for (CacheEntry entry in _entriesFor(source)) {
1700 entry.modificationTime = time; 1700 entry.modificationTime = time;
1701 } 1701 }
1702 return; 1702 return;
1703 } 1703 }
1704 } catch (e) {} 1704 } catch (e) {}
1705 } 1705 }
1706 // We need to invalidate the cache. 1706 // We need to invalidate the cache.
1707 // TODO(brianwilkerson) Implement this. 1707 // TODO(brianwilkerson) Implement this.
1708 // _propagateInvalidation(source, entry); 1708 // _propagateInvalidation(source, entry);
1709 } 1709 }
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after
1814 /** 1814 /**
1815 * Check the cache for any invalid entries (entries whose modification time 1815 * Check the cache for any invalid entries (entries whose modification time
1816 * does not match the modification time of the source associated with the 1816 * does not match the modification time of the source associated with the
1817 * entry). Invalid entries will be marked as invalid so that the source will 1817 * entry). Invalid entries will be marked as invalid so that the source will
1818 * be re-analyzed. Return `true` if at least one entry was invalid. 1818 * be re-analyzed. Return `true` if at least one entry was invalid.
1819 */ 1819 */
1820 bool _validateCacheConsistency() { 1820 bool _validateCacheConsistency() {
1821 int consistencyCheckStart = JavaSystem.nanoTime(); 1821 int consistencyCheckStart = JavaSystem.nanoTime();
1822 HashSet<Source> changedSources = new HashSet<Source>(); 1822 HashSet<Source> changedSources = new HashSet<Source>();
1823 HashSet<Source> missingSources = new HashSet<Source>(); 1823 HashSet<Source> missingSources = new HashSet<Source>();
1824 MapIterator<AnalysisTarget, cache.CacheEntry> iterator = _cache.iterator(); 1824 MapIterator<AnalysisTarget, CacheEntry> iterator = _cache.iterator();
1825 while (iterator.moveNext()) { 1825 while (iterator.moveNext()) {
1826 Source source = iterator.key.source; 1826 Source source = iterator.key.source;
1827 if (source != null) { 1827 if (source != null) {
1828 cache.CacheEntry entry = iterator.value; 1828 CacheEntry entry = iterator.value;
1829 int sourceTime = getModificationStamp(source); 1829 int sourceTime = getModificationStamp(source);
1830 if (sourceTime != entry.modificationTime) { 1830 if (sourceTime != entry.modificationTime) {
1831 changedSources.add(source); 1831 changedSources.add(source);
1832 } 1832 }
1833 if (entry.exception != null) { 1833 if (entry.exception != null) {
1834 if (!exists(source)) { 1834 if (!exists(source)) {
1835 missingSources.add(source); 1835 missingSources.add(source);
1836 } 1836 }
1837 } 1837 }
1838 } 1838 }
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
1873 } 1873 }
1874 1874
1875 /** 1875 /**
1876 * An object that manages the partitions that can be shared between analysis 1876 * An object that manages the partitions that can be shared between analysis
1877 * contexts. 1877 * contexts.
1878 */ 1878 */
1879 class PartitionManager { 1879 class PartitionManager {
1880 /** 1880 /**
1881 * A table mapping SDK's to the partitions used for those SDK's. 1881 * A table mapping SDK's to the partitions used for those SDK's.
1882 */ 1882 */
1883 HashMap<DartSdk, cache.SdkCachePartition> _sdkPartitions = 1883 HashMap<DartSdk, SdkCachePartition> _sdkPartitions =
1884 new HashMap<DartSdk, cache.SdkCachePartition>(); 1884 new HashMap<DartSdk, SdkCachePartition>();
1885 1885
1886 /** 1886 /**
1887 * Clear any cached data being maintained by this manager. 1887 * Clear any cached data being maintained by this manager.
1888 */ 1888 */
1889 void clearCache() { 1889 void clearCache() {
1890 _sdkPartitions.clear(); 1890 _sdkPartitions.clear();
1891 } 1891 }
1892 1892
1893 /** 1893 /**
1894 * Return the partition being used for the given [sdk], creating the partition 1894 * Return the partition being used for the given [sdk], creating the partition
1895 * if necessary. 1895 * if necessary.
1896 */ 1896 */
1897 cache.SdkCachePartition forSdk(DartSdk sdk) { 1897 SdkCachePartition forSdk(DartSdk sdk) {
1898 // Call sdk.context now, because when it creates a new 1898 // Call sdk.context now, because when it creates a new
1899 // InternalAnalysisContext instance, it calls forSdk() again, so creates an 1899 // InternalAnalysisContext instance, it calls forSdk() again, so creates an
1900 // SdkCachePartition instance. 1900 // SdkCachePartition instance.
1901 // So, if we initialize context after "partition == null", we end up 1901 // So, if we initialize context after "partition == null", we end up
1902 // with two SdkCachePartition instances. 1902 // with two SdkCachePartition instances.
1903 InternalAnalysisContext sdkContext = sdk.context; 1903 InternalAnalysisContext sdkContext = sdk.context;
1904 // Check cache for an existing partition. 1904 // Check cache for an existing partition.
1905 cache.SdkCachePartition partition = _sdkPartitions[sdk]; 1905 SdkCachePartition partition = _sdkPartitions[sdk];
1906 if (partition == null) { 1906 if (partition == null) {
1907 partition = new cache.SdkCachePartition(sdkContext); 1907 partition = new SdkCachePartition(sdkContext);
1908 _sdkPartitions[sdk] = partition; 1908 _sdkPartitions[sdk] = partition;
1909 } 1909 }
1910 return partition; 1910 return partition;
1911 } 1911 }
1912 } 1912 }
1913 1913
1914 /** 1914 /**
1915 * Representation of a pending computation which is based on the results of 1915 * Representation of a pending computation which is based on the results of
1916 * analysis that may or may not have been completed. 1916 * analysis that may or may not have been completed.
1917 */ 1917 */
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
1951 * the pending future if it's appropriate to do so. If the pending future is 1951 * the pending future if it's appropriate to do so. If the pending future is
1952 * completed by this call, true is returned; otherwise false is returned. 1952 * completed by this call, true is returned; otherwise false is returned.
1953 * 1953 *
1954 * Once this function has returned true, it should not be called again. 1954 * Once this function has returned true, it should not be called again.
1955 * 1955 *
1956 * Other than completing the future, this method is free of side effects. 1956 * Other than completing the future, this method is free of side effects.
1957 * Note that any code the client has attached to the future will be executed 1957 * Note that any code the client has attached to the future will be executed
1958 * in a microtask, so there is no danger of side effects occurring due to 1958 * in a microtask, so there is no danger of side effects occurring due to
1959 * client callbacks. 1959 * client callbacks.
1960 */ 1960 */
1961 bool evaluate(cache.CacheEntry entry) { 1961 bool evaluate(CacheEntry entry) {
1962 assert(!_completer.isCompleted); 1962 assert(!_completer.isCompleted);
1963 try { 1963 try {
1964 T result = _computeValue(entry); 1964 T result = _computeValue(entry);
1965 if (result == null) { 1965 if (result == null) {
1966 return false; 1966 return false;
1967 } else { 1967 } else {
1968 _completer.complete(result); 1968 _completer.complete(result);
1969 return true; 1969 return true;
1970 } 1970 }
1971 } catch (exception, stackTrace) { 1971 } catch (exception, stackTrace) {
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
2011 * 2011 *
2012 * If the [computeValue] still returns `null` after there is no further 2012 * If the [computeValue] still returns `null` after there is no further
2013 * analysis to be done for [target], then the future will be completed with 2013 * analysis to be done for [target], then the future will be completed with
2014 * the error AnalysisNotScheduledError. 2014 * the error AnalysisNotScheduledError.
2015 * 2015 *
2016 * Since [computeValue] will be called while the state of analysis is being 2016 * Since [computeValue] will be called while the state of analysis is being
2017 * updated, it should be free of side effects so that it doesn't cause 2017 * updated, it should be free of side effects so that it doesn't cause
2018 * reentrant changes to the analysis state. 2018 * reentrant changes to the analysis state.
2019 */ 2019 */
2020 CancelableFuture<T> computeAsync( 2020 CancelableFuture<T> computeAsync(
2021 AnalysisTarget target, T computeValue(cache.CacheEntry entry)) { 2021 AnalysisTarget target, T computeValue(CacheEntry entry)) {
2022 if (_context.isDisposed) { 2022 if (_context.isDisposed) {
2023 // No further analysis is expected, so return a future that completes 2023 // No further analysis is expected, so return a future that completes
2024 // immediately with AnalysisNotScheduledError. 2024 // immediately with AnalysisNotScheduledError.
2025 return new CancelableFuture.error(new AnalysisNotScheduledError()); 2025 return new CancelableFuture.error(new AnalysisNotScheduledError());
2026 } 2026 }
2027 cache.CacheEntry entry = _context.getReadableSourceEntryOrNull(target); 2027 CacheEntry entry = _context.getReadableSourceEntryOrNull(target);
2028 if (entry == null) { 2028 if (entry == null) {
2029 return new CancelableFuture.error(new AnalysisNotScheduledError()); 2029 return new CancelableFuture.error(new AnalysisNotScheduledError());
2030 } 2030 }
2031 PendingFuture pendingFuture = 2031 PendingFuture pendingFuture =
2032 new PendingFuture<T>(_context, target, computeValue); 2032 new PendingFuture<T>(_context, target, computeValue);
2033 if (!pendingFuture.evaluate(entry)) { 2033 if (!pendingFuture.evaluate(entry)) {
2034 _context._pendingFutureTargets 2034 _context._pendingFutureTargets
2035 .putIfAbsent(target, () => <PendingFuture>[]) 2035 .putIfAbsent(target, () => <PendingFuture>[])
2036 .add(pendingFuture); 2036 .add(pendingFuture);
2037 } 2037 }
2038 return pendingFuture.future; 2038 return pendingFuture.future;
2039 } 2039 }
2040 } 2040 }
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698