OLD | NEW |
| (Empty) |
1 // Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file | |
2 // for details. All rights reserved. Use of this source code is governed by a | |
3 // BSD-style license that can be found in the LICENSE file. | |
4 | |
5 library analyzer.src.task.dart_work_manager; | |
6 | |
7 import 'dart:collection'; | |
8 | |
9 import 'package:analyzer/src/context/cache.dart'; | |
10 import 'package:analyzer/src/generated/engine.dart' | |
11 show | |
12 AnalysisEngine, | |
13 AnalysisErrorInfo, | |
14 AnalysisErrorInfoImpl, | |
15 AnalysisOptions, | |
16 CacheState, | |
17 InternalAnalysisContext; | |
18 import 'package:analyzer/src/generated/error.dart'; | |
19 import 'package:analyzer/src/generated/source.dart'; | |
20 import 'package:analyzer/src/generated/utilities_collection.dart'; | |
21 import 'package:analyzer/src/task/dart.dart'; | |
22 import 'package:analyzer/src/task/driver.dart'; | |
23 import 'package:analyzer/task/dart.dart'; | |
24 import 'package:analyzer/task/general.dart'; | |
25 import 'package:analyzer/task/model.dart'; | |
26 | |
27 /** | |
28 * The manager for Dart specific analysis. | |
29 */ | |
30 class DartWorkManager implements WorkManager { | |
31 /** | |
32 * The list of errors that are reported for raw Dart [Source]s. | |
33 */ | |
34 static final List<ResultDescriptor> _SOURCE_ERRORS = <ResultDescriptor>[ | |
35 BUILD_DIRECTIVES_ERRORS, | |
36 BUILD_LIBRARY_ERRORS, | |
37 PARSE_ERRORS, | |
38 SCAN_ERRORS | |
39 ]; | |
40 | |
41 /** | |
42 * The list of errors that are reported for raw Dart [LibrarySpecificUnit]s. | |
43 */ | |
44 static final List<ResultDescriptor> _UNIT_ERRORS = <ResultDescriptor>[ | |
45 HINTS, | |
46 RESOLVE_REFERENCES_ERRORS, | |
47 RESOLVE_TYPE_NAMES_ERRORS, | |
48 VARIABLE_REFERENCE_ERRORS, | |
49 VERIFY_ERRORS | |
50 ]; | |
51 | |
52 final InternalAnalysisContext context; | |
53 | |
54 /** | |
55 * The [TargetedResult]s that should be computed with priority. | |
56 */ | |
57 final LinkedHashSet<TargetedResult> priorityResultQueue = | |
58 new LinkedHashSet<TargetedResult>(); | |
59 | |
60 /** | |
61 * The sources whose kind we don't know yet. | |
62 */ | |
63 final LinkedHashSet<Source> unknownSourceQueue = new LinkedHashSet<Source>(); | |
64 | |
65 /** | |
66 * The queue of library sources to process. | |
67 */ | |
68 final LinkedHashSet<Source> librarySourceQueue = new LinkedHashSet<Source>(); | |
69 | |
70 /** | |
71 * A table mapping library sources to the part sources they include. | |
72 */ | |
73 final HashMap<Source, List<Source>> libraryPartsMap = | |
74 new HashMap<Source, List<Source>>(); | |
75 | |
76 /** | |
77 * A table mapping part sources to the library sources that include them. | |
78 */ | |
79 final HashMap<Source, List<Source>> partLibrariesMap = | |
80 new HashMap<Source, List<Source>>(); | |
81 | |
82 /** | |
83 * Initialize a newly created manager. | |
84 */ | |
85 DartWorkManager(this.context) { | |
86 analysisCache.onResultInvalidated.listen((InvalidatedResult event) { | |
87 if (event.descriptor == LIBRARY_ERRORS_READY) { | |
88 CacheEntry entry = event.entry; | |
89 if (entry.getValue(SOURCE_KIND) == SourceKind.LIBRARY) { | |
90 librarySourceQueue.add(entry.target); | |
91 } | |
92 } | |
93 }); | |
94 } | |
95 | |
96 /** | |
97 * Returns the correctly typed result of `context.analysisCache`. | |
98 */ | |
99 AnalysisCache get analysisCache => context.analysisCache; | |
100 | |
101 /** | |
102 * The partition that contains analysis results that are not shared with other | |
103 * contexts. | |
104 */ | |
105 CachePartition get privateAnalysisCachePartition => | |
106 context.privateAnalysisCachePartition; | |
107 | |
108 /** | |
109 * Specifies that the client want the given [result] of the given [target] | |
110 * to be computed with priority. | |
111 */ | |
112 void addPriorityResult(AnalysisTarget target, ResultDescriptor result) { | |
113 priorityResultQueue.add(new TargetedResult(target, result)); | |
114 } | |
115 | |
116 /** | |
117 * Notifies the manager about changes in the explicit source list. | |
118 */ | |
119 void applyChange(List<Source> addedSources, List<Source> changedSources, | |
120 List<Source> removedSources) { | |
121 addedSources = addedSources.where(_isDartSource).toList(); | |
122 changedSources = changedSources.where(_isDartSource).toList(); | |
123 removedSources = removedSources.where(_isDartSource).toList(); | |
124 // unknown queue | |
125 unknownSourceQueue.addAll(addedSources); | |
126 unknownSourceQueue.addAll(changedSources); | |
127 unknownSourceQueue.removeAll(removedSources); | |
128 // library queue | |
129 librarySourceQueue.removeAll(changedSources); | |
130 librarySourceQueue.removeAll(removedSources); | |
131 // parts in libraries | |
132 for (Source changedSource in changedSources) { | |
133 _onLibrarySourceChangedOrRemoved(changedSource); | |
134 } | |
135 for (Source removedSource in removedSources) { | |
136 partLibrariesMap.remove(removedSource); | |
137 _onLibrarySourceChangedOrRemoved(removedSource); | |
138 } | |
139 } | |
140 | |
141 @override | |
142 void applyPriorityTargets(List<AnalysisTarget> targets) { | |
143 // Unschedule the old targets. | |
144 List<TargetedResult> resultsToUnschedule = <TargetedResult>[]; | |
145 for (TargetedResult result in priorityResultQueue) { | |
146 if (result.result == LIBRARY_ERRORS_READY) { | |
147 resultsToUnschedule.add(result); | |
148 } | |
149 } | |
150 priorityResultQueue.removeAll(resultsToUnschedule); | |
151 // Schedule new targets. | |
152 for (AnalysisTarget target in targets) { | |
153 if (_isDartSource(target)) { | |
154 SourceKind sourceKind = analysisCache.getValue(target, SOURCE_KIND); | |
155 if (sourceKind == SourceKind.UNKNOWN) { | |
156 addPriorityResult(target, SOURCE_KIND); | |
157 } else if (sourceKind == SourceKind.LIBRARY) { | |
158 _schedulePriorityLibrarySourceAnalysis(target); | |
159 } else if (sourceKind == SourceKind.PART) { | |
160 List<Source> libraries = context.getLibrariesContaining(target); | |
161 for (Source library in libraries) { | |
162 addPriorityResult(library, LIBRARY_ERRORS_READY); | |
163 } | |
164 } | |
165 } | |
166 } | |
167 } | |
168 | |
169 /** | |
170 * Return an [AnalysisErrorInfo] containing the list of all of the errors and | |
171 * the line info associated with the given [source]. The list of errors will | |
172 * be empty if the source is not known to the context or if there are no | |
173 * errors in the source. The errors contained in the list can be incomplete. | |
174 */ | |
175 AnalysisErrorInfo getErrors(Source source) { | |
176 if (analysisCache.getState(source, DART_ERRORS) == CacheState.VALID) { | |
177 List<AnalysisError> errors = analysisCache.getValue(source, DART_ERRORS); | |
178 LineInfo lineInfo = analysisCache.getValue(source, LINE_INFO); | |
179 return new AnalysisErrorInfoImpl(errors, lineInfo); | |
180 } | |
181 List<AnalysisError> errors = <AnalysisError>[]; | |
182 for (ResultDescriptor descriptor in _SOURCE_ERRORS) { | |
183 errors.addAll(analysisCache.getValue(source, descriptor)); | |
184 } | |
185 for (Source library in context.getLibrariesContaining(source)) { | |
186 LibrarySpecificUnit unit = new LibrarySpecificUnit(library, source); | |
187 for (ResultDescriptor descriptor in _UNIT_ERRORS) { | |
188 errors.addAll(analysisCache.getValue(unit, descriptor)); | |
189 } | |
190 } | |
191 LineInfo lineInfo = analysisCache.getValue(source, LINE_INFO); | |
192 return new AnalysisErrorInfoImpl(errors, lineInfo); | |
193 } | |
194 | |
195 /** | |
196 * Returns libraries containing the given [part]. | |
197 * Maybe empty, but not null. | |
198 */ | |
199 List<Source> getLibrariesContainingPart(Source part) { | |
200 List<Source> libraries = partLibrariesMap[part]; | |
201 return libraries != null ? libraries : Source.EMPTY_LIST; | |
202 } | |
203 | |
204 @override | |
205 TargetedResult getNextResult() { | |
206 // Try to find a priority result to compute. | |
207 while (priorityResultQueue.isNotEmpty) { | |
208 TargetedResult result = priorityResultQueue.first; | |
209 if (!_needsComputing(result.target, result.result)) { | |
210 priorityResultQueue.remove(result); | |
211 continue; | |
212 } | |
213 return result; | |
214 } | |
215 // Try to find a new library to analyze. | |
216 while (librarySourceQueue.isNotEmpty) { | |
217 Source librarySource = librarySourceQueue.first; | |
218 // Maybe done with this library. | |
219 if (!_needsComputing(librarySource, LIBRARY_ERRORS_READY)) { | |
220 librarySourceQueue.remove(librarySource); | |
221 continue; | |
222 } | |
223 // Analyze this library. | |
224 return new TargetedResult(librarySource, LIBRARY_ERRORS_READY); | |
225 } | |
226 // No libraries in the queue, check whether there are sources to organize. | |
227 while (unknownSourceQueue.isNotEmpty) { | |
228 Source source = unknownSourceQueue.first; | |
229 // Maybe done with this source. | |
230 if (!_needsComputing(source, SOURCE_KIND)) { | |
231 unknownSourceQueue.remove(source); | |
232 continue; | |
233 } | |
234 // Compute the kind of this source. | |
235 return new TargetedResult(source, SOURCE_KIND); | |
236 } | |
237 // TODO(scheglov) Report errors for parts that remained in the queue after | |
238 // all libraries had been processed. | |
239 // No results to compute. | |
240 return null; | |
241 } | |
242 | |
243 @override | |
244 WorkOrderPriority getNextResultPriority() { | |
245 if (priorityResultQueue.isNotEmpty) { | |
246 return WorkOrderPriority.PRIORITY; | |
247 } | |
248 if (unknownSourceQueue.isNotEmpty || librarySourceQueue.isNotEmpty) { | |
249 return WorkOrderPriority.NORMAL; | |
250 } | |
251 return WorkOrderPriority.NONE; | |
252 } | |
253 | |
254 /** | |
255 * Notifies the manager about analysis options changes. | |
256 */ | |
257 void onAnalysisOptionsChanged() { | |
258 _invalidateAllLocalResolutionInformation(false); | |
259 } | |
260 | |
261 /** | |
262 * Notifies the manager about [SourceFactory] changes. | |
263 */ | |
264 void onSourceFactoryChanged() { | |
265 _invalidateAllLocalResolutionInformation(true); | |
266 } | |
267 | |
268 @override | |
269 void resultsComputed( | |
270 AnalysisTarget target, Map<ResultDescriptor, dynamic> outputs) { | |
271 // Organize sources. | |
272 if (_isDartSource(target)) { | |
273 Source source = target; | |
274 SourceKind kind = outputs[SOURCE_KIND]; | |
275 if (kind != null) { | |
276 unknownSourceQueue.remove(source); | |
277 if (kind == SourceKind.LIBRARY) { | |
278 if (context.prioritySources.contains(source)) { | |
279 _schedulePriorityLibrarySourceAnalysis(source); | |
280 } else { | |
281 bool needErrors = _shouldErrorsBeComputed(source); | |
282 if (needErrors) { | |
283 librarySourceQueue.add(target); | |
284 } | |
285 } | |
286 } | |
287 } | |
288 } | |
289 // Update parts in libraries. | |
290 if (_isDartSource(target)) { | |
291 Source library = target; | |
292 List<Source> includedParts = outputs[INCLUDED_PARTS]; | |
293 if (includedParts != null) { | |
294 libraryPartsMap[library] = includedParts; | |
295 for (Source part in includedParts) { | |
296 List<Source> libraries = | |
297 partLibrariesMap.putIfAbsent(part, () => <Source>[]); | |
298 if (!libraries.contains(library)) { | |
299 libraries.add(library); | |
300 _invalidateContainingLibraries(part); | |
301 } | |
302 } | |
303 } | |
304 } | |
305 // Update notice. | |
306 if (_isDartSource(target)) { | |
307 bool shouldSetErrors = false; | |
308 outputs.forEach((ResultDescriptor descriptor, value) { | |
309 if (descriptor == PARSED_UNIT && value != null) { | |
310 context.getNotice(target).parsedDartUnit = value; | |
311 shouldSetErrors = true; | |
312 } | |
313 if (descriptor == DART_ERRORS) { | |
314 shouldSetErrors = true; | |
315 } | |
316 }); | |
317 if (shouldSetErrors) { | |
318 AnalysisErrorInfo info = getErrors(target); | |
319 context.getNotice(target).setErrors(info.errors, info.lineInfo); | |
320 } | |
321 } | |
322 if (target is LibrarySpecificUnit) { | |
323 Source source = target.source; | |
324 bool shouldSetErrors = false; | |
325 outputs.forEach((ResultDescriptor descriptor, value) { | |
326 if (descriptor == RESOLVED_UNIT && value != null) { | |
327 context.getNotice(source).resolvedDartUnit = value; | |
328 shouldSetErrors = true; | |
329 } | |
330 }); | |
331 if (shouldSetErrors) { | |
332 AnalysisErrorInfo info = getErrors(source); | |
333 context.getNotice(source).setErrors(info.errors, info.lineInfo); | |
334 } | |
335 } | |
336 } | |
337 | |
338 void unitIncrementallyResolved(Source librarySource, Source unitSource) { | |
339 librarySourceQueue.add(librarySource); | |
340 } | |
341 | |
342 /** | |
343 * Invalidate all of the resolution results computed by this context. The flag | |
344 * [invalidateUris] should be `true` if the cached results of converting URIs | |
345 * to source files should also be invalidated. | |
346 */ | |
347 void _invalidateAllLocalResolutionInformation(bool invalidateUris) { | |
348 CachePartition partition = privateAnalysisCachePartition; | |
349 // Prepare targets and values to invalidate. | |
350 List<Source> dartSources = <Source>[]; | |
351 List<LibrarySpecificUnit> unitTargets = <LibrarySpecificUnit>[]; | |
352 MapIterator<AnalysisTarget, CacheEntry> iterator = partition.iterator(); | |
353 while (iterator.moveNext()) { | |
354 AnalysisTarget target = iterator.key; | |
355 // Optionally gather Dart sources to invalidate URIs resolution. | |
356 if (invalidateUris && _isDartSource(target)) { | |
357 dartSources.add(target); | |
358 } | |
359 // LibrarySpecificUnit(s) are roots of Dart resolution. | |
360 // When one is invalidated, invalidation is propagated to all resolution. | |
361 if (target is LibrarySpecificUnit) { | |
362 unitTargets.add(target); | |
363 Source library = target.library; | |
364 if (context.exists(library)) { | |
365 librarySourceQueue.add(library); | |
366 } | |
367 } | |
368 } | |
369 // Invalidate targets and values. | |
370 unitTargets.forEach(partition.remove); | |
371 for (Source dartSource in dartSources) { | |
372 CacheEntry entry = partition.get(dartSource); | |
373 if (dartSource != null) { | |
374 // TODO(scheglov) we invalidate too much. | |
375 // Would be nice to invalidate just URLs resolution. | |
376 entry.setState(PARSED_UNIT, CacheState.INVALID); | |
377 entry.setState(IMPORTED_LIBRARIES, CacheState.INVALID); | |
378 entry.setState(EXPLICITLY_IMPORTED_LIBRARIES, CacheState.INVALID); | |
379 entry.setState(EXPORTED_LIBRARIES, CacheState.INVALID); | |
380 entry.setState(INCLUDED_PARTS, CacheState.INVALID); | |
381 } | |
382 } | |
383 } | |
384 | |
385 /** | |
386 * Invalidate [CONTAINING_LIBRARIES] for the given [source]. | |
387 * [CONTAINING_LIBRARIES] does not have dependencies, so we manage it here. | |
388 * The [source] may be a part, or a library whose contents is updated so | |
389 * will be a part. | |
390 */ | |
391 void _invalidateContainingLibraries(Source source) { | |
392 CacheEntry entry = analysisCache.get(source); | |
393 if (entry != null) { | |
394 entry.setState(CONTAINING_LIBRARIES, CacheState.INVALID); | |
395 } | |
396 } | |
397 | |
398 /** | |
399 * Returns `true` if the given [result] of the given [target] needs | |
400 * computing, i.e. it is not in the valid and not in the error state. | |
401 */ | |
402 bool _needsComputing(AnalysisTarget target, ResultDescriptor result) { | |
403 CacheState state = analysisCache.getState(target, result); | |
404 return state != CacheState.VALID && state != CacheState.ERROR; | |
405 } | |
406 | |
407 /** | |
408 * The given [library] source was changed or removed. | |
409 * Update [libraryPartsMap] and [partLibrariesMap]. | |
410 */ | |
411 void _onLibrarySourceChangedOrRemoved(Source library) { | |
412 List<Source> parts = libraryPartsMap.remove(library); | |
413 if (parts != null) { | |
414 for (Source part in parts) { | |
415 List<Source> libraries = partLibrariesMap[part]; | |
416 if (libraries != null) { | |
417 libraries.remove(library); | |
418 _invalidateContainingLibraries(part); | |
419 } | |
420 } | |
421 } | |
422 _invalidateContainingLibraries(library); | |
423 } | |
424 | |
425 /** | |
426 * Schedule computing [RESOLVED_UNIT] for the given [librarySource]. | |
427 * If errors should be computed, then schedule [LIBRARY_ERRORS_READY] instead, | |
428 * it also computes [RESOLVED_UNIT] in process. | |
429 */ | |
430 void _schedulePriorityLibrarySourceAnalysis(Source librarySource) { | |
431 bool needErrors = _shouldErrorsBeComputed(librarySource); | |
432 if (needErrors) { | |
433 addPriorityResult(librarySource, LIBRARY_ERRORS_READY); | |
434 } else { | |
435 var target = new LibrarySpecificUnit(librarySource, librarySource); | |
436 addPriorityResult(target, RESOLVED_UNIT); | |
437 } | |
438 } | |
439 | |
440 bool _shouldErrorsBeComputed(Source source) => | |
441 context.shouldErrorsBeAnalyzed(source, null); | |
442 | |
443 static bool _isDartSource(AnalysisTarget target) { | |
444 return target is Source && AnalysisEngine.isDartFileName(target.fullName); | |
445 } | |
446 } | |
OLD | NEW |