OLD | NEW |
---|---|
1 // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 import 'dart:async'; | 5 import 'dart:async'; |
6 import 'dart:collection'; | |
7 import 'dart:convert'; | |
6 | 8 |
7 import 'package:analyzer/dart/ast/ast.dart'; | 9 import 'package:analyzer/dart/ast/ast.dart'; |
10 import 'package:analyzer/dart/ast/token.dart'; | |
8 import 'package:analyzer/error/error.dart'; | 11 import 'package:analyzer/error/error.dart'; |
12 import 'package:analyzer/error/listener.dart'; | |
13 import 'package:analyzer/file_system/file_system.dart'; | |
14 import 'package:analyzer/src/context/context.dart'; | |
15 import 'package:analyzer/src/context/source.dart'; | |
9 import 'package:analyzer/src/dart/analysis/byte_store.dart'; | 16 import 'package:analyzer/src/dart/analysis/byte_store.dart'; |
17 import 'package:analyzer/src/dart/error/todo_codes.dart'; | |
18 import 'package:analyzer/src/dart/scanner/reader.dart'; | |
19 import 'package:analyzer/src/dart/scanner/scanner.dart'; | |
20 import 'package:analyzer/src/generated/engine.dart' | |
21 show AnalysisContext, AnalysisEngine, AnalysisOptions, ChangeSet; | |
22 import 'package:analyzer/src/generated/parser.dart'; | |
10 import 'package:analyzer/src/generated/source.dart'; | 23 import 'package:analyzer/src/generated/source.dart'; |
24 import 'package:analyzer/src/generated/utilities_dart.dart'; | |
25 import 'package:analyzer/src/summary/api_signature.dart'; | |
26 import 'package:analyzer/src/summary/flat_buffers.dart' as fb; | |
27 import 'package:analyzer/src/summary/format.dart'; | |
28 import 'package:analyzer/src/summary/idl.dart'; | |
29 import 'package:analyzer/src/summary/link.dart'; | |
30 import 'package:analyzer/src/summary/package_bundle_reader.dart'; | |
31 import 'package:analyzer/src/summary/summarize_ast.dart'; | |
32 import 'package:analyzer/src/summary/summarize_elements.dart'; | |
33 import 'package:analyzer/src/util/fast_uri.dart'; | |
34 import 'package:convert/convert.dart'; | |
35 import 'package:crypto/crypto.dart'; | |
11 | 36 |
12 /** | 37 /** |
13 * This class computes [AnalysisResult]s for Dart files. | 38 * This class computes [AnalysisResult]s for Dart files. |
14 * | 39 * |
15 * Let the set of "explicitly analyzed files" denote the set of paths that have | 40 * Let the set of "explicitly analyzed files" denote the set of paths that have |
16 * been passed to [addFile] but not subsequently passed to [removeFile]. Let | 41 * been passed to [addFile] but not subsequently passed to [removeFile]. Let |
17 * the "current analysis results" denote the map from the set of explicitly | 42 * the "current analysis results" denote the map from the set of explicitly |
18 * analyzed files to the most recent [AnalysisResult] delivered to [results] | 43 * analyzed files to the most recent [AnalysisResult] delivered to [results] |
19 * for each file. Let the "current file state" represent a map from file path | 44 * for each file. Let the "current file state" represent a map from file path |
20 * to the file contents most recently read from that file, or fetched from the | 45 * to the file contents most recently read from that file, or fetched from the |
(...skipping 19 matching lines...) Expand all Loading... | |
40 * "idle" after a finite amount of processing. | 65 * "idle" after a finite amount of processing. |
41 * | 66 * |
42 * As a result of these guarantees, a client may ensure that the analysis | 67 * As a result of these guarantees, a client may ensure that the analysis |
43 * results are "eventually consistent" with the file system by simply calling | 68 * results are "eventually consistent" with the file system by simply calling |
44 * [changeFile] any time the contents of a file on the file system have changed. | 69 * [changeFile] any time the contents of a file on the file system have changed. |
45 * | 70 * |
46 * | 71 * |
47 * TODO(scheglov) Clean up the list of implicitly analyzed files. | 72 * TODO(scheglov) Clean up the list of implicitly analyzed files. |
48 */ | 73 */ |
49 class AnalysisDriver { | 74 class AnalysisDriver { |
75 final PerformanceLog _logger; | |
76 | |
77 /** | |
78 * The resource provider for working with files. | |
79 */ | |
80 final ResourceProvider _resourceProvider; | |
81 | |
50 /** | 82 /** |
51 * The byte storage to get and put serialized data. | 83 * The byte storage to get and put serialized data. |
52 * | 84 * |
53 * It can be shared with other [AnalysisDriver]s. | 85 * It can be shared with other [AnalysisDriver]s. |
54 */ | 86 */ |
55 final ByteStore _byteStore; | 87 final ByteStore _byteStore; |
56 | 88 |
57 /** | 89 /** |
90 * This [ContentCache] is consulted for a file content before reading | |
91 * the content from the file. | |
92 */ | |
93 final ContentCache _contentCache; | |
94 | |
95 /** | |
58 * The [SourceFactory] is used to resolve URIs to paths and restore URIs | 96 * The [SourceFactory] is used to resolve URIs to paths and restore URIs |
59 * from file paths. | 97 * from file paths. |
60 */ | 98 */ |
61 final SourceFactory _sourceFactory; | 99 final SourceFactory _sourceFactory; |
62 | 100 |
63 /** | 101 /** |
64 * This [ContentCache] is consulted for a file content before reading | 102 * The analysis options to analyze with. |
65 * the content from the file. | |
66 */ | 103 */ |
67 final ContentCache _contentCache; | 104 final AnalysisOptions _analysisOptions; |
68 | 105 |
69 AnalysisDriver(this._byteStore, this._sourceFactory, this._contentCache); | 106 /** |
107 * The combined unlinked and linked package for the SDK, extracted from | |
108 * the given [_sourceFactory]. | |
109 */ | |
110 PackageBundle _sdkBundle; | |
111 | |
112 /** | |
113 * The mapping from the files for which analysis was requested using | |
114 * [getResult] to the [Completer]s to report the result. | |
115 */ | |
116 final _requestedFiles = <String, Completer<AnalysisResult>>{}; | |
117 | |
118 /** | |
119 * The set of explicitly analyzed files. | |
120 */ | |
121 final _explicitFiles = new LinkedHashSet<String>(); | |
122 | |
123 /** | |
124 * The set of files that are currently scheduled for analysis. | |
125 */ | |
126 final _filesToAnalyze = new LinkedHashSet<String>(); | |
127 | |
128 /** | |
129 * The current file state. | |
130 * | |
131 * It maps file paths to MD5 hash of the file content. | |
132 */ | |
133 final _fileContentHashMap = <String, String>{}; | |
134 | |
135 /** | |
136 * TODO(scheglov) document and improve | |
137 */ | |
138 final _hasWorkStreamController = new StreamController<String>(); | |
139 | |
140 AnalysisDriver(this._logger, this._resourceProvider, this._byteStore, | |
141 this._contentCache, this._sourceFactory, this._analysisOptions) { | |
142 _sdkBundle = _sourceFactory.dartSdk.getLinkedBundle(); | |
143 } | |
70 | 144 |
71 /** | 145 /** |
72 * Set the list of files that the driver should try to analyze sooner. | 146 * Set the list of files that the driver should try to analyze sooner. |
73 * | 147 * |
74 * Every path in the list must be absolute and normalized. | 148 * Every path in the list must be absolute and normalized. |
75 * | 149 * |
76 * The driver will produce the results through the [results] stream. The | 150 * The driver will produce the results through the [results] stream. The |
77 * exact order in which results are produced is not defined, neither | 151 * exact order in which results are produced is not defined, neither |
78 * between priority files, nor between priority and non-priority files. | 152 * between priority files, nor between priority and non-priority files. |
79 */ | 153 */ |
(...skipping 14 matching lines...) Expand all Loading... | |
94 * Invocation of [addFile] or [changeFile] might result in producing more | 168 * Invocation of [addFile] or [changeFile] might result in producing more |
95 * analysis results that reflect the new current file state. | 169 * analysis results that reflect the new current file state. |
96 * | 170 * |
97 * More than one result might be produced for the same file, even if the | 171 * More than one result might be produced for the same file, even if the |
98 * client does not change the state of the files. | 172 * client does not change the state of the files. |
99 * | 173 * |
100 * Results might be produced even for files that have never been added | 174 * Results might be produced even for files that have never been added |
101 * using [addFile], for example when [getResult] was called for a file. | 175 * using [addFile], for example when [getResult] was called for a file. |
102 */ | 176 */ |
103 Stream<AnalysisResult> get results async* { | 177 Stream<AnalysisResult> get results async* { |
104 // TODO(scheglov) implement | 178 try { |
179 while (true) { | |
180 // TODO(scheglov) implement state transitioning | |
181 await for (String why in _hasWorkStreamController.stream) { | |
182 // Analyze the first file in the general queue. | |
183 if (_filesToAnalyze.isNotEmpty) { | |
184 _logger.runTimed('Analyzed ${_filesToAnalyze.length} files', () { | |
185 while (_filesToAnalyze.isNotEmpty) { | |
186 String path = _filesToAnalyze.first; | |
187 _filesToAnalyze.remove(path); | |
188 _File file = _fileForPath(path); | |
189 _computeAndPrintErrors(file); | |
190 // TODO(scheglov) yield the result | |
191 } | |
192 }); | |
193 } | |
194 } | |
195 // TODO(scheglov) implement | |
196 } | |
197 } finally { | |
198 print('The stream was cancelled.'); | |
199 } | |
105 } | 200 } |
106 | 201 |
107 /** | 202 /** |
108 * Add the file with the given [path] to the set of files to analyze. | 203 * Add the file with the given [path] to the set of files to analyze. |
109 * | 204 * |
110 * The [path] must be absolute and normalized. | 205 * The [path] must be absolute and normalized. |
111 * | 206 * |
112 * The results of analysis are eventually produced by the [results] stream. | 207 * The results of analysis are eventually produced by the [results] stream. |
113 */ | 208 */ |
114 void addFile(String path) { | 209 void addFile(String path) { |
115 // TODO(scheglov) implement | 210 _explicitFiles.add(path); |
211 _filesToAnalyze.add(path); | |
212 _hasWorkStreamController.add('do it!'); | |
116 } | 213 } |
117 | 214 |
118 /** | 215 /** |
119 * The file with the given [path] might have changed - updated, added or | 216 * The file with the given [path] might have changed - updated, added or |
120 * removed. Or not, we don't know. Or it might have, but then changed back. | 217 * removed. Or not, we don't know. Or it might have, but then changed back. |
121 * | 218 * |
122 * The [path] must be absolute and normalized. | 219 * The [path] must be absolute and normalized. |
123 * | 220 * |
124 * The [path] can be any file - explicitly or implicitly analyzed, or neither. | 221 * The [path] can be any file - explicitly or implicitly analyzed, or neither. |
125 * | 222 * |
126 * Causes the analysis state to transition to "analyzing" (if it is not in | 223 * Causes the analysis state to transition to "analyzing" (if it is not in |
127 * that state already). Schedules the file contents for [path] to be read | 224 * that state already). Schedules the file contents for [path] to be read |
128 * into the current file state prior to the next time the analysis state | 225 * into the current file state prior to the next time the analysis state |
129 * transitions to "idle". | 226 * transitions to "idle". |
130 * | 227 * |
131 * Invocation of this method will not prevent a [Future] returned from | 228 * Invocation of this method will not prevent a [Future] returned from |
132 * [getResult] from completing with a result, but the result is not | 229 * [getResult] from completing with a result, but the result is not |
133 * guaranteed to be consistent with the new current file state after this | 230 * guaranteed to be consistent with the new current file state after this |
134 * [changeFile] invocation. | 231 * [changeFile] invocation. |
135 */ | 232 */ |
136 void changeFile(String path) { | 233 void changeFile(String path) { |
137 // TODO(scheglov) implement | 234 // TODO(scheglov) Don't clear, schedule API signature validation. |
235 _fileContentHashMap.clear(); | |
236 _filesToAnalyze.add(path); | |
237 _filesToAnalyze.addAll(_explicitFiles); | |
238 // TODO(scheglov) name?! | |
239 _hasWorkStreamController.add('do it!'); | |
138 } | 240 } |
139 | 241 |
140 /** | 242 /** |
141 * Return the [Future] that completes with a [AnalysisResult] for the file | 243 * Return the [Future] that completes with a [AnalysisResult] for the file |
142 * with the given [path]. | 244 * with the given [path]. |
143 * | 245 * |
144 * The [path] must be absolute and normalized. | 246 * The [path] must be absolute and normalized. |
145 * | 247 * |
146 * The [path] can be any file - explicitly or implicitly analyzed, or neither. | 248 * The [path] can be any file - explicitly or implicitly analyzed, or neither. |
147 * | 249 * |
148 * Causes the analysis state to transition to "analyzing" (if it is not in | 250 * Causes the analysis state to transition to "analyzing" (if it is not in |
149 * that state already), the driver will read the file and produce the analysis | 251 * that state already), the driver will read the file and produce the analysis |
150 * result for it, which is consistent with the current file state (including | 252 * result for it, which is consistent with the current file state (including |
151 * the new state of the file), prior to the next time the analysis state | 253 * the new state of the file), prior to the next time the analysis state |
152 * transitions to "idle". | 254 * transitions to "idle". |
153 */ | 255 */ |
154 Future<AnalysisResult> getResult(String path) { | 256 Future<AnalysisResult> getResult(String path) { |
155 // TODO(scheglov) implement | 257 var completer = new Completer<AnalysisResult>(); |
156 throw new UnimplementedError(); | 258 _requestedFiles[path] = completer; |
Paul Berry
2016/10/24 11:46:21
There's a bug here. If the client calls getResult
scheglov
2016/10/24 17:23:48
Fixed.
We need a list of completers here.
Every of
| |
259 return completer.future; | |
Paul Berry
2016/10/24 11:46:21
I'm surprised not to see a call to `_hasWorkStream
scheglov
2016/10/24 17:23:48
Done.
Yes, we need to add a value to the "has wor
| |
157 } | 260 } |
158 | 261 |
159 /** | 262 /** |
160 * Remove the file with the given [path] from the list of files to analyze. | 263 * Remove the file with the given [path] from the list of files to analyze. |
161 * | 264 * |
162 * The [path] must be absolute and normalized. | 265 * The [path] must be absolute and normalized. |
163 * | 266 * |
164 * The results of analysis of the file might still be produced by the | 267 * The results of analysis of the file might still be produced by the |
165 * [results] stream. The driver will try to stop producing these results, | 268 * [results] stream. The driver will try to stop producing these results, |
166 * but does not guarantee this. | 269 * but does not guarantee this. |
167 */ | 270 */ |
168 void removeFile(String path) { | 271 void removeFile(String path) { |
169 // TODO(scheglov) implement | 272 _explicitFiles.remove(path); |
273 _filesToAnalyze.remove(path); | |
274 } | |
275 | |
276 /** | |
277 * TODO(scheglov) replace with actual [AnalysisResult] computing. | |
278 */ | |
279 List<String> _computeAndPrintErrors(_File file) { | |
280 List<String> errorStrings = _logger.run('Compute errors $file', () { | |
281 LibraryContext libraryContext = _createLibraryContext(file); | |
282 | |
283 String errorsKey; | |
284 { | |
285 ApiSignature signature = new ApiSignature(); | |
286 signature.addString(libraryContext.node.linkedHash); | |
287 signature.addString(file.contentHash); | |
288 errorsKey = '${signature.toHex()}.errors'; | |
289 } | |
290 | |
291 { | |
292 List<int> bytes = _byteStore.get(errorsKey); | |
293 if (bytes != null) { | |
294 fb.BufferContext bp = new fb.BufferContext.fromBytes(bytes); | |
295 int table = bp.derefObject(0); | |
296 return const fb.ListReader<String>(const fb.StringReader()) | |
297 .vTableGet(bp, table, 0); | |
298 } | |
299 } | |
300 | |
301 AnalysisContext analysisContext = _createAnalysisContext(libraryContext); | |
302 analysisContext.resolveCompilationUnit2( | |
303 libraryContext.file.source, libraryContext.file.source); | |
304 try { | |
305 List<AnalysisError> errors; | |
306 try { | |
307 errors = _logger.runTimed('Computed errors', () { | |
308 return analysisContext.computeErrors(file.source); | |
309 }); | |
310 } catch (e, st) { | |
311 // TODO(scheglov) why does it fail? | |
312 // Caused by Bad state: Unmatched TypeParameterElementImpl T | |
Paul Berry
2016/10/24 11:46:21
This exception is usually caused by a bug in type
scheglov
2016/10/24 17:23:47
I will try to come up with a repro today.
scheglov
2016/10/24 17:29:31
Hm...
Cannot reproduce now.
I will remove try/catc
| |
313 errors = []; | |
314 } | |
315 List<String> errorStrings = errors | |
316 .where((error) => error.errorCode is! TodoCode) | |
317 .map((error) => error.toString()) | |
318 .toList(); | |
319 { | |
320 fb.Builder fbBuilder = new fb.Builder(); | |
321 var exportedOffset = fbBuilder.writeList(errorStrings | |
322 .map((errorStr) => fbBuilder.writeString(errorStr)) | |
323 .toList()); | |
324 fbBuilder.startTable(); | |
325 fbBuilder.addOffset(0, exportedOffset); | |
326 var offset = fbBuilder.endTable(); | |
327 List<int> bytes = fbBuilder.finish(offset, 'CErr'); | |
328 _byteStore.put(errorsKey, bytes); | |
329 } | |
330 | |
331 return errorStrings; | |
332 } finally { | |
333 analysisContext.dispose(); | |
334 } | |
335 }); | |
336 | |
337 if (errorStrings.isNotEmpty) { | |
338 errorStrings.forEach((errorString) => print('\t$errorString')); | |
339 } else { | |
340 print('\tNO ERRORS'); | |
341 } | |
342 return errorStrings; | |
343 } | |
344 | |
345 AnalysisContext _createAnalysisContext(LibraryContext libraryContext) { | |
346 AnalysisContextImpl analysisContext = | |
347 AnalysisEngine.instance.createAnalysisContext(); | |
348 | |
349 analysisContext.sourceFactory = | |
350 new SourceFactory((_sourceFactory as SourceFactoryImpl).resolvers); | |
351 analysisContext.resultProvider = | |
352 new InputPackagesResultProvider(analysisContext, libraryContext.store); | |
353 analysisContext | |
354 .applyChanges(new ChangeSet()..addedSource(libraryContext.file.source)); | |
355 return analysisContext; | |
356 } | |
357 | |
358 /** | |
359 * Return the content in which the library represented by the given | |
Paul Berry
2016/10/24 11:46:21
s/content/context/
scheglov
2016/10/24 17:23:48
Done.
| |
360 * [libraryFile] should be analyzed it. | |
361 * | |
362 * TODO(scheglov) We often don't need [SummaryDataStore], only linked hash. | |
363 */ | |
364 LibraryContext _createLibraryContext(_File libraryFile) { | |
365 Map<String, _LibraryNode> nodes = <String, _LibraryNode>{}; | |
366 | |
367 return _logger.run('Create library context', () { | |
368 SummaryDataStore store = new SummaryDataStore(const <String>[]); | |
369 store.addBundle(null, _sdkBundle); | |
370 | |
371 void createLibraryNodes(_File libraryFile) { | |
372 Uri libraryUri = libraryFile.uri; | |
373 if (libraryUri.scheme == 'dart') { | |
374 return; | |
Paul Berry
2016/10/24 11:46:21
Why? Is this because we always get "dart:" stuff
scheglov
2016/10/24 17:23:48
Done.
| |
375 } | |
376 String uriStr = libraryUri.toString(); | |
377 if (!nodes.containsKey(uriStr)) { | |
378 _LibraryNode node = new _LibraryNode(this, nodes, libraryUri); | |
379 nodes[uriStr] = node; | |
380 ReferencedUris referenced = _getReferencedUris(libraryFile); | |
381 | |
382 // Append unlinked bundles. | |
383 for (String uri in referenced.parted) { | |
384 _File file = libraryFile.resolveUri(uri); | |
385 PackageBundle unlinked = _getUnlinked(file); | |
386 node.unlinkedBundles.add(unlinked); | |
387 store.addBundle(null, unlinked); | |
388 } | |
389 | |
390 // Create nodes for referenced libraries. | |
391 for (String uri in referenced.imported) { | |
392 _File file = libraryFile.resolveUri(uri); | |
393 createLibraryNodes(file); | |
394 } | |
395 for (String uri in referenced.exported) { | |
396 _File file = libraryFile.resolveUri(uri); | |
397 createLibraryNodes(file); | |
398 } | |
399 } | |
400 } | |
401 | |
402 _logger.runTimed2(() { | |
403 createLibraryNodes(libraryFile); | |
404 }, () => 'Computed ${nodes.length} nodes'); | |
405 _LibraryNode libraryNode = nodes[libraryFile.uri.toString()]; | |
Paul Berry
2016/10/24 11:46:21
Nit: how about if we change createLibraryNodes() s
scheglov
2016/10/24 17:23:48
Done.
| |
406 | |
407 Set<String> libraryUrisToLink = new Set<String>(); | |
408 int numberOfNodesWithLinked = 0; | |
409 _logger.runTimed2(() { | |
410 for (_LibraryNode node in nodes.values) { | |
411 String key = '${node.linkedHash}.linked'; | |
412 List<int> bytes = _byteStore.get(key); | |
413 if (bytes != null) { | |
414 PackageBundle linked = new PackageBundle.fromBuffer(bytes); | |
415 node.linked = linked; | |
416 store.addBundle(null, linked); | |
417 numberOfNodesWithLinked++; | |
418 } else { | |
419 libraryUrisToLink.add(node.uri.toString()); | |
420 } | |
421 } | |
422 }, () => 'Loaded $numberOfNodesWithLinked linked bundles'); | |
423 | |
424 Map<String, LinkedLibraryBuilder> linkedLibraries = {}; | |
425 _logger.runTimed2(() { | |
426 linkedLibraries = link(libraryUrisToLink, (String uri) { | |
427 LinkedLibrary linkedLibrary = store.linkedMap[uri]; | |
428 if (linkedLibrary == null) { | |
429 throw new StateError('No linked library for: $uri'); | |
430 } | |
431 return linkedLibrary; | |
432 }, (String uri) { | |
433 UnlinkedUnit unlinkedUnit = store.unlinkedMap[uri]; | |
434 if (unlinkedUnit == null) { | |
435 throw new StateError('No unlinked unit for: $uri'); | |
436 } | |
437 return unlinkedUnit; | |
438 }, (_) => null, _analysisOptions.strongMode); | |
439 }, () => 'Linked ${linkedLibraries.length} bundles'); | |
440 | |
441 linkedLibraries.forEach((uri, linkedBuilder) { | |
442 _LibraryNode node = nodes[uri]; | |
443 String key = '${node.linkedHash}.linked'; | |
444 List<int> bytes; | |
445 { | |
446 PackageBundleAssembler assembler = new PackageBundleAssembler(); | |
447 assembler.addLinkedLibrary(uri, linkedBuilder); | |
448 bytes = assembler.assemble().toBuffer(); | |
449 } | |
450 PackageBundle linked = new PackageBundle.fromBuffer(bytes); | |
451 node.linked = linked; | |
452 store.addBundle(null, linked); | |
453 _byteStore.put(key, bytes); | |
454 }); | |
455 | |
456 return new LibraryContext(libraryFile, libraryNode, store); | |
457 }); | |
458 } | |
459 | |
460 /** | |
461 * Return the [_File] for the given [path] in [_sourceFactory]. | |
462 */ | |
463 _File _fileForPath(String path) { | |
464 Source fileSource = _resourceProvider.getFile(path).createSource(); | |
465 Uri uri = _sourceFactory.restoreUri(fileSource); | |
466 Source source = _resourceProvider.getFile(path).createSource(uri); | |
467 return new _File(this, source); | |
468 } | |
469 | |
470 /** | |
471 * TODO(scheglov) It would be nice to get URIs of "parts" from unlinked. | |
472 */ | |
473 ReferencedUris _getReferencedUris(_File file) { | |
474 // Try to get from the store. | |
475 { | |
476 String key = '${file.contentHash}.uris'; | |
477 List<int> bytes = _byteStore.get(key); | |
478 if (bytes != null) { | |
479 fb.BufferContext bp = new fb.BufferContext.fromBytes(bytes); | |
480 int table = bp.derefObject(0); | |
481 const fb.ListReader<String> stringListReader = | |
482 const fb.ListReader<String>(const fb.StringReader()); | |
483 bool isLibrary = const fb.BoolReader().vTableGet(bp, table, 0); | |
484 List<String> imported = stringListReader.vTableGet(bp, table, 1); | |
485 List<String> exported = stringListReader.vTableGet(bp, table, 2); | |
486 List<String> parted = stringListReader.vTableGet(bp, table, 3); | |
487 ReferencedUris referencedUris = new ReferencedUris(); | |
488 referencedUris.isLibrary = isLibrary; | |
489 referencedUris.imported.addAll(imported); | |
490 referencedUris.exported.addAll(exported); | |
491 referencedUris.parted.addAll(parted); | |
492 return referencedUris; | |
493 } | |
494 } | |
495 | |
496 // Compute URIs. | |
497 ReferencedUris referencedUris = new ReferencedUris(); | |
498 referencedUris.parted.add(file.uri.toString()); | |
499 for (Directive directive in file.unit.directives) { | |
500 if (directive is PartOfDirective) { | |
501 referencedUris.isLibrary = false; | |
502 } else if (directive is UriBasedDirective) { | |
503 String uri = directive.uri.stringValue; | |
504 if (directive is ImportDirective) { | |
505 referencedUris.imported.add(uri); | |
506 } else if (directive is ExportDirective) { | |
507 referencedUris.exported.add(uri); | |
508 } else if (directive is PartDirective) { | |
509 referencedUris.parted.add(uri); | |
510 } | |
511 } | |
512 } | |
513 | |
514 // Serialize into bytes. | |
515 List<int> bytes; | |
516 { | |
517 fb.Builder fbBuilder = new fb.Builder(); | |
518 var importedOffset = fbBuilder.writeList(referencedUris.imported | |
519 .map((uri) => fbBuilder.writeString(uri)) | |
520 .toList()); | |
521 var exportedOffset = fbBuilder.writeList(referencedUris.exported | |
522 .map((uri) => fbBuilder.writeString(uri)) | |
523 .toList()); | |
524 var partedOffset = fbBuilder.writeList(referencedUris.parted | |
525 .map((uri) => fbBuilder.writeString(uri)) | |
526 .toList()); | |
527 fbBuilder.startTable(); | |
528 fbBuilder.addBool(0, referencedUris.isLibrary); | |
529 fbBuilder.addOffset(1, importedOffset); | |
530 fbBuilder.addOffset(2, exportedOffset); | |
531 fbBuilder.addOffset(3, partedOffset); | |
532 var offset = fbBuilder.endTable(); | |
533 bytes = fbBuilder.finish(offset, 'SoRU'); | |
534 } | |
535 | |
536 // We read the content and recomputed the hash. | |
537 // So, we need to update the key. | |
538 String key = '${file.contentHash}.uris'; | |
539 _byteStore.put(key, bytes); | |
540 | |
541 return referencedUris; | |
542 } | |
543 | |
544 /** | |
545 * Return the unlinked bundle of [file] for the current file state. | |
546 * | |
547 * That is, if there is an existing bundle for the current content hash | |
548 * of the [file] in the [_byteStore], then it is returned. Otherwise, the | |
549 * [file] content is read, the content hash is computed and the current file | |
550 * state is updated accordingly. That the content is parsed into the | |
551 * [CompilationUnit] and serialized into a new unlinked bundle. The bundle | |
552 * is then put into the [_byteStore] and returned. | |
553 */ | |
554 PackageBundle _getUnlinked(_File file) { | |
555 // Try to get bytes for file's unlinked bundle. | |
556 List<int> bytes; | |
557 { | |
558 String key = '${file.contentHash}.unlinked'; | |
559 bytes = _byteStore.get(key); | |
560 } | |
561 // If no cached unlinked bundle, compute it. | |
562 if (bytes == null) { | |
563 _logger.runTimed('Create unlinked for $file', () { | |
564 // We read the content and recomputed the hash. | |
565 // So, we need to update the key. | |
566 String key = '${file.contentHash}.unlinked'; | |
567 UnlinkedUnitBuilder unlinkedUnit = serializeAstUnlinked(file.unit); | |
568 PackageBundleAssembler assembler = new PackageBundleAssembler(); | |
569 assembler.addUnlinkedUnitWithHash( | |
570 file.uri.toString(), unlinkedUnit, key); | |
571 bytes = assembler.assemble().toBuffer(); | |
572 _byteStore.put(key, bytes); | |
573 }); | |
574 } | |
575 return new PackageBundle.fromBuffer(bytes); | |
170 } | 576 } |
171 } | 577 } |
172 | 578 |
173 /** | 579 /** |
174 * The result of analyzing of a single file. | 580 * The result of analyzing of a single file. |
175 * | 581 * |
176 * These results are self-consistent, i.e. [content], [contentHash], the | 582 * These results are self-consistent, i.e. [content], [contentHash], the |
177 * resolved [unit] correspond to each other. All referenced elements, even | 583 * resolved [unit] correspond to each other. All referenced elements, even |
178 * external ones, are also self-consistent. But none of the results is | 584 * external ones, are also self-consistent. But none of the results is |
179 * guaranteed to be consistent with the state of the files. | 585 * guaranteed to be consistent with the state of the files. |
(...skipping 30 matching lines...) Expand all Loading... | |
210 final CompilationUnit unit; | 616 final CompilationUnit unit; |
211 | 617 |
212 /** | 618 /** |
213 * The full list of computed analysis errors, both syntactic and semantic. | 619 * The full list of computed analysis errors, both syntactic and semantic. |
214 */ | 620 */ |
215 final List<AnalysisError> errors; | 621 final List<AnalysisError> errors; |
216 | 622 |
217 AnalysisResult(this.path, this.uri, this.content, this.contentHash, this.unit, | 623 AnalysisResult(this.path, this.uri, this.content, this.contentHash, this.unit, |
218 this.errors); | 624 this.errors); |
219 } | 625 } |
626 | |
627 class LibraryContext { | |
Paul Berry
2016/10/24 11:46:21
Please add doc comments to this class and its publ
scheglov
2016/10/24 17:23:48
Done.
| |
628 final _File file; | |
629 final _LibraryNode node; | |
630 final SummaryDataStore store; | |
631 LibraryContext(this.file, this.node, this.store); | |
632 } | |
633 | |
634 class PerformanceLog { | |
635 final StringSink sink; | |
636 int _level = 0; | |
637 | |
638 PerformanceLog(this.sink); | |
639 | |
640 /*=T*/ run/*<T>*/(String msg, /*=T*/ f()) { | |
641 Stopwatch timer = new Stopwatch()..start(); | |
642 try { | |
643 writeln('+++ $msg.'); | |
644 _level++; | |
645 return f(); | |
646 } finally { | |
647 _level--; | |
648 int ms = timer.elapsedMilliseconds; | |
649 writeln('--- $msg in $ms ms.'); | |
650 } | |
651 } | |
652 | |
653 /*=T*/ runTimed/*<T>*/(String msg, /*=T*/ f()) { | |
Paul Berry
2016/10/24 11:46:21
I'm not comfortable with the duplication between r
scheglov
2016/10/24 17:23:48
Acknowledged.
| |
654 _level++; | |
655 Stopwatch timer = new Stopwatch()..start(); | |
656 try { | |
657 return f(); | |
658 } finally { | |
659 _level--; | |
660 int ms = timer.elapsedMilliseconds; | |
661 writeln('$msg in $ms ms.'); | |
662 } | |
663 } | |
664 | |
665 runTimed2(f(), String getMsg()) { | |
Paul Berry
2016/10/24 11:46:21
Similar concern here. I would recommend getting r
scheglov
2016/10/24 17:23:48
Acknowledged.
I completely agree.
I changed the lo
| |
666 _level++; | |
667 Stopwatch timer = new Stopwatch()..start(); | |
668 try { | |
669 return f(); | |
670 } finally { | |
671 _level--; | |
672 int ms = timer.elapsedMilliseconds; | |
673 String msg = getMsg(); | |
674 writeln('$msg in $ms ms.'); | |
675 } | |
676 } | |
677 | |
678 void writeln(String msg) { | |
679 String indent = '\t' * _level; | |
680 sink.writeln('$indent$msg'); | |
681 } | |
682 } | |
683 | |
684 class ReferencedUris { | |
685 bool isLibrary = true; | |
686 final List<String> imported = <String>[]; | |
687 final List<String> exported = <String>[]; | |
688 final List<String> parted = <String>[]; | |
689 } | |
690 | |
691 /** | |
692 * Information about a file being analyzed, explicitly or implicitly. | |
693 * | |
694 * It keeps a consistent view on its [content], [contentHash] and [unit]. | |
695 */ | |
696 class _File { | |
697 /** | |
698 * The driver instance that is used to access [SourceFactory] and caches. | |
699 */ | |
700 final AnalysisDriver driver; | |
701 | |
702 /** | |
703 * The [Source] this [_File] instance represent. | |
Brian Wilkerson
2016/10/26 06:31:48
nit: "represent" --> "represents"
| |
704 */ | |
705 final Source source; | |
706 | |
707 String _content; | |
708 String _contentHash; | |
709 CompilationUnit _unit; | |
710 | |
711 _File(this.driver, this.source); | |
712 | |
713 /** | |
714 * Return the current content of the file. | |
715 * | |
716 * If the [_content] field if it is still `null`, get the content from the | |
717 * content cache or from the [source]. If the content cannot be accessed | |
718 * because of an exception, it considers to be an empty string. | |
719 * | |
720 * When a new content is read, the new [_contentHash] is computed and the | |
721 * current file state is updated. | |
722 */ | |
723 String get content { | |
724 if (_content == null) { | |
725 _readContentAndComputeHash(); | |
726 } | |
727 return _content; | |
728 } | |
729 | |
730 /** | |
731 * Ensure that the [contentHash] is filled. | |
732 * | |
733 * If the hash is already in the current file state, return the current | |
734 * value. Otherwise, read the [content], compute the hash, put it into | |
735 * the current file state, and update the [contentHash] field. | |
736 * | |
737 * The client cannot remember values of this property, because its value | |
Paul Berry
2016/10/24 11:46:21
Change "cannot" to either "should not" or "cannot
scheglov
2016/10/24 17:23:48
Done.
| |
738 * might change when [content] is read and the hash is recomputed. | |
739 */ | |
740 String get contentHash { | |
741 _contentHash ??= driver._fileContentHashMap[path]; | |
742 if (_contentHash == null) { | |
743 _readContentAndComputeHash(); | |
744 } | |
745 return _contentHash; | |
746 } | |
747 | |
748 String get path => source.fullName; | |
749 | |
750 /** | |
751 * Return the [CompilationUnit] of the file. | |
752 * | |
753 * Current this unit is resolved, it is used to compute unlinked summaries | |
Paul Berry
2016/10/24 11:46:21
I had trouble understanding this paragraph. Do yo
scheglov
2016/10/24 17:23:48
I changed the documentation comments for this gett
| |
754 * and and URIs. We use a separate analysis context to perform resolution | |
755 * and computing errors. But this might change in the future. | |
756 */ | |
757 CompilationUnit get unit { | |
758 AnalysisErrorListener errorListener = AnalysisErrorListener.NULL_LISTENER; | |
759 | |
760 CharSequenceReader reader = new CharSequenceReader(content); | |
761 Scanner scanner = new Scanner(source, reader, errorListener); | |
762 scanner.scanGenericMethodComments = driver._analysisOptions.strongMode; | |
763 Token token = scanner.tokenize(); | |
764 LineInfo lineInfo = new LineInfo(scanner.lineStarts); | |
765 | |
766 Parser parser = new Parser(source, errorListener); | |
767 parser.parseGenericMethodComments = driver._analysisOptions.strongMode; | |
768 _unit = parser.parseCompilationUnit(token); | |
769 _unit.lineInfo = lineInfo; | |
770 | |
771 return _unit; | |
772 } | |
773 | |
774 Uri get uri => source.uri; | |
775 | |
776 /** | |
777 * Return the [_File] for the [uri] referenced in this file. | |
778 */ | |
779 _File resolveUri(String uri) { | |
780 Source uriSource = driver._sourceFactory.resolveUri(source, uri); | |
781 return new _File(driver, uriSource); | |
782 } | |
783 | |
784 @override | |
785 String toString() => uri.toString(); | |
786 | |
787 /** | |
788 * Fill the [_content] and [_contentHash] fields. | |
789 * | |
790 * If the [_content] field if it is still `null`, get the content from the | |
Paul Berry
2016/10/24 11:46:21
Drop the words "if it".
scheglov
2016/10/24 17:23:48
Done.
| |
791 * content cache or from the [source]. If the content cannot be accessed | |
792 * because of an exception, it considers to be an empty string. | |
793 * | |
794 * When a new content is read, the new [_contentHash] should be computed and | |
795 * the current file state should be updated. | |
796 */ | |
797 void _readContentAndComputeHash() { | |
798 try { | |
799 _content = driver._contentCache.getContents(source); | |
800 _content ??= source.contents.data; | |
801 } catch (_) { | |
802 _content = ''; | |
Paul Berry
2016/10/24 11:46:21
Do we need to record the fact that we couldn't rea
scheglov
2016/10/24 17:23:48
I don't know yet.
There is a bug with not existing
| |
803 } | |
804 // Compute the content hash. | |
805 List<int> textBytes = UTF8.encode(_content); | |
806 List<int> hashBytes = md5.convert(textBytes).bytes; | |
807 _contentHash = hex.encode(hashBytes); | |
808 // Update the current file state. | |
809 driver._fileContentHashMap[path] = _contentHash; | |
810 } | |
811 } | |
812 | |
813 class _LibraryNode { | |
814 final AnalysisDriver driver; | |
815 final Map<String, _LibraryNode> nodes; | |
816 final Uri uri; | |
817 final List<PackageBundle> unlinkedBundles = <PackageBundle>[]; | |
818 | |
819 Set<_LibraryNode> transitiveDependencies; | |
820 List<_LibraryNode> _dependencies; | |
821 String _linkedHash; | |
822 | |
823 List<int> linkedNewBytes; | |
824 PackageBundle linked; | |
825 | |
826 _LibraryNode(this.driver, this.nodes, this.uri); | |
827 | |
828 /** | |
829 * Retrieve the dependencies of this node. | |
830 */ | |
831 List<_LibraryNode> get dependencies { | |
832 if (_dependencies == null) { | |
833 Set<_LibraryNode> dependencies = new Set<_LibraryNode>(); | |
834 | |
835 void appendDependency(String uriStr) { | |
836 Uri uri = FastUri.parse(uriStr); | |
837 if (uri.scheme == 'dart') { | |
838 // Dependency on the SDK is implicit and always added. | |
839 // The SDK linked bundle is precomputed before linking packages. | |
840 } else { | |
841 if (!uri.isAbsolute) { | |
842 uri = resolveRelativeUri(this.uri, uri); | |
843 uriStr = uri.toString(); | |
844 } | |
845 _LibraryNode node = nodes[uriStr]; | |
846 if (node == null) { | |
847 throw new StateError('No node for: $uriStr'); | |
848 } | |
849 dependencies.add(node); | |
850 } | |
851 } | |
852 | |
853 for (PackageBundle unlinkedBundle in unlinkedBundles) { | |
854 for (UnlinkedUnit unit in unlinkedBundle.unlinkedUnits) { | |
855 for (UnlinkedImport import in unit.imports) { | |
856 if (!import.isImplicit) { | |
857 appendDependency(import.uri); | |
858 } | |
859 } | |
860 for (UnlinkedExportPublic export in unit.publicNamespace.exports) { | |
861 appendDependency(export.uri); | |
862 } | |
863 } | |
864 } | |
865 | |
866 _dependencies = dependencies.toList(); | |
867 } | |
868 return _dependencies; | |
869 } | |
870 | |
871 @override | |
872 int get hashCode => uri.hashCode; | |
873 | |
874 bool get isReady => linked != null; | |
Paul Berry
2016/10/24 11:46:21
It's not obvious to me why "ready" means "linked".
scheglov
2016/10/24 17:23:48
I removed this and couple other fields later.
| |
875 | |
876 String get linkedHash { | |
Paul Berry
2016/10/24 11:46:21
The name "linkedHash" sounds like it means a hash
scheglov
2016/10/24 17:23:48
Done.
| |
877 if (_linkedHash == null) { | |
878 if (transitiveDependencies == null) { | |
879 computeTransitiveDependencies(); | |
880 } | |
881 | |
882 // Add all unlinked API signatures. | |
883 List<String> signatures = <String>[]; | |
884 signatures.add(driver._sdkBundle.apiSignature); | |
885 transitiveDependencies | |
886 .map((node) => node.unlinkedBundles) | |
887 .expand((bundles) => bundles) | |
888 .map((bundle) => bundle.apiSignature) | |
889 .forEach(signatures.add); | |
890 signatures.sort(); | |
891 | |
892 // Combine into a single hash. | |
893 ApiSignature signature = new ApiSignature(); | |
894 signature.addString(uri.toString()); | |
895 signatures.forEach(signature.addString); | |
896 _linkedHash = signature.toHex(); | |
897 } | |
898 return _linkedHash; | |
899 } | |
900 | |
901 bool operator ==(other) { | |
902 return other is _LibraryNode && other.uri == uri; | |
903 } | |
904 | |
905 void computeTransitiveDependencies() { | |
906 if (transitiveDependencies == null) { | |
907 transitiveDependencies = new Set<_LibraryNode>(); | |
908 | |
909 void appendDependencies(_LibraryNode node) { | |
910 if (transitiveDependencies.add(node)) { | |
911 node.dependencies.forEach(appendDependencies); | |
912 } | |
913 } | |
914 | |
915 appendDependencies(this); | |
916 } | |
917 } | |
918 | |
919 @override | |
920 String toString() => uri.toString(); | |
921 } | |
OLD | NEW |