Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(181)

Side by Side Diff: pkg/analyzer/lib/src/dart/analysis/driver.dart

Issue 2450483002: Fixes for review comments. (Closed)
Patch Set: Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 import 'dart:async'; 5 import 'dart:async';
6 import 'dart:collection'; 6 import 'dart:collection';
7 import 'dart:convert'; 7 import 'dart:convert';
8 8
9 import 'package:analyzer/dart/ast/ast.dart'; 9 import 'package:analyzer/dart/ast/ast.dart';
10 import 'package:analyzer/dart/ast/token.dart'; 10 import 'package:analyzer/dart/ast/token.dart';
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
106 /** 106 /**
107 * The combined unlinked and linked package for the SDK, extracted from 107 * The combined unlinked and linked package for the SDK, extracted from
108 * the given [_sourceFactory]. 108 * the given [_sourceFactory].
109 */ 109 */
110 PackageBundle _sdkBundle; 110 PackageBundle _sdkBundle;
111 111
112 /** 112 /**
113 * The mapping from the files for which analysis was requested using 113 * The mapping from the files for which analysis was requested using
114 * [getResult] to the [Completer]s to report the result. 114 * [getResult] to the [Completer]s to report the result.
115 */ 115 */
116 final _requestedFiles = <String, Completer<AnalysisResult>>{}; 116 final _requestedFiles = <String, List<Completer<AnalysisResult>>>{};
117 117
118 /** 118 /**
119 * The set of explicitly analyzed files. 119 * The set of explicitly analyzed files.
120 */ 120 */
121 final _explicitFiles = new LinkedHashSet<String>(); 121 final _explicitFiles = new LinkedHashSet<String>();
122 122
123 /** 123 /**
124 * The set of files that are currently scheduled for analysis. 124 * The set of files that are currently scheduled for analysis.
125 */ 125 */
126 final _filesToAnalyze = new LinkedHashSet<String>(); 126 final _filesToAnalyze = new LinkedHashSet<String>();
127 127
128 /** 128 /**
129 * The mapping of [Uri]s to the mapping of textual URIs to the [Source] 129 * The mapping of [Uri]s to the mapping of textual URIs to the [Source]
130 * that correspond in the current [_sourceFactory]. 130 * that correspond in the current [_sourceFactory].
131 */ 131 */
132 final _uriResolutionCache = <Uri, Map<String, Source>>{}; 132 final _uriResolutionCache = <Uri, Map<String, Source>>{};
133 133
134 /** 134 /**
135 * The current file state. 135 * The current file state.
136 * 136 *
137 * It maps file paths to the MD5 hash of the file content. 137 * It maps file paths to the MD5 hash of the file content.
138 */ 138 */
139 final _fileContentHashMap = <String, String>{}; 139 final _fileContentHashMap = <String, String>{};
140 140
141 /** 141 /**
142 * Mapping from library URIs to the linked hash of the library. 142 * Mapping from library URIs to the dependency signature of the library.
143 */ 143 */
144 final _linkedHashMap = <Uri, String>{}; 144 final _dependencySignatureMap = <Uri, String>{};
145 145
146 /** 146 /**
147 * TODO(scheglov) document and improve 147 * TODO(scheglov) document and improve
148 */ 148 */
149 final _hasWorkStreamController = new StreamController<String>(); 149 final _hasWorkStreamController = new StreamController<String>();
150 150
151 AnalysisDriver(this._logger, this._resourceProvider, this._byteStore, 151 AnalysisDriver(this._logger, this._resourceProvider, this._byteStore,
152 this._contentCache, this._sourceFactory, this._analysisOptions) { 152 this._contentCache, this._sourceFactory, this._analysisOptions) {
153 _sdkBundle = _sourceFactory.dartSdk.getLinkedBundle(); 153 _sdkBundle = _sourceFactory.dartSdk.getLinkedBundle();
154 } 154 }
(...skipping 30 matching lines...) Expand all
185 * Results might be produced even for files that have never been added 185 * Results might be produced even for files that have never been added
186 * using [addFile], for example when [getResult] was called for a file. 186 * using [addFile], for example when [getResult] was called for a file.
187 */ 187 */
188 Stream<AnalysisResult> get results async* { 188 Stream<AnalysisResult> get results async* {
189 try { 189 try {
190 while (true) { 190 while (true) {
191 // TODO(scheglov) implement state transitioning 191 // TODO(scheglov) implement state transitioning
192 await for (String why in _hasWorkStreamController.stream) { 192 await for (String why in _hasWorkStreamController.stream) {
193 // Analyze the first file in the general queue. 193 // Analyze the first file in the general queue.
194 if (_filesToAnalyze.isNotEmpty) { 194 if (_filesToAnalyze.isNotEmpty) {
195 _logger.runTimed('Analyzed ${_filesToAnalyze.length} files', () { 195 _logger.run('Analyze ${_filesToAnalyze.length} files', () {
196 while (_filesToAnalyze.isNotEmpty) { 196 while (_filesToAnalyze.isNotEmpty) {
197 String path = _filesToAnalyze.first; 197 String path = _filesToAnalyze.first;
198 _filesToAnalyze.remove(path); 198 _filesToAnalyze.remove(path);
199 _File file = _fileForPath(path); 199 _File file = _fileForPath(path);
200 _computeAndPrintErrors(file); 200 _computeAndPrintErrors(file);
201 // TODO(scheglov) yield the result 201 // TODO(scheglov) yield the result
202 } 202 }
203 }); 203 });
204 } 204 }
205 } 205 }
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
237 * transitions to "idle". 237 * transitions to "idle".
238 * 238 *
239 * Invocation of this method will not prevent a [Future] returned from 239 * Invocation of this method will not prevent a [Future] returned from
240 * [getResult] from completing with a result, but the result is not 240 * [getResult] from completing with a result, but the result is not
241 * guaranteed to be consistent with the new current file state after this 241 * guaranteed to be consistent with the new current file state after this
242 * [changeFile] invocation. 242 * [changeFile] invocation.
243 */ 243 */
244 void changeFile(String path) { 244 void changeFile(String path) {
245 // TODO(scheglov) Don't clear, schedule API signature validation. 245 // TODO(scheglov) Don't clear, schedule API signature validation.
246 _fileContentHashMap.clear(); 246 _fileContentHashMap.clear();
247 _linkedHashMap.clear(); 247 _dependencySignatureMap.clear();
248 _filesToAnalyze.add(path); 248 _filesToAnalyze.add(path);
249 _filesToAnalyze.addAll(_explicitFiles); 249 _filesToAnalyze.addAll(_explicitFiles);
250 // TODO(scheglov) name?! 250 // TODO(scheglov) name?!
251 _hasWorkStreamController.add('do it!'); 251 _hasWorkStreamController.add('do it!');
252 } 252 }
253 253
254 /** 254 /**
255 * Return the [Future] that completes with a [AnalysisResult] for the file 255 * Return the [Future] that completes with a [AnalysisResult] for the file
256 * with the given [path]. 256 * with the given [path].
257 * 257 *
258 * The [path] must be absolute and normalized. 258 * The [path] must be absolute and normalized.
259 * 259 *
260 * The [path] can be any file - explicitly or implicitly analyzed, or neither. 260 * The [path] can be any file - explicitly or implicitly analyzed, or neither.
261 * 261 *
262 * Causes the analysis state to transition to "analyzing" (if it is not in 262 * Causes the analysis state to transition to "analyzing" (if it is not in
263 * that state already), the driver will read the file and produce the analysis 263 * that state already), the driver will read the file and produce the analysis
264 * result for it, which is consistent with the current file state (including 264 * result for it, which is consistent with the current file state (including
265 * the new state of the file), prior to the next time the analysis state 265 * the new state of the file), prior to the next time the analysis state
266 * transitions to "idle". 266 * transitions to "idle".
267 */ 267 */
268 Future<AnalysisResult> getResult(String path) { 268 Future<AnalysisResult> getResult(String path) {
269 var completer = new Completer<AnalysisResult>(); 269 var completer = new Completer<AnalysisResult>();
270 _requestedFiles[path] = completer; 270 _requestedFiles
271 .putIfAbsent(path, () => <Completer<AnalysisResult>>[])
272 .add(completer);
273 _hasWorkStreamController.add(path);
271 return completer.future; 274 return completer.future;
272 } 275 }
273 276
274 /** 277 /**
275 * Remove the file with the given [path] from the list of files to analyze. 278 * Remove the file with the given [path] from the list of files to analyze.
276 * 279 *
277 * The [path] must be absolute and normalized. 280 * The [path] must be absolute and normalized.
278 * 281 *
279 * The results of analysis of the file might still be produced by the 282 * The results of analysis of the file might still be produced by the
280 * [results] stream. The driver will try to stop producing these results, 283 * [results] stream. The driver will try to stop producing these results,
(...skipping 21 matching lines...) Expand all
302 file.path.endsWith('pkg/analyzer/lib/src/generated/error.dart') || 305 file.path.endsWith('pkg/analyzer/lib/src/generated/error.dart') ||
303 file.path.endsWith('pkg/analyzer/lib/src/generated/scanner.dart') || 306 file.path.endsWith('pkg/analyzer/lib/src/generated/scanner.dart') ||
304 file.path.endsWith('pkg/analyzer/lib/src/generated/sdk_io.dart') || 307 file.path.endsWith('pkg/analyzer/lib/src/generated/sdk_io.dart') ||
305 file.path.endsWith('pkg/analyzer/lib/src/generated/visitors.dart') || 308 file.path.endsWith('pkg/analyzer/lib/src/generated/visitors.dart') ||
306 file.path.endsWith('pkg/analyzer/test/generated/constant_test.dart') || 309 file.path.endsWith('pkg/analyzer/test/generated/constant_test.dart') ||
307 file.path.endsWith('pkg/analyzer/test/source/embedder_test.dart')) { 310 file.path.endsWith('pkg/analyzer/test/source/embedder_test.dart')) {
308 return []; 311 return [];
309 } 312 }
310 313
311 List<String> errorStrings = _logger.run('Compute errors $file', () { 314 List<String> errorStrings = _logger.run('Compute errors $file', () {
312 LibraryContext libraryContext = _createLibraryContext(file); 315 _LibraryContext libraryContext = _createLibraryContext(file);
313 316
314 String errorsKey; 317 String errorsKey;
315 { 318 {
316 ApiSignature signature = new ApiSignature(); 319 ApiSignature signature = new ApiSignature();
317 signature.addString(libraryContext.node.linkedHash); 320 signature.addString(libraryContext.node.dependencySignature);
318 signature.addString(file.contentHash); 321 signature.addString(file.contentHash);
319 errorsKey = '${signature.toHex()}.errors'; 322 errorsKey = '${signature.toHex()}.errors';
320 } 323 }
321 324
322 { 325 {
323 List<int> bytes = _byteStore.get(errorsKey); 326 List<int> bytes = _byteStore.get(errorsKey);
324 if (bytes != null) { 327 if (bytes != null) {
325 fb.BufferContext bp = new fb.BufferContext.fromBytes(bytes); 328 fb.BufferContext bp = new fb.BufferContext.fromBytes(bytes);
326 int table = bp.derefObject(0); 329 int table = bp.derefObject(0);
327 return const fb.ListReader<String>(const fb.StringReader()) 330 return const fb.ListReader<String>(const fb.StringReader())
328 .vTableGet(bp, table, 0); 331 .vTableGet(bp, table, 0);
329 } 332 }
330 } 333 }
331 334
332 AnalysisContext analysisContext = _createAnalysisContext(libraryContext); 335 AnalysisContext analysisContext = _createAnalysisContext(libraryContext);
333 analysisContext.setContents(file.source, file.content); 336 analysisContext.setContents(file.source, file.content);
334 try { 337 try {
335 // Compute resolved unit. 338 // Compute resolved unit.
336 // _logger.runTimed('Computed resolved unit', () { 339 // _logger.runTimed('Computed resolved unit', () {
337 // analysisContext.resolveCompilationUnit2( 340 // analysisContext.resolveCompilationUnit2(
338 // libraryContext.file.source, libraryContext.file.source); 341 // libraryContext.file.source, libraryContext.file.source);
339 // }); 342 // });
340 // Compute errors. 343 // Compute errors.
341 List<AnalysisError> errors; 344 List<AnalysisError> errors;
342 try { 345 try {
343 errors = _logger.runTimed('Computed errors', () { 346 errors = _logger.run('Compute errors', () {
344 return analysisContext.computeErrors(file.source); 347 return analysisContext.computeErrors(file.source);
345 }); 348 });
346 } catch (e, st) { 349 } catch (e, st) {
347 // TODO(scheglov) why does it fail? 350 // TODO(scheglov) why does it fail?
348 // Caused by Bad state: Unmatched TypeParameterElementImpl T 351 // Caused by Bad state: Unmatched TypeParameterElementImpl T
349 errors = []; 352 errors = [];
350 } 353 }
351 List<String> errorStrings = errors 354 List<String> errorStrings = errors
352 .where((error) => error.errorCode is! TodoCode) 355 .where((error) => error.errorCode is! TodoCode)
353 .map((error) => error.toString()) 356 .map((error) => error.toString())
(...skipping 17 matching lines...) Expand all
371 }); 374 });
372 375
373 if (errorStrings.isNotEmpty) { 376 if (errorStrings.isNotEmpty) {
374 errorStrings.forEach((errorString) => print('\t$errorString')); 377 errorStrings.forEach((errorString) => print('\t$errorString'));
375 } else { 378 } else {
376 print('\tNO ERRORS'); 379 print('\tNO ERRORS');
377 } 380 }
378 return errorStrings; 381 return errorStrings;
379 } 382 }
380 383
381 AnalysisContext _createAnalysisContext(LibraryContext libraryContext) { 384 AnalysisContext _createAnalysisContext(_LibraryContext libraryContext) {
382 AnalysisContextImpl analysisContext = 385 AnalysisContextImpl analysisContext =
383 AnalysisEngine.instance.createAnalysisContext(); 386 AnalysisEngine.instance.createAnalysisContext();
384 387
385 analysisContext.sourceFactory = 388 analysisContext.sourceFactory =
386 new SourceFactory((_sourceFactory as SourceFactoryImpl).resolvers); 389 new SourceFactory((_sourceFactory as SourceFactoryImpl).resolvers);
387 analysisContext.resultProvider = 390 analysisContext.resultProvider =
388 new InputPackagesResultProvider(analysisContext, libraryContext.store); 391 new InputPackagesResultProvider(analysisContext, libraryContext.store);
389 analysisContext 392 analysisContext
390 .applyChanges(new ChangeSet()..addedSource(libraryContext.file.source)); 393 .applyChanges(new ChangeSet()..addedSource(libraryContext.file.source));
391 return analysisContext; 394 return analysisContext;
392 } 395 }
393 396
394 /** 397 /**
395 * Return the content in which the library represented by the given 398 * Return the context in which the library represented by the given
396 * [libraryFile] should be analyzed it. 399 * [libraryFile] should be analyzed it.
397 * 400 *
398 * TODO(scheglov) We often don't need [SummaryDataStore], only linked hash. 401 * TODO(scheglov) We often don't need [SummaryDataStore], only dependency
402 * signature.
399 */ 403 */
400 LibraryContext _createLibraryContext(_File libraryFile) { 404 _LibraryContext _createLibraryContext(_File libraryFile) {
401 Map<String, _LibraryNode> nodes = <String, _LibraryNode>{};
402
403 return _logger.run('Create library context', () { 405 return _logger.run('Create library context', () {
406 Map<String, _LibraryNode> nodes = <String, _LibraryNode>{};
404 SummaryDataStore store = new SummaryDataStore(const <String>[]); 407 SummaryDataStore store = new SummaryDataStore(const <String>[]);
405 store.addBundle(null, _sdkBundle); 408 store.addBundle(null, _sdkBundle);
406 409
407 void createLibraryNodes(_File libraryFile) { 410 _LibraryNode createLibraryNodes(_File libraryFile) {
408 Uri libraryUri = libraryFile.uri; 411 Uri libraryUri = libraryFile.uri;
412
413 // URIs with the 'dart:' scheme are served from the SDK bundle.
409 if (libraryUri.scheme == 'dart') { 414 if (libraryUri.scheme == 'dart') {
410 return; 415 return null;
411 } 416 }
412 String uriStr = libraryUri.toString(); 417
413 if (!nodes.containsKey(uriStr)) { 418 String libraryUriStr = libraryUri.toString();
414 _LibraryNode node = new _LibraryNode(this, nodes, libraryUri); 419 _LibraryNode node = nodes[libraryUriStr];
415 nodes[uriStr] = node; 420 if (node == null) {
416 ReferencedUris referenced = _getReferencedUris(libraryFile); 421 node = new _LibraryNode(this, nodes, libraryUri);
422 nodes[libraryUriStr] = node;
423 _ReferencedUris referenced = _getReferencedUris(libraryFile);
417 424
418 // Append unlinked bundles. 425 // Append unlinked bundles.
419 for (String uri in referenced.parted) { 426 for (String uri in referenced.parted) {
420 _File file = libraryFile.resolveUri(uri); 427 _File file = libraryFile.resolveUri(uri);
421 PackageBundle unlinked = _getUnlinked(file); 428 PackageBundle unlinked = _getUnlinked(file);
422 node.unlinkedBundles.add(unlinked); 429 node.unlinkedBundles.add(unlinked);
423 store.addBundle(null, unlinked); 430 store.addBundle(null, unlinked);
424 } 431 }
425 432
426 // Create nodes for referenced libraries. 433 // Create nodes for referenced libraries.
427 for (String uri in referenced.imported) { 434 for (String uri in referenced.imported) {
428 _File file = libraryFile.resolveUri(uri); 435 _File file = libraryFile.resolveUri(uri);
429 createLibraryNodes(file); 436 createLibraryNodes(file);
430 } 437 }
431 for (String uri in referenced.exported) { 438 for (String uri in referenced.exported) {
432 _File file = libraryFile.resolveUri(uri); 439 _File file = libraryFile.resolveUri(uri);
433 createLibraryNodes(file); 440 createLibraryNodes(file);
434 } 441 }
435 } 442 }
443
444 // Done with this node.
445 return node;
436 } 446 }
437 447
438 _logger.runTimed2(() { 448 _LibraryNode libraryNode = _logger.run('Compute library nodes', () {
439 createLibraryNodes(libraryFile); 449 return createLibraryNodes(libraryFile);
440 }, () => 'Computed ${nodes.length} nodes'); 450 });
441 _LibraryNode libraryNode = nodes[libraryFile.uri.toString()];
442 451
443 Set<String> libraryUrisToLink = new Set<String>(); 452 Set<String> libraryUrisToLink = new Set<String>();
444 int numberOfNodesWithLinked = 0; 453 _logger.run('Load linked bundles', () {
445 _logger.runTimed2(() {
446 for (_LibraryNode node in nodes.values) { 454 for (_LibraryNode node in nodes.values) {
447 String key = '${node.linkedHash}.linked'; 455 String key = '${node.dependencySignature}.linked';
448 List<int> bytes = _byteStore.get(key); 456 List<int> bytes = _byteStore.get(key);
449 if (bytes != null) { 457 if (bytes != null) {
450 PackageBundle linked = new PackageBundle.fromBuffer(bytes); 458 PackageBundle linked = new PackageBundle.fromBuffer(bytes);
451 store.addBundle(null, linked); 459 store.addBundle(null, linked);
452 numberOfNodesWithLinked++;
453 } else { 460 } else {
454 libraryUrisToLink.add(node.uri.toString()); 461 libraryUrisToLink.add(node.uri.toString());
455 } 462 }
456 } 463 }
457 }, () => 'Loaded $numberOfNodesWithLinked linked bundles'); 464 int numOfLoaded = nodes.length - libraryUrisToLink.length;
465 _logger.writeln('Loaded $numOfLoaded linked bundles.');
466 });
458 467
459 Map<String, LinkedLibraryBuilder> linkedLibraries = {}; 468 Map<String, LinkedLibraryBuilder> linkedLibraries = {};
460 _logger.runTimed2(() { 469 _logger.run('Link bundles', () {
461 linkedLibraries = link(libraryUrisToLink, (String uri) { 470 linkedLibraries = link(libraryUrisToLink, (String uri) {
462 LinkedLibrary linkedLibrary = store.linkedMap[uri]; 471 LinkedLibrary linkedLibrary = store.linkedMap[uri];
463 if (linkedLibrary == null) { 472 if (linkedLibrary == null) {
464 throw new StateError('No linked library for: $uri'); 473 throw new StateError('No linked library for: $uri');
465 } 474 }
466 return linkedLibrary; 475 return linkedLibrary;
467 }, (String uri) { 476 }, (String uri) {
468 UnlinkedUnit unlinkedUnit = store.unlinkedMap[uri]; 477 UnlinkedUnit unlinkedUnit = store.unlinkedMap[uri];
469 if (unlinkedUnit == null) { 478 if (unlinkedUnit == null) {
470 throw new StateError('No unlinked unit for: $uri'); 479 throw new StateError('No unlinked unit for: $uri');
471 } 480 }
472 return unlinkedUnit; 481 return unlinkedUnit;
473 }, (_) => null, _analysisOptions.strongMode); 482 }, (_) => null, _analysisOptions.strongMode);
474 }, () => 'Linked ${linkedLibraries.length} bundles'); 483 _logger.writeln('Linked ${linkedLibraries.length} bundles.');
484 });
475 485
476 linkedLibraries.forEach((uri, linkedBuilder) { 486 linkedLibraries.forEach((uri, linkedBuilder) {
477 _LibraryNode node = nodes[uri]; 487 _LibraryNode node = nodes[uri];
478 String key = '${node.linkedHash}.linked'; 488 String key = '${node.dependencySignature}.linked';
479 List<int> bytes; 489 List<int> bytes;
480 { 490 {
481 PackageBundleAssembler assembler = new PackageBundleAssembler(); 491 PackageBundleAssembler assembler = new PackageBundleAssembler();
482 assembler.addLinkedLibrary(uri, linkedBuilder); 492 assembler.addLinkedLibrary(uri, linkedBuilder);
483 bytes = assembler.assemble().toBuffer(); 493 bytes = assembler.assemble().toBuffer();
484 } 494 }
485 PackageBundle linked = new PackageBundle.fromBuffer(bytes); 495 PackageBundle linked = new PackageBundle.fromBuffer(bytes);
486 store.addBundle(null, linked); 496 store.addBundle(null, linked);
487 _byteStore.put(key, bytes); 497 _byteStore.put(key, bytes);
488 }); 498 });
489 499
490 return new LibraryContext(libraryFile, libraryNode, store); 500 return new _LibraryContext(libraryFile, libraryNode, store);
491 }); 501 });
492 } 502 }
493 503
494 /** 504 /**
495 * Return the [_File] for the given [path] in [_sourceFactory]. 505 * Return the [_File] for the given [path] in [_sourceFactory].
496 */ 506 */
497 _File _fileForPath(String path) { 507 _File _fileForPath(String path) {
498 Source fileSource = _resourceProvider.getFile(path).createSource(); 508 Source fileSource = _resourceProvider.getFile(path).createSource();
499 Uri uri = _sourceFactory.restoreUri(fileSource); 509 Uri uri = _sourceFactory.restoreUri(fileSource);
500 Source source = _resourceProvider.getFile(path).createSource(uri); 510 Source source = _resourceProvider.getFile(path).createSource(uri);
501 return new _File(this, source); 511 return new _File(this, source);
502 } 512 }
503 513
504 /** 514 /**
505 * TODO(scheglov) It would be nice to get URIs of "parts" from unlinked. 515 * TODO(scheglov) It would be nice to get URIs of "parts" from unlinked.
506 */ 516 */
507 ReferencedUris _getReferencedUris(_File file) { 517 _ReferencedUris _getReferencedUris(_File file) {
508 // Try to get from the store. 518 // Try to get from the store.
509 { 519 {
510 String key = '${file.contentHash}.uris'; 520 String key = '${file.contentHash}.uris';
511 List<int> bytes = _byteStore.get(key); 521 List<int> bytes = _byteStore.get(key);
512 if (bytes != null) { 522 if (bytes != null) {
513 fb.BufferContext bp = new fb.BufferContext.fromBytes(bytes); 523 fb.BufferContext bp = new fb.BufferContext.fromBytes(bytes);
514 int table = bp.derefObject(0); 524 int table = bp.derefObject(0);
515 const fb.ListReader<String> stringListReader = 525 const fb.ListReader<String> stringListReader =
516 const fb.ListReader<String>(const fb.StringReader()); 526 const fb.ListReader<String>(const fb.StringReader());
517 bool isLibrary = const fb.BoolReader().vTableGet(bp, table, 0); 527 bool isLibrary = const fb.BoolReader().vTableGet(bp, table, 0);
518 List<String> imported = stringListReader.vTableGet(bp, table, 1); 528 List<String> imported = stringListReader.vTableGet(bp, table, 1);
519 List<String> exported = stringListReader.vTableGet(bp, table, 2); 529 List<String> exported = stringListReader.vTableGet(bp, table, 2);
520 List<String> parted = stringListReader.vTableGet(bp, table, 3); 530 List<String> parted = stringListReader.vTableGet(bp, table, 3);
521 ReferencedUris referencedUris = new ReferencedUris(); 531 _ReferencedUris referencedUris = new _ReferencedUris();
522 referencedUris.isLibrary = isLibrary; 532 referencedUris.isLibrary = isLibrary;
523 referencedUris.imported.addAll(imported); 533 referencedUris.imported.addAll(imported);
524 referencedUris.exported.addAll(exported); 534 referencedUris.exported.addAll(exported);
525 referencedUris.parted.addAll(parted); 535 referencedUris.parted.addAll(parted);
526 return referencedUris; 536 return referencedUris;
527 } 537 }
528 } 538 }
529 539
530 // Compute URIs. 540 // Compute URIs.
531 ReferencedUris referencedUris = new ReferencedUris(); 541 _ReferencedUris referencedUris = new _ReferencedUris();
532 referencedUris.parted.add(file.uri.toString()); 542 referencedUris.parted.add(file.uri.toString());
533 for (Directive directive in file.unit.directives) { 543 for (Directive directive in file.unit.directives) {
534 if (directive is PartOfDirective) { 544 if (directive is PartOfDirective) {
535 referencedUris.isLibrary = false; 545 referencedUris.isLibrary = false;
536 } else if (directive is UriBasedDirective) { 546 } else if (directive is UriBasedDirective) {
537 String uri = directive.uri.stringValue; 547 String uri = directive.uri.stringValue;
538 if (directive is ImportDirective) { 548 if (directive is ImportDirective) {
539 referencedUris.imported.add(uri); 549 referencedUris.imported.add(uri);
540 } else if (directive is ExportDirective) { 550 } else if (directive is ExportDirective) {
541 referencedUris.exported.add(uri); 551 referencedUris.exported.add(uri);
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
587 */ 597 */
588 PackageBundle _getUnlinked(_File file) { 598 PackageBundle _getUnlinked(_File file) {
589 // Try to get bytes for file's unlinked bundle. 599 // Try to get bytes for file's unlinked bundle.
590 List<int> bytes; 600 List<int> bytes;
591 { 601 {
592 String key = '${file.contentHash}.unlinked'; 602 String key = '${file.contentHash}.unlinked';
593 bytes = _byteStore.get(key); 603 bytes = _byteStore.get(key);
594 } 604 }
595 // If no cached unlinked bundle, compute it. 605 // If no cached unlinked bundle, compute it.
596 if (bytes == null) { 606 if (bytes == null) {
597 _logger.runTimed('Create unlinked for $file', () { 607 _logger.run('Create unlinked for $file', () {
598 // We read the content and recomputed the hash. 608 // We read the content and recomputed the hash.
599 // So, we need to update the key. 609 // So, we need to update the key.
600 String key = '${file.contentHash}.unlinked'; 610 String key = '${file.contentHash}.unlinked';
601 UnlinkedUnitBuilder unlinkedUnit = serializeAstUnlinked(file.unit); 611 UnlinkedUnitBuilder unlinkedUnit = serializeAstUnlinked(file.unit);
602 PackageBundleAssembler assembler = new PackageBundleAssembler(); 612 PackageBundleAssembler assembler = new PackageBundleAssembler();
603 assembler.addUnlinkedUnitWithHash( 613 assembler.addUnlinkedUnitWithHash(
604 file.uri.toString(), unlinkedUnit, key); 614 file.uri.toString(), unlinkedUnit, key);
605 bytes = assembler.assemble().toBuffer(); 615 bytes = assembler.assemble().toBuffer();
606 _byteStore.put(key, bytes); 616 _byteStore.put(key, bytes);
607 }); 617 });
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
651 661
652 /** 662 /**
653 * The full list of computed analysis errors, both syntactic and semantic. 663 * The full list of computed analysis errors, both syntactic and semantic.
654 */ 664 */
655 final List<AnalysisError> errors; 665 final List<AnalysisError> errors;
656 666
657 AnalysisResult(this.path, this.uri, this.content, this.contentHash, this.unit, 667 AnalysisResult(this.path, this.uri, this.content, this.contentHash, this.unit,
658 this.errors); 668 this.errors);
659 } 669 }
660 670
661 class LibraryContext { 671 /**
662 final _File file; 672 * This class is used to gather and print performance information.
663 final _LibraryNode node; 673 */
664 final SummaryDataStore store;
665 LibraryContext(this.file, this.node, this.store);
666 }
667
668 class PerformanceLog { 674 class PerformanceLog {
669 final StringSink sink; 675 final StringSink sink;
670 int _level = 0; 676 int _level = 0;
671 677
672 PerformanceLog(this.sink); 678 PerformanceLog(this.sink);
673 679
680 /**
681 * Return the result of the function [f] invocation and log the elapsed time.
682 *
683 * Each invocation of [run] creates a new enclosed section in the log,
684 * which begins with printing [msg], then any log output produced during
685 * [f] invocation, and ends with printing [msg] with the elapsed time.
686 */
674 /*=T*/ run/*<T>*/(String msg, /*=T*/ f()) { 687 /*=T*/ run/*<T>*/(String msg, /*=T*/ f()) {
675 Stopwatch timer = new Stopwatch()..start(); 688 Stopwatch timer = new Stopwatch()..start();
676 try { 689 try {
677 writeln('+++ $msg.'); 690 writeln('+++ $msg.');
678 _level++; 691 _level++;
679 return f(); 692 return f();
680 } finally { 693 } finally {
681 _level--; 694 _level--;
682 int ms = timer.elapsedMilliseconds; 695 int ms = timer.elapsedMilliseconds;
683 writeln('--- $msg in $ms ms.'); 696 writeln('--- $msg in $ms ms.');
684 } 697 }
685 } 698 }
686 699
687 /*=T*/ runTimed/*<T>*/(String msg, /*=T*/ f()) { 700 /**
688 _level++; 701 * Write a new line into the log
689 Stopwatch timer = new Stopwatch()..start(); 702 */
690 try {
691 return f();
692 } finally {
693 _level--;
694 int ms = timer.elapsedMilliseconds;
695 writeln('$msg in $ms ms.');
696 }
697 }
698
699 runTimed2(f(), String getMsg()) {
700 _level++;
701 Stopwatch timer = new Stopwatch()..start();
702 try {
703 return f();
704 } finally {
705 _level--;
706 int ms = timer.elapsedMilliseconds;
707 String msg = getMsg();
708 writeln('$msg in $ms ms.');
709 }
710 }
711
712 void writeln(String msg) { 703 void writeln(String msg) {
713 String indent = '\t' * _level; 704 String indent = '\t' * _level;
714 sink.writeln('$indent$msg'); 705 sink.writeln('$indent$msg');
715 } 706 }
716 } 707 }
717 708
718 class ReferencedUris {
719 bool isLibrary = true;
720 final List<String> imported = <String>[];
721 final List<String> exported = <String>[];
722 final List<String> parted = <String>[];
723 }
724
725 /** 709 /**
726 * Information about a file being analyzed, explicitly or implicitly. 710 * Information about a file being analyzed, explicitly or implicitly.
727 * 711 *
728 * It keeps a consistent view on its [content], [contentHash] and [unit]. 712 * It keeps a consistent view on its [content], [contentHash] and [unit].
713 *
714 * Instances of this class may only be used during computing a single analysis
715 * result and should not be cached anywhere. We need this limitation to prevent
716 * references from caches to the resolved [unit], so to element models, etc.
717 * The data structures should be short lived - computed, returned to the client,
718 * processed there and quickly thrown away.
729 */ 719 */
730 class _File { 720 class _File {
731 /** 721 /**
732 * The driver instance that is used to access [SourceFactory] and caches. 722 * The driver instance that is used to access [SourceFactory] and caches.
733 */ 723 */
734 final AnalysisDriver driver; 724 final AnalysisDriver driver;
735 725
736 /** 726 /**
737 * The [Source] this [_File] instance represent. 727 * The [Source] this [_File] instance represent.
738 */ 728 */
(...skipping 22 matching lines...) Expand all
761 return _content; 751 return _content;
762 } 752 }
763 753
764 /** 754 /**
765 * Ensure that the [contentHash] is filled. 755 * Ensure that the [contentHash] is filled.
766 * 756 *
767 * If the hash is already in the current file state, return the current 757 * If the hash is already in the current file state, return the current
768 * value. Otherwise, read the [content], compute the hash, put it into 758 * value. Otherwise, read the [content], compute the hash, put it into
769 * the current file state, and update the [contentHash] field. 759 * the current file state, and update the [contentHash] field.
770 * 760 *
771 * The client cannot remember values of this property, because its value 761 * The client should not remember values of this property, because its value
772 * might change when [content] is read and the hash is recomputed. 762 * might change when [content] is read and the hash is recomputed.
773 */ 763 */
774 String get contentHash { 764 String get contentHash {
775 _contentHash ??= driver._fileContentHashMap[path]; 765 _contentHash ??= driver._fileContentHashMap[path];
776 if (_contentHash == null) { 766 if (_contentHash == null) {
777 _readContentAndComputeHash(); 767 _readContentAndComputeHash();
778 } 768 }
779 return _contentHash; 769 return _contentHash;
780 } 770 }
781 771
782 String get path => source.fullName; 772 String get path => source.fullName;
783 773
784 /** 774 /**
785 * Return the [CompilationUnit] of the file. 775 * Return the unresolved [CompilationUnit] of the file.
786 * 776 *
787 * Current this unit is resolved, it is used to compute unlinked summaries 777 * Performing resolution and computing errors is done in a separate analysis
788 * and and URIs. We use a separate analysis context to perform resolution 778 * context. In the future we might push the existing unresolved unit into the
789 * and computing errors. But this might change in the future. 779 * analysis context, so at some point the unit might become resolved.
790 */ 780 */
791 CompilationUnit get unit { 781 CompilationUnit get unit {
792 AnalysisErrorListener errorListener = AnalysisErrorListener.NULL_LISTENER; 782 AnalysisErrorListener errorListener = AnalysisErrorListener.NULL_LISTENER;
793 783
794 CharSequenceReader reader = new CharSequenceReader(content); 784 CharSequenceReader reader = new CharSequenceReader(content);
795 Scanner scanner = new Scanner(source, reader, errorListener); 785 Scanner scanner = new Scanner(source, reader, errorListener);
796 scanner.scanGenericMethodComments = driver._analysisOptions.strongMode; 786 scanner.scanGenericMethodComments = driver._analysisOptions.strongMode;
797 Token token = scanner.tokenize(); 787 Token token = scanner.tokenize();
798 LineInfo lineInfo = new LineInfo(scanner.lineStarts); 788 LineInfo lineInfo = new LineInfo(scanner.lineStarts);
799 789
(...skipping 16 matching lines...) Expand all
816 .putIfAbsent(uri, () => driver._sourceFactory.resolveUri(source, uri)); 806 .putIfAbsent(uri, () => driver._sourceFactory.resolveUri(source, uri));
817 return new _File(driver, uriSource); 807 return new _File(driver, uriSource);
818 } 808 }
819 809
820 @override 810 @override
821 String toString() => uri.toString(); 811 String toString() => uri.toString();
822 812
823 /** 813 /**
824 * Fill the [_content] and [_contentHash] fields. 814 * Fill the [_content] and [_contentHash] fields.
825 * 815 *
826 * If the [_content] field if it is still `null`, get the content from the 816 * If the [_content] field is still `null`, get the content from the
827 * content cache or from the [source]. If the content cannot be accessed 817 * content cache or from the [source]. If the content cannot be accessed
828 * because of an exception, it considers to be an empty string. 818 * because of an exception, it considers to be an empty string.
829 * 819 *
830 * When a new content is read, the new [_contentHash] should be computed and 820 * When a new content is read, the new [_contentHash] should be computed and
831 * the current file state should be updated. 821 * the current file state should be updated.
832 */ 822 */
833 void _readContentAndComputeHash() { 823 void _readContentAndComputeHash() {
834 try { 824 try {
835 _content = driver._contentCache.getContents(source); 825 _content = driver._contentCache.getContents(source);
836 _content ??= source.contents.data; 826 _content ??= source.contents.data;
837 } catch (_) { 827 } catch (_) {
828 // TODO(scheglov) Fix the bug with not existing sources.
829 // We should not put "self URI" into cached _ReferencedUris.
830 // Otherwise such not-existing/empty sources all have the same hash,
831 // but their "self URIs" must be all different.
838 _content = ''; 832 _content = '';
839 } 833 }
840 // Compute the content hash. 834 // Compute the content hash.
841 List<int> textBytes = UTF8.encode(_content); 835 List<int> textBytes = UTF8.encode(_content);
842 List<int> hashBytes = md5.convert(textBytes).bytes; 836 List<int> hashBytes = md5.convert(textBytes).bytes;
843 _contentHash = hex.encode(hashBytes); 837 _contentHash = hex.encode(hashBytes);
844 // Update the current file state. 838 // Update the current file state.
845 driver._fileContentHashMap[path] = _contentHash; 839 driver._fileContentHashMap[path] = _contentHash;
846 } 840 }
847 } 841 }
848 842
843 /**
844 * TODO(scheglov) document
845 */
846 class _LibraryContext {
847 final _File file;
848 final _LibraryNode node;
849 final SummaryDataStore store;
850 _LibraryContext(this.file, this.node, this.store);
851 }
852
849 class _LibraryNode { 853 class _LibraryNode {
850 final AnalysisDriver driver; 854 final AnalysisDriver driver;
851 final Map<String, _LibraryNode> nodes; 855 final Map<String, _LibraryNode> nodes;
852 final Uri uri; 856 final Uri uri;
853 final List<PackageBundle> unlinkedBundles = <PackageBundle>[]; 857 final List<PackageBundle> unlinkedBundles = <PackageBundle>[];
854 858
855 Set<_LibraryNode> transitiveDependencies; 859 Set<_LibraryNode> transitiveDependencies;
856 List<_LibraryNode> _dependencies; 860 List<_LibraryNode> _dependencies;
857 String _linkedHash; 861 String _dependencySignature;
858 862
859 _LibraryNode(this.driver, this.nodes, this.uri); 863 _LibraryNode(this.driver, this.nodes, this.uri);
860 864
861 /** 865 /**
862 * Retrieve the dependencies of this node. 866 * Retrieve the dependencies of this node.
863 */ 867 */
864 List<_LibraryNode> get dependencies { 868 List<_LibraryNode> get dependencies {
865 if (_dependencies == null) { 869 if (_dependencies == null) {
866 Set<_LibraryNode> dependencies = new Set<_LibraryNode>(); 870 Set<_LibraryNode> dependencies = new Set<_LibraryNode>();
867 871
(...skipping 26 matching lines...) Expand all
894 appendDependency(export.uri); 898 appendDependency(export.uri);
895 } 899 }
896 } 900 }
897 } 901 }
898 902
899 _dependencies = dependencies.toList(); 903 _dependencies = dependencies.toList();
900 } 904 }
901 return _dependencies; 905 return _dependencies;
902 } 906 }
903 907
904 @override 908 String get dependencySignature {
905 int get hashCode => uri.hashCode; 909 return _dependencySignature ??=
906 910 driver._dependencySignatureMap.putIfAbsent(uri, () {
907 String get linkedHash {
908 _linkedHash ??= driver._linkedHashMap.putIfAbsent(uri, () {
909 computeTransitiveDependencies(); 911 computeTransitiveDependencies();
910 912
911 // Add all unlinked API signatures. 913 // Add all unlinked API signatures.
912 List<String> signatures = <String>[]; 914 List<String> signatures = <String>[];
913 signatures.add(driver._sdkBundle.apiSignature); 915 signatures.add(driver._sdkBundle.apiSignature);
914 transitiveDependencies 916 transitiveDependencies
915 .map((node) => node.unlinkedBundles) 917 .map((node) => node.unlinkedBundles)
916 .expand((bundles) => bundles) 918 .expand((bundles) => bundles)
917 .map((bundle) => bundle.apiSignature) 919 .map((bundle) => bundle.apiSignature)
918 .forEach(signatures.add); 920 .forEach(signatures.add);
919 signatures.sort(); 921 signatures.sort();
920 922
921 // Combine into a single hash. 923 // Combine into a single hash.
922 ApiSignature signature = new ApiSignature(); 924 ApiSignature signature = new ApiSignature();
923 signature.addString(uri.toString()); 925 signature.addString(uri.toString());
924 signatures.forEach(signature.addString); 926 signatures.forEach(signature.addString);
925 return signature.toHex(); 927 return signature.toHex();
926 }); 928 });
927 return _linkedHash;
928 } 929 }
929 930
931 @override
932 int get hashCode => uri.hashCode;
933
930 bool operator ==(other) { 934 bool operator ==(other) {
931 return other is _LibraryNode && other.uri == uri; 935 return other is _LibraryNode && other.uri == uri;
932 } 936 }
933 937
934 void computeTransitiveDependencies() { 938 void computeTransitiveDependencies() {
935 if (transitiveDependencies == null) { 939 if (transitiveDependencies == null) {
936 transitiveDependencies = new Set<_LibraryNode>(); 940 transitiveDependencies = new Set<_LibraryNode>();
937 941
938 void appendDependencies(_LibraryNode node) { 942 void appendDependencies(_LibraryNode node) {
939 if (transitiveDependencies.add(node)) { 943 if (transitiveDependencies.add(node)) {
940 node.dependencies.forEach(appendDependencies); 944 node.dependencies.forEach(appendDependencies);
941 } 945 }
942 } 946 }
943 947
944 appendDependencies(this); 948 appendDependencies(this);
945 } 949 }
946 } 950 }
947 951
948 @override 952 @override
949 String toString() => uri.toString(); 953 String toString() => uri.toString();
950 } 954 }
955
956 /**
957 * TODO(scheglov) document
958 */
959 class _ReferencedUris {
960 bool isLibrary = true;
961 final List<String> imported = <String>[];
962 final List<String> exported = <String>[];
963 final List<String> parted = <String>[];
964 }
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698