Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(407)

Side by Side Diff: pkg/analysis_services/lib/src/index/store/split_store.dart

Issue 484733003: Import analysis_services.dart into analysis_server.dart. (Closed) Base URL: https://dart.googlecode.com/svn/branches/bleeding_edge/dart
Patch Set: Created 6 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
(Empty)
1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file.
4
5 library services.src.index.store.split_store;
6
7 import 'dart:async';
8 import 'dart:collection';
9 import 'dart:io';
10 import 'dart:typed_data';
11
12 import 'package:analysis_services/index/index.dart';
13 import 'package:analysis_services/index/index_store.dart';
14 import 'package:analysis_services/src/index/store/codec.dart';
15 import 'package:analysis_services/src/index/store/collection.dart';
16 import 'package:analyzer/src/generated/element.dart';
17 import 'package:analyzer/src/generated/engine.dart';
18 import 'package:analyzer/src/generated/java_engine.dart';
19 import 'package:analyzer/src/generated/source.dart';
20
21
22 /**
23 * A manager for files content.
24 */
25 abstract class FileManager {
26 /**
27 * Removes all files.
28 */
29 void clear();
30
31 /**
32 * Deletes the file with the given name.
33 */
34 void delete(String name);
35
36 /**
37 * Read the entire file contents as a list of bytes.
38 */
39 Future<List<int>> read(String name);
40
41 /**
42 * Write a list of bytes to a file.
43 */
44 Future write(String name, List<int> bytes);
45 }
46
47
48 /**
49 * A [FileManager] based [NodeManager].
50 */
51 class FileNodeManager implements NodeManager {
52 static int _VERSION = 1;
53
54 final FileManager _fileManager;
55 final Logger _logger;
56
57 final ContextCodec contextCodec;
58 final ElementCodec elementCodec;
59 final StringCodec stringCodec;
60 final RelationshipCodec _relationshipCodec;
61
62 int _locationCount = 0;
63
64 Map<String, int> _nodeLocationCounts = new HashMap<String, int>();
65
66 FileNodeManager(this._fileManager, this._logger, this.stringCodec,
67 this.contextCodec, this.elementCodec, this._relationshipCodec);
68
69 @override
70 int get locationCount => _locationCount;
71
72 @override
73 void clear() {
74 _fileManager.clear();
75 }
76
77 @override
78 Future<IndexNode> getNode(String name) {
79 return _fileManager.read(name).then((List<int> bytes) {
80 if (bytes == null) {
81 return null;
82 }
83 _DataInputStream stream = new _DataInputStream(bytes);
84 return _readNode(stream);
85 }).catchError((e, stackTrace) {
86 _logger.logError2(
87 'Exception during reading index file ${name}',
88 new CaughtException(e, stackTrace));
89 });
90 }
91
92 @override
93 IndexNode newNode(AnalysisContext context) =>
94 new IndexNode(context, elementCodec, _relationshipCodec);
95
96 @override
97 Future putNode(String name, IndexNode node) {
98 // update location count
99 {
100 _locationCount -= _getLocationCount(name);
101 int nodeLocationCount = node.locationCount;
102 _nodeLocationCounts[name] = nodeLocationCount;
103 _locationCount += nodeLocationCount;
104 }
105 // write the node
106 return new Future.microtask(() {
107 _DataOutputStream stream = new _DataOutputStream();
108 _writeNode(node, stream);
109 var bytes = stream.getBytes();
110 return _fileManager.write(name, bytes);
111 }).catchError((e, stackTrace) {
112 _logger.logError2(
113 'Exception during reading index file ${name}',
114 new CaughtException(e, stackTrace));
115 });
116 }
117
118 @override
119 void removeNode(String name) {
120 // update location count
121 _locationCount -= _getLocationCount(name);
122 _nodeLocationCounts.remove(name);
123 // remove node
124 _fileManager.delete(name);
125 }
126
127 int _getLocationCount(String name) {
128 int locationCount = _nodeLocationCounts[name];
129 return locationCount != null ? locationCount : 0;
130 }
131
132 RelationKeyData _readElementRelationKey(_DataInputStream stream) {
133 int elementId = stream.readInt();
134 int relationshipId = stream.readInt();
135 return new RelationKeyData.forData(elementId, relationshipId);
136 }
137
138 LocationData _readLocationData(_DataInputStream stream) {
139 int elementId = stream.readInt();
140 int offset = stream.readInt();
141 int length = stream.readInt();
142 int flags = stream.readInt();
143 return new LocationData.forData(elementId, offset, length, flags);
144 }
145
146 IndexNode _readNode(_DataInputStream stream) {
147 // check version
148 {
149 int version = stream.readInt();
150 if (version != _VERSION) {
151 throw new StateError(
152 'Version ${_VERSION} expected, but ${version} found.');
153 }
154 }
155 // context
156 int contextId = stream.readInt();
157 AnalysisContext context = contextCodec.decode(contextId);
158 if (context == null) {
159 return null;
160 }
161 // relations
162 Map<RelationKeyData, List<LocationData>> relations =
163 new HashMap<RelationKeyData, List<LocationData>>();
164 int numRelations = stream.readInt();
165 for (int i = 0; i < numRelations; i++) {
166 RelationKeyData key = _readElementRelationKey(stream);
167 int numLocations = stream.readInt();
168 List<LocationData> locations = new List<LocationData>();
169 for (int j = 0; j < numLocations; j++) {
170 locations.add(_readLocationData(stream));
171 }
172 relations[key] = locations;
173 }
174 // create IndexNode
175 IndexNode node = new IndexNode(context, elementCodec, _relationshipCodec);
176 node.relations = relations;
177 return node;
178 }
179
180 void _writeElementRelationKey(_DataOutputStream stream, RelationKeyData key) {
181 stream.writeInt(key.elementId);
182 stream.writeInt(key.relationshipId);
183 }
184
185 void _writeNode(IndexNode node, _DataOutputStream stream) {
186 // version
187 stream.writeInt(_VERSION);
188 // context
189 {
190 AnalysisContext context = node.context;
191 int contextId = contextCodec.encode(context);
192 stream.writeInt(contextId);
193 }
194 // relations
195 Map<RelationKeyData, List<LocationData>> relations = node.relations;
196 stream.writeInt(relations.length);
197 relations.forEach((key, locations) {
198 _writeElementRelationKey(stream, key);
199 stream.writeInt(locations.length);
200 for (LocationData location in locations) {
201 stream.writeInt(location.elementId);
202 stream.writeInt(location.offset);
203 stream.writeInt(location.length);
204 stream.writeInt(location.flags);
205 }
206 });
207 }
208 }
209
210
211 /**
212 * A single index file in-memory presentation.
213 */
214 class IndexNode {
215 final AnalysisContext context;
216
217 final ElementCodec _elementCodec;
218 final RelationshipCodec _relationshipCodec;
219
220 Map<RelationKeyData, List<LocationData>> _relations =
221 new HashMap<RelationKeyData, List<LocationData>>();
222
223
224 IndexNode(this.context, this._elementCodec, this._relationshipCodec);
225
226 /**
227 * Returns number of locations in this node.
228 */
229 int get locationCount {
230 int locationCount = 0;
231 for (List<LocationData> locations in _relations.values) {
232 locationCount += locations.length;
233 }
234 return locationCount;
235 }
236
237 /**
238 * Returns the recorded relations.
239 */
240 Map<RelationKeyData, List<LocationData>> get relations => _relations;
241
242 /**
243 * Sets relations data.
244 * This method is used during loading data from a storage.
245 */
246 void set relations(Map<RelationKeyData, List<LocationData>> relations) {
247 _relations = relations;
248 }
249
250 /**
251 * Returns the locations of the elements that have the given relationship with
252 * the given element.
253 *
254 * [element] - the the element that has the relationship with the locations to
255 * be returned.
256 * [relationship] - the [Relationship] between the given [element] and the
257 * locations to be returned
258 */
259 List<Location> getRelationships(Element element, Relationship relationship) {
260 // prepare key
261 RelationKeyData key =
262 new RelationKeyData.forObject(
263 _elementCodec,
264 _relationshipCodec,
265 element,
266 relationship);
267 // find LocationData(s)
268 List<LocationData> locationDatas = _relations[key];
269 if (locationDatas == null) {
270 return Location.EMPTY_ARRAY;
271 }
272 // convert to Location(s)
273 List<Location> locations = <Location>[];
274 for (LocationData locationData in locationDatas) {
275 Location location = locationData.getLocation(context, _elementCodec);
276 if (location != null) {
277 locations.add(location);
278 }
279 }
280 return locations;
281 }
282
283 /**
284 * Records that the given [element] and [location] have the given [relationshi p].
285 *
286 * [element] - the [Element] that is related to the location.
287 * [relationship] - the [Relationship] between [element] and [location].
288 * [location] - the [Location] where relationship happens.
289 */
290 void recordRelationship(Element element, Relationship relationship,
291 Location location) {
292 RelationKeyData key =
293 new RelationKeyData.forObject(
294 _elementCodec,
295 _relationshipCodec,
296 element,
297 relationship);
298 // prepare LocationData(s)
299 List<LocationData> locationDatas = _relations[key];
300 if (locationDatas == null) {
301 locationDatas = <LocationData>[];
302 _relations[key] = locationDatas;
303 }
304 // add new LocationData
305 locationDatas.add(new LocationData.forObject(_elementCodec, location));
306 }
307 }
308
309
310 /**
311 * A container with information about a [Location].
312 */
313 class LocationData {
314 static const int _FLAG_QUALIFIED = 1 << 0;
315 static const int _FLAG_RESOLVED = 1 << 1;
316
317 final int elementId;
318 final int offset;
319 final int length;
320 final int flags;
321
322 LocationData.forData(this.elementId, this.offset, this.length, this.flags);
323
324 LocationData.forObject(ElementCodec elementCodec, Location location)
325 : elementId = elementCodec.encode(location.element),
326 offset = location.offset,
327 length = location.length,
328 flags = (location.isQualified ? _FLAG_QUALIFIED : 0) |
329 (location.isResolved ? _FLAG_RESOLVED : 0);
330
331 @override
332 int get hashCode {
333 return 31 * (31 * elementId + offset) + length;
334 }
335
336 @override
337 bool operator ==(Object obj) {
338 if (obj is! LocationData) {
339 return false;
340 }
341 LocationData other = obj;
342 return other.elementId == elementId &&
343 other.offset == offset &&
344 other.length == length &&
345 other.flags == flags;
346 }
347
348 /**
349 * Returns a {@link Location} that is represented by this {@link LocationData} .
350 */
351 Location getLocation(AnalysisContext context, ElementCodec elementCodec) {
352 Element element = elementCodec.decode(context, elementId);
353 if (element == null) {
354 return null;
355 }
356 bool isQualified = (flags & _FLAG_QUALIFIED) != 0;
357 bool isResovled = (flags & _FLAG_RESOLVED) != 0;
358 return new Location(
359 element,
360 offset,
361 length,
362 isQualified: isQualified,
363 isResolved: isResovled);
364 }
365 }
366
367
368 /**
369 * A manager for [IndexNode]s.
370 */
371 abstract class NodeManager {
372 /**
373 * The shared {@link ContextCodec} instance.
374 */
375 ContextCodec get contextCodec;
376
377 /**
378 * The shared {@link ElementCodec} instance.
379 */
380 ElementCodec get elementCodec;
381
382 /**
383 * A number of locations in all nodes.
384 */
385 int get locationCount;
386
387 /**
388 * The shared {@link StringCodec} instance.
389 */
390 StringCodec get stringCodec;
391
392 /**
393 * Removes all nodes.
394 */
395 void clear();
396
397 /**
398 * Returns the {@link IndexNode} with the given name, {@code null} if not foun d.
399 */
400 Future<IndexNode> getNode(String name);
401
402 /**
403 * Returns a new {@link IndexNode}.
404 */
405 IndexNode newNode(AnalysisContext context);
406
407 /**
408 * Associates the given {@link IndexNode} with the given name.
409 */
410 void putNode(String name, IndexNode node);
411
412 /**
413 * Removes the {@link IndexNode} with the given name.
414 */
415 void removeNode(String name);
416 }
417
418
419 /**
420 * An [Element] to [Location] relation key.
421 */
422 class RelationKeyData {
423 final int elementId;
424 final int relationshipId;
425
426 RelationKeyData.forData(this.elementId, this.relationshipId);
427
428 RelationKeyData.forObject(ElementCodec elementCodec,
429 RelationshipCodec relationshipCodec, Element element, Relationship relatio nship)
430 : elementId = elementCodec.encode(element),
431 relationshipId = relationshipCodec.encode(relationship);
432
433 @override
434 int get hashCode {
435 return 31 * elementId + relationshipId;
436 }
437
438 @override
439 bool operator ==(Object obj) {
440 if (obj is! RelationKeyData) {
441 return false;
442 }
443 RelationKeyData other = obj;
444 return other.elementId == elementId &&
445 other.relationshipId == relationshipId;
446 }
447 }
448
449
450 /**
451 * An [IndexStore] which keeps index information in separate nodes for each unit .
452 */
453 class SplitIndexStore implements IndexStore {
454 /**
455 * The [ContextCodec] to encode/decode [AnalysisContext]s.
456 */
457 ContextCodec _contextCodec;
458
459 /**
460 * Information about "universe" elements.
461 * We need to keep them together to avoid loading of all index nodes.
462 *
463 * Order of keys: contextId, nodeId, Relationship.
464 */
465 Map<int, Map<int, Map<Relationship, List<LocationData>>>>
466 _contextNodeRelations =
467 new HashMap<int, Map<int, Map<Relationship, List<LocationData>>>>();
468
469 /**
470 * The mapping of library [Source] to the [Source]s of part units.
471 */
472 Map<AnalysisContext, Map<Source, Set<Source>>> _contextToLibraryToUnits =
473 new HashMap<AnalysisContext, Map<Source, Set<Source>>>();
474
475 /**
476 * The mapping of unit [Source] to the [Source]s of libraries it is used in.
477 */
478 Map<AnalysisContext, Map<Source, Set<Source>>> _contextToUnitToLibraries =
479 new HashMap<AnalysisContext, Map<Source, Set<Source>>>();
480
481 int _currentContextId = 0;
482
483 IndexNode _currentNode;
484
485 String _currentNodeName;
486
487 int _currentNodeNameId = 0;
488
489 /**
490 * The [ElementCodec] to encode/decode [Element]s.
491 */
492 ElementCodec _elementCodec;
493
494 /**
495 * A table mapping element names to the node names that may have relations wit h elements with
496 * these names.
497 */
498 IntToIntSetMap _nameToNodeNames = new IntToIntSetMap();
499
500 /**
501 * The [NodeManager] to get/put [IndexNode]s.
502 */
503 final NodeManager _nodeManager;
504
505 /**
506 * The set of known [Source]s.
507 */
508 Set<Source> _sources = new HashSet<Source>();
509
510 /**
511 * The [StringCodec] to encode/decode [String]s.
512 */
513 StringCodec _stringCodec;
514
515 SplitIndexStore(this._nodeManager) {
516 this._contextCodec = _nodeManager.contextCodec;
517 this._elementCodec = _nodeManager.elementCodec;
518 this._stringCodec = _nodeManager.stringCodec;
519 }
520
521 @override
522 String get statistics =>
523 '[${_nodeManager.locationCount} locations, ${_sources.length} sources, ${_ nameToNodeNames.length} names]';
524
525 @override
526 bool aboutToIndexDart(AnalysisContext context,
527 CompilationUnitElement unitElement) {
528 context = _unwrapContext(context);
529 // may be already disposed in other thread
530 if (context.isDisposed) {
531 return false;
532 }
533 // validate unit
534 if (unitElement == null) {
535 return false;
536 }
537 LibraryElement libraryElement = unitElement.library;
538 if (libraryElement == null) {
539 return false;
540 }
541 CompilationUnitElement definingUnitElement =
542 libraryElement.definingCompilationUnit;
543 if (definingUnitElement == null) {
544 return false;
545 }
546 // prepare sources
547 Source library = definingUnitElement.source;
548 Source unit = unitElement.source;
549 // special handling for the defining library unit
550 if (unit == library) {
551 // prepare new parts
552 HashSet<Source> newParts = new HashSet<Source>();
553 for (CompilationUnitElement part in libraryElement.parts) {
554 newParts.add(part.source);
555 }
556 // prepare old parts
557 Map<Source, Set<Source>> libraryToUnits =
558 _contextToLibraryToUnits[context];
559 if (libraryToUnits == null) {
560 libraryToUnits = new HashMap<Source, Set<Source>>();
561 _contextToLibraryToUnits[context] = libraryToUnits;
562 }
563 Set<Source> oldParts = libraryToUnits[library];
564 // check if some parts are not in the library now
565 if (oldParts != null) {
566 Set<Source> noParts = oldParts.difference(newParts);
567 for (Source noPart in noParts) {
568 _removeLocations(context, library, noPart);
569 }
570 }
571 // remember new parts
572 libraryToUnits[library] = newParts;
573 }
574 // remember library/unit relations
575 _recordUnitInLibrary(context, library, unit);
576 _recordLibraryWithUnit(context, library, unit);
577 _sources.add(library);
578 _sources.add(unit);
579 // prepare node
580 String libraryName = library.fullName;
581 String unitName = unit.fullName;
582 int libraryNameIndex = _stringCodec.encode(libraryName);
583 int unitNameIndex = _stringCodec.encode(unitName);
584 _currentNodeName = '${libraryNameIndex}_${unitNameIndex}.index';
585 _currentNodeNameId = _stringCodec.encode(_currentNodeName);
586 _currentNode = _nodeManager.newNode(context);
587 _currentContextId = _contextCodec.encode(context);
588 // remove Universe information for the current node
589 for (Map<int, dynamic> nodeRelations in _contextNodeRelations.values) {
590 nodeRelations.remove(_currentNodeNameId);
591 }
592 // done
593 return true;
594 }
595
596 @override
597 bool aboutToIndexHtml(AnalysisContext context, HtmlElement htmlElement) {
598 context = _unwrapContext(context);
599 // may be already disposed in other thread
600 if (context.isDisposed) {
601 return false;
602 }
603 // remove locations
604 Source source = htmlElement.source;
605 _removeLocations(context, null, source);
606 // remember library/unit relations
607 _recordUnitInLibrary(context, null, source);
608 // prepare node
609 String sourceName = source.fullName;
610 int sourceNameIndex = _stringCodec.encode(sourceName);
611 _currentNodeName = '${sourceNameIndex}.index';
612 _currentNodeNameId = _stringCodec.encode(_currentNodeName);
613 _currentNode = _nodeManager.newNode(context);
614 return true;
615 }
616
617 @override
618 void clear() {
619 _contextNodeRelations.clear();
620 _nodeManager.clear();
621 _nameToNodeNames.clear();
622 }
623
624 @override
625 void doneIndex() {
626 if (_currentNode != null) {
627 _nodeManager.putNode(_currentNodeName, _currentNode);
628 _currentNodeName = null;
629 _currentNodeNameId = -1;
630 _currentNode = null;
631 _currentContextId = -1;
632 }
633 }
634
635 Future<List<Location>> getRelationships(Element element,
636 Relationship relationship) {
637 // special support for UniverseElement
638 if (identical(element, UniverseElement.INSTANCE)) {
639 List<Location> locations = _getRelationshipsUniverse(relationship);
640 return new Future.value(locations);
641 }
642 // prepare node names
643 int nameId = _elementCodec.encodeHash(element);
644 List<int> nodeNameIds = _nameToNodeNames.get(nameId);
645 // prepare Future(s) for reading each IndexNode
646 List<Future<List<Location>>> nodeFutures = <Future<List<Location>>>[];
647 for (int nodeNameId in nodeNameIds) {
648 String nodeName = _stringCodec.decode(nodeNameId);
649 Future<IndexNode> nodeFuture = _nodeManager.getNode(nodeName);
650 Future<List<Location>> locationsFuture = nodeFuture.then((node) {
651 if (node == null) {
652 // TODO(scheglov) remove node
653 return Location.EMPTY_ARRAY;
654 }
655 return node.getRelationships(element, relationship);
656 });
657 nodeFutures.add(locationsFuture);
658 }
659 // return Future that merges separate IndexNode Location(s)
660 return Future.wait(nodeFutures).then((List<List<Location>> locationsList) {
661 List<Location> allLocations = <Location>[];
662 for (List<Location> locations in locationsList) {
663 allLocations.addAll(locations);
664 }
665 return allLocations;
666 });
667 }
668
669 @override
670 void recordRelationship(Element element, Relationship relationship,
671 Location location) {
672 if (element == null || location == null) {
673 return;
674 }
675 // special support for UniverseElement
676 if (identical(element, UniverseElement.INSTANCE)) {
677 _recordRelationshipUniverse(relationship, location);
678 return;
679 }
680 // other elements
681 _recordNodeNameForElement(element);
682 _currentNode.recordRelationship(element, relationship, location);
683 }
684
685 @override
686 void removeContext(AnalysisContext context) {
687 context = _unwrapContext(context);
688 if (context == null) {
689 return;
690 }
691 // remove sources
692 removeSources(context, null);
693 // remove context information
694 _contextToLibraryToUnits.remove(context);
695 _contextToUnitToLibraries.remove(context);
696 _contextNodeRelations.remove(_contextCodec.encode(context));
697 // remove context from codec
698 _contextCodec.remove(context);
699 }
700
701 @override
702 void removeSource(AnalysisContext context, Source source) {
703 context = _unwrapContext(context);
704 if (context == null) {
705 return;
706 }
707 // remove nodes for unit/library pairs
708 Map<Source, Set<Source>> unitToLibraries =
709 _contextToUnitToLibraries[context];
710 if (unitToLibraries != null) {
711 Set<Source> libraries = unitToLibraries.remove(source);
712 if (libraries != null) {
713 for (Source library in libraries) {
714 _removeLocations(context, library, source);
715 }
716 }
717 }
718 // remove nodes for library/unit pairs
719 Map<Source, Set<Source>> libraryToUnits = _contextToLibraryToUnits[context];
720 if (libraryToUnits != null) {
721 Set<Source> units = libraryToUnits.remove(source);
722 if (units != null) {
723 for (Source unit in units) {
724 _removeLocations(context, source, unit);
725 }
726 }
727 }
728 }
729
730 @override
731 void removeSources(AnalysisContext context, SourceContainer container) {
732 context = _unwrapContext(context);
733 if (context == null) {
734 return;
735 }
736 // remove nodes for unit/library pairs
737 Map<Source, Set<Source>> unitToLibraries =
738 _contextToUnitToLibraries[context];
739 if (unitToLibraries != null) {
740 List<Source> units = new List<Source>.from(unitToLibraries.keys);
741 for (Source source in units) {
742 if (container == null || container.contains(source)) {
743 removeSource(context, source);
744 }
745 }
746 }
747 // remove nodes for library/unit pairs
748 Map<Source, Set<Source>> libraryToUnits = _contextToLibraryToUnits[context];
749 if (libraryToUnits != null) {
750 List<Source> libraries = new List<Source>.from(libraryToUnits.keys);
751 for (Source source in libraries) {
752 if (container == null || container.contains(source)) {
753 removeSource(context, source);
754 }
755 }
756 }
757 }
758
759 String _getElementName(Element element) => element.name;
760
761 List<Location> _getRelationshipsUniverse(Relationship relationship) {
762 List<Location> locations = <Location>[];
763 _contextNodeRelations.forEach((contextId, contextRelations) {
764 AnalysisContext context = _contextCodec.decode(contextId);
765 if (context != null) {
766 for (Map<Relationship, List<LocationData>> nodeRelations in
767 contextRelations.values) {
768 List<LocationData> nodeLocations = nodeRelations[relationship];
769 if (nodeLocations != null) {
770 for (LocationData locationData in nodeLocations) {
771 Location location =
772 locationData.getLocation(context, _elementCodec);
773 if (location != null) {
774 locations.add(location);
775 }
776 }
777 }
778 }
779 }
780 });
781 return locations;
782 }
783
784 void _recordLibraryWithUnit(AnalysisContext context, Source library,
785 Source unit) {
786 Map<Source, Set<Source>> libraryToUnits = _contextToLibraryToUnits[context];
787 if (libraryToUnits == null) {
788 libraryToUnits = new HashMap<Source, Set<Source>>();
789 _contextToLibraryToUnits[context] = libraryToUnits;
790 }
791 Set<Source> units = libraryToUnits[library];
792 if (units == null) {
793 units = new HashSet<Source>();
794 libraryToUnits[library] = units;
795 }
796 units.add(unit);
797 }
798
799 void _recordNodeNameForElement(Element element) {
800 int nameId = _elementCodec.encodeHash(element);
801 _nameToNodeNames.add(nameId, _currentNodeNameId);
802 }
803
804 void _recordRelationshipUniverse(Relationship relationship,
805 Location location) {
806 // in current context
807 Map<int, Map<Relationship, List<LocationData>>> nodeRelations =
808 _contextNodeRelations[_currentContextId];
809 if (nodeRelations == null) {
810 nodeRelations = new HashMap<int, Map<Relationship, List<LocationData>>>();
811 _contextNodeRelations[_currentContextId] = nodeRelations;
812 }
813 // in current node
814 Map<Relationship, List<LocationData>> relations =
815 nodeRelations[_currentNodeNameId];
816 if (relations == null) {
817 relations = new HashMap<Relationship, List<LocationData>>();
818 nodeRelations[_currentNodeNameId] = relations;
819 }
820 // for the given relationship
821 List<LocationData> locations = relations[relationship];
822 if (locations == null) {
823 locations = <LocationData>[];
824 relations[relationship] = locations;
825 }
826 // record LocationData
827 locations.add(new LocationData.forObject(_elementCodec, location));
828 }
829
830 void _recordUnitInLibrary(AnalysisContext context, Source library,
831 Source unit) {
832 Map<Source, Set<Source>> unitToLibraries =
833 _contextToUnitToLibraries[context];
834 if (unitToLibraries == null) {
835 unitToLibraries = new HashMap<Source, Set<Source>>();
836 _contextToUnitToLibraries[context] = unitToLibraries;
837 }
838 Set<Source> libraries = unitToLibraries[unit];
839 if (libraries == null) {
840 libraries = new HashSet<Source>();
841 unitToLibraries[unit] = libraries;
842 }
843 libraries.add(library);
844 }
845
846 /**
847 * Removes locations recorded in the given library/unit pair.
848 */
849 void _removeLocations(AnalysisContext context, Source library, Source unit) {
850 // remove node
851 String libraryName = library != null ? library.fullName : null;
852 String unitName = unit.fullName;
853 int libraryNameIndex = _stringCodec.encode(libraryName);
854 int unitNameIndex = _stringCodec.encode(unitName);
855 String nodeName = '${libraryNameIndex}_${unitNameIndex}.index';
856 int nodeNameId = _stringCodec.encode(nodeName);
857 _nodeManager.removeNode(nodeName);
858 // remove source
859 _sources.remove(library);
860 _sources.remove(unit);
861 // remove universe relations
862 {
863 int contextId = _contextCodec.encode(context);
864 Map<int, Object> nodeRelations = _contextNodeRelations[contextId];
865 if (nodeRelations != null) {
866 nodeRelations.remove(nodeNameId);
867 }
868 }
869 }
870
871 /**
872 * When logging is on, [AnalysisEngine] actually creates
873 * [InstrumentedAnalysisContextImpl], which wraps [AnalysisContextImpl] used t o create
874 * actual [Element]s. So, in index we have to unwrap [InstrumentedAnalysisCont extImpl]
875 * when perform any operation.
876 */
877 AnalysisContext _unwrapContext(AnalysisContext context) {
878 if (context is InstrumentedAnalysisContextImpl) {
879 context = (context as InstrumentedAnalysisContextImpl).basis;
880 }
881 return context;
882 }
883 }
884
885
886 class _DataInputStream {
887 ByteData _byteData;
888 int _byteOffset = 0;
889
890 _DataInputStream(List<int> bytes) {
891 ByteBuffer buffer = new Uint8List.fromList(bytes).buffer;
892 _byteData = new ByteData.view(buffer);
893 }
894
895 int readInt() {
896 int result = _byteData.getInt32(_byteOffset);
897 _byteOffset += 4;
898 return result;
899 }
900 }
901
902
903 class _DataOutputStream {
904 BytesBuilder _buffer = new BytesBuilder();
905
906 Uint8List getBytes() {
907 return new Uint8List.fromList(_buffer.takeBytes());
908 }
909
910 void writeInt(int value) {
911 _buffer.addByte((value & 0xFF000000) >> 24);
912 _buffer.addByte((value & 0x00FF0000) >> 16);
913 _buffer.addByte((value & 0x0000FF00) >> 8);
914 _buffer.addByte(value & 0xFF);
915 }
916 }
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698