Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(431)

Side by Side Diff: pkg/analysis_server/lib/src/index/store/split_store.dart

Issue 365193004: Move Index and IndexStore implementations into Engine. (Closed) Base URL: https://dart.googlecode.com/svn/branches/bleeding_edge/dart
Patch Set: Created 6 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
(Empty)
1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file.
4
5 library index.split_store;
6
7 import 'dart:async';
8 import 'dart:collection';
9 import 'dart:io';
10 import 'dart:typed_data';
11
12 import 'package:analysis_server/src/index/store/collection.dart';
13 import 'package:analyzer/src/generated/element.dart';
14 import 'package:analyzer/src/generated/engine.dart';
15 import 'package:analyzer/src/generated/index.dart';
16 import 'package:analyzer/src/generated/java_engine.dart';
17 import 'package:analyzer/src/generated/source.dart';
18 import 'package:analysis_server/src/index/store/codec.dart';
19
20
21 /**
22 * A manager for files content.
23 */
24 abstract class FileManager {
25 /**
26 * Removes all files.
27 */
28 void clear();
29
30 /**
31 * Deletes the file with the given name.
32 */
33 void delete(String name);
34
35 /**
36 * Read the entire file contents as a list of bytes.
37 */
38 Future<List<int>> read(String name);
39
40 /**
41 * Write a list of bytes to a file.
42 */
43 Future write(String name, List<int> bytes);
44 }
45
46
47 /**
48 * A [FileManager] based [NodeManager].
49 */
50 class FileNodeManager implements NodeManager {
51 static int _VERSION = 1;
52
53 final ContextCodec contextCodec;
54
55 final ElementCodec elementCodec;
56
57 final StringCodec stringCodec;
58
59 final FileManager _fileManager;
60
61 int _locationCount = 0;
62
63 final Logger _logger;
64
65 Map<String, int> _nodeLocationCounts = new HashMap<String, int>();
66
67 final RelationshipCodec _relationshipCodec;
68
69 FileNodeManager(this._fileManager, this._logger, this.stringCodec,
70 this.contextCodec, this.elementCodec, this._relationshipCodec);
71
72 @override
73 int get locationCount => _locationCount;
74
75 @override
76 void clear() {
77 _fileManager.clear();
78 }
79
80 @override
81 Future<IndexNode> getNode(String name) {
82 return _fileManager.read(name).then((List<int> bytes) {
83 if (bytes == null) {
84 return null;
85 }
86 _DataInputStream stream = new _DataInputStream(bytes);
87 return _readNode(stream);
88 }).catchError((e, stackTrace) {
89 _logger.logError2('Exception during reading index file ${name}',
90 new CaughtException(e, stackTrace));
91 });
92 }
93
94 @override
95 IndexNode newNode(AnalysisContext context) => new IndexNode(context,
96 elementCodec, _relationshipCodec);
97
98 @override
99 Future putNode(String name, IndexNode node) {
100 // update location count
101 {
102 _locationCount -= _getLocationCount(name);
103 int nodeLocationCount = node.locationCount;
104 _nodeLocationCounts[name] = nodeLocationCount;
105 _locationCount += nodeLocationCount;
106 }
107 // write the node
108 return new Future.microtask(() {
109 _DataOutputStream stream = new _DataOutputStream();
110 _writeNode(node, stream);
111 var bytes = stream.getBytes();
112 return _fileManager.write(name, bytes);
113 }).catchError((e, stackTrace) {
114 _logger.logError2('Exception during reading index file ${name}',
115 new CaughtException(e, stackTrace));
116 });
117 }
118
119 @override
120 void removeNode(String name) {
121 // update location count
122 _locationCount -= _getLocationCount(name);
123 _nodeLocationCounts.remove(name);
124 // remove node
125 _fileManager.delete(name);
126 }
127
128 int _getLocationCount(String name) {
129 int locationCount = _nodeLocationCounts[name];
130 return locationCount != null ? locationCount : 0;
131 }
132
133 RelationKeyData _readElementRelationKey(_DataInputStream stream) {
134 int elementId = stream.readInt();
135 int relationshipId = stream.readInt();
136 return new RelationKeyData.forData(elementId, relationshipId);
137 }
138
139 LocationData _readLocationData(_DataInputStream stream) {
140 int elementId = stream.readInt();
141 int offset = stream.readInt();
142 int length = stream.readInt();
143 return new LocationData.forData(elementId, offset, length);
144 }
145
146 IndexNode _readNode(_DataInputStream stream) {
147 // check version
148 {
149 int version = stream.readInt();
150 if (version != _VERSION) {
151 throw new StateError(
152 'Version ${_VERSION} expected, but ${version} found.');
153 }
154 }
155 // context
156 int contextId = stream.readInt();
157 AnalysisContext context = contextCodec.decode(contextId);
158 if (context == null) {
159 return null;
160 }
161 // relations
162 Map<RelationKeyData, List<LocationData>> relations =
163 new HashMap<RelationKeyData, List<LocationData>>();
164 int numRelations = stream.readInt();
165 for (int i = 0; i < numRelations; i++) {
166 RelationKeyData key = _readElementRelationKey(stream);
167 int numLocations = stream.readInt();
168 List<LocationData> locations = new List<LocationData>();
169 for (int j = 0; j < numLocations; j++) {
170 locations.add(_readLocationData(stream));
171 }
172 relations[key] = locations;
173 }
174 // create IndexNode
175 IndexNode node = new IndexNode(context, elementCodec, _relationshipCodec);
176 node.relations = relations;
177 return node;
178 }
179
180 void _writeElementRelationKey(_DataOutputStream stream, RelationKeyData key) {
181 stream.writeInt(key.elementId);
182 stream.writeInt(key.relationshipId);
183 }
184
185 void _writeNode(IndexNode node, _DataOutputStream stream) {
186 // version
187 stream.writeInt(_VERSION);
188 // context
189 {
190 AnalysisContext context = node.context;
191 int contextId = contextCodec.encode(context);
192 stream.writeInt(contextId);
193 }
194 // relations
195 Map<RelationKeyData, List<LocationData>> relations = node.relations;
196 stream.writeInt(relations.length);
197 relations.forEach((key, locations) {
198 _writeElementRelationKey(stream, key);
199 stream.writeInt(locations.length);
200 for (LocationData location in locations) {
201 stream.writeInt(location.elementId);
202 stream.writeInt(location.offset);
203 stream.writeInt(location.length);
204 }
205 });
206 }
207 }
208
209
210 /**
211 * A single index file in-memory presentation.
212 */
213 class IndexNode {
214 final AnalysisContext context;
215
216 final ElementCodec _elementCodec;
217
218 Map<RelationKeyData, List<LocationData>> _relations =
219 new HashMap<RelationKeyData, List<LocationData>>();
220
221 final RelationshipCodec _relationshipCodec;
222
223 IndexNode(this.context, this._elementCodec, this._relationshipCodec);
224
225 /**
226 * Returns number of locations in this node.
227 */
228 int get locationCount {
229 int locationCount = 0;
230 for (List<LocationData> locations in _relations.values) {
231 locationCount += locations.length;
232 }
233 return locationCount;
234 }
235
236 /**
237 * Returns the recorded relations.
238 */
239 Map<RelationKeyData, List<LocationData>> get relations => _relations;
240
241 /**
242 * Sets relations data. This method is used during loading data from a storage .
243 */
244 void set relations(Map<RelationKeyData, List<LocationData>> relations) {
245 this._relations.clear();
246 this._relations.addAll(relations);
247 }
248
249 /**
250 * Return the locations of the elements that have the given relationship with the given element.
251 *
252 * @param element the the element that has the relationship with the locations to be returned
253 * @param relationship the [Relationship] between the given element and the lo cations to be
254 * returned
255 */
256 List<Location> getRelationships(Element element, Relationship relationship) {
257 // prepare key
258 RelationKeyData key = new RelationKeyData.forObject(_elementCodec,
259 _relationshipCodec, element, relationship);
260 // find LocationData(s)
261 List<LocationData> locationDatas = _relations[key];
262 if (locationDatas == null) {
263 return Location.EMPTY_ARRAY;
264 }
265 // convert to Location(s)
266 List<Location> locations = <Location>[];
267 for (LocationData locationData in locationDatas) {
268 Location location = locationData.getLocation(context, _elementCodec);
269 if (location != null) {
270 locations.add(location);
271 }
272 }
273 return locations;
274 }
275
276 /**
277 * Records that the given element and location have the given relationship.
278 *
279 * @param element the element that is related to the location
280 * @param relationship the [Relationship] between the element and the location
281 * @param location the [Location] where relationship happens
282 */
283 void recordRelationship(Element element, Relationship relationship,
284 Location location) {
285 RelationKeyData key = new RelationKeyData.forObject(_elementCodec,
286 _relationshipCodec, element, relationship);
287 // prepare LocationData(s)
288 List<LocationData> locationDatas = _relations[key];
289 if (locationDatas == null) {
290 locationDatas = <LocationData>[];
291 _relations[key] = locationDatas;
292 }
293 // add new LocationData
294 locationDatas.add(new LocationData.forObject(_elementCodec, location));
295 }
296 }
297
298
299 /**
300 * A container with information about a [Location].
301 */
302 class LocationData {
303 final int elementId;
304 final int length;
305 final int offset;
306
307 LocationData.forData(this.elementId, this.offset, this.length);
308
309 LocationData.forObject(ElementCodec elementCodec, Location location)
310 : elementId = elementCodec.encode(location.element),
311 offset = location.offset,
312 length = location.length;
313
314 @override
315 int get hashCode {
316 return 31 * (31 * elementId + offset) + length;
317 }
318
319 @override
320 bool operator ==(Object obj) {
321 if (obj is! LocationData) {
322 return false;
323 }
324 LocationData other = obj;
325 return other.elementId == elementId && other.offset == offset &&
326 other.length == length;
327 }
328
329 /**
330 * Returns a {@link Location} that is represented by this {@link LocationData} .
331 */
332 Location getLocation(AnalysisContext context, ElementCodec elementCodec) {
333 Element element = elementCodec.decode(context, elementId);
334 if (element == null) {
335 return null;
336 }
337 return new Location(element, offset, length);
338 }
339 }
340
341
342 /**
343 * A manager for [IndexNode]s.
344 */
345 abstract class NodeManager {
346 /**
347 * The shared {@link ContextCodec} instance.
348 */
349 ContextCodec get contextCodec;
350
351 /**
352 * The shared {@link ElementCodec} instance.
353 */
354 ElementCodec get elementCodec;
355
356 /**
357 * A number of locations in all nodes.
358 */
359 int get locationCount;
360
361 /**
362 * The shared {@link StringCodec} instance.
363 */
364 StringCodec get stringCodec;
365
366 /**
367 * Removes all nodes.
368 */
369 void clear();
370
371 /**
372 * Returns the {@link IndexNode} with the given name, {@code null} if not foun d.
373 */
374 Future<IndexNode> getNode(String name);
375
376 /**
377 * Returns a new {@link IndexNode}.
378 */
379 IndexNode newNode(AnalysisContext context);
380
381 /**
382 * Associates the given {@link IndexNode} with the given name.
383 */
384 void putNode(String name, IndexNode node);
385
386 /**
387 * Removes the {@link IndexNode} with the given name.
388 */
389 void removeNode(String name);
390 }
391
392
393 /**
394 * An [Element] to [Location] relation key.
395 */
396 class RelationKeyData {
397 final int elementId;
398 final int relationshipId;
399
400 RelationKeyData.forData(this.elementId, this.relationshipId);
401
402 RelationKeyData.forObject(ElementCodec elementCodec,
403 RelationshipCodec relationshipCodec, Element element, Relationship relatio nship)
404 : elementId = elementCodec.encode(element),
405 relationshipId = relationshipCodec.encode(relationship);
406
407 @override
408 int get hashCode {
409 return 31 * elementId + relationshipId;
410 }
411
412 @override
413 bool operator ==(Object obj) {
414 if (obj is! RelationKeyData) {
415 return false;
416 }
417 RelationKeyData other = obj;
418 return other.elementId == elementId && other.relationshipId ==
419 relationshipId;
420 }
421 }
422
423
424 /**
425 * An [IndexStore] which keeps index information in separate nodes for each unit .
426 */
427 class SplitIndexStore implements IndexStore {
428 /**
429 * The [ContextCodec] to encode/decode [AnalysisContext]s.
430 */
431 ContextCodec _contextCodec;
432
433 /**
434 * Information about "universe" elements.
435 * We need to keep them together to avoid loading of all index nodes.
436 *
437 * Order of keys: contextId, nodeId, Relationship.
438 */
439 Map<int, Map<int, Map<Relationship, List<LocationData>>>>
440 _contextNodeRelations = new HashMap<int, Map<int, Map<Relationship,
441 List<LocationData>>>>();
442
443 /**
444 * The mapping of library [Source] to the [Source]s of part units.
445 */
446 Map<AnalysisContext, Map<Source, Set<Source>>> _contextToLibraryToUnits =
447 new HashMap<AnalysisContext, Map<Source, Set<Source>>>();
448
449 /**
450 * The mapping of unit [Source] to the [Source]s of libraries it is used in.
451 */
452 Map<AnalysisContext, Map<Source, Set<Source>>> _contextToUnitToLibraries =
453 new HashMap<AnalysisContext, Map<Source, Set<Source>>>();
454
455 int _currentContextId = 0;
456
457 IndexNode _currentNode;
458
459 String _currentNodeName;
460
461 int _currentNodeNameId = 0;
462
463 /**
464 * The [ElementCodec] to encode/decode [Element]s.
465 */
466 ElementCodec _elementCodec;
467
468 /**
469 * A table mapping element names to the node names that may have relations wit h elements with
470 * these names.
471 */
472 IntToIntSetMap _nameToNodeNames = new IntToIntSetMap();
473
474 /**
475 * The [NodeManager] to get/put [IndexNode]s.
476 */
477 final NodeManager _nodeManager;
478
479 /**
480 * The set of known [Source]s.
481 */
482 Set<Source> _sources = new HashSet<Source>();
483
484 /**
485 * The [StringCodec] to encode/decode [String]s.
486 */
487 StringCodec _stringCodec;
488
489 SplitIndexStore(this._nodeManager) {
490 this._contextCodec = _nodeManager.contextCodec;
491 this._elementCodec = _nodeManager.elementCodec;
492 this._stringCodec = _nodeManager.stringCodec;
493 }
494
495 @override
496 String get statistics =>
497 '[${_nodeManager.locationCount} locations, ${_sources.length} sources, ${_ nameToNodeNames.length} names]';
498
499 @override
500 bool aboutToIndexDart(AnalysisContext context,
501 CompilationUnitElement unitElement) {
502 context = _unwrapContext(context);
503 // may be already disposed in other thread
504 if (context.isDisposed) {
505 return false;
506 }
507 // validate unit
508 if (unitElement == null) {
509 return false;
510 }
511 LibraryElement libraryElement = unitElement.library;
512 if (libraryElement == null) {
513 return false;
514 }
515 CompilationUnitElement definingUnitElement =
516 libraryElement.definingCompilationUnit;
517 if (definingUnitElement == null) {
518 return false;
519 }
520 // prepare sources
521 Source library = definingUnitElement.source;
522 Source unit = unitElement.source;
523 // special handling for the defining library unit
524 if (unit == library) {
525 // prepare new parts
526 HashSet<Source> newParts = new HashSet<Source>();
527 for (CompilationUnitElement part in libraryElement.parts) {
528 newParts.add(part.source);
529 }
530 // prepare old parts
531 Map<Source, Set<Source>> libraryToUnits =
532 _contextToLibraryToUnits[context];
533 if (libraryToUnits == null) {
534 libraryToUnits = new HashMap<Source, Set<Source>>();
535 _contextToLibraryToUnits[context] = libraryToUnits;
536 }
537 Set<Source> oldParts = libraryToUnits[library];
538 // check if some parts are not in the library now
539 if (oldParts != null) {
540 Set<Source> noParts = oldParts.difference(newParts);
541 for (Source noPart in noParts) {
542 _removeLocations(context, library, noPart);
543 }
544 }
545 // remember new parts
546 libraryToUnits[library] = newParts;
547 }
548 // remember library/unit relations
549 _recordUnitInLibrary(context, library, unit);
550 _recordLibraryWithUnit(context, library, unit);
551 _sources.add(library);
552 _sources.add(unit);
553 // prepare node
554 String libraryName = library.fullName;
555 String unitName = unit.fullName;
556 int libraryNameIndex = _stringCodec.encode(libraryName);
557 int unitNameIndex = _stringCodec.encode(unitName);
558 _currentNodeName = '${libraryNameIndex}_${unitNameIndex}.index';
559 _currentNodeNameId = _stringCodec.encode(_currentNodeName);
560 _currentNode = _nodeManager.newNode(context);
561 _currentContextId = _contextCodec.encode(context);
562 // remove Universe information for the current node
563 for (Map<int, dynamic> nodeRelations in _contextNodeRelations.values) {
564 nodeRelations.remove(_currentNodeNameId);
565 }
566 // done
567 return true;
568 }
569
570 @override
571 bool aboutToIndexHtml(AnalysisContext context, HtmlElement htmlElement) {
572 context = _unwrapContext(context);
573 // may be already disposed in other thread
574 if (context.isDisposed) {
575 return false;
576 }
577 // remove locations
578 Source source = htmlElement.source;
579 _removeLocations(context, null, source);
580 // remember library/unit relations
581 _recordUnitInLibrary(context, null, source);
582 // prepare node
583 String sourceName = source.fullName;
584 int sourceNameIndex = _stringCodec.encode(sourceName);
585 _currentNodeName = '${sourceNameIndex}.index';
586 _currentNodeNameId = _stringCodec.encode(_currentNodeName);
587 _currentNode = _nodeManager.newNode(context);
588 return true;
589 }
590
591 @override
592 void clear() {
593 _contextNodeRelations.clear();
594 _nodeManager.clear();
595 _nameToNodeNames.clear();
596 }
597
598 @override
599 void doneIndex() {
600 if (_currentNode != null) {
601 _nodeManager.putNode(_currentNodeName, _currentNode);
602 _currentNodeName = null;
603 _currentNodeNameId = -1;
604 _currentNode = null;
605 _currentContextId = -1;
606 }
607 }
608
609 @override
610 List<Location> getRelationships(Element element, Relationship relationship) {
611 // TODO(scheglov) make IndexStore interface async
612 return <Location>[];
613 }
614
615 Future<List<Location>> getRelationshipsAsync(Element element,
616 Relationship relationship) {
617 // special support for UniverseElement
618 if (identical(element, UniverseElement.INSTANCE)) {
619 List<Location> locations = _getRelationshipsUniverse(relationship);
620 return new Future.value(locations);
621 }
622 // prepare node names
623 String name = _getElementName(element);
624 int nameId = _stringCodec.encode(name);
625 List<int> nodeNameIds = _nameToNodeNames.get(nameId);
626 // prepare Future(s) for reading each IndexNode
627 List<Future<List<Location>>> nodeFutures = <Future<List<Location>>>[];
628 for (int nodeNameId in nodeNameIds) {
629 String nodeName = _stringCodec.decode(nodeNameId);
630 Future<IndexNode> nodeFuture = _nodeManager.getNode(nodeName);
631 Future<List<Location>> locationsFuture = nodeFuture.then((node) {
632 if (node == null) {
633 // TODO(scheglov) remove node
634 return Location.EMPTY_ARRAY;
635 }
636 return node.getRelationships(element, relationship);
637 });
638 nodeFutures.add(locationsFuture);
639 }
640 // return Future that merges separate IndexNode Location(s)
641 return Future.wait(nodeFutures).then((List<List<Location>> locationsList) {
642 List<Location> allLocations = <Location>[];
643 for (List<Location> locations in locationsList) {
644 allLocations.addAll(locations);
645 }
646 return allLocations;
647 });
648 }
649
650 @override
651 void recordRelationship(Element element, Relationship relationship,
652 Location location) {
653 if (element == null || location == null) {
654 return;
655 }
656 // special support for UniverseElement
657 if (identical(element, UniverseElement.INSTANCE)) {
658 _recordRelationshipUniverse(relationship, location);
659 return;
660 }
661 // other elements
662 _recordNodeNameForElement(element);
663 _currentNode.recordRelationship(element, relationship, location);
664 }
665
666 @override
667 void removeContext(AnalysisContext context) {
668 context = _unwrapContext(context);
669 if (context == null) {
670 return;
671 }
672 // remove sources
673 removeSources(context, null);
674 // remove context information
675 _contextToLibraryToUnits.remove(context);
676 _contextToUnitToLibraries.remove(context);
677 _contextNodeRelations.remove(_contextCodec.encode(context));
678 // remove context from codec
679 _contextCodec.remove(context);
680 }
681
682 @override
683 void removeSource(AnalysisContext context, Source source) {
684 context = _unwrapContext(context);
685 if (context == null) {
686 return;
687 }
688 // remove nodes for unit/library pairs
689 Map<Source, Set<Source>> unitToLibraries =
690 _contextToUnitToLibraries[context];
691 if (unitToLibraries != null) {
692 Set<Source> libraries = unitToLibraries.remove(source);
693 if (libraries != null) {
694 for (Source library in libraries) {
695 _removeLocations(context, library, source);
696 }
697 }
698 }
699 // remove nodes for library/unit pairs
700 Map<Source, Set<Source>> libraryToUnits = _contextToLibraryToUnits[context];
701 if (libraryToUnits != null) {
702 Set<Source> units = libraryToUnits.remove(source);
703 if (units != null) {
704 for (Source unit in units) {
705 _removeLocations(context, source, unit);
706 }
707 }
708 }
709 }
710
711 @override
712 void removeSources(AnalysisContext context, SourceContainer container) {
713 context = _unwrapContext(context);
714 if (context == null) {
715 return;
716 }
717 // remove nodes for unit/library pairs
718 Map<Source, Set<Source>> unitToLibraries =
719 _contextToUnitToLibraries[context];
720 if (unitToLibraries != null) {
721 List<Source> units = new List<Source>.from(unitToLibraries.keys);
722 for (Source source in units) {
723 if (container == null || container.contains(source)) {
724 removeSource(context, source);
725 }
726 }
727 }
728 // remove nodes for library/unit pairs
729 Map<Source, Set<Source>> libraryToUnits = _contextToLibraryToUnits[context];
730 if (libraryToUnits != null) {
731 List<Source> libraries = new List<Source>.from(libraryToUnits.keys);
732 for (Source source in libraries) {
733 if (container == null || container.contains(source)) {
734 removeSource(context, source);
735 }
736 }
737 }
738 }
739
740 String _getElementName(Element element) => element.name;
741
742 List<Location> _getRelationshipsUniverse(Relationship relationship) {
743 List<Location> locations = <Location>[];
744 _contextNodeRelations.forEach((contextId, contextRelations) {
745 AnalysisContext context = _contextCodec.decode(contextId);
746 if (context != null) {
747 for (Map<Relationship, List<LocationData>> nodeRelations in
748 contextRelations.values) {
749 List<LocationData> nodeLocations = nodeRelations[relationship];
750 if (nodeLocations != null) {
751 for (LocationData locationData in nodeLocations) {
752 Location location = locationData.getLocation(context,
753 _elementCodec);
754 if (location != null) {
755 locations.add(location);
756 }
757 }
758 }
759 }
760 }
761 });
762 return locations;
763 }
764
765 void _recordLibraryWithUnit(AnalysisContext context, Source library,
766 Source unit) {
767 Map<Source, Set<Source>> libraryToUnits = _contextToLibraryToUnits[context];
768 if (libraryToUnits == null) {
769 libraryToUnits = new HashMap<Source, Set<Source>>();
770 _contextToLibraryToUnits[context] = libraryToUnits;
771 }
772 Set<Source> units = libraryToUnits[library];
773 if (units == null) {
774 units = new HashSet<Source>();
775 libraryToUnits[library] = units;
776 }
777 units.add(unit);
778 }
779
780 void _recordNodeNameForElement(Element element) {
781 String name = _getElementName(element);
782 int nameId = _stringCodec.encode(name);
783 _nameToNodeNames.add(nameId, _currentNodeNameId);
784 }
785
786 void _recordRelationshipUniverse(Relationship relationship,
787 Location location) {
788 // in current context
789 Map<int, Map<Relationship, List<LocationData>>> nodeRelations =
790 _contextNodeRelations[_currentContextId];
791 if (nodeRelations == null) {
792 nodeRelations = new HashMap<int, Map<Relationship, List<LocationData>>>();
793 _contextNodeRelations[_currentContextId] = nodeRelations;
794 }
795 // in current node
796 Map<Relationship, List<LocationData>> relations =
797 nodeRelations[_currentNodeNameId];
798 if (relations == null) {
799 relations = new HashMap<Relationship, List<LocationData>>();
800 nodeRelations[_currentNodeNameId] = relations;
801 }
802 // for the given relationship
803 List<LocationData> locations = relations[relationship];
804 if (locations == null) {
805 locations = <LocationData>[];
806 relations[relationship] = locations;
807 }
808 // record LocationData
809 locations.add(new LocationData.forObject(_elementCodec, location));
810 }
811
812 void _recordUnitInLibrary(AnalysisContext context, Source library,
813 Source unit) {
814 Map<Source, Set<Source>> unitToLibraries =
815 _contextToUnitToLibraries[context];
816 if (unitToLibraries == null) {
817 unitToLibraries = new HashMap<Source, Set<Source>>();
818 _contextToUnitToLibraries[context] = unitToLibraries;
819 }
820 Set<Source> libraries = unitToLibraries[unit];
821 if (libraries == null) {
822 libraries = new HashSet<Source>();
823 unitToLibraries[unit] = libraries;
824 }
825 libraries.add(library);
826 }
827
828 /**
829 * Removes locations recorded in the given library/unit pair.
830 */
831 void _removeLocations(AnalysisContext context, Source library, Source unit) {
832 // remove node
833 String libraryName = library != null ? library.fullName : null;
834 String unitName = unit.fullName;
835 int libraryNameIndex = _stringCodec.encode(libraryName);
836 int unitNameIndex = _stringCodec.encode(unitName);
837 String nodeName = '${libraryNameIndex}_${unitNameIndex}.index';
838 int nodeNameId = _stringCodec.encode(nodeName);
839 _nodeManager.removeNode(nodeName);
840 // remove source
841 _sources.remove(library);
842 _sources.remove(unit);
843 // remove universe relations
844 {
845 int contextId = _contextCodec.encode(context);
846 Map<int, Object> nodeRelations = _contextNodeRelations[contextId];
847 if (nodeRelations != null) {
848 nodeRelations.remove(nodeNameId);
849 }
850 }
851 }
852
853 /**
854 * When logging is on, [AnalysisEngine] actually creates
855 * [InstrumentedAnalysisContextImpl], which wraps [AnalysisContextImpl] used t o create
856 * actual [Element]s. So, in index we have to unwrap [InstrumentedAnalysisCont extImpl]
857 * when perform any operation.
858 */
859 AnalysisContext _unwrapContext(AnalysisContext context) {
860 if (context is InstrumentedAnalysisContextImpl) {
861 context = (context as InstrumentedAnalysisContextImpl).basis;
862 }
863 return context;
864 }
865 }
866
867
868 class _DataInputStream {
869 ByteData _byteData;
870 int _byteOffset = 0;
871
872 _DataInputStream(List<int> bytes) {
873 ByteBuffer buffer = new Uint8List.fromList(bytes).buffer;
874 _byteData = new ByteData.view(buffer);
875 }
876
877 int readInt() {
878 int result = _byteData.getInt32(_byteOffset);
879 _byteOffset += 4;
880 return result;
881 }
882 }
883
884
885 class _DataOutputStream {
886 BytesBuilder _buffer = new BytesBuilder();
887
888 Uint8List getBytes() {
889 return new Uint8List.fromList(_buffer.takeBytes());
890 }
891
892 void writeInt(int value) {
893 _buffer.addByte((value & 0xFF000000) >> 24);
894 _buffer.addByte((value & 0x00FF0000) >> 16);
895 _buffer.addByte((value & 0x0000FF00) >> 8);
896 _buffer.addByte(value & 0xFF);
897 }
898 }
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698