OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 171 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
182 | 182 |
183 // It is very important to keep objects that form a heap snapshot | 183 // It is very important to keep objects that form a heap snapshot |
184 // as small as possible. | 184 // as small as possible. |
185 namespace { // Avoid littering the global namespace. | 185 namespace { // Avoid littering the global namespace. |
186 | 186 |
187 template <size_t ptr_size> struct SnapshotSizeConstants; | 187 template <size_t ptr_size> struct SnapshotSizeConstants; |
188 | 188 |
189 template <> struct SnapshotSizeConstants<4> { | 189 template <> struct SnapshotSizeConstants<4> { |
190 static const int kExpectedHeapGraphEdgeSize = 12; | 190 static const int kExpectedHeapGraphEdgeSize = 12; |
191 static const int kExpectedHeapEntrySize = 24; | 191 static const int kExpectedHeapEntrySize = 24; |
192 static const int kExpectedHeapSnapshotsCollectionSize = 100; | |
193 static const int kExpectedHeapSnapshotSize = 132; | |
194 }; | 192 }; |
195 | 193 |
196 template <> struct SnapshotSizeConstants<8> { | 194 template <> struct SnapshotSizeConstants<8> { |
197 static const int kExpectedHeapGraphEdgeSize = 24; | 195 static const int kExpectedHeapGraphEdgeSize = 24; |
198 static const int kExpectedHeapEntrySize = 32; | 196 static const int kExpectedHeapEntrySize = 32; |
199 static const int kExpectedHeapSnapshotsCollectionSize = 152; | |
200 static const int kExpectedHeapSnapshotSize = 160; | |
201 }; | 197 }; |
202 | 198 |
203 } // namespace | 199 } // namespace |
204 | 200 |
205 HeapSnapshot::HeapSnapshot(HeapSnapshotsCollection* collection, | 201 HeapSnapshot::HeapSnapshot(HeapSnapshotsCollection* collection, |
206 const char* title, | 202 const char* title, |
207 unsigned uid) | 203 unsigned uid) |
208 : collection_(collection), | 204 : collection_(collection), |
209 title_(title), | 205 title_(title), |
210 uid_(uid), | 206 uid_(uid), |
(...skipping 20 matching lines...) Expand all Loading... |
231 | 227 |
232 | 228 |
233 void HeapSnapshot::RememberLastJSObjectId() { | 229 void HeapSnapshot::RememberLastJSObjectId() { |
234 max_snapshot_js_object_id_ = collection_->last_assigned_id(); | 230 max_snapshot_js_object_id_ = collection_->last_assigned_id(); |
235 } | 231 } |
236 | 232 |
237 | 233 |
238 HeapEntry* HeapSnapshot::AddRootEntry() { | 234 HeapEntry* HeapSnapshot::AddRootEntry() { |
239 ASSERT(root_index_ == HeapEntry::kNoEntry); | 235 ASSERT(root_index_ == HeapEntry::kNoEntry); |
240 ASSERT(entries_.is_empty()); // Root entry must be the first one. | 236 ASSERT(entries_.is_empty()); // Root entry must be the first one. |
241 HeapEntry* entry = AddEntry(HeapEntry::kObject, | 237 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic, |
242 "", | 238 "", |
243 HeapObjectsMap::kInternalRootObjectId, | 239 HeapObjectsMap::kInternalRootObjectId, |
244 0); | 240 0); |
245 root_index_ = entry->index(); | 241 root_index_ = entry->index(); |
246 ASSERT(root_index_ == 0); | 242 ASSERT(root_index_ == 0); |
247 return entry; | 243 return entry; |
248 } | 244 } |
249 | 245 |
250 | 246 |
251 HeapEntry* HeapSnapshot::AddGcRootsEntry() { | 247 HeapEntry* HeapSnapshot::AddGcRootsEntry() { |
252 ASSERT(gc_roots_index_ == HeapEntry::kNoEntry); | 248 ASSERT(gc_roots_index_ == HeapEntry::kNoEntry); |
253 HeapEntry* entry = AddEntry(HeapEntry::kObject, | 249 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic, |
254 "(GC roots)", | 250 "(GC roots)", |
255 HeapObjectsMap::kGcRootsObjectId, | 251 HeapObjectsMap::kGcRootsObjectId, |
256 0); | 252 0); |
257 gc_roots_index_ = entry->index(); | 253 gc_roots_index_ = entry->index(); |
258 return entry; | 254 return entry; |
259 } | 255 } |
260 | 256 |
261 | 257 |
262 HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag) { | 258 HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag) { |
263 ASSERT(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry); | 259 ASSERT(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry); |
264 ASSERT(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags); | 260 ASSERT(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags); |
265 HeapEntry* entry = AddEntry( | 261 HeapEntry* entry = AddEntry( |
266 HeapEntry::kObject, | 262 HeapEntry::kSynthetic, |
267 VisitorSynchronization::kTagNames[tag], | 263 VisitorSynchronization::kTagNames[tag], |
268 HeapObjectsMap::GetNthGcSubrootId(tag), | 264 HeapObjectsMap::GetNthGcSubrootId(tag), |
269 0); | 265 0); |
270 gc_subroot_indexes_[tag] = entry->index(); | 266 gc_subroot_indexes_[tag] = entry->index(); |
271 return entry; | 267 return entry; |
272 } | 268 } |
273 | 269 |
274 | 270 |
275 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type, | 271 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type, |
276 const char* name, | 272 const char* name, |
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
346 } | 342 } |
347 | 343 |
348 | 344 |
349 template<typename T, class P> | 345 template<typename T, class P> |
350 static size_t GetMemoryUsedByList(const List<T, P>& list) { | 346 static size_t GetMemoryUsedByList(const List<T, P>& list) { |
351 return list.length() * sizeof(T) + sizeof(list); | 347 return list.length() * sizeof(T) + sizeof(list); |
352 } | 348 } |
353 | 349 |
354 | 350 |
355 size_t HeapSnapshot::RawSnapshotSize() const { | 351 size_t HeapSnapshot::RawSnapshotSize() const { |
356 STATIC_CHECK(SnapshotSizeConstants<kPointerSize>::kExpectedHeapSnapshotSize == | |
357 sizeof(HeapSnapshot)); // NOLINT | |
358 return | 352 return |
359 sizeof(*this) + | 353 sizeof(*this) + |
360 GetMemoryUsedByList(entries_) + | 354 GetMemoryUsedByList(entries_) + |
361 GetMemoryUsedByList(edges_) + | 355 GetMemoryUsedByList(edges_) + |
362 GetMemoryUsedByList(children_) + | 356 GetMemoryUsedByList(children_) + |
363 GetMemoryUsedByList(sorted_entries_); | 357 GetMemoryUsedByList(sorted_entries_); |
364 } | 358 } |
365 | 359 |
366 | 360 |
367 // We split IDs on evens for embedder objects (see | 361 // We split IDs on evens for embedder objects (see |
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
458 entries_.Add(EntryInfo(id, addr, size)); | 452 entries_.Add(EntryInfo(id, addr, size)); |
459 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy()); | 453 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy()); |
460 return id; | 454 return id; |
461 } | 455 } |
462 | 456 |
463 | 457 |
464 void HeapObjectsMap::StopHeapObjectsTracking() { | 458 void HeapObjectsMap::StopHeapObjectsTracking() { |
465 time_intervals_.Clear(); | 459 time_intervals_.Clear(); |
466 } | 460 } |
467 | 461 |
| 462 |
468 void HeapObjectsMap::UpdateHeapObjectsMap() { | 463 void HeapObjectsMap::UpdateHeapObjectsMap() { |
469 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask, | 464 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask, |
470 "HeapSnapshotsCollection::UpdateHeapObjectsMap"); | 465 "HeapSnapshotsCollection::UpdateHeapObjectsMap"); |
471 HeapIterator iterator(heap_); | 466 HeapIterator iterator(heap_); |
472 for (HeapObject* obj = iterator.next(); | 467 for (HeapObject* obj = iterator.next(); |
473 obj != NULL; | 468 obj != NULL; |
474 obj = iterator.next()) { | 469 obj = iterator.next()) { |
475 FindOrAddEntry(obj->address(), obj->Size()); | 470 FindOrAddEntry(obj->address(), obj->Size()); |
476 } | 471 } |
477 RemoveDeadEntries(); | 472 RemoveDeadEntries(); |
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
571 return | 566 return |
572 sizeof(*this) + | 567 sizeof(*this) + |
573 sizeof(HashMap::Entry) * entries_map_.capacity() + | 568 sizeof(HashMap::Entry) * entries_map_.capacity() + |
574 GetMemoryUsedByList(entries_) + | 569 GetMemoryUsedByList(entries_) + |
575 GetMemoryUsedByList(time_intervals_); | 570 GetMemoryUsedByList(time_intervals_); |
576 } | 571 } |
577 | 572 |
578 | 573 |
579 HeapSnapshotsCollection::HeapSnapshotsCollection(Heap* heap) | 574 HeapSnapshotsCollection::HeapSnapshotsCollection(Heap* heap) |
580 : is_tracking_objects_(false), | 575 : is_tracking_objects_(false), |
581 snapshots_uids_(HeapSnapshotsMatch), | |
582 token_enumerator_(new TokenEnumerator()), | |
583 ids_(heap) { | 576 ids_(heap) { |
584 } | 577 } |
585 | 578 |
586 | 579 |
587 static void DeleteHeapSnapshot(HeapSnapshot** snapshot_ptr) { | 580 static void DeleteHeapSnapshot(HeapSnapshot** snapshot_ptr) { |
588 delete *snapshot_ptr; | 581 delete *snapshot_ptr; |
589 } | 582 } |
590 | 583 |
591 | 584 |
592 HeapSnapshotsCollection::~HeapSnapshotsCollection() { | 585 HeapSnapshotsCollection::~HeapSnapshotsCollection() { |
593 delete token_enumerator_; | |
594 snapshots_.Iterate(DeleteHeapSnapshot); | 586 snapshots_.Iterate(DeleteHeapSnapshot); |
595 } | 587 } |
596 | 588 |
597 | 589 |
598 HeapSnapshot* HeapSnapshotsCollection::NewSnapshot(const char* name, | 590 HeapSnapshot* HeapSnapshotsCollection::NewSnapshot(const char* name, |
599 unsigned uid) { | 591 unsigned uid) { |
600 is_tracking_objects_ = true; // Start watching for heap objects moves. | 592 is_tracking_objects_ = true; // Start watching for heap objects moves. |
601 return new HeapSnapshot(this, name, uid); | 593 return new HeapSnapshot(this, name, uid); |
602 } | 594 } |
603 | 595 |
604 | 596 |
605 void HeapSnapshotsCollection::SnapshotGenerationFinished( | 597 void HeapSnapshotsCollection::SnapshotGenerationFinished( |
606 HeapSnapshot* snapshot) { | 598 HeapSnapshot* snapshot) { |
607 ids_.SnapshotGenerationFinished(); | 599 ids_.SnapshotGenerationFinished(); |
608 if (snapshot != NULL) { | 600 if (snapshot != NULL) { |
609 snapshots_.Add(snapshot); | 601 snapshots_.Add(snapshot); |
610 HashMap::Entry* entry = | |
611 snapshots_uids_.Lookup(reinterpret_cast<void*>(snapshot->uid()), | |
612 static_cast<uint32_t>(snapshot->uid()), | |
613 true); | |
614 ASSERT(entry->value == NULL); | |
615 entry->value = snapshot; | |
616 } | 602 } |
617 } | 603 } |
618 | 604 |
619 | 605 |
620 HeapSnapshot* HeapSnapshotsCollection::GetSnapshot(unsigned uid) { | |
621 HashMap::Entry* entry = snapshots_uids_.Lookup(reinterpret_cast<void*>(uid), | |
622 static_cast<uint32_t>(uid), | |
623 false); | |
624 return entry != NULL ? reinterpret_cast<HeapSnapshot*>(entry->value) : NULL; | |
625 } | |
626 | |
627 | |
628 void HeapSnapshotsCollection::RemoveSnapshot(HeapSnapshot* snapshot) { | 606 void HeapSnapshotsCollection::RemoveSnapshot(HeapSnapshot* snapshot) { |
629 snapshots_.RemoveElement(snapshot); | 607 snapshots_.RemoveElement(snapshot); |
630 unsigned uid = snapshot->uid(); | |
631 snapshots_uids_.Remove(reinterpret_cast<void*>(uid), | |
632 static_cast<uint32_t>(uid)); | |
633 } | 608 } |
634 | 609 |
635 | 610 |
636 Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById( | 611 Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById( |
637 SnapshotObjectId id) { | 612 SnapshotObjectId id) { |
638 // First perform a full GC in order to avoid dead objects. | 613 // First perform a full GC in order to avoid dead objects. |
639 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask, | 614 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask, |
640 "HeapSnapshotsCollection::FindHeapObjectById"); | 615 "HeapSnapshotsCollection::FindHeapObjectById"); |
641 DisallowHeapAllocation no_allocation; | 616 DisallowHeapAllocation no_allocation; |
642 HeapObject* object = NULL; | 617 HeapObject* object = NULL; |
643 HeapIterator iterator(heap(), HeapIterator::kFilterUnreachable); | 618 HeapIterator iterator(heap(), HeapIterator::kFilterUnreachable); |
644 // Make sure that object with the given id is still reachable. | 619 // Make sure that object with the given id is still reachable. |
645 for (HeapObject* obj = iterator.next(); | 620 for (HeapObject* obj = iterator.next(); |
646 obj != NULL; | 621 obj != NULL; |
647 obj = iterator.next()) { | 622 obj = iterator.next()) { |
648 if (ids_.FindEntry(obj->address()) == id) { | 623 if (ids_.FindEntry(obj->address()) == id) { |
649 ASSERT(object == NULL); | 624 ASSERT(object == NULL); |
650 object = obj; | 625 object = obj; |
651 // Can't break -- kFilterUnreachable requires full heap traversal. | 626 // Can't break -- kFilterUnreachable requires full heap traversal. |
652 } | 627 } |
653 } | 628 } |
654 return object != NULL ? Handle<HeapObject>(object) : Handle<HeapObject>(); | 629 return object != NULL ? Handle<HeapObject>(object) : Handle<HeapObject>(); |
655 } | 630 } |
656 | 631 |
657 | 632 |
658 size_t HeapSnapshotsCollection::GetUsedMemorySize() const { | 633 size_t HeapSnapshotsCollection::GetUsedMemorySize() const { |
659 STATIC_CHECK(SnapshotSizeConstants<kPointerSize>:: | |
660 kExpectedHeapSnapshotsCollectionSize == | |
661 sizeof(HeapSnapshotsCollection)); // NOLINT | |
662 size_t size = sizeof(*this); | 634 size_t size = sizeof(*this); |
663 size += names_.GetUsedMemorySize(); | 635 size += names_.GetUsedMemorySize(); |
664 size += ids_.GetUsedMemorySize(); | 636 size += ids_.GetUsedMemorySize(); |
665 size += sizeof(HashMap::Entry) * snapshots_uids_.capacity(); | |
666 size += GetMemoryUsedByList(snapshots_); | 637 size += GetMemoryUsedByList(snapshots_); |
667 for (int i = 0; i < snapshots_.length(); ++i) { | 638 for (int i = 0; i < snapshots_.length(); ++i) { |
668 size += snapshots_[i]->RawSnapshotSize(); | 639 size += snapshots_[i]->RawSnapshotSize(); |
669 } | 640 } |
670 return size; | 641 return size; |
671 } | 642 } |
672 | 643 |
673 | 644 |
674 HeapEntriesMap::HeapEntriesMap() | 645 HeapEntriesMap::HeapEntriesMap() |
675 : entries_(HeapThingsMatch) { | 646 : entries_(HeapThingsMatch) { |
(...skipping 278 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
954 }; | 925 }; |
955 | 926 |
956 | 927 |
957 void V8HeapExplorer::ExtractReferences(HeapObject* obj) { | 928 void V8HeapExplorer::ExtractReferences(HeapObject* obj) { |
958 HeapEntry* heap_entry = GetEntry(obj); | 929 HeapEntry* heap_entry = GetEntry(obj); |
959 if (heap_entry == NULL) return; // No interest in this object. | 930 if (heap_entry == NULL) return; // No interest in this object. |
960 int entry = heap_entry->index(); | 931 int entry = heap_entry->index(); |
961 | 932 |
962 bool extract_indexed_refs = true; | 933 bool extract_indexed_refs = true; |
963 if (obj->IsJSGlobalProxy()) { | 934 if (obj->IsJSGlobalProxy()) { |
964 ExtractJSGlobalProxyReferences(JSGlobalProxy::cast(obj)); | 935 ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj)); |
965 } else if (obj->IsJSObject()) { | 936 } else if (obj->IsJSObject()) { |
966 ExtractJSObjectReferences(entry, JSObject::cast(obj)); | 937 ExtractJSObjectReferences(entry, JSObject::cast(obj)); |
967 } else if (obj->IsString()) { | 938 } else if (obj->IsString()) { |
968 ExtractStringReferences(entry, String::cast(obj)); | 939 ExtractStringReferences(entry, String::cast(obj)); |
969 } else if (obj->IsContext()) { | 940 } else if (obj->IsContext()) { |
970 ExtractContextReferences(entry, Context::cast(obj)); | 941 ExtractContextReferences(entry, Context::cast(obj)); |
971 } else if (obj->IsMap()) { | 942 } else if (obj->IsMap()) { |
972 ExtractMapReferences(entry, Map::cast(obj)); | 943 ExtractMapReferences(entry, Map::cast(obj)); |
973 } else if (obj->IsSharedFunctionInfo()) { | 944 } else if (obj->IsSharedFunctionInfo()) { |
974 ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj)); | 945 ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj)); |
(...skipping 14 matching lines...) Expand all Loading... |
989 extract_indexed_refs = false; | 960 extract_indexed_refs = false; |
990 } | 961 } |
991 if (extract_indexed_refs) { | 962 if (extract_indexed_refs) { |
992 SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset); | 963 SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset); |
993 IndexedReferencesExtractor refs_extractor(this, obj, entry); | 964 IndexedReferencesExtractor refs_extractor(this, obj, entry); |
994 obj->Iterate(&refs_extractor); | 965 obj->Iterate(&refs_extractor); |
995 } | 966 } |
996 } | 967 } |
997 | 968 |
998 | 969 |
999 void V8HeapExplorer::ExtractJSGlobalProxyReferences(JSGlobalProxy* proxy) { | 970 void V8HeapExplorer::ExtractJSGlobalProxyReferences( |
1000 // We need to reference JS global objects from snapshot's root. | 971 int entry, JSGlobalProxy* proxy) { |
1001 // We use JSGlobalProxy because this is what embedder (e.g. browser) | 972 SetInternalReference(proxy, entry, |
1002 // uses for the global object. | 973 "native_context", proxy->native_context(), |
1003 Object* object = proxy->map()->prototype(); | 974 JSGlobalProxy::kNativeContextOffset); |
1004 bool is_debug_object = false; | |
1005 #ifdef ENABLE_DEBUGGER_SUPPORT | |
1006 is_debug_object = object->IsGlobalObject() && | |
1007 Isolate::Current()->debug()->IsDebugGlobal(GlobalObject::cast(object)); | |
1008 #endif | |
1009 if (!is_debug_object) { | |
1010 SetUserGlobalReference(object); | |
1011 } | |
1012 } | 975 } |
1013 | 976 |
1014 | 977 |
1015 void V8HeapExplorer::ExtractJSObjectReferences( | 978 void V8HeapExplorer::ExtractJSObjectReferences( |
1016 int entry, JSObject* js_obj) { | 979 int entry, JSObject* js_obj) { |
1017 HeapObject* obj = js_obj; | 980 HeapObject* obj = js_obj; |
1018 ExtractClosureReferences(js_obj, entry); | 981 ExtractClosureReferences(js_obj, entry); |
1019 ExtractPropertyReferences(js_obj, entry); | 982 ExtractPropertyReferences(js_obj, entry); |
1020 ExtractElementReferences(js_obj, entry); | 983 ExtractElementReferences(js_obj, entry); |
1021 ExtractInternalReferences(js_obj, entry); | 984 ExtractInternalReferences(js_obj, entry); |
(...skipping 749 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1771 HeapGraphEdge::kInternal, | 1734 HeapGraphEdge::kInternal, |
1772 snapshot_->gc_subroot(tag)->index(), | 1735 snapshot_->gc_subroot(tag)->index(), |
1773 name, | 1736 name, |
1774 child_entry); | 1737 child_entry); |
1775 } else { | 1738 } else { |
1776 filler_->SetIndexedAutoIndexReference( | 1739 filler_->SetIndexedAutoIndexReference( |
1777 is_weak ? HeapGraphEdge::kWeak : HeapGraphEdge::kElement, | 1740 is_weak ? HeapGraphEdge::kWeak : HeapGraphEdge::kElement, |
1778 snapshot_->gc_subroot(tag)->index(), | 1741 snapshot_->gc_subroot(tag)->index(), |
1779 child_entry); | 1742 child_entry); |
1780 } | 1743 } |
| 1744 |
| 1745 // Add a shortcut to JS global object reference at snapshot root. |
| 1746 if (child_obj->IsNativeContext()) { |
| 1747 Context* context = Context::cast(child_obj); |
| 1748 GlobalObject* global = context->global_object(); |
| 1749 if (global->IsJSGlobalObject()) { |
| 1750 bool is_debug_object = false; |
| 1751 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 1752 is_debug_object = heap_->isolate()->debug()->IsDebugGlobal(global); |
| 1753 #endif |
| 1754 if (!is_debug_object && !user_roots_.Contains(global)) { |
| 1755 user_roots_.Insert(global); |
| 1756 SetUserGlobalReference(global); |
| 1757 } |
| 1758 } |
| 1759 } |
1781 } | 1760 } |
1782 } | 1761 } |
1783 | 1762 |
1784 | 1763 |
1785 const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) { | 1764 const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) { |
1786 if (strong_gc_subroot_names_.is_empty()) { | 1765 if (strong_gc_subroot_names_.is_empty()) { |
1787 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name); | 1766 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name); |
1788 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name) | 1767 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name) |
1789 STRONG_ROOT_LIST(ROOT_NAME) | 1768 STRONG_ROOT_LIST(ROOT_NAME) |
1790 #undef ROOT_NAME | 1769 #undef ROOT_NAME |
(...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1976 group->info = NULL; // Acquire info object ownership. | 1955 group->info = NULL; // Acquire info object ownership. |
1977 } | 1956 } |
1978 isolate->global_handles()->RemoveObjectGroups(); | 1957 isolate->global_handles()->RemoveObjectGroups(); |
1979 isolate->heap()->CallGCEpilogueCallbacks(major_gc_type); | 1958 isolate->heap()->CallGCEpilogueCallbacks(major_gc_type); |
1980 // Record objects that are not in ObjectGroups, but have class ID. | 1959 // Record objects that are not in ObjectGroups, but have class ID. |
1981 GlobalHandlesExtractor extractor(this); | 1960 GlobalHandlesExtractor extractor(this); |
1982 isolate->global_handles()->IterateAllRootsWithClassIds(&extractor); | 1961 isolate->global_handles()->IterateAllRootsWithClassIds(&extractor); |
1983 embedder_queried_ = true; | 1962 embedder_queried_ = true; |
1984 } | 1963 } |
1985 | 1964 |
| 1965 |
1986 void NativeObjectsExplorer::FillImplicitReferences() { | 1966 void NativeObjectsExplorer::FillImplicitReferences() { |
1987 Isolate* isolate = Isolate::Current(); | 1967 Isolate* isolate = Isolate::Current(); |
1988 List<ImplicitRefGroup*>* groups = | 1968 List<ImplicitRefGroup*>* groups = |
1989 isolate->global_handles()->implicit_ref_groups(); | 1969 isolate->global_handles()->implicit_ref_groups(); |
1990 for (int i = 0; i < groups->length(); ++i) { | 1970 for (int i = 0; i < groups->length(); ++i) { |
1991 ImplicitRefGroup* group = groups->at(i); | 1971 ImplicitRefGroup* group = groups->at(i); |
1992 HeapObject* parent = *group->parent; | 1972 HeapObject* parent = *group->parent; |
1993 int parent_entry = | 1973 int parent_entry = |
1994 filler_->FindOrAddEntry(parent, native_entries_allocator_)->index(); | 1974 filler_->FindOrAddEntry(parent, native_entries_allocator_)->index(); |
1995 ASSERT(parent_entry != HeapEntry::kNoEntry); | 1975 ASSERT(parent_entry != HeapEntry::kNoEntry); |
(...skipping 605 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2601 | 2581 |
2602 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) { | 2582 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) { |
2603 static const char hex_chars[] = "0123456789ABCDEF"; | 2583 static const char hex_chars[] = "0123456789ABCDEF"; |
2604 w->AddString("\\u"); | 2584 w->AddString("\\u"); |
2605 w->AddCharacter(hex_chars[(u >> 12) & 0xf]); | 2585 w->AddCharacter(hex_chars[(u >> 12) & 0xf]); |
2606 w->AddCharacter(hex_chars[(u >> 8) & 0xf]); | 2586 w->AddCharacter(hex_chars[(u >> 8) & 0xf]); |
2607 w->AddCharacter(hex_chars[(u >> 4) & 0xf]); | 2587 w->AddCharacter(hex_chars[(u >> 4) & 0xf]); |
2608 w->AddCharacter(hex_chars[u & 0xf]); | 2588 w->AddCharacter(hex_chars[u & 0xf]); |
2609 } | 2589 } |
2610 | 2590 |
| 2591 |
2611 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) { | 2592 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) { |
2612 writer_->AddCharacter('\n'); | 2593 writer_->AddCharacter('\n'); |
2613 writer_->AddCharacter('\"'); | 2594 writer_->AddCharacter('\"'); |
2614 for ( ; *s != '\0'; ++s) { | 2595 for ( ; *s != '\0'; ++s) { |
2615 switch (*s) { | 2596 switch (*s) { |
2616 case '\b': | 2597 case '\b': |
2617 writer_->AddString("\\b"); | 2598 writer_->AddString("\\b"); |
2618 continue; | 2599 continue; |
2619 case '\f': | 2600 case '\f': |
2620 writer_->AddString("\\f"); | 2601 writer_->AddString("\\f"); |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2686 | 2667 |
2687 | 2668 |
2688 void HeapSnapshotJSONSerializer::SortHashMap( | 2669 void HeapSnapshotJSONSerializer::SortHashMap( |
2689 HashMap* map, List<HashMap::Entry*>* sorted_entries) { | 2670 HashMap* map, List<HashMap::Entry*>* sorted_entries) { |
2690 for (HashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p)) | 2671 for (HashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p)) |
2691 sorted_entries->Add(p); | 2672 sorted_entries->Add(p); |
2692 sorted_entries->Sort(SortUsingEntryValue); | 2673 sorted_entries->Sort(SortUsingEntryValue); |
2693 } | 2674 } |
2694 | 2675 |
2695 } } // namespace v8::internal | 2676 } } // namespace v8::internal |
OLD | NEW |