Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/global-handles.h" | 5 #include "src/global-handles.h" |
| 6 | 6 |
| 7 #include "src/api.h" | 7 #include "src/api.h" |
| 8 #include "src/v8.h" | 8 #include "src/v8.h" |
| 9 #include "src/vm-state-inl.h" | 9 #include "src/vm-state-inl.h" |
| 10 | 10 |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 47 DCHECK(offsetof(Node, flags_) == Internals::kNodeFlagsOffset); | 47 DCHECK(offsetof(Node, flags_) == Internals::kNodeFlagsOffset); |
| 48 STATIC_ASSERT(static_cast<int>(NodeState::kMask) == | 48 STATIC_ASSERT(static_cast<int>(NodeState::kMask) == |
| 49 Internals::kNodeStateMask); | 49 Internals::kNodeStateMask); |
| 50 STATIC_ASSERT(WEAK == Internals::kNodeStateIsWeakValue); | 50 STATIC_ASSERT(WEAK == Internals::kNodeStateIsWeakValue); |
| 51 STATIC_ASSERT(PENDING == Internals::kNodeStateIsPendingValue); | 51 STATIC_ASSERT(PENDING == Internals::kNodeStateIsPendingValue); |
| 52 STATIC_ASSERT(NEAR_DEATH == Internals::kNodeStateIsNearDeathValue); | 52 STATIC_ASSERT(NEAR_DEATH == Internals::kNodeStateIsNearDeathValue); |
| 53 STATIC_ASSERT(static_cast<int>(IsIndependent::kShift) == | 53 STATIC_ASSERT(static_cast<int>(IsIndependent::kShift) == |
| 54 Internals::kNodeIsIndependentShift); | 54 Internals::kNodeIsIndependentShift); |
| 55 STATIC_ASSERT(static_cast<int>(IsPartiallyDependent::kShift) == | 55 STATIC_ASSERT(static_cast<int>(IsPartiallyDependent::kShift) == |
| 56 Internals::kNodeIsPartiallyDependentShift); | 56 Internals::kNodeIsPartiallyDependentShift); |
| 57 STATIC_ASSERT(static_cast<int>(IsActive::kShift) == | |
| 58 Internals::kNodeIsActiveShift); | |
| 57 } | 59 } |
| 58 | 60 |
| 59 #ifdef ENABLE_HANDLE_ZAPPING | 61 #ifdef ENABLE_HANDLE_ZAPPING |
| 60 ~Node() { | 62 ~Node() { |
| 61 // TODO(1428): if it's a weak handle we should have invoked its callback. | 63 // TODO(1428): if it's a weak handle we should have invoked its callback. |
| 62 // Zap the values for eager trapping. | 64 // Zap the values for eager trapping. |
| 63 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue); | 65 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue); |
| 64 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId; | 66 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId; |
| 65 index_ = 0; | 67 index_ = 0; |
| 66 set_independent(false); | 68 set_independent(false); |
| 67 set_partially_dependent(false); | 69 if (FLAG_scavenge_reclaim_unmodified_objects) { |
| 70 set_unmodified(false); | |
| 71 set_active(false); | |
| 72 } else { | |
| 73 set_partially_dependent(false); | |
| 74 } | |
| 68 set_in_new_space_list(false); | 75 set_in_new_space_list(false); |
| 69 parameter_or_next_free_.next_free = NULL; | 76 parameter_or_next_free_.next_free = NULL; |
| 70 weak_callback_ = NULL; | 77 weak_callback_ = NULL; |
| 71 } | 78 } |
| 72 #endif | 79 #endif |
| 73 | 80 |
| 74 void Initialize(int index, Node** first_free) { | 81 void Initialize(int index, Node** first_free) { |
| 75 index_ = static_cast<uint8_t>(index); | 82 index_ = static_cast<uint8_t>(index); |
| 76 DCHECK(static_cast<int>(index_) == index); | 83 DCHECK(static_cast<int>(index_) == index); |
| 77 set_state(FREE); | 84 set_state(FREE); |
| 78 set_weakness_type(NORMAL_WEAK); | 85 set_weakness_type(NORMAL_WEAK); |
| 79 set_in_new_space_list(false); | 86 set_in_new_space_list(false); |
| 87 if (FLAG_scavenge_reclaim_unmodified_objects) { | |
| 88 set_unmodified(false); | |
| 89 set_active(false); | |
| 90 } | |
| 80 parameter_or_next_free_.next_free = *first_free; | 91 parameter_or_next_free_.next_free = *first_free; |
| 81 *first_free = this; | 92 *first_free = this; |
| 82 } | 93 } |
| 83 | 94 |
| 84 void Acquire(Object* object) { | 95 void Acquire(Object* object) { |
| 85 DCHECK(state() == FREE); | 96 DCHECK(state() == FREE); |
| 86 object_ = object; | 97 object_ = object; |
| 87 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId; | 98 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId; |
| 88 set_independent(false); | 99 set_independent(false); |
| 89 set_partially_dependent(false); | 100 if (FLAG_scavenge_reclaim_unmodified_objects) { |
| 101 set_unmodified(false); | |
| 102 set_active(false); | |
| 103 } else { | |
| 104 set_partially_dependent(false); | |
| 105 } | |
| 90 set_state(NORMAL); | 106 set_state(NORMAL); |
| 91 parameter_or_next_free_.parameter = NULL; | 107 parameter_or_next_free_.parameter = NULL; |
| 92 weak_callback_ = NULL; | 108 weak_callback_ = NULL; |
| 93 IncreaseBlockUses(); | 109 IncreaseBlockUses(); |
| 94 } | 110 } |
| 95 | 111 |
| 96 void Zap() { | 112 void Zap() { |
| 97 DCHECK(IsInUse()); | 113 DCHECK(IsInUse()); |
| 98 // Zap the values for eager trapping. | 114 // Zap the values for eager trapping. |
| 99 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue); | 115 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue); |
| 100 } | 116 } |
| 101 | 117 |
| 102 void Release() { | 118 void Release() { |
| 103 DCHECK(IsInUse()); | 119 DCHECK(IsInUse()); |
| 104 set_state(FREE); | 120 set_state(FREE); |
| 105 // Zap the values for eager trapping. | 121 // Zap the values for eager trapping. |
| 106 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue); | 122 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue); |
| 107 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId; | 123 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId; |
| 108 set_independent(false); | 124 set_independent(false); |
| 109 set_partially_dependent(false); | 125 if (FLAG_scavenge_reclaim_unmodified_objects) { |
| 126 set_unmodified(false); | |
| 127 set_active(false); | |
| 128 } else { | |
| 129 set_partially_dependent(false); | |
| 130 } | |
| 110 weak_callback_ = NULL; | 131 weak_callback_ = NULL; |
| 111 DecreaseBlockUses(); | 132 DecreaseBlockUses(); |
| 112 } | 133 } |
| 113 | 134 |
| 114 // Object slot accessors. | 135 // Object slot accessors. |
| 115 Object* object() const { return object_; } | 136 Object* object() const { return object_; } |
| 116 Object** location() { return &object_; } | 137 Object** location() { return &object_; } |
| 117 Handle<Object> handle() { return Handle<Object>(location()); } | 138 Handle<Object> handle() { return Handle<Object>(location()); } |
| 118 | 139 |
| 119 // Wrapper class ID accessors. | 140 // Wrapper class ID accessors. |
| (...skipping 13 matching lines...) Expand all Loading... | |
| 133 } | 154 } |
| 134 | 155 |
| 135 bool is_independent() { | 156 bool is_independent() { |
| 136 return IsIndependent::decode(flags_); | 157 return IsIndependent::decode(flags_); |
| 137 } | 158 } |
| 138 void set_independent(bool v) { | 159 void set_independent(bool v) { |
| 139 flags_ = IsIndependent::update(flags_, v); | 160 flags_ = IsIndependent::update(flags_, v); |
| 140 } | 161 } |
| 141 | 162 |
| 142 bool is_partially_dependent() { | 163 bool is_partially_dependent() { |
| 164 CHECK(!FLAG_scavenge_reclaim_unmodified_objects); | |
| 143 return IsPartiallyDependent::decode(flags_); | 165 return IsPartiallyDependent::decode(flags_); |
| 144 } | 166 } |
| 145 void set_partially_dependent(bool v) { | 167 void set_partially_dependent(bool v) { |
| 168 CHECK(!FLAG_scavenge_reclaim_unmodified_objects); | |
| 146 flags_ = IsPartiallyDependent::update(flags_, v); | 169 flags_ = IsPartiallyDependent::update(flags_, v); |
| 147 } | 170 } |
| 148 | 171 |
| 172 bool is_active() { return IsActive::decode(flags_); } | |
| 173 void set_active(bool v) { IsActive::update(flags_, v); } | |
| 174 | |
| 175 bool is_unmodified() { | |
| 176 CHECK(FLAG_scavenge_reclaim_unmodified_objects); | |
| 177 return unmodified_flag; | |
| 178 } | |
| 179 void set_unmodified(bool v) { | |
| 180 CHECK(FLAG_scavenge_reclaim_unmodified_objects); | |
| 181 unmodified_flag = v; | |
| 182 } | |
| 183 | |
| 149 bool is_in_new_space_list() { | 184 bool is_in_new_space_list() { |
| 150 return IsInNewSpaceList::decode(flags_); | 185 return IsInNewSpaceList::decode(flags_); |
| 151 } | 186 } |
| 152 void set_in_new_space_list(bool v) { | 187 void set_in_new_space_list(bool v) { |
| 153 flags_ = IsInNewSpaceList::update(flags_, v); | 188 flags_ = IsInNewSpaceList::update(flags_, v); |
| 154 } | 189 } |
| 155 | 190 |
| 156 WeaknessType weakness_type() const { | 191 WeaknessType weakness_type() const { |
| 157 return NodeWeaknessType::decode(flags_); | 192 return NodeWeaknessType::decode(flags_); |
| 158 } | 193 } |
| (...skipping 183 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 342 // Wrapper class ID. | 377 // Wrapper class ID. |
| 343 uint16_t class_id_; | 378 uint16_t class_id_; |
| 344 | 379 |
| 345 // Index in the containing handle block. | 380 // Index in the containing handle block. |
| 346 uint8_t index_; | 381 uint8_t index_; |
| 347 | 382 |
| 348 // This stores three flags (independent, partially_dependent and | 383 // This stores three flags (independent, partially_dependent and |
| 349 // in_new_space_list) and a State. | 384 // in_new_space_list) and a State. |
| 350 class NodeState : public BitField<State, 0, 3> {}; | 385 class NodeState : public BitField<State, 0, 3> {}; |
| 351 class IsIndependent : public BitField<bool, 3, 1> {}; | 386 class IsIndependent : public BitField<bool, 3, 1> {}; |
| 387 // The following two fields are mutually exclusive | |
| 388 class IsActive : public BitField<bool, 4, 1> {}; | |
| 352 class IsPartiallyDependent : public BitField<bool, 4, 1> {}; | 389 class IsPartiallyDependent : public BitField<bool, 4, 1> {}; |
| 353 class IsInNewSpaceList : public BitField<bool, 5, 1> {}; | 390 class IsInNewSpaceList : public BitField<bool, 5, 1> {}; |
| 354 class NodeWeaknessType : public BitField<WeaknessType, 6, 2> {}; | 391 class NodeWeaknessType : public BitField<WeaknessType, 6, 2> {}; |
| 355 | 392 |
| 356 uint8_t flags_; | 393 uint8_t flags_; |
|
rmcilroy
2015/10/21 13:36:51
Maybe we could just make this a uint16_t and store
| |
| 394 bool unmodified_flag; | |
| 357 | 395 |
| 358 // Handle specific callback - might be a weak reference in disguise. | 396 // Handle specific callback - might be a weak reference in disguise. |
| 359 WeakCallback weak_callback_; | 397 WeakCallback weak_callback_; |
| 360 | 398 |
| 361 // Provided data for callback. In FREE state, this is used for | 399 // Provided data for callback. In FREE state, this is used for |
| 362 // the free list link. | 400 // the free list link. |
| 363 union { | 401 union { |
| 364 void* parameter; | 402 void* parameter; |
| 365 Node* next_free; | 403 Node* next_free; |
| 366 } parameter_or_next_free_; | 404 } parameter_or_next_free_; |
| (...skipping 272 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 639 if (it.node()->IsWeak() && f(it.node()->location())) { | 677 if (it.node()->IsWeak() && f(it.node()->location())) { |
| 640 it.node()->MarkPending(); | 678 it.node()->MarkPending(); |
| 641 } | 679 } |
| 642 } | 680 } |
| 643 } | 681 } |
| 644 | 682 |
| 645 | 683 |
| 646 void GlobalHandles::IterateNewSpaceStrongAndDependentRoots(ObjectVisitor* v) { | 684 void GlobalHandles::IterateNewSpaceStrongAndDependentRoots(ObjectVisitor* v) { |
| 647 for (int i = 0; i < new_space_nodes_.length(); ++i) { | 685 for (int i = 0; i < new_space_nodes_.length(); ++i) { |
| 648 Node* node = new_space_nodes_[i]; | 686 Node* node = new_space_nodes_[i]; |
| 649 if (node->IsStrongRetainer() || | 687 if (FLAG_scavenge_reclaim_unmodified_objects) { |
| 650 (node->IsWeakRetainer() && !node->is_independent() && | 688 if (node->IsStrongRetainer() || |
| 651 !node->is_partially_dependent())) { | 689 (node->IsWeakRetainer() && !node->is_independent() && |
| 690 !node->is_unmodified())) { | |
| 652 v->VisitPointer(node->location()); | 691 v->VisitPointer(node->location()); |
| 692 } | |
| 693 } else { | |
| 694 if (node->IsStrongRetainer() || | |
| 695 (node->IsWeakRetainer() && !node->is_independent() && | |
| 696 !node->is_partially_dependent())) { | |
| 697 v->VisitPointer(node->location()); | |
| 698 } | |
| 653 } | 699 } |
| 654 } | 700 } |
| 655 } | 701 } |
| 656 | 702 |
| 657 | 703 |
| 658 void GlobalHandles::IdentifyNewSpaceWeakIndependentHandles( | 704 void GlobalHandles::IdentifyNewSpaceWeakIndependentHandles( |
| 659 WeakSlotCallbackWithHeap f) { | 705 WeakSlotCallbackWithHeap f) { |
| 660 for (int i = 0; i < new_space_nodes_.length(); ++i) { | 706 for (int i = 0; i < new_space_nodes_.length(); ++i) { |
| 661 Node* node = new_space_nodes_[i]; | 707 Node* node = new_space_nodes_[i]; |
| 662 DCHECK(node->is_in_new_space_list()); | 708 DCHECK(node->is_in_new_space_list()); |
| (...skipping 17 matching lines...) Expand all Loading... | |
| 680 node->CollectPhantomCallbackData(isolate(), | 726 node->CollectPhantomCallbackData(isolate(), |
| 681 &pending_phantom_callbacks_); | 727 &pending_phantom_callbacks_); |
| 682 } else { | 728 } else { |
| 683 v->VisitPointer(node->location()); | 729 v->VisitPointer(node->location()); |
| 684 } | 730 } |
| 685 } | 731 } |
| 686 } | 732 } |
| 687 } | 733 } |
| 688 | 734 |
| 689 | 735 |
| 736 void GlobalHandles::IdentifyWeakUnmodifiedObjects( | |
| 737 WeakSlotCallback is_unmodified) { | |
| 738 for (int i = 0; i < new_space_nodes_.length(); ++i) { | |
| 739 Node* node = new_space_nodes_[i]; | |
| 740 if (!node->is_active() && node->IsWeak() && | |
| 741 is_unmodified(node->location())) { | |
| 742 node->set_unmodified(true); | |
| 743 } | |
| 744 node->set_active(false); | |
| 745 } | |
| 746 } | |
| 747 | |
| 748 | |
| 749 void GlobalHandles::MarkNewSpaceWeakUnmodifiedObjectsPending( | |
| 750 WeakSlotCallbackWithHeap is_unscavenged) { | |
| 751 for (int i = 0; i < new_space_nodes_.length(); ++i) { | |
| 752 Node* node = new_space_nodes_[i]; | |
| 753 DCHECK(node->is_in_new_space_list()); | |
| 754 if ((node->is_independent() || node->is_unmodified()) && node->IsWeak() && | |
| 755 is_unscavenged(isolate_->heap(), node->location())) { | |
| 756 node->MarkPending(); | |
| 757 } | |
| 758 } | |
| 759 } | |
| 760 | |
| 761 | |
| 762 void GlobalHandles::IterateNewSpaceWeakUnmodifiedRoots(ObjectVisitor* v) { | |
| 763 for (int i = 0; i < new_space_nodes_.length(); ++i) { | |
| 764 Node* node = new_space_nodes_[i]; | |
| 765 DCHECK(node->is_in_new_space_list()); | |
| 766 if ((node->is_independent() || node->is_unmodified()) && | |
| 767 node->IsWeakRetainer()) { | |
| 768 // Pending weak phantom handles die immediately. Everything else survives. | |
| 769 if (node->state() == Node::PENDING && | |
| 770 node->weakness_type() != NORMAL_WEAK) { | |
| 771 node->CollectPhantomCallbackData(isolate(), | |
| 772 &pending_phantom_callbacks_); | |
| 773 } else { | |
| 774 v->VisitPointer(node->location()); | |
| 775 } | |
| 776 } | |
| 777 } | |
| 778 } | |
| 779 | |
| 780 | |
| 690 bool GlobalHandles::IterateObjectGroups(ObjectVisitor* v, | 781 bool GlobalHandles::IterateObjectGroups(ObjectVisitor* v, |
| 691 WeakSlotCallbackWithHeap can_skip) { | 782 WeakSlotCallbackWithHeap can_skip) { |
| 692 ComputeObjectGroupsAndImplicitReferences(); | 783 ComputeObjectGroupsAndImplicitReferences(); |
| 693 int last = 0; | 784 int last = 0; |
| 694 bool any_group_was_visited = false; | 785 bool any_group_was_visited = false; |
| 695 for (int i = 0; i < object_groups_.length(); i++) { | 786 for (int i = 0; i < object_groups_.length(); i++) { |
| 696 ObjectGroup* entry = object_groups_.at(i); | 787 ObjectGroup* entry = object_groups_.at(i); |
| 697 DCHECK(entry != NULL); | 788 DCHECK(entry != NULL); |
| 698 | 789 |
| 699 Object*** objects = entry->objects; | 790 Object*** objects = entry->objects; |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 750 const int initial_post_gc_processing_count) { | 841 const int initial_post_gc_processing_count) { |
| 751 int freed_nodes = 0; | 842 int freed_nodes = 0; |
| 752 for (int i = 0; i < new_space_nodes_.length(); ++i) { | 843 for (int i = 0; i < new_space_nodes_.length(); ++i) { |
| 753 Node* node = new_space_nodes_[i]; | 844 Node* node = new_space_nodes_[i]; |
| 754 DCHECK(node->is_in_new_space_list()); | 845 DCHECK(node->is_in_new_space_list()); |
| 755 if (!node->IsRetainer()) { | 846 if (!node->IsRetainer()) { |
| 756 // Free nodes do not have weak callbacks. Do not use them to compute | 847 // Free nodes do not have weak callbacks. Do not use them to compute |
| 757 // the freed_nodes. | 848 // the freed_nodes. |
| 758 continue; | 849 continue; |
| 759 } | 850 } |
| 760 // Skip dependent handles. Their weak callbacks might expect to be | 851 // Skip dependent or unmodified handles. Their weak callbacks might expect |
| 852 // to be | |
| 761 // called between two global garbage collection callbacks which | 853 // called between two global garbage collection callbacks which |
| 762 // are not called for minor collections. | 854 // are not called for minor collections. |
| 763 if (!node->is_independent() && !node->is_partially_dependent()) { | 855 if (FLAG_scavenge_reclaim_unmodified_objects) { |
| 764 continue; | 856 if (!node->is_independent() && !node->is_unmodified()) { |
| 857 continue; | |
| 858 } | |
| 859 node->set_active(false); | |
| 860 node->set_unmodified(false); | |
| 861 } else { | |
| 862 if (!node->is_independent() && !node->is_partially_dependent()) { | |
| 863 continue; | |
| 864 } | |
| 865 node->clear_partially_dependent(); | |
| 765 } | 866 } |
| 766 node->clear_partially_dependent(); | 867 |
| 767 if (node->PostGarbageCollectionProcessing(isolate_)) { | 868 if (node->PostGarbageCollectionProcessing(isolate_)) { |
| 768 if (initial_post_gc_processing_count != post_gc_processing_count_) { | 869 if (initial_post_gc_processing_count != post_gc_processing_count_) { |
| 769 // Weak callback triggered another GC and another round of | 870 // Weak callback triggered another GC and another round of |
| 770 // PostGarbageCollection processing. The current node might | 871 // PostGarbageCollection processing. The current node might |
| 771 // have been deleted in that round, so we need to bail out (or | 872 // have been deleted in that round, so we need to bail out (or |
| 772 // restart the processing). | 873 // restart the processing). |
| 773 return freed_nodes; | 874 return freed_nodes; |
| 774 } | 875 } |
| 775 } | 876 } |
| 776 if (!node->IsRetainer()) { | 877 if (!node->IsRetainer()) { |
| 777 freed_nodes++; | 878 freed_nodes++; |
| 778 } | 879 } |
| 779 } | 880 } |
| 780 return freed_nodes; | 881 return freed_nodes; |
| 781 } | 882 } |
| 782 | 883 |
| 783 | 884 |
| 784 int GlobalHandles::PostMarkSweepProcessing( | 885 int GlobalHandles::PostMarkSweepProcessing( |
| 785 const int initial_post_gc_processing_count) { | 886 const int initial_post_gc_processing_count) { |
| 786 int freed_nodes = 0; | 887 int freed_nodes = 0; |
| 787 for (NodeIterator it(this); !it.done(); it.Advance()) { | 888 for (NodeIterator it(this); !it.done(); it.Advance()) { |
| 788 if (!it.node()->IsRetainer()) { | 889 if (!it.node()->IsRetainer()) { |
| 789 // Free nodes do not have weak callbacks. Do not use them to compute | 890 // Free nodes do not have weak callbacks. Do not use them to compute |
| 790 // the freed_nodes. | 891 // the freed_nodes. |
| 791 continue; | 892 continue; |
| 792 } | 893 } |
| 793 it.node()->clear_partially_dependent(); | 894 if (FLAG_scavenge_reclaim_unmodified_objects) { |
| 895 it.node()->set_unmodified(false); | |
| 896 it.node()->set_active(false); | |
| 897 } else { | |
| 898 it.node()->clear_partially_dependent(); | |
| 899 } | |
| 794 if (it.node()->PostGarbageCollectionProcessing(isolate_)) { | 900 if (it.node()->PostGarbageCollectionProcessing(isolate_)) { |
| 795 if (initial_post_gc_processing_count != post_gc_processing_count_) { | 901 if (initial_post_gc_processing_count != post_gc_processing_count_) { |
| 796 // See the comment above. | 902 // See the comment above. |
| 797 return freed_nodes; | 903 return freed_nodes; |
| 798 } | 904 } |
| 799 } | 905 } |
| 800 if (!it.node()->IsRetainer()) { | 906 if (!it.node()->IsRetainer()) { |
| 801 freed_nodes++; | 907 freed_nodes++; |
| 802 } | 908 } |
| 803 } | 909 } |
| (...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 948 for (int i = 0; i < new_space_nodes_.length(); ++i) { | 1054 for (int i = 0; i < new_space_nodes_.length(); ++i) { |
| 949 Node* node = new_space_nodes_[i]; | 1055 Node* node = new_space_nodes_[i]; |
| 950 if (node->IsRetainer() && node->has_wrapper_class_id()) { | 1056 if (node->IsRetainer() && node->has_wrapper_class_id()) { |
| 951 v->VisitEmbedderReference(node->location(), | 1057 v->VisitEmbedderReference(node->location(), |
| 952 node->wrapper_class_id()); | 1058 node->wrapper_class_id()); |
| 953 } | 1059 } |
| 954 } | 1060 } |
| 955 } | 1061 } |
| 956 | 1062 |
| 957 | 1063 |
| 1064 void GlobalHandles::IterateWeakRootsInNewSpaceWithClassIds(ObjectVisitor* v) { | |
| 1065 for (int i = 0; i < new_space_nodes_.length(); ++i) { | |
| 1066 Node* node = new_space_nodes_[i]; | |
| 1067 if (node->has_wrapper_class_id() && node->IsWeak()) { | |
| 1068 v->VisitEmbedderReference(node->location(), node->wrapper_class_id()); | |
| 1069 } | |
| 1070 } | |
| 1071 } | |
| 1072 | |
| 1073 | |
| 958 int GlobalHandles::NumberOfWeakHandles() { | 1074 int GlobalHandles::NumberOfWeakHandles() { |
| 959 int count = 0; | 1075 int count = 0; |
| 960 for (NodeIterator it(this); !it.done(); it.Advance()) { | 1076 for (NodeIterator it(this); !it.done(); it.Advance()) { |
| 961 if (it.node()->IsWeakRetainer()) { | 1077 if (it.node()->IsWeakRetainer()) { |
| 962 count++; | 1078 count++; |
| 963 } | 1079 } |
| 964 } | 1080 } |
| 965 return count; | 1081 return count; |
| 966 } | 1082 } |
| 967 | 1083 |
| (...skipping 306 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1274 blocks_[block][offset] = object; | 1390 blocks_[block][offset] = object; |
| 1275 if (isolate->heap()->InNewSpace(object)) { | 1391 if (isolate->heap()->InNewSpace(object)) { |
| 1276 new_space_indices_.Add(size_); | 1392 new_space_indices_.Add(size_); |
| 1277 } | 1393 } |
| 1278 *index = size_++; | 1394 *index = size_++; |
| 1279 } | 1395 } |
| 1280 | 1396 |
| 1281 | 1397 |
| 1282 } // namespace internal | 1398 } // namespace internal |
| 1283 } // namespace v8 | 1399 } // namespace v8 |
| OLD | NEW |