| OLD | NEW |
| 1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/global-handles.h" | 5 #include "src/global-handles.h" |
| 6 | 6 |
| 7 #include "src/api.h" | 7 #include "src/api.h" |
| 8 #include "src/v8.h" | 8 #include "src/v8.h" |
| 9 #include "src/vm-state-inl.h" | 9 #include "src/vm-state-inl.h" |
| 10 | 10 |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 45 Node() { | 45 Node() { |
| 46 DCHECK(offsetof(Node, class_id_) == Internals::kNodeClassIdOffset); | 46 DCHECK(offsetof(Node, class_id_) == Internals::kNodeClassIdOffset); |
| 47 DCHECK(offsetof(Node, flags_) == Internals::kNodeFlagsOffset); | 47 DCHECK(offsetof(Node, flags_) == Internals::kNodeFlagsOffset); |
| 48 STATIC_ASSERT(static_cast<int>(NodeState::kMask) == | 48 STATIC_ASSERT(static_cast<int>(NodeState::kMask) == |
| 49 Internals::kNodeStateMask); | 49 Internals::kNodeStateMask); |
| 50 STATIC_ASSERT(WEAK == Internals::kNodeStateIsWeakValue); | 50 STATIC_ASSERT(WEAK == Internals::kNodeStateIsWeakValue); |
| 51 STATIC_ASSERT(PENDING == Internals::kNodeStateIsPendingValue); | 51 STATIC_ASSERT(PENDING == Internals::kNodeStateIsPendingValue); |
| 52 STATIC_ASSERT(NEAR_DEATH == Internals::kNodeStateIsNearDeathValue); | 52 STATIC_ASSERT(NEAR_DEATH == Internals::kNodeStateIsNearDeathValue); |
| 53 STATIC_ASSERT(static_cast<int>(IsIndependent::kShift) == | 53 STATIC_ASSERT(static_cast<int>(IsIndependent::kShift) == |
| 54 Internals::kNodeIsIndependentShift); | 54 Internals::kNodeIsIndependentShift); |
| 55 STATIC_ASSERT(static_cast<int>(IsPartiallyDependent::kShift) == | |
| 56 Internals::kNodeIsPartiallyDependentShift); | |
| 57 STATIC_ASSERT(static_cast<int>(IsActive::kShift) == | 55 STATIC_ASSERT(static_cast<int>(IsActive::kShift) == |
| 58 Internals::kNodeIsActiveShift); | 56 Internals::kNodeIsActiveShift); |
| 59 } | 57 } |
| 60 | 58 |
| 61 #ifdef ENABLE_HANDLE_ZAPPING | 59 #ifdef ENABLE_HANDLE_ZAPPING |
| 62 ~Node() { | 60 ~Node() { |
| 63 // TODO(1428): if it's a weak handle we should have invoked its callback. | 61 // TODO(1428): if it's a weak handle we should have invoked its callback. |
| 64 // Zap the values for eager trapping. | 62 // Zap the values for eager trapping. |
| 65 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue); | 63 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue); |
| 66 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId; | 64 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId; |
| 67 index_ = 0; | 65 index_ = 0; |
| 68 set_independent(false); | 66 set_independent(false); |
| 69 if (FLAG_scavenge_reclaim_unmodified_objects) { | 67 set_active(false); |
| 70 set_active(false); | |
| 71 } else { | |
| 72 set_partially_dependent(false); | |
| 73 } | |
| 74 set_in_new_space_list(false); | 68 set_in_new_space_list(false); |
| 75 parameter_or_next_free_.next_free = NULL; | 69 parameter_or_next_free_.next_free = NULL; |
| 76 weak_callback_ = NULL; | 70 weak_callback_ = NULL; |
| 77 } | 71 } |
| 78 #endif | 72 #endif |
| 79 | 73 |
| 80 void Initialize(int index, Node** first_free) { | 74 void Initialize(int index, Node** first_free) { |
| 81 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue); | 75 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue); |
| 82 index_ = static_cast<uint8_t>(index); | 76 index_ = static_cast<uint8_t>(index); |
| 83 DCHECK(static_cast<int>(index_) == index); | 77 DCHECK(static_cast<int>(index_) == index); |
| 84 set_state(FREE); | 78 set_state(FREE); |
| 85 set_in_new_space_list(false); | 79 set_in_new_space_list(false); |
| 86 parameter_or_next_free_.next_free = *first_free; | 80 parameter_or_next_free_.next_free = *first_free; |
| 87 *first_free = this; | 81 *first_free = this; |
| 88 } | 82 } |
| 89 | 83 |
| 90 void Acquire(Object* object) { | 84 void Acquire(Object* object) { |
| 91 DCHECK(state() == FREE); | 85 DCHECK(state() == FREE); |
| 92 object_ = object; | 86 object_ = object; |
| 93 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId; | 87 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId; |
| 94 set_independent(false); | 88 set_independent(false); |
| 95 if (FLAG_scavenge_reclaim_unmodified_objects) { | 89 set_active(false); |
| 96 set_active(false); | |
| 97 } else { | |
| 98 set_partially_dependent(false); | |
| 99 } | |
| 100 set_state(NORMAL); | 90 set_state(NORMAL); |
| 101 parameter_or_next_free_.parameter = NULL; | 91 parameter_or_next_free_.parameter = NULL; |
| 102 weak_callback_ = NULL; | 92 weak_callback_ = NULL; |
| 103 IncreaseBlockUses(); | 93 IncreaseBlockUses(); |
| 104 } | 94 } |
| 105 | 95 |
| 106 void Zap() { | 96 void Zap() { |
| 107 DCHECK(IsInUse()); | 97 DCHECK(IsInUse()); |
| 108 // Zap the values for eager trapping. | 98 // Zap the values for eager trapping. |
| 109 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue); | 99 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue); |
| 110 } | 100 } |
| 111 | 101 |
| 112 void Release() { | 102 void Release() { |
| 113 DCHECK(IsInUse()); | 103 DCHECK(IsInUse()); |
| 114 set_state(FREE); | 104 set_state(FREE); |
| 115 // Zap the values for eager trapping. | 105 // Zap the values for eager trapping. |
| 116 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue); | 106 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue); |
| 117 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId; | 107 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId; |
| 118 set_independent(false); | 108 set_independent(false); |
| 119 if (FLAG_scavenge_reclaim_unmodified_objects) { | 109 set_active(false); |
| 120 set_active(false); | |
| 121 } else { | |
| 122 set_partially_dependent(false); | |
| 123 } | |
| 124 weak_callback_ = NULL; | 110 weak_callback_ = NULL; |
| 125 DecreaseBlockUses(); | 111 DecreaseBlockUses(); |
| 126 } | 112 } |
| 127 | 113 |
| 128 // Object slot accessors. | 114 // Object slot accessors. |
| 129 Object* object() const { return object_; } | 115 Object* object() const { return object_; } |
| 130 Object** location() { return &object_; } | 116 Object** location() { return &object_; } |
| 131 Handle<Object> handle() { return Handle<Object>(location()); } | 117 Handle<Object> handle() { return Handle<Object>(location()); } |
| 132 | 118 |
| 133 // Wrapper class ID accessors. | 119 // Wrapper class ID accessors. |
| (...skipping 12 matching lines...) Expand all Loading... |
| 146 flags_ = NodeState::update(flags_, state); | 132 flags_ = NodeState::update(flags_, state); |
| 147 } | 133 } |
| 148 | 134 |
| 149 bool is_independent() { | 135 bool is_independent() { |
| 150 return IsIndependent::decode(flags_); | 136 return IsIndependent::decode(flags_); |
| 151 } | 137 } |
| 152 void set_independent(bool v) { | 138 void set_independent(bool v) { |
| 153 flags_ = IsIndependent::update(flags_, v); | 139 flags_ = IsIndependent::update(flags_, v); |
| 154 } | 140 } |
| 155 | 141 |
| 156 bool is_partially_dependent() { | |
| 157 CHECK(!FLAG_scavenge_reclaim_unmodified_objects); | |
| 158 return IsPartiallyDependent::decode(flags_); | |
| 159 } | |
| 160 void set_partially_dependent(bool v) { | |
| 161 CHECK(!FLAG_scavenge_reclaim_unmodified_objects); | |
| 162 flags_ = IsPartiallyDependent::update(flags_, v); | |
| 163 } | |
| 164 | |
| 165 bool is_active() { | 142 bool is_active() { |
| 166 CHECK(FLAG_scavenge_reclaim_unmodified_objects); | |
| 167 return IsActive::decode(flags_); | 143 return IsActive::decode(flags_); |
| 168 } | 144 } |
| 169 void set_active(bool v) { | 145 void set_active(bool v) { |
| 170 CHECK(FLAG_scavenge_reclaim_unmodified_objects); | |
| 171 flags_ = IsActive::update(flags_, v); | 146 flags_ = IsActive::update(flags_, v); |
| 172 } | 147 } |
| 173 | 148 |
| 174 bool is_in_new_space_list() { | 149 bool is_in_new_space_list() { |
| 175 return IsInNewSpaceList::decode(flags_); | 150 return IsInNewSpaceList::decode(flags_); |
| 176 } | 151 } |
| 177 void set_in_new_space_list(bool v) { | 152 void set_in_new_space_list(bool v) { |
| 178 flags_ = IsInNewSpaceList::update(flags_, v); | 153 flags_ = IsInNewSpaceList::update(flags_, v); |
| 179 } | 154 } |
| 180 | 155 |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 220 DCHECK(state() == WEAK); | 195 DCHECK(state() == WEAK); |
| 221 set_state(PENDING); | 196 set_state(PENDING); |
| 222 } | 197 } |
| 223 | 198 |
| 224 // Independent flag accessors. | 199 // Independent flag accessors. |
| 225 void MarkIndependent() { | 200 void MarkIndependent() { |
| 226 DCHECK(IsInUse()); | 201 DCHECK(IsInUse()); |
| 227 set_independent(true); | 202 set_independent(true); |
| 228 } | 203 } |
| 229 | 204 |
| 230 void MarkPartiallyDependent() { | |
| 231 DCHECK(IsInUse()); | |
| 232 if (GetGlobalHandles()->isolate()->heap()->InNewSpace(object_)) { | |
| 233 set_partially_dependent(true); | |
| 234 } | |
| 235 } | |
| 236 void clear_partially_dependent() { set_partially_dependent(false); } | |
| 237 | |
| 238 // Callback accessor. | 205 // Callback accessor. |
| 239 // TODO(svenpanne) Re-enable or nuke later. | 206 // TODO(svenpanne) Re-enable or nuke later. |
| 240 // WeakReferenceCallback callback() { return callback_; } | 207 // WeakReferenceCallback callback() { return callback_; } |
| 241 | 208 |
| 242 // Callback parameter accessors. | 209 // Callback parameter accessors. |
| 243 void set_parameter(void* parameter) { | 210 void set_parameter(void* parameter) { |
| 244 DCHECK(IsInUse()); | 211 DCHECK(IsInUse()); |
| 245 parameter_or_next_free_.parameter = parameter; | 212 parameter_or_next_free_.parameter = parameter; |
| 246 } | 213 } |
| 247 void* parameter() const { | 214 void* parameter() const { |
| (...skipping 143 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 391 | 358 |
| 392 // Index in the containing handle block. | 359 // Index in the containing handle block. |
| 393 uint8_t index_; | 360 uint8_t index_; |
| 394 | 361 |
| 395 // This stores three flags (independent, partially_dependent and | 362 // This stores three flags (independent, partially_dependent and |
| 396 // in_new_space_list) and a State. | 363 // in_new_space_list) and a State. |
| 397 class NodeState : public BitField<State, 0, 3> {}; | 364 class NodeState : public BitField<State, 0, 3> {}; |
| 398 class IsIndependent : public BitField<bool, 3, 1> {}; | 365 class IsIndependent : public BitField<bool, 3, 1> {}; |
| 399 // The following two fields are mutually exclusive | 366 // The following two fields are mutually exclusive |
| 400 class IsActive : public BitField<bool, 4, 1> {}; | 367 class IsActive : public BitField<bool, 4, 1> {}; |
| 401 class IsPartiallyDependent : public BitField<bool, 4, 1> {}; | |
| 402 class IsInNewSpaceList : public BitField<bool, 5, 1> {}; | 368 class IsInNewSpaceList : public BitField<bool, 5, 1> {}; |
| 403 class NodeWeaknessType : public BitField<WeaknessType, 6, 2> {}; | 369 class NodeWeaknessType : public BitField<WeaknessType, 6, 2> {}; |
| 404 | 370 |
| 405 uint8_t flags_; | 371 uint8_t flags_; |
| 406 | 372 |
| 407 // Handle specific callback - might be a weak reference in disguise. | 373 // Handle specific callback - might be a weak reference in disguise. |
| 408 WeakCallbackInfo<void>::Callback weak_callback_; | 374 WeakCallbackInfo<void>::Callback weak_callback_; |
| 409 | 375 |
| 410 // Provided data for callback. In FREE state, this is used for | 376 // Provided data for callback. In FREE state, this is used for |
| 411 // the free list link. | 377 // the free list link. |
| (...skipping 223 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 635 | 601 |
| 636 void* GlobalHandles::ClearWeakness(Object** location) { | 602 void* GlobalHandles::ClearWeakness(Object** location) { |
| 637 return Node::FromLocation(location)->ClearWeakness(); | 603 return Node::FromLocation(location)->ClearWeakness(); |
| 638 } | 604 } |
| 639 | 605 |
| 640 | 606 |
| 641 void GlobalHandles::MarkIndependent(Object** location) { | 607 void GlobalHandles::MarkIndependent(Object** location) { |
| 642 Node::FromLocation(location)->MarkIndependent(); | 608 Node::FromLocation(location)->MarkIndependent(); |
| 643 } | 609 } |
| 644 | 610 |
| 645 | |
| 646 void GlobalHandles::MarkPartiallyDependent(Object** location) { | |
| 647 Node::FromLocation(location)->MarkPartiallyDependent(); | |
| 648 } | |
| 649 | |
| 650 | |
| 651 bool GlobalHandles::IsIndependent(Object** location) { | 611 bool GlobalHandles::IsIndependent(Object** location) { |
| 652 return Node::FromLocation(location)->is_independent(); | 612 return Node::FromLocation(location)->is_independent(); |
| 653 } | 613 } |
| 654 | 614 |
| 655 | 615 |
| 656 bool GlobalHandles::IsNearDeath(Object** location) { | 616 bool GlobalHandles::IsNearDeath(Object** location) { |
| 657 return Node::FromLocation(location)->IsNearDeath(); | 617 return Node::FromLocation(location)->IsNearDeath(); |
| 658 } | 618 } |
| 659 | 619 |
| 660 | 620 |
| (...skipping 26 matching lines...) Expand all Loading... |
| 687 if (it.node()->IsWeak() && f(it.node()->location())) { | 647 if (it.node()->IsWeak() && f(it.node()->location())) { |
| 688 it.node()->MarkPending(); | 648 it.node()->MarkPending(); |
| 689 } | 649 } |
| 690 } | 650 } |
| 691 } | 651 } |
| 692 | 652 |
| 693 | 653 |
| 694 void GlobalHandles::IterateNewSpaceStrongAndDependentRoots(ObjectVisitor* v) { | 654 void GlobalHandles::IterateNewSpaceStrongAndDependentRoots(ObjectVisitor* v) { |
| 695 for (int i = 0; i < new_space_nodes_.length(); ++i) { | 655 for (int i = 0; i < new_space_nodes_.length(); ++i) { |
| 696 Node* node = new_space_nodes_[i]; | 656 Node* node = new_space_nodes_[i]; |
| 697 if (FLAG_scavenge_reclaim_unmodified_objects) { | 657 if (node->IsStrongRetainer() || |
| 698 if (node->IsStrongRetainer() || | 658 (node->IsWeakRetainer() && !node->is_independent() && |
| 699 (node->IsWeakRetainer() && !node->is_independent() && | 659 node->is_active())) { |
| 700 node->is_active())) { | 660 v->VisitPointer(node->location()); |
| 701 v->VisitPointer(node->location()); | |
| 702 } | |
| 703 } else { | |
| 704 if (node->IsStrongRetainer() || | |
| 705 (node->IsWeakRetainer() && !node->is_independent() && | |
| 706 !node->is_partially_dependent())) { | |
| 707 v->VisitPointer(node->location()); | |
| 708 } | |
| 709 } | 661 } |
| 710 } | 662 } |
| 711 } | 663 } |
| 712 | 664 |
| 713 | 665 |
| 714 void GlobalHandles::IdentifyNewSpaceWeakIndependentHandles( | 666 void GlobalHandles::IdentifyNewSpaceWeakIndependentHandles( |
| 715 WeakSlotCallbackWithHeap f) { | 667 WeakSlotCallbackWithHeap f) { |
| 716 for (int i = 0; i < new_space_nodes_.length(); ++i) { | 668 for (int i = 0; i < new_space_nodes_.length(); ++i) { |
| 717 Node* node = new_space_nodes_[i]; | 669 Node* node = new_space_nodes_[i]; |
| 718 DCHECK(node->is_in_new_space_list()); | 670 DCHECK(node->is_in_new_space_list()); |
| 719 if ((node->is_independent() || node->is_partially_dependent()) && | 671 if (node->is_independent() && node->IsWeak() && |
| 720 node->IsWeak() && f(isolate_->heap(), node->location())) { | 672 f(isolate_->heap(), node->location())) { |
| 721 node->MarkPending(); | 673 node->MarkPending(); |
| 722 } | 674 } |
| 723 } | 675 } |
| 724 } | 676 } |
| 725 | 677 |
| 726 | 678 |
| 727 void GlobalHandles::IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v) { | 679 void GlobalHandles::IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v) { |
| 728 for (int i = 0; i < new_space_nodes_.length(); ++i) { | 680 for (int i = 0; i < new_space_nodes_.length(); ++i) { |
| 729 Node* node = new_space_nodes_[i]; | 681 Node* node = new_space_nodes_[i]; |
| 730 DCHECK(node->is_in_new_space_list()); | 682 DCHECK(node->is_in_new_space_list()); |
| 731 if ((node->is_independent() || node->is_partially_dependent()) && | 683 if (node->is_independent() && node->IsWeakRetainer()) { |
| 732 node->IsWeakRetainer()) { | |
| 733 // Pending weak phantom handles die immediately. Everything else survives. | 684 // Pending weak phantom handles die immediately. Everything else survives. |
| 734 if (node->IsPendingPhantomResetHandle()) { | 685 if (node->IsPendingPhantomResetHandle()) { |
| 735 node->ResetPhantomHandle(); | 686 node->ResetPhantomHandle(); |
| 736 ++number_of_phantom_handle_resets_; | 687 ++number_of_phantom_handle_resets_; |
| 737 } else if (node->IsPendingPhantomCallback()) { | 688 } else if (node->IsPendingPhantomCallback()) { |
| 738 node->CollectPhantomCallbackData(isolate(), | 689 node->CollectPhantomCallbackData(isolate(), |
| 739 &pending_phantom_callbacks_); | 690 &pending_phantom_callbacks_); |
| 740 } else { | 691 } else { |
| 741 v->VisitPointer(node->location()); | 692 v->VisitPointer(node->location()); |
| 742 } | 693 } |
| (...skipping 218 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 961 DCHECK(node->is_in_new_space_list()); | 912 DCHECK(node->is_in_new_space_list()); |
| 962 if (!node->IsRetainer()) { | 913 if (!node->IsRetainer()) { |
| 963 // Free nodes do not have weak callbacks. Do not use them to compute | 914 // Free nodes do not have weak callbacks. Do not use them to compute |
| 964 // the freed_nodes. | 915 // the freed_nodes. |
| 965 continue; | 916 continue; |
| 966 } | 917 } |
| 967 // Skip dependent or unmodified handles. Their weak callbacks might expect | 918 // Skip dependent or unmodified handles. Their weak callbacks might expect |
| 968 // to be | 919 // to be |
| 969 // called between two global garbage collection callbacks which | 920 // called between two global garbage collection callbacks which |
| 970 // are not called for minor collections. | 921 // are not called for minor collections. |
| 971 if (FLAG_scavenge_reclaim_unmodified_objects) { | |
| 972 if (!node->is_independent() && (node->is_active())) { | 922 if (!node->is_independent() && (node->is_active())) { |
| 973 node->set_active(false); | 923 node->set_active(false); |
| 974 continue; | 924 continue; |
| 975 } | 925 } |
| 976 node->set_active(false); | 926 node->set_active(false); |
| 977 } else { | |
| 978 if (!node->is_independent() && !node->is_partially_dependent()) { | |
| 979 continue; | |
| 980 } | |
| 981 node->clear_partially_dependent(); | |
| 982 } | |
| 983 | 927 |
| 984 if (node->PostGarbageCollectionProcessing(isolate_)) { | 928 if (node->PostGarbageCollectionProcessing(isolate_)) { |
| 985 if (initial_post_gc_processing_count != post_gc_processing_count_) { | 929 if (initial_post_gc_processing_count != post_gc_processing_count_) { |
| 986 // Weak callback triggered another GC and another round of | 930 // Weak callback triggered another GC and another round of |
| 987 // PostGarbageCollection processing. The current node might | 931 // PostGarbageCollection processing. The current node might |
| 988 // have been deleted in that round, so we need to bail out (or | 932 // have been deleted in that round, so we need to bail out (or |
| 989 // restart the processing). | 933 // restart the processing). |
| 990 return freed_nodes; | 934 return freed_nodes; |
| 991 } | 935 } |
| 992 } | 936 } |
| 993 if (!node->IsRetainer()) { | 937 if (!node->IsRetainer()) { |
| 994 freed_nodes++; | 938 freed_nodes++; |
| 995 } | 939 } |
| 996 } | 940 } |
| 997 return freed_nodes; | 941 return freed_nodes; |
| 998 } | 942 } |
| 999 | 943 |
| 1000 | 944 |
| 1001 int GlobalHandles::PostMarkSweepProcessing( | 945 int GlobalHandles::PostMarkSweepProcessing( |
| 1002 const int initial_post_gc_processing_count) { | 946 const int initial_post_gc_processing_count) { |
| 1003 int freed_nodes = 0; | 947 int freed_nodes = 0; |
| 1004 for (NodeIterator it(this); !it.done(); it.Advance()) { | 948 for (NodeIterator it(this); !it.done(); it.Advance()) { |
| 1005 if (!it.node()->IsRetainer()) { | 949 if (!it.node()->IsRetainer()) { |
| 1006 // Free nodes do not have weak callbacks. Do not use them to compute | 950 // Free nodes do not have weak callbacks. Do not use them to compute |
| 1007 // the freed_nodes. | 951 // the freed_nodes. |
| 1008 continue; | 952 continue; |
| 1009 } | 953 } |
| 1010 if (FLAG_scavenge_reclaim_unmodified_objects) { | 954 it.node()->set_active(false); |
| 1011 it.node()->set_active(false); | |
| 1012 } else { | |
| 1013 it.node()->clear_partially_dependent(); | |
| 1014 } | |
| 1015 if (it.node()->PostGarbageCollectionProcessing(isolate_)) { | 955 if (it.node()->PostGarbageCollectionProcessing(isolate_)) { |
| 1016 if (initial_post_gc_processing_count != post_gc_processing_count_) { | 956 if (initial_post_gc_processing_count != post_gc_processing_count_) { |
| 1017 // See the comment above. | 957 // See the comment above. |
| 1018 return freed_nodes; | 958 return freed_nodes; |
| 1019 } | 959 } |
| 1020 } | 960 } |
| 1021 if (!it.node()->IsRetainer()) { | 961 if (!it.node()->IsRetainer()) { |
| 1022 freed_nodes++; | 962 freed_nodes++; |
| 1023 } | 963 } |
| 1024 } | 964 } |
| (...skipping 486 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1511 blocks_[block][offset] = object; | 1451 blocks_[block][offset] = object; |
| 1512 if (isolate->heap()->InNewSpace(object)) { | 1452 if (isolate->heap()->InNewSpace(object)) { |
| 1513 new_space_indices_.Add(size_); | 1453 new_space_indices_.Add(size_); |
| 1514 } | 1454 } |
| 1515 *index = size_++; | 1455 *index = size_++; |
| 1516 } | 1456 } |
| 1517 | 1457 |
| 1518 | 1458 |
| 1519 } // namespace internal | 1459 } // namespace internal |
| 1520 } // namespace v8 | 1460 } // namespace v8 |
| OLD | NEW |