Chromium Code Reviews| Index: src/global-handles.cc |
| diff --git a/src/global-handles.cc b/src/global-handles.cc |
| index 574b2489ea2bc1e21dc8d917a2935700a050bdee..692ea56c97f7dfd8bb0cc3a8fe6edb6690926e75 100644 |
| --- a/src/global-handles.cc |
| +++ b/src/global-handles.cc |
| @@ -30,10 +30,17 @@ class GlobalHandles::Node { |
| // FREE -> NORMAL <-> WEAK -> PENDING -> NEAR_DEATH -> { NORMAL, WEAK, FREE } |
| enum State { |
| FREE = 0, |
| - NORMAL, // Normal global handle. |
| - WEAK, // Flagged as weak but not yet finalized. |
| - PENDING, // Has been recognized as only reachable by weak handles. |
| - NEAR_DEATH // Callback has informed the handle is near death. |
| + NORMAL, // Normal global handle. |
| + WEAK, // Flagged as weak but not yet finalized. |
| + PENDING, // Has been recognized as only reachable by weak handles. |
| + NEAR_DEATH, // Callback has informed the handle is near death. |
| + NUMBER_OF_NODE_STATES |
| + }; |
| + |
| + enum WeaknessType { |
| + NORMAL_WEAK, // Embedder gets a handle to the dying object. |
| + PHANTOM_WEAK, // Embedder gets the parameter they passed in earlier. |
| + INTERNAL_FIELDS_WEAK // Embedded gets 2 internal fields from dying object. |
|
jochen (gone - plz use gerrit)
2014/12/02 10:25:59
Embedder
Erik Corry
2014/12/02 10:40:49
Done.
|
| }; |
| // Maps handle location (slot) to the containing node. |
| @@ -92,8 +99,14 @@ class GlobalHandles::Node { |
| IncreaseBlockUses(); |
| } |
| + void Zap() { |
| + DCHECK(IsInUse()); |
| + // Zap the values for eager trapping. |
| + object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue); |
| + } |
| + |
| void Release() { |
| - DCHECK(state() != FREE); |
| + DCHECK(IsInUse()); |
| set_state(FREE); |
| // Zap the values for eager trapping. |
| object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue); |
| @@ -146,11 +159,11 @@ class GlobalHandles::Node { |
| flags_ = IsInNewSpaceList::update(flags_, v); |
| } |
| - bool is_zapped_during_weak_callback() { |
| - return IsZappedDuringWeakCallback::decode(flags_); |
| + WeaknessType weakness_type() const { |
| + return NodeWeaknessType::decode(flags_); |
| } |
| - void set_is_zapped_during_weak_callback(bool v) { |
| - flags_ = IsZappedDuringWeakCallback::update(flags_, v); |
| + void set_weakness_type(WeaknessType weakness_type) { |
| + flags_ = NodeWeaknessType::update(flags_, weakness_type); |
| } |
| bool IsNearDeath() const { |
| @@ -160,6 +173,8 @@ class GlobalHandles::Node { |
| bool IsWeak() const { return state() == WEAK; } |
| + bool IsInUse() const { return state() != FREE; } |
| + |
| bool IsRetainer() const { return state() != FREE; } |
| bool IsStrongRetainer() const { return state() == NORMAL; } |
| @@ -175,12 +190,12 @@ class GlobalHandles::Node { |
| // Independent flag accessors. |
| void MarkIndependent() { |
| - DCHECK(state() != FREE); |
| + DCHECK(IsInUse()); |
| set_independent(true); |
| } |
| void MarkPartiallyDependent() { |
| - DCHECK(state() != FREE); |
| + DCHECK(IsInUse()); |
| if (GetGlobalHandles()->isolate()->heap()->InNewSpace(object_)) { |
| set_partially_dependent(true); |
| } |
| @@ -193,14 +208,38 @@ class GlobalHandles::Node { |
| // Callback parameter accessors. |
| void set_parameter(void* parameter) { |
| - DCHECK(state() != FREE); |
| + DCHECK(IsInUse()); |
| + DCHECK(weakness_type() == NORMAL_WEAK || weakness_type() == PHANTOM_WEAK); |
| parameter_or_next_free_.parameter = parameter; |
| } |
| void* parameter() const { |
| - DCHECK(state() != FREE); |
| + DCHECK(IsInUse()); |
| return parameter_or_next_free_.parameter; |
| } |
| + void set_internal_fields( |
| + int internal_field_index1, int internal_field_index2) { |
| + DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK); |
| + DCHECK(internal_field_index1 <= INT16_MAX); |
| + DCHECK(internal_field_index1 >= INT16_MIN); |
| + DCHECK(internal_field_index2 <= INT16_MAX); |
| + DCHECK(internal_field_index2 >= INT16_MIN); |
| + parameter_or_next_free_.internal_field_indeces.internal_field1 = |
| + static_cast<int16_t>(internal_field_index1); |
| + parameter_or_next_free_.internal_field_indeces.internal_field2 = |
| + static_cast<int16_t>(internal_field_index2); |
| + } |
| + |
| + int internal_field1() const { |
| + DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK); |
| + return parameter_or_next_free_.internal_field_indeces.internal_field1; |
| + } |
| + |
| + int internal_field2() const { |
| + DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK); |
| + return parameter_or_next_free_.internal_field_indeces.internal_field2; |
| + } |
| + |
| // Accessors for next free node in the free list. |
| Node* next_free() { |
| DCHECK(state() == FREE); |
| @@ -211,67 +250,107 @@ class GlobalHandles::Node { |
| parameter_or_next_free_.next_free = value; |
| } |
| - void MakeWeak(void* parameter, WeakCallback weak_callback, |
| - bool is_zapped_during_weak_callback = false) { |
| + void MakeWeak(void* parameter, WeakCallback weak_callback) { |
| DCHECK(weak_callback != NULL); |
| - DCHECK(state() != FREE); |
| + DCHECK(IsInUse()); |
| CHECK(object_ != NULL); |
| set_state(WEAK); |
| + set_weakness_type(Node::NORMAL_WEAK); |
| set_parameter(parameter); |
| - set_is_zapped_during_weak_callback(is_zapped_during_weak_callback); |
| weak_callback_ = weak_callback; |
| } |
| + void MakePhantom(void* parameter, PhantomCallback phantom_callback, |
| + int16_t internal_field_index1, |
| + int16_t internal_field_index2) { |
| + DCHECK(phantom_callback != NULL); |
| + DCHECK(IsInUse()); |
| + CHECK(object_ != NULL); |
| + set_state(WEAK); |
| + if (parameter == NULL) { |
| + set_weakness_type(Node::INTERNAL_FIELDS_WEAK); |
| + set_internal_fields(internal_field_index1, internal_field_index2); |
|
jochen (gone - plz use gerrit)
2014/12/02 10:25:59
is there a check somewhere that the object actuall
Erik Corry
2014/12/02 10:40:49
Yes, it's in InternalFieldOK, called by SlowGetInt
|
| + } else { |
| + DCHECK(internal_field_index1 == v8::Object::kNoInternalFieldIndex); |
| + DCHECK(internal_field_index2 == v8::Object::kNoInternalFieldIndex); |
| + set_weakness_type(Node::PHANTOM_WEAK); |
| + set_parameter(parameter); |
| + } |
| + weak_callback_ = reinterpret_cast<WeakCallback>(phantom_callback); |
| + } |
| + |
| void* ClearWeakness() { |
| - DCHECK(state() != FREE); |
| + DCHECK(IsInUse()); |
| void* p = parameter(); |
| set_state(NORMAL); |
| set_parameter(NULL); |
| return p; |
| } |
| + void CollectPhantomCallbackData( |
| + Isolate* isolate, List<PendingPhantomCallback>* pending_callbacks) { |
| + DCHECK(state() != Node::NEAR_DEATH); |
| + if (state() != Node::PENDING) return; |
| + if (weak_callback_ != NULL) { |
| + if (weakness_type() == Node::NORMAL_WEAK) return; |
| + |
| + v8::PhantomCallbackData<void>::Callback callback = |
| + reinterpret_cast<v8::PhantomCallbackData<void>::Callback>( |
| + weak_callback_); |
| + |
| + if (weakness_type() == Node::PHANTOM_WEAK) { |
| + // Phantom weak pointer case. |
| + DCHECK(*location() == Smi::FromInt(kPhantomReferenceZap)); |
| + // Make data with a null handle. |
| + v8::PhantomCallbackData<void> data( |
| + reinterpret_cast<v8::Isolate*>(isolate), parameter()); |
| + pending_callbacks->Add(PendingPhantomCallback(data, callback)); |
| + } else { |
| + DCHECK(weakness_type() == Node::INTERNAL_FIELDS_WEAK); |
| + // Phantom weak pointer case, passing internal fields instead of |
| + // parameter. |
| + Handle<Object> handle(object(), isolate); |
| + Handle<JSObject> jsobject = Handle<JSObject>::cast(handle); |
| + v8::PhantomCallbackData<void> data( |
| + reinterpret_cast<v8::Isolate*>(isolate), |
| + jsobject->GetInternalField(internal_field1()), |
| + jsobject->GetInternalField(internal_field2())); |
| + // In the future, we want to delay the callback. In that case we will |
| + // zap when we queue up, to stop the C++ side accessing the dead V8 |
| + // object, but we will call Release only after the callback (allowing |
| + // the node to be reused). |
| + pending_callbacks->Add(PendingPhantomCallback(data, callback)); |
| + } |
| + } |
| + Release(); |
| + } |
| + |
| bool PostGarbageCollectionProcessing(Isolate* isolate) { |
| if (state() != Node::PENDING) return false; |
| if (weak_callback_ == NULL) { |
| Release(); |
| return false; |
| } |
| - void* param = parameter(); |
| set_state(NEAR_DEATH); |
| - set_parameter(NULL); |
| + // Check that we are not passing a finalized external string to |
| + // the callback. |
| + DCHECK(!object_->IsExternalOneByteString() || |
| + ExternalOneByteString::cast(object_)->resource() != NULL); |
| + DCHECK(!object_->IsExternalTwoByteString() || |
| + ExternalTwoByteString::cast(object_)->resource() != NULL); |
| + // Leaving V8. |
| + VMState<EXTERNAL> vmstate(isolate); |
| + HandleScope handle_scope(isolate); |
| + DCHECK(weakness_type() == Node::NORMAL_WEAK); |
| Object** object = location(); |
| - { |
| - // Check that we are not passing a finalized external string to |
| - // the callback. |
| - DCHECK(!object_->IsExternalOneByteString() || |
| - ExternalOneByteString::cast(object_)->resource() != NULL); |
| - DCHECK(!object_->IsExternalTwoByteString() || |
| - ExternalTwoByteString::cast(object_)->resource() != NULL); |
| - // Leaving V8. |
| - VMState<EXTERNAL> vmstate(isolate); |
| - HandleScope handle_scope(isolate); |
| - if (is_zapped_during_weak_callback()) { |
| - // Phantom weak pointer case. |
| - DCHECK(*object == Smi::FromInt(kPhantomReferenceZap)); |
| - // Make data with a null handle. |
| - v8::WeakCallbackData<v8::Value, void> data( |
| - reinterpret_cast<v8::Isolate*>(isolate), v8::Local<v8::Object>(), |
| - param); |
| - weak_callback_(data); |
| - if (state() != FREE) { |
| - // Callback does not have to clear the global handle if it is a |
| - // phantom handle. |
| - Release(); |
| - } |
| - } else { |
| - Handle<Object> handle(*object, isolate); |
| - v8::WeakCallbackData<v8::Value, void> data( |
| - reinterpret_cast<v8::Isolate*>(isolate), v8::Utils::ToLocal(handle), |
| - param); |
| - weak_callback_(data); |
| - } |
| - } |
| + Handle<Object> handle(*object, isolate); |
| + v8::WeakCallbackData<v8::Value, void> data( |
| + reinterpret_cast<v8::Isolate*>(isolate), v8::Utils::ToLocal(handle), |
| + parameter()); |
| + set_parameter(NULL); |
| + weak_callback_(data); |
| + |
| // Absence of explicit cleanup or revival of weak handle |
| // in most of the cases would lead to memory leak. |
| CHECK(state() != NEAR_DEATH); |
| @@ -300,11 +379,11 @@ class GlobalHandles::Node { |
| // This stores three flags (independent, partially_dependent and |
| // in_new_space_list) and a State. |
| - class NodeState : public BitField<State, 0, 4> {}; |
| - class IsIndependent : public BitField<bool, 4, 1> {}; |
| - class IsPartiallyDependent : public BitField<bool, 5, 1> {}; |
| - class IsInNewSpaceList : public BitField<bool, 6, 1> {}; |
| - class IsZappedDuringWeakCallback : public BitField<bool, 7, 1> {}; |
| + class NodeState : public BitField<State, 0, 3> {}; |
| + class IsIndependent : public BitField<bool, 3, 1> {}; |
| + class IsPartiallyDependent : public BitField<bool, 4, 1> {}; |
| + class IsInNewSpaceList : public BitField<bool, 5, 1> {}; |
| + class NodeWeaknessType : public BitField<WeaknessType, 6, 2> {}; |
| uint8_t flags_; |
| @@ -315,6 +394,10 @@ class GlobalHandles::Node { |
| // the free list link. |
| union { |
| void* parameter; |
| + struct { |
| + int16_t internal_field1; |
| + int16_t internal_field2; |
| + } internal_field_indeces; |
| Node* next_free; |
| } parameter_or_next_free_; |
| @@ -500,9 +583,26 @@ void GlobalHandles::Destroy(Object** location) { |
| void GlobalHandles::MakeWeak(Object** location, void* parameter, |
| - WeakCallback weak_callback, PhantomState phantom) { |
| + WeakCallback weak_callback) { |
| + Node::FromLocation(location)->MakeWeak(parameter, weak_callback); |
| +} |
| + |
| + |
| +void GlobalHandles::MakePhantom(Object** location, void* parameter, |
| + PhantomCallback phantom_callback, |
| + int16_t internal_field_index1, |
| + int16_t internal_field_index2) { |
| Node::FromLocation(location) |
| - ->MakeWeak(parameter, weak_callback, phantom == Phantom); |
| + ->MakePhantom(parameter, phantom_callback, internal_field_index1, |
| + internal_field_index2); |
| +} |
| + |
| + |
| +void GlobalHandles::CollectPhantomCallbackData() { |
| + for (NodeIterator it(this); !it.done(); it.Advance()) { |
| + Node* node = it.node(); |
| + node->CollectPhantomCallbackData(isolate(), &pending_phantom_callbacks_); |
| + } |
| } |
| @@ -540,10 +640,27 @@ void GlobalHandles::IterateWeakRoots(ObjectVisitor* v) { |
| for (NodeIterator it(this); !it.done(); it.Advance()) { |
| Node* node = it.node(); |
| if (node->IsWeakRetainer()) { |
| - if (node->state() == Node::PENDING && |
| - node->is_zapped_during_weak_callback()) { |
| - *(node->location()) = Smi::FromInt(kPhantomReferenceZap); |
| + // Weakness type can be normal, phantom or internal fields. |
| + // For normal weakness we mark through the handle so that |
| + // the object and things reachable from it are available |
| + // to the callback. |
| + // In the case of phantom we can zap the object handle now |
| + // and we won't need it, so we don't need to mark through it. |
| + // In the internal fields case we will need the internal |
| + // fields, so we can't zap the handle, but we don't need to |
| + // mark through it, because it will die in this GC round. |
| + if (node->state() == Node::PENDING) { |
| + if (node->weakness_type() == Node::PHANTOM_WEAK) { |
| + *(node->location()) = Smi::FromInt(kPhantomReferenceZap); |
| + } else if (node->weakness_type() == Node::NORMAL_WEAK) { |
| + v->VisitPointer(node->location()); |
| + } else { |
| + DCHECK(node->weakness_type() == Node::INTERNAL_FIELDS_WEAK); |
| + } |
| } else { |
| + // Node is not pending, so that means the object survived. |
| + // We stil need to visit the pointer in case the object moved, |
|
rmcilroy
2014/12/03 11:56:35
/s/stil/still
Erik Corry
2014/12/15 15:12:42
Done.
|
| + // eg. because of compaction. |
| v->VisitPointer(node->location()); |
| } |
| } |
| @@ -591,10 +708,18 @@ void GlobalHandles::IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v) { |
| DCHECK(node->is_in_new_space_list()); |
| if ((node->is_independent() || node->is_partially_dependent()) && |
| node->IsWeakRetainer()) { |
| - if (node->is_zapped_during_weak_callback()) { |
| + if (node->weakness_type() == Node::PHANTOM_WEAK) { |
| *(node->location()) = Smi::FromInt(kPhantomReferenceZap); |
| - } else { |
| + } else if (node->weakness_type() == Node::NORMAL_WEAK) { |
| v->VisitPointer(node->location()); |
| + } else { |
| + DCHECK(node->weakness_type() == Node::INTERNAL_FIELDS_WEAK); |
| + // For this case we only need to trace if it's alive: The tracing of |
| + // something that is already alive is just to get the pointer updated |
| + // to the new location of the object). |
| + if (!node->IsNearDeath()) { |
| + v->VisitPointer(node->location()); |
| + } |
| } |
| } |
| } |
| @@ -647,63 +772,66 @@ bool GlobalHandles::IterateObjectGroups(ObjectVisitor* v, |
| } |
| -int GlobalHandles::PostGarbageCollectionProcessing( |
| - GarbageCollector collector) { |
| - // Process weak global handle callbacks. This must be done after the |
| - // GC is completely done, because the callbacks may invoke arbitrary |
| - // API functions. |
| - DCHECK(isolate_->heap()->gc_state() == Heap::NOT_IN_GC); |
| - const int initial_post_gc_processing_count = ++post_gc_processing_count_; |
| +int GlobalHandles::PostScavengeProcessing( |
| + const int initial_post_gc_processing_count) { |
| int freed_nodes = 0; |
| - if (collector == SCAVENGER) { |
| - for (int i = 0; i < new_space_nodes_.length(); ++i) { |
| - Node* node = new_space_nodes_[i]; |
| - DCHECK(node->is_in_new_space_list()); |
| - if (!node->IsRetainer()) { |
| - // Free nodes do not have weak callbacks. Do not use them to compute |
| - // the freed_nodes. |
| - continue; |
| - } |
| - // Skip dependent handles. Their weak callbacks might expect to be |
| - // called between two global garbage collection callbacks which |
| - // are not called for minor collections. |
| - if (!node->is_independent() && !node->is_partially_dependent()) { |
| - continue; |
| - } |
| - node->clear_partially_dependent(); |
| - if (node->PostGarbageCollectionProcessing(isolate_)) { |
| - if (initial_post_gc_processing_count != post_gc_processing_count_) { |
| - // Weak callback triggered another GC and another round of |
| - // PostGarbageCollection processing. The current node might |
| - // have been deleted in that round, so we need to bail out (or |
| - // restart the processing). |
| - return freed_nodes; |
| - } |
| - } |
| - if (!node->IsRetainer()) { |
| - freed_nodes++; |
| - } |
| + for (int i = 0; i < new_space_nodes_.length(); ++i) { |
| + Node* node = new_space_nodes_[i]; |
| + DCHECK(node->is_in_new_space_list()); |
| + if (!node->IsRetainer()) { |
| + // Free nodes do not have weak callbacks. Do not use them to compute |
| + // the freed_nodes. |
| + continue; |
| } |
| - } else { |
| - for (NodeIterator it(this); !it.done(); it.Advance()) { |
| - if (!it.node()->IsRetainer()) { |
| - // Free nodes do not have weak callbacks. Do not use them to compute |
| - // the freed_nodes. |
| - continue; |
| - } |
| - it.node()->clear_partially_dependent(); |
| - if (it.node()->PostGarbageCollectionProcessing(isolate_)) { |
| - if (initial_post_gc_processing_count != post_gc_processing_count_) { |
| - // See the comment above. |
| - return freed_nodes; |
| - } |
| + // Skip dependent handles. Their weak callbacks might expect to be |
| + // called between two global garbage collection callbacks which |
| + // are not called for minor collections. |
| + if (!node->is_independent() && !node->is_partially_dependent()) { |
| + continue; |
| + } |
| + node->clear_partially_dependent(); |
| + if (node->PostGarbageCollectionProcessing(isolate_)) { |
| + if (initial_post_gc_processing_count != post_gc_processing_count_) { |
| + // Weak callback triggered another GC and another round of |
| + // PostGarbageCollection processing. The current node might |
| + // have been deleted in that round, so we need to bail out (or |
| + // restart the processing). |
| + return freed_nodes; |
| } |
| - if (!it.node()->IsRetainer()) { |
| - freed_nodes++; |
| + } |
| + if (!node->IsRetainer()) { |
| + freed_nodes++; |
| + } |
| + } |
| + return freed_nodes; |
| +} |
| + |
| + |
| +int GlobalHandles::PostMarkSweepProcessing( |
| + const int initial_post_gc_processing_count) { |
| + int freed_nodes = 0; |
| + for (NodeIterator it(this); !it.done(); it.Advance()) { |
| + if (!it.node()->IsRetainer()) { |
| + // Free nodes do not have weak callbacks. Do not use them to compute |
| + // the freed_nodes. |
| + continue; |
| + } |
| + it.node()->clear_partially_dependent(); |
| + if (it.node()->PostGarbageCollectionProcessing(isolate_)) { |
| + if (initial_post_gc_processing_count != post_gc_processing_count_) { |
| + // See the comment above. |
| + return freed_nodes; |
| } |
| } |
| + if (!it.node()->IsRetainer()) { |
| + freed_nodes++; |
| + } |
| } |
| - // Update the list of new space nodes. |
| + return freed_nodes; |
| +} |
| + |
| + |
| +void GlobalHandles::UpdateListOfNewSpaceNodes() { |
| int last = 0; |
| for (int i = 0; i < new_space_nodes_.length(); ++i) { |
| Node* node = new_space_nodes_[i]; |
| @@ -722,6 +850,42 @@ int GlobalHandles::PostGarbageCollectionProcessing( |
| } |
| } |
| new_space_nodes_.Rewind(last); |
| +} |
| + |
| + |
| +int GlobalHandles::DispatchPendingPhantomCallbacks() { |
| + int freed_nodes = 0; |
| + while (pending_phantom_callbacks_.length() != 0) { |
| + PendingPhantomCallback callback = pending_phantom_callbacks_.RemoveLast(); |
| + callback.invoke(); |
| + freed_nodes++; |
| + } |
| + return freed_nodes; |
| +} |
| + |
| + |
| +int GlobalHandles::PostGarbageCollectionProcessing( |
| + GarbageCollector collector) { |
| + // Process weak global handle callbacks. This must be done after the |
| + // GC is completely done, because the callbacks may invoke arbitrary |
| + // API functions. |
| + DCHECK(isolate_->heap()->gc_state() == Heap::NOT_IN_GC); |
| + const int initial_post_gc_processing_count = ++post_gc_processing_count_; |
| + int freed_nodes = 0; |
| + if (collector == SCAVENGER) { |
| + freed_nodes = PostScavengeProcessing(initial_post_gc_processing_count); |
| + } else { |
| + freed_nodes = PostMarkSweepProcessing(initial_post_gc_processing_count); |
| + } |
| + if (initial_post_gc_processing_count != post_gc_processing_count_) { |
| + // If the callbacks caused a nested GC, then return. See comment in |
| + // PostScavengeProcessing. |
| + return freed_nodes; |
| + } |
| + freed_nodes += DispatchPendingPhantomCallbacks(); |
| + if (initial_post_gc_processing_count == post_gc_processing_count_) { |
| + UpdateListOfNewSpaceNodes(); |
| + } |
| return freed_nodes; |
| } |