OLD | NEW |
---|---|
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/api.h" | 7 #include "src/api.h" |
8 #include "src/global-handles.h" | 8 #include "src/global-handles.h" |
9 | 9 |
10 #include "src/vm-state-inl.h" | 10 #include "src/vm-state-inl.h" |
(...skipping 246 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
257 return p; | 257 return p; |
258 } | 258 } |
259 | 259 |
260 void CollectPhantomCallbackData( | 260 void CollectPhantomCallbackData( |
261 Isolate* isolate, | 261 Isolate* isolate, |
262 List<PendingPhantomCallback>* pending_phantom_callbacks) { | 262 List<PendingPhantomCallback>* pending_phantom_callbacks) { |
263 if (state() != PENDING) return; | 263 if (state() != PENDING) return; |
264 if (weak_callback_ != NULL) { | 264 if (weak_callback_ != NULL) { |
265 if (weakness_type() == NORMAL_WEAK) return; | 265 if (weakness_type() == NORMAL_WEAK) return; |
266 | 266 |
267 v8::Isolate* api_isolate = reinterpret_cast<v8::Isolate*>(isolate); | |
268 | |
269 DCHECK(weakness_type() == PHANTOM_WEAK || | 267 DCHECK(weakness_type() == PHANTOM_WEAK || |
270 weakness_type() == PHANTOM_WEAK_2_INTERNAL_FIELDS); | 268 weakness_type() == PHANTOM_WEAK_2_INTERNAL_FIELDS); |
271 | 269 |
272 Object* internal_field0 = nullptr; | 270 Object* internal_field0 = nullptr; |
273 Object* internal_field1 = nullptr; | 271 Object* internal_field1 = nullptr; |
274 if (weakness_type() != PHANTOM_WEAK) { | 272 if (weakness_type() != PHANTOM_WEAK) { |
275 if (object()->IsJSObject()) { | 273 if (object()->IsJSObject()) { |
276 JSObject* jsobject = JSObject::cast(object()); | 274 JSObject* jsobject = JSObject::cast(object()); |
277 int field_count = jsobject->GetInternalFieldCount(); | 275 int field_count = jsobject->GetInternalFieldCount(); |
278 if (field_count > 0) { | 276 if (field_count > 0) { |
279 internal_field0 = jsobject->GetInternalField(0); | 277 internal_field0 = jsobject->GetInternalField(0); |
280 if (!internal_field0->IsSmi()) internal_field0 = nullptr; | 278 if (!internal_field0->IsSmi()) internal_field0 = nullptr; |
281 } | 279 } |
282 if (field_count > 1) { | 280 if (field_count > 1) { |
283 internal_field1 = jsobject->GetInternalField(1); | 281 internal_field1 = jsobject->GetInternalField(1); |
284 if (!internal_field1->IsSmi()) internal_field1 = nullptr; | 282 if (!internal_field1->IsSmi()) internal_field1 = nullptr; |
285 } | 283 } |
286 } | 284 } |
287 } | 285 } |
288 | 286 |
289 // Zap with harmless value. | 287 // Zap with something dangerous. |
290 *location() = Smi::FromInt(0); | 288 *location() = reinterpret_cast<Object*>(0x1); |
Erik Corry
2015/03/26 10:07:11
I would prefer some zap value that you can grep fo
dcarney
2015/03/26 10:26:56
Done.
| |
291 typedef v8::WeakCallbackInfo<void> Data; | 289 typedef v8::WeakCallbackInfo<void> Data; |
292 | 290 |
293 Data data(api_isolate, parameter(), internal_field0, internal_field1); | |
294 Data::Callback callback = | 291 Data::Callback callback = |
295 reinterpret_cast<Data::Callback>(weak_callback_); | 292 reinterpret_cast<Data::Callback>(weak_callback_); |
296 | 293 |
297 pending_phantom_callbacks->Add( | 294 pending_phantom_callbacks->Add(PendingPhantomCallback( |
298 PendingPhantomCallback(this, data, callback)); | 295 this, callback, parameter(), internal_field0, internal_field1)); |
299 DCHECK(IsInUse()); | 296 DCHECK(IsInUse()); |
300 set_state(NEAR_DEATH); | 297 set_state(NEAR_DEATH); |
301 } | 298 } |
302 } | 299 } |
303 | 300 |
304 bool PostGarbageCollectionProcessing(Isolate* isolate) { | 301 bool PostGarbageCollectionProcessing(Isolate* isolate) { |
305 // Handles only weak handles (not phantom) that are dying. | 302 // Handles only weak handles (not phantom) that are dying. |
306 if (state() != Node::PENDING) return false; | 303 if (state() != Node::PENDING) return false; |
307 if (weak_callback_ == NULL) { | 304 if (weak_callback_ == NULL) { |
308 Release(); | 305 Release(); |
(...skipping 522 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
831 node->set_in_new_space_list(false); | 828 node->set_in_new_space_list(false); |
832 isolate_->heap()->IncrementNodesDiedInNewSpace(); | 829 isolate_->heap()->IncrementNodesDiedInNewSpace(); |
833 } | 830 } |
834 } | 831 } |
835 new_space_nodes_.Rewind(last); | 832 new_space_nodes_.Rewind(last); |
836 } | 833 } |
837 | 834 |
838 | 835 |
839 int GlobalHandles::DispatchPendingPhantomCallbacks() { | 836 int GlobalHandles::DispatchPendingPhantomCallbacks() { |
840 int freed_nodes = 0; | 837 int freed_nodes = 0; |
838 { | |
839 // The initial pass callbacks must simply clear the nodes. | |
840 for (auto i = pending_phantom_callbacks_.begin(); | |
841 i != pending_phantom_callbacks_.end(); ++i) { | |
842 auto callback = i; | |
843 // Skip callbacks that have already been processed once. | |
844 if (callback->node() == nullptr) continue; | |
845 callback->Invoke(isolate()); | |
846 freed_nodes++; | |
847 } | |
848 } | |
849 // The second pass empties the list. | |
841 while (pending_phantom_callbacks_.length() != 0) { | 850 while (pending_phantom_callbacks_.length() != 0) { |
842 PendingPhantomCallback callback = pending_phantom_callbacks_.RemoveLast(); | 851 auto callback = pending_phantom_callbacks_.RemoveLast(); |
843 DCHECK(callback.node()->IsInUse()); | 852 DCHECK(callback.node() == nullptr); |
844 callback.invoke(); | 853 // No second pass callback required. |
845 DCHECK(!callback.node()->IsInUse()); | 854 if (callback.callback() == nullptr) continue; |
846 freed_nodes++; | 855 // Fire second pass callback. |
856 callback.Invoke(isolate()); | |
847 } | 857 } |
848 return freed_nodes; | 858 return freed_nodes; |
849 } | 859 } |
850 | 860 |
851 | 861 |
862 void GlobalHandles::PendingPhantomCallback::Invoke(Isolate* isolate) { | |
863 Data::Callback* callback_addr = nullptr; | |
864 if (node_ != nullptr) { | |
865 // Initialize for first pass callback. | |
866 DCHECK(node_->state() == Node::NEAR_DEATH); | |
867 callback_addr = &callback_; | |
868 } | |
869 Data data(reinterpret_cast<v8::Isolate*>(isolate), parameter_, | |
870 internal_fields_, callback_addr); | |
871 Data::Callback callback = callback_; | |
872 callback_ = nullptr; | |
873 callback(data); | |
874 if (node_ != nullptr) { | |
875 // Transition to second pass state. | |
876 DCHECK(node_->state() == Node::FREE); | |
877 node_ = nullptr; | |
878 } | |
879 } | |
880 | |
881 | |
852 int GlobalHandles::PostGarbageCollectionProcessing(GarbageCollector collector) { | 882 int GlobalHandles::PostGarbageCollectionProcessing(GarbageCollector collector) { |
853 // Process weak global handle callbacks. This must be done after the | 883 // Process weak global handle callbacks. This must be done after the |
854 // GC is completely done, because the callbacks may invoke arbitrary | 884 // GC is completely done, because the callbacks may invoke arbitrary |
855 // API functions. | 885 // API functions. |
856 DCHECK(isolate_->heap()->gc_state() == Heap::NOT_IN_GC); | 886 DCHECK(isolate_->heap()->gc_state() == Heap::NOT_IN_GC); |
857 const int initial_post_gc_processing_count = ++post_gc_processing_count_; | 887 const int initial_post_gc_processing_count = ++post_gc_processing_count_; |
858 int freed_nodes = 0; | 888 int freed_nodes = 0; |
859 freed_nodes += DispatchPendingPhantomCallbacks(); | 889 freed_nodes += DispatchPendingPhantomCallbacks(); |
860 if (initial_post_gc_processing_count != post_gc_processing_count_) { | 890 if (initial_post_gc_processing_count != post_gc_processing_count_) { |
861 // If the callbacks caused a nested GC, then return. See comment in | 891 // If the callbacks caused a nested GC, then return. See comment in |
(...skipping 10 matching lines...) Expand all Loading... | |
872 // PostScavengeProcessing. | 902 // PostScavengeProcessing. |
873 return freed_nodes; | 903 return freed_nodes; |
874 } | 904 } |
875 if (initial_post_gc_processing_count == post_gc_processing_count_) { | 905 if (initial_post_gc_processing_count == post_gc_processing_count_) { |
876 UpdateListOfNewSpaceNodes(); | 906 UpdateListOfNewSpaceNodes(); |
877 } | 907 } |
878 return freed_nodes; | 908 return freed_nodes; |
879 } | 909 } |
880 | 910 |
881 | 911 |
882 void GlobalHandles::PendingPhantomCallback::invoke() { | |
883 if (node_->state() == Node::FREE) return; | |
884 DCHECK(node_->state() == Node::NEAR_DEATH); | |
885 callback_(data_); | |
886 if (node_->state() != Node::FREE) node_->Release(); | |
887 } | |
888 | |
889 | |
890 void GlobalHandles::IterateStrongRoots(ObjectVisitor* v) { | 912 void GlobalHandles::IterateStrongRoots(ObjectVisitor* v) { |
891 for (NodeIterator it(this); !it.done(); it.Advance()) { | 913 for (NodeIterator it(this); !it.done(); it.Advance()) { |
892 if (it.node()->IsStrongRetainer()) { | 914 if (it.node()->IsStrongRetainer()) { |
893 v->VisitPointer(it.node()->location()); | 915 v->VisitPointer(it.node()->location()); |
894 } | 916 } |
895 } | 917 } |
896 } | 918 } |
897 | 919 |
898 | 920 |
899 void GlobalHandles::IterateAllRoots(ObjectVisitor* v) { | 921 void GlobalHandles::IterateAllRoots(ObjectVisitor* v) { |
(...skipping 344 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1244 DCHECK_EQ(isolate->heap()->the_hole_value(), blocks_[block][offset]); | 1266 DCHECK_EQ(isolate->heap()->the_hole_value(), blocks_[block][offset]); |
1245 blocks_[block][offset] = object; | 1267 blocks_[block][offset] = object; |
1246 if (isolate->heap()->InNewSpace(object)) { | 1268 if (isolate->heap()->InNewSpace(object)) { |
1247 new_space_indices_.Add(size_); | 1269 new_space_indices_.Add(size_); |
1248 } | 1270 } |
1249 *index = size_++; | 1271 *index = size_++; |
1250 } | 1272 } |
1251 | 1273 |
1252 | 1274 |
1253 } } // namespace v8::internal | 1275 } } // namespace v8::internal |
OLD | NEW |