Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/api.h" | 7 #include "src/api.h" |
| 8 #include "src/global-handles.h" | 8 #include "src/global-handles.h" |
| 9 | 9 |
| 10 #include "src/vm-state-inl.h" | 10 #include "src/vm-state-inl.h" |
| (...skipping 12 matching lines...) Expand all Loading... | |
| 23 delete[] children; | 23 delete[] children; |
| 24 } | 24 } |
| 25 | 25 |
| 26 | 26 |
| 27 class GlobalHandles::Node { | 27 class GlobalHandles::Node { |
| 28 public: | 28 public: |
| 29 // State transition diagram: | 29 // State transition diagram: |
| 30 // FREE -> NORMAL <-> WEAK -> PENDING -> NEAR_DEATH -> { NORMAL, WEAK, FREE } | 30 // FREE -> NORMAL <-> WEAK -> PENDING -> NEAR_DEATH -> { NORMAL, WEAK, FREE } |
| 31 enum State { | 31 enum State { |
| 32 FREE = 0, | 32 FREE = 0, |
| 33 NORMAL, // Normal global handle. | 33 NORMAL, // Normal global handle. |
| 34 WEAK, // Flagged as weak but not yet finalized. | 34 WEAK, // Flagged as weak but not yet finalized. |
| 35 PENDING, // Has been recognized as only reachable by weak handles. | 35 PENDING, // Has been recognized as only reachable by weak handles. |
| 36 NEAR_DEATH // Callback has informed the handle is near death. | 36 NEAR_DEATH, // Callback has informed the handle is near death. |
| 37 NUMBER_OF_NODE_STATES | |
| 38 }; | |
| 39 | |
| 40 enum WeaknessType { | |
| 41 NORMAL_WEAK, // Embedder gets a handle to the dying object. | |
| 42 PHANTOM_WEAK, // Embedder gets the parameter they passed in earlier. | |
| 43 INTERNAL_FIELDS_WEAK // Embedded gets 2 internal fields from dying object. | |
|
jochen (gone - plz use gerrit)
2014/12/02 10:25:59
Embedder
Erik Corry
2014/12/02 10:40:49
Done.
| |
| 37 }; | 44 }; |
| 38 | 45 |
| 39 // Maps handle location (slot) to the containing node. | 46 // Maps handle location (slot) to the containing node. |
| 40 static Node* FromLocation(Object** location) { | 47 static Node* FromLocation(Object** location) { |
| 41 DCHECK(OFFSET_OF(Node, object_) == 0); | 48 DCHECK(OFFSET_OF(Node, object_) == 0); |
| 42 return reinterpret_cast<Node*>(location); | 49 return reinterpret_cast<Node*>(location); |
| 43 } | 50 } |
| 44 | 51 |
| 45 Node() { | 52 Node() { |
| 46 DCHECK(OFFSET_OF(Node, class_id_) == Internals::kNodeClassIdOffset); | 53 DCHECK(OFFSET_OF(Node, class_id_) == Internals::kNodeClassIdOffset); |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 85 object_ = object; | 92 object_ = object; |
| 86 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId; | 93 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId; |
| 87 set_independent(false); | 94 set_independent(false); |
| 88 set_partially_dependent(false); | 95 set_partially_dependent(false); |
| 89 set_state(NORMAL); | 96 set_state(NORMAL); |
| 90 parameter_or_next_free_.parameter = NULL; | 97 parameter_or_next_free_.parameter = NULL; |
| 91 weak_callback_ = NULL; | 98 weak_callback_ = NULL; |
| 92 IncreaseBlockUses(); | 99 IncreaseBlockUses(); |
| 93 } | 100 } |
| 94 | 101 |
| 102 void Zap() { | |
| 103 DCHECK(IsInUse()); | |
| 104 // Zap the values for eager trapping. | |
| 105 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue); | |
| 106 } | |
| 107 | |
| 95 void Release() { | 108 void Release() { |
| 96 DCHECK(state() != FREE); | 109 DCHECK(IsInUse()); |
| 97 set_state(FREE); | 110 set_state(FREE); |
| 98 // Zap the values for eager trapping. | 111 // Zap the values for eager trapping. |
| 99 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue); | 112 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue); |
| 100 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId; | 113 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId; |
| 101 set_independent(false); | 114 set_independent(false); |
| 102 set_partially_dependent(false); | 115 set_partially_dependent(false); |
| 103 weak_callback_ = NULL; | 116 weak_callback_ = NULL; |
| 104 DecreaseBlockUses(); | 117 DecreaseBlockUses(); |
| 105 } | 118 } |
| 106 | 119 |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 139 flags_ = IsPartiallyDependent::update(flags_, v); | 152 flags_ = IsPartiallyDependent::update(flags_, v); |
| 140 } | 153 } |
| 141 | 154 |
| 142 bool is_in_new_space_list() { | 155 bool is_in_new_space_list() { |
| 143 return IsInNewSpaceList::decode(flags_); | 156 return IsInNewSpaceList::decode(flags_); |
| 144 } | 157 } |
| 145 void set_in_new_space_list(bool v) { | 158 void set_in_new_space_list(bool v) { |
| 146 flags_ = IsInNewSpaceList::update(flags_, v); | 159 flags_ = IsInNewSpaceList::update(flags_, v); |
| 147 } | 160 } |
| 148 | 161 |
| 149 bool is_zapped_during_weak_callback() { | 162 WeaknessType weakness_type() const { |
| 150 return IsZappedDuringWeakCallback::decode(flags_); | 163 return NodeWeaknessType::decode(flags_); |
| 151 } | 164 } |
| 152 void set_is_zapped_during_weak_callback(bool v) { | 165 void set_weakness_type(WeaknessType weakness_type) { |
| 153 flags_ = IsZappedDuringWeakCallback::update(flags_, v); | 166 flags_ = NodeWeaknessType::update(flags_, weakness_type); |
| 154 } | 167 } |
| 155 | 168 |
| 156 bool IsNearDeath() const { | 169 bool IsNearDeath() const { |
| 157 // Check for PENDING to ensure correct answer when processing callbacks. | 170 // Check for PENDING to ensure correct answer when processing callbacks. |
| 158 return state() == PENDING || state() == NEAR_DEATH; | 171 return state() == PENDING || state() == NEAR_DEATH; |
| 159 } | 172 } |
| 160 | 173 |
| 161 bool IsWeak() const { return state() == WEAK; } | 174 bool IsWeak() const { return state() == WEAK; } |
| 162 | 175 |
| 176 bool IsInUse() const { return state() != FREE; } | |
| 177 | |
| 163 bool IsRetainer() const { return state() != FREE; } | 178 bool IsRetainer() const { return state() != FREE; } |
| 164 | 179 |
| 165 bool IsStrongRetainer() const { return state() == NORMAL; } | 180 bool IsStrongRetainer() const { return state() == NORMAL; } |
| 166 | 181 |
| 167 bool IsWeakRetainer() const { | 182 bool IsWeakRetainer() const { |
| 168 return state() == WEAK || state() == PENDING || state() == NEAR_DEATH; | 183 return state() == WEAK || state() == PENDING || state() == NEAR_DEATH; |
| 169 } | 184 } |
| 170 | 185 |
| 171 void MarkPending() { | 186 void MarkPending() { |
| 172 DCHECK(state() == WEAK); | 187 DCHECK(state() == WEAK); |
| 173 set_state(PENDING); | 188 set_state(PENDING); |
| 174 } | 189 } |
| 175 | 190 |
| 176 // Independent flag accessors. | 191 // Independent flag accessors. |
| 177 void MarkIndependent() { | 192 void MarkIndependent() { |
| 178 DCHECK(state() != FREE); | 193 DCHECK(IsInUse()); |
| 179 set_independent(true); | 194 set_independent(true); |
| 180 } | 195 } |
| 181 | 196 |
| 182 void MarkPartiallyDependent() { | 197 void MarkPartiallyDependent() { |
| 183 DCHECK(state() != FREE); | 198 DCHECK(IsInUse()); |
| 184 if (GetGlobalHandles()->isolate()->heap()->InNewSpace(object_)) { | 199 if (GetGlobalHandles()->isolate()->heap()->InNewSpace(object_)) { |
| 185 set_partially_dependent(true); | 200 set_partially_dependent(true); |
| 186 } | 201 } |
| 187 } | 202 } |
| 188 void clear_partially_dependent() { set_partially_dependent(false); } | 203 void clear_partially_dependent() { set_partially_dependent(false); } |
| 189 | 204 |
| 190 // Callback accessor. | 205 // Callback accessor. |
| 191 // TODO(svenpanne) Re-enable or nuke later. | 206 // TODO(svenpanne) Re-enable or nuke later. |
| 192 // WeakReferenceCallback callback() { return callback_; } | 207 // WeakReferenceCallback callback() { return callback_; } |
| 193 | 208 |
| 194 // Callback parameter accessors. | 209 // Callback parameter accessors. |
| 195 void set_parameter(void* parameter) { | 210 void set_parameter(void* parameter) { |
| 196 DCHECK(state() != FREE); | 211 DCHECK(IsInUse()); |
| 212 DCHECK(weakness_type() == NORMAL_WEAK || weakness_type() == PHANTOM_WEAK); | |
| 197 parameter_or_next_free_.parameter = parameter; | 213 parameter_or_next_free_.parameter = parameter; |
| 198 } | 214 } |
| 199 void* parameter() const { | 215 void* parameter() const { |
| 200 DCHECK(state() != FREE); | 216 DCHECK(IsInUse()); |
| 201 return parameter_or_next_free_.parameter; | 217 return parameter_or_next_free_.parameter; |
| 202 } | 218 } |
| 203 | 219 |
| 220 void set_internal_fields( | |
| 221 int internal_field_index1, int internal_field_index2) { | |
| 222 DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK); | |
| 223 DCHECK(internal_field_index1 <= INT16_MAX); | |
| 224 DCHECK(internal_field_index1 >= INT16_MIN); | |
| 225 DCHECK(internal_field_index2 <= INT16_MAX); | |
| 226 DCHECK(internal_field_index2 >= INT16_MIN); | |
| 227 parameter_or_next_free_.internal_field_indeces.internal_field1 = | |
| 228 static_cast<int16_t>(internal_field_index1); | |
| 229 parameter_or_next_free_.internal_field_indeces.internal_field2 = | |
| 230 static_cast<int16_t>(internal_field_index2); | |
| 231 } | |
| 232 | |
| 233 int internal_field1() const { | |
| 234 DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK); | |
| 235 return parameter_or_next_free_.internal_field_indeces.internal_field1; | |
| 236 } | |
| 237 | |
| 238 int internal_field2() const { | |
| 239 DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK); | |
| 240 return parameter_or_next_free_.internal_field_indeces.internal_field2; | |
| 241 } | |
| 242 | |
| 204 // Accessors for next free node in the free list. | 243 // Accessors for next free node in the free list. |
| 205 Node* next_free() { | 244 Node* next_free() { |
| 206 DCHECK(state() == FREE); | 245 DCHECK(state() == FREE); |
| 207 return parameter_or_next_free_.next_free; | 246 return parameter_or_next_free_.next_free; |
| 208 } | 247 } |
| 209 void set_next_free(Node* value) { | 248 void set_next_free(Node* value) { |
| 210 DCHECK(state() == FREE); | 249 DCHECK(state() == FREE); |
| 211 parameter_or_next_free_.next_free = value; | 250 parameter_or_next_free_.next_free = value; |
| 212 } | 251 } |
| 213 | 252 |
| 214 void MakeWeak(void* parameter, WeakCallback weak_callback, | 253 void MakeWeak(void* parameter, WeakCallback weak_callback) { |
| 215 bool is_zapped_during_weak_callback = false) { | |
| 216 DCHECK(weak_callback != NULL); | 254 DCHECK(weak_callback != NULL); |
| 217 DCHECK(state() != FREE); | 255 DCHECK(IsInUse()); |
| 218 CHECK(object_ != NULL); | 256 CHECK(object_ != NULL); |
| 219 set_state(WEAK); | 257 set_state(WEAK); |
| 258 set_weakness_type(Node::NORMAL_WEAK); | |
| 220 set_parameter(parameter); | 259 set_parameter(parameter); |
| 221 set_is_zapped_during_weak_callback(is_zapped_during_weak_callback); | |
| 222 weak_callback_ = weak_callback; | 260 weak_callback_ = weak_callback; |
| 223 } | 261 } |
| 224 | 262 |
| 263 void MakePhantom(void* parameter, PhantomCallback phantom_callback, | |
| 264 int16_t internal_field_index1, | |
| 265 int16_t internal_field_index2) { | |
| 266 DCHECK(phantom_callback != NULL); | |
| 267 DCHECK(IsInUse()); | |
| 268 CHECK(object_ != NULL); | |
| 269 set_state(WEAK); | |
| 270 if (parameter == NULL) { | |
| 271 set_weakness_type(Node::INTERNAL_FIELDS_WEAK); | |
| 272 set_internal_fields(internal_field_index1, internal_field_index2); | |
|
jochen (gone - plz use gerrit)
2014/12/02 10:25:59
is there a check somewhere that the object actuall
Erik Corry
2014/12/02 10:40:49
Yes, it's in InternalFieldOK, called by SlowGetInt
| |
| 273 } else { | |
| 274 DCHECK(internal_field_index1 == v8::Object::kNoInternalFieldIndex); | |
| 275 DCHECK(internal_field_index2 == v8::Object::kNoInternalFieldIndex); | |
| 276 set_weakness_type(Node::PHANTOM_WEAK); | |
| 277 set_parameter(parameter); | |
| 278 } | |
| 279 weak_callback_ = reinterpret_cast<WeakCallback>(phantom_callback); | |
| 280 } | |
| 281 | |
| 225 void* ClearWeakness() { | 282 void* ClearWeakness() { |
| 226 DCHECK(state() != FREE); | 283 DCHECK(IsInUse()); |
| 227 void* p = parameter(); | 284 void* p = parameter(); |
| 228 set_state(NORMAL); | 285 set_state(NORMAL); |
| 229 set_parameter(NULL); | 286 set_parameter(NULL); |
| 230 return p; | 287 return p; |
| 231 } | 288 } |
| 232 | 289 |
| 290 void CollectPhantomCallbackData( | |
| 291 Isolate* isolate, List<PendingPhantomCallback>* pending_callbacks) { | |
| 292 DCHECK(state() != Node::NEAR_DEATH); | |
| 293 if (state() != Node::PENDING) return; | |
| 294 if (weak_callback_ != NULL) { | |
| 295 if (weakness_type() == Node::NORMAL_WEAK) return; | |
| 296 | |
| 297 v8::PhantomCallbackData<void>::Callback callback = | |
| 298 reinterpret_cast<v8::PhantomCallbackData<void>::Callback>( | |
| 299 weak_callback_); | |
| 300 | |
| 301 if (weakness_type() == Node::PHANTOM_WEAK) { | |
| 302 // Phantom weak pointer case. | |
| 303 DCHECK(*location() == Smi::FromInt(kPhantomReferenceZap)); | |
| 304 // Make data with a null handle. | |
| 305 v8::PhantomCallbackData<void> data( | |
| 306 reinterpret_cast<v8::Isolate*>(isolate), parameter()); | |
| 307 pending_callbacks->Add(PendingPhantomCallback(data, callback)); | |
| 308 } else { | |
| 309 DCHECK(weakness_type() == Node::INTERNAL_FIELDS_WEAK); | |
| 310 // Phantom weak pointer case, passing internal fields instead of | |
| 311 // parameter. | |
| 312 Handle<Object> handle(object(), isolate); | |
| 313 Handle<JSObject> jsobject = Handle<JSObject>::cast(handle); | |
| 314 v8::PhantomCallbackData<void> data( | |
| 315 reinterpret_cast<v8::Isolate*>(isolate), | |
| 316 jsobject->GetInternalField(internal_field1()), | |
| 317 jsobject->GetInternalField(internal_field2())); | |
| 318 // In the future, we want to delay the callback. In that case we will | |
| 319 // zap when we queue up, to stop the C++ side accessing the dead V8 | |
| 320 // object, but we will call Release only after the callback (allowing | |
| 321 // the node to be reused). | |
| 322 pending_callbacks->Add(PendingPhantomCallback(data, callback)); | |
| 323 } | |
| 324 } | |
| 325 Release(); | |
| 326 } | |
| 327 | |
| 233 bool PostGarbageCollectionProcessing(Isolate* isolate) { | 328 bool PostGarbageCollectionProcessing(Isolate* isolate) { |
| 234 if (state() != Node::PENDING) return false; | 329 if (state() != Node::PENDING) return false; |
| 235 if (weak_callback_ == NULL) { | 330 if (weak_callback_ == NULL) { |
| 236 Release(); | 331 Release(); |
| 237 return false; | 332 return false; |
| 238 } | 333 } |
| 239 void* param = parameter(); | |
| 240 set_state(NEAR_DEATH); | 334 set_state(NEAR_DEATH); |
| 335 | |
| 336 // Check that we are not passing a finalized external string to | |
| 337 // the callback. | |
| 338 DCHECK(!object_->IsExternalOneByteString() || | |
| 339 ExternalOneByteString::cast(object_)->resource() != NULL); | |
| 340 DCHECK(!object_->IsExternalTwoByteString() || | |
| 341 ExternalTwoByteString::cast(object_)->resource() != NULL); | |
| 342 // Leaving V8. | |
| 343 VMState<EXTERNAL> vmstate(isolate); | |
| 344 HandleScope handle_scope(isolate); | |
| 345 DCHECK(weakness_type() == Node::NORMAL_WEAK); | |
| 346 Object** object = location(); | |
| 347 Handle<Object> handle(*object, isolate); | |
| 348 v8::WeakCallbackData<v8::Value, void> data( | |
| 349 reinterpret_cast<v8::Isolate*>(isolate), v8::Utils::ToLocal(handle), | |
| 350 parameter()); | |
| 241 set_parameter(NULL); | 351 set_parameter(NULL); |
| 352 weak_callback_(data); | |
| 242 | 353 |
| 243 Object** object = location(); | |
| 244 { | |
| 245 // Check that we are not passing a finalized external string to | |
| 246 // the callback. | |
| 247 DCHECK(!object_->IsExternalOneByteString() || | |
| 248 ExternalOneByteString::cast(object_)->resource() != NULL); | |
| 249 DCHECK(!object_->IsExternalTwoByteString() || | |
| 250 ExternalTwoByteString::cast(object_)->resource() != NULL); | |
| 251 // Leaving V8. | |
| 252 VMState<EXTERNAL> vmstate(isolate); | |
| 253 HandleScope handle_scope(isolate); | |
| 254 if (is_zapped_during_weak_callback()) { | |
| 255 // Phantom weak pointer case. | |
| 256 DCHECK(*object == Smi::FromInt(kPhantomReferenceZap)); | |
| 257 // Make data with a null handle. | |
| 258 v8::WeakCallbackData<v8::Value, void> data( | |
| 259 reinterpret_cast<v8::Isolate*>(isolate), v8::Local<v8::Object>(), | |
| 260 param); | |
| 261 weak_callback_(data); | |
| 262 if (state() != FREE) { | |
| 263 // Callback does not have to clear the global handle if it is a | |
| 264 // phantom handle. | |
| 265 Release(); | |
| 266 } | |
| 267 } else { | |
| 268 Handle<Object> handle(*object, isolate); | |
| 269 v8::WeakCallbackData<v8::Value, void> data( | |
| 270 reinterpret_cast<v8::Isolate*>(isolate), v8::Utils::ToLocal(handle), | |
| 271 param); | |
| 272 weak_callback_(data); | |
| 273 } | |
| 274 } | |
| 275 // Absence of explicit cleanup or revival of weak handle | 354 // Absence of explicit cleanup or revival of weak handle |
| 276 // in most of the cases would lead to memory leak. | 355 // in most of the cases would lead to memory leak. |
| 277 CHECK(state() != NEAR_DEATH); | 356 CHECK(state() != NEAR_DEATH); |
| 278 return true; | 357 return true; |
| 279 } | 358 } |
| 280 | 359 |
| 281 inline GlobalHandles* GetGlobalHandles(); | 360 inline GlobalHandles* GetGlobalHandles(); |
| 282 | 361 |
| 283 private: | 362 private: |
| 284 inline NodeBlock* FindBlock(); | 363 inline NodeBlock* FindBlock(); |
| 285 inline void IncreaseBlockUses(); | 364 inline void IncreaseBlockUses(); |
| 286 inline void DecreaseBlockUses(); | 365 inline void DecreaseBlockUses(); |
| 287 | 366 |
| 288 // Storage for object pointer. | 367 // Storage for object pointer. |
| 289 // Placed first to avoid offset computation. | 368 // Placed first to avoid offset computation. |
| 290 Object* object_; | 369 Object* object_; |
| 291 | 370 |
| 292 // Next word stores class_id, index, state, and independent. | 371 // Next word stores class_id, index, state, and independent. |
| 293 // Note: the most aligned fields should go first. | 372 // Note: the most aligned fields should go first. |
| 294 | 373 |
| 295 // Wrapper class ID. | 374 // Wrapper class ID. |
| 296 uint16_t class_id_; | 375 uint16_t class_id_; |
| 297 | 376 |
| 298 // Index in the containing handle block. | 377 // Index in the containing handle block. |
| 299 uint8_t index_; | 378 uint8_t index_; |
| 300 | 379 |
| 301 // This stores three flags (independent, partially_dependent and | 380 // This stores three flags (independent, partially_dependent and |
| 302 // in_new_space_list) and a State. | 381 // in_new_space_list) and a State. |
| 303 class NodeState : public BitField<State, 0, 4> {}; | 382 class NodeState : public BitField<State, 0, 3> {}; |
| 304 class IsIndependent : public BitField<bool, 4, 1> {}; | 383 class IsIndependent : public BitField<bool, 3, 1> {}; |
| 305 class IsPartiallyDependent : public BitField<bool, 5, 1> {}; | 384 class IsPartiallyDependent : public BitField<bool, 4, 1> {}; |
| 306 class IsInNewSpaceList : public BitField<bool, 6, 1> {}; | 385 class IsInNewSpaceList : public BitField<bool, 5, 1> {}; |
| 307 class IsZappedDuringWeakCallback : public BitField<bool, 7, 1> {}; | 386 class NodeWeaknessType : public BitField<WeaknessType, 6, 2> {}; |
| 308 | 387 |
| 309 uint8_t flags_; | 388 uint8_t flags_; |
| 310 | 389 |
| 311 // Handle specific callback - might be a weak reference in disguise. | 390 // Handle specific callback - might be a weak reference in disguise. |
| 312 WeakCallback weak_callback_; | 391 WeakCallback weak_callback_; |
| 313 | 392 |
| 314 // Provided data for callback. In FREE state, this is used for | 393 // Provided data for callback. In FREE state, this is used for |
| 315 // the free list link. | 394 // the free list link. |
| 316 union { | 395 union { |
| 317 void* parameter; | 396 void* parameter; |
| 397 struct { | |
| 398 int16_t internal_field1; | |
| 399 int16_t internal_field2; | |
| 400 } internal_field_indeces; | |
| 318 Node* next_free; | 401 Node* next_free; |
| 319 } parameter_or_next_free_; | 402 } parameter_or_next_free_; |
| 320 | 403 |
| 321 DISALLOW_COPY_AND_ASSIGN(Node); | 404 DISALLOW_COPY_AND_ASSIGN(Node); |
| 322 }; | 405 }; |
| 323 | 406 |
| 324 | 407 |
| 325 class GlobalHandles::NodeBlock { | 408 class GlobalHandles::NodeBlock { |
| 326 public: | 409 public: |
| 327 static const int kSize = 256; | 410 static const int kSize = 256; |
| (...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 493 return Node::FromLocation(location)->GetGlobalHandles()->Create(*location); | 576 return Node::FromLocation(location)->GetGlobalHandles()->Create(*location); |
| 494 } | 577 } |
| 495 | 578 |
| 496 | 579 |
| 497 void GlobalHandles::Destroy(Object** location) { | 580 void GlobalHandles::Destroy(Object** location) { |
| 498 if (location != NULL) Node::FromLocation(location)->Release(); | 581 if (location != NULL) Node::FromLocation(location)->Release(); |
| 499 } | 582 } |
| 500 | 583 |
| 501 | 584 |
| 502 void GlobalHandles::MakeWeak(Object** location, void* parameter, | 585 void GlobalHandles::MakeWeak(Object** location, void* parameter, |
| 503 WeakCallback weak_callback, PhantomState phantom) { | 586 WeakCallback weak_callback) { |
| 504 Node::FromLocation(location) | 587 Node::FromLocation(location)->MakeWeak(parameter, weak_callback); |
| 505 ->MakeWeak(parameter, weak_callback, phantom == Phantom); | |
| 506 } | 588 } |
| 507 | 589 |
| 508 | 590 |
| 591 void GlobalHandles::MakePhantom(Object** location, void* parameter, | |
| 592 PhantomCallback phantom_callback, | |
| 593 int16_t internal_field_index1, | |
| 594 int16_t internal_field_index2) { | |
| 595 Node::FromLocation(location) | |
| 596 ->MakePhantom(parameter, phantom_callback, internal_field_index1, | |
| 597 internal_field_index2); | |
| 598 } | |
| 599 | |
| 600 | |
| 601 void GlobalHandles::CollectPhantomCallbackData() { | |
| 602 for (NodeIterator it(this); !it.done(); it.Advance()) { | |
| 603 Node* node = it.node(); | |
| 604 node->CollectPhantomCallbackData(isolate(), &pending_phantom_callbacks_); | |
| 605 } | |
| 606 } | |
| 607 | |
| 608 | |
| 509 void* GlobalHandles::ClearWeakness(Object** location) { | 609 void* GlobalHandles::ClearWeakness(Object** location) { |
| 510 return Node::FromLocation(location)->ClearWeakness(); | 610 return Node::FromLocation(location)->ClearWeakness(); |
| 511 } | 611 } |
| 512 | 612 |
| 513 | 613 |
| 514 void GlobalHandles::MarkIndependent(Object** location) { | 614 void GlobalHandles::MarkIndependent(Object** location) { |
| 515 Node::FromLocation(location)->MarkIndependent(); | 615 Node::FromLocation(location)->MarkIndependent(); |
| 516 } | 616 } |
| 517 | 617 |
| 518 | 618 |
| (...skipping 14 matching lines...) Expand all Loading... | |
| 533 | 633 |
| 534 bool GlobalHandles::IsWeak(Object** location) { | 634 bool GlobalHandles::IsWeak(Object** location) { |
| 535 return Node::FromLocation(location)->IsWeak(); | 635 return Node::FromLocation(location)->IsWeak(); |
| 536 } | 636 } |
| 537 | 637 |
| 538 | 638 |
| 539 void GlobalHandles::IterateWeakRoots(ObjectVisitor* v) { | 639 void GlobalHandles::IterateWeakRoots(ObjectVisitor* v) { |
| 540 for (NodeIterator it(this); !it.done(); it.Advance()) { | 640 for (NodeIterator it(this); !it.done(); it.Advance()) { |
| 541 Node* node = it.node(); | 641 Node* node = it.node(); |
| 542 if (node->IsWeakRetainer()) { | 642 if (node->IsWeakRetainer()) { |
| 543 if (node->state() == Node::PENDING && | 643 // Weakness type can be normal, phantom or internal fields. |
| 544 node->is_zapped_during_weak_callback()) { | 644 // For normal weakness we mark through the handle so that |
| 545 *(node->location()) = Smi::FromInt(kPhantomReferenceZap); | 645 // the object and things reachable from it are available |
| 646 // to the callback. | |
| 647 // In the case of phantom we can zap the object handle now | |
| 648 // and we won't need it, so we don't need to mark through it. | |
| 649 // In the internal fields case we will need the internal | |
| 650 // fields, so we can't zap the handle, but we don't need to | |
| 651 // mark through it, because it will die in this GC round. | |
| 652 if (node->state() == Node::PENDING) { | |
| 653 if (node->weakness_type() == Node::PHANTOM_WEAK) { | |
| 654 *(node->location()) = Smi::FromInt(kPhantomReferenceZap); | |
| 655 } else if (node->weakness_type() == Node::NORMAL_WEAK) { | |
| 656 v->VisitPointer(node->location()); | |
| 657 } else { | |
| 658 DCHECK(node->weakness_type() == Node::INTERNAL_FIELDS_WEAK); | |
| 659 } | |
| 546 } else { | 660 } else { |
| 661 // Node is not pending, so that means the object survived. | |
| 662 // We stil need to visit the pointer in case the object moved, | |
|
rmcilroy
2014/12/03 11:56:35
/s/stil/still
Erik Corry
2014/12/15 15:12:42
Done.
| |
| 663 // eg. because of compaction. | |
| 547 v->VisitPointer(node->location()); | 664 v->VisitPointer(node->location()); |
| 548 } | 665 } |
| 549 } | 666 } |
| 550 } | 667 } |
| 551 } | 668 } |
| 552 | 669 |
| 553 | 670 |
| 554 void GlobalHandles::IdentifyWeakHandles(WeakSlotCallback f) { | 671 void GlobalHandles::IdentifyWeakHandles(WeakSlotCallback f) { |
| 555 for (NodeIterator it(this); !it.done(); it.Advance()) { | 672 for (NodeIterator it(this); !it.done(); it.Advance()) { |
| 556 if (it.node()->IsWeak() && f(it.node()->location())) { | 673 if (it.node()->IsWeak() && f(it.node()->location())) { |
| (...skipping 27 matching lines...) Expand all Loading... | |
| 584 } | 701 } |
| 585 } | 702 } |
| 586 | 703 |
| 587 | 704 |
| 588 void GlobalHandles::IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v) { | 705 void GlobalHandles::IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v) { |
| 589 for (int i = 0; i < new_space_nodes_.length(); ++i) { | 706 for (int i = 0; i < new_space_nodes_.length(); ++i) { |
| 590 Node* node = new_space_nodes_[i]; | 707 Node* node = new_space_nodes_[i]; |
| 591 DCHECK(node->is_in_new_space_list()); | 708 DCHECK(node->is_in_new_space_list()); |
| 592 if ((node->is_independent() || node->is_partially_dependent()) && | 709 if ((node->is_independent() || node->is_partially_dependent()) && |
| 593 node->IsWeakRetainer()) { | 710 node->IsWeakRetainer()) { |
| 594 if (node->is_zapped_during_weak_callback()) { | 711 if (node->weakness_type() == Node::PHANTOM_WEAK) { |
| 595 *(node->location()) = Smi::FromInt(kPhantomReferenceZap); | 712 *(node->location()) = Smi::FromInt(kPhantomReferenceZap); |
| 713 } else if (node->weakness_type() == Node::NORMAL_WEAK) { | |
| 714 v->VisitPointer(node->location()); | |
| 596 } else { | 715 } else { |
| 597 v->VisitPointer(node->location()); | 716 DCHECK(node->weakness_type() == Node::INTERNAL_FIELDS_WEAK); |
| 717 // For this case we only need to trace if it's alive: The tracing of | |
| 718 // something that is already alive is just to get the pointer updated | |
| 719 // to the new location of the object). | |
| 720 if (!node->IsNearDeath()) { | |
| 721 v->VisitPointer(node->location()); | |
| 722 } | |
| 598 } | 723 } |
| 599 } | 724 } |
| 600 } | 725 } |
| 601 } | 726 } |
| 602 | 727 |
| 603 | 728 |
| 604 bool GlobalHandles::IterateObjectGroups(ObjectVisitor* v, | 729 bool GlobalHandles::IterateObjectGroups(ObjectVisitor* v, |
| 605 WeakSlotCallbackWithHeap can_skip) { | 730 WeakSlotCallbackWithHeap can_skip) { |
| 606 ComputeObjectGroupsAndImplicitReferences(); | 731 ComputeObjectGroupsAndImplicitReferences(); |
| 607 int last = 0; | 732 int last = 0; |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 640 // Once the entire group has been iterated over, set the object | 765 // Once the entire group has been iterated over, set the object |
| 641 // group to NULL so it won't be processed again. | 766 // group to NULL so it won't be processed again. |
| 642 delete entry; | 767 delete entry; |
| 643 object_groups_.at(i) = NULL; | 768 object_groups_.at(i) = NULL; |
| 644 } | 769 } |
| 645 object_groups_.Rewind(last); | 770 object_groups_.Rewind(last); |
| 646 return any_group_was_visited; | 771 return any_group_was_visited; |
| 647 } | 772 } |
| 648 | 773 |
| 649 | 774 |
| 650 int GlobalHandles::PostGarbageCollectionProcessing( | 775 int GlobalHandles::PostScavengeProcessing( |
| 651 GarbageCollector collector) { | 776 const int initial_post_gc_processing_count) { |
| 652 // Process weak global handle callbacks. This must be done after the | |
| 653 // GC is completely done, because the callbacks may invoke arbitrary | |
| 654 // API functions. | |
| 655 DCHECK(isolate_->heap()->gc_state() == Heap::NOT_IN_GC); | |
| 656 const int initial_post_gc_processing_count = ++post_gc_processing_count_; | |
| 657 int freed_nodes = 0; | 777 int freed_nodes = 0; |
| 658 if (collector == SCAVENGER) { | 778 for (int i = 0; i < new_space_nodes_.length(); ++i) { |
| 659 for (int i = 0; i < new_space_nodes_.length(); ++i) { | 779 Node* node = new_space_nodes_[i]; |
| 660 Node* node = new_space_nodes_[i]; | 780 DCHECK(node->is_in_new_space_list()); |
| 661 DCHECK(node->is_in_new_space_list()); | 781 if (!node->IsRetainer()) { |
| 662 if (!node->IsRetainer()) { | 782 // Free nodes do not have weak callbacks. Do not use them to compute |
| 663 // Free nodes do not have weak callbacks. Do not use them to compute | 783 // the freed_nodes. |
| 664 // the freed_nodes. | 784 continue; |
| 665 continue; | 785 } |
| 666 } | 786 // Skip dependent handles. Their weak callbacks might expect to be |
| 667 // Skip dependent handles. Their weak callbacks might expect to be | 787 // called between two global garbage collection callbacks which |
| 668 // called between two global garbage collection callbacks which | 788 // are not called for minor collections. |
| 669 // are not called for minor collections. | 789 if (!node->is_independent() && !node->is_partially_dependent()) { |
| 670 if (!node->is_independent() && !node->is_partially_dependent()) { | 790 continue; |
| 671 continue; | 791 } |
| 672 } | 792 node->clear_partially_dependent(); |
| 673 node->clear_partially_dependent(); | 793 if (node->PostGarbageCollectionProcessing(isolate_)) { |
| 674 if (node->PostGarbageCollectionProcessing(isolate_)) { | 794 if (initial_post_gc_processing_count != post_gc_processing_count_) { |
| 675 if (initial_post_gc_processing_count != post_gc_processing_count_) { | 795 // Weak callback triggered another GC and another round of |
| 676 // Weak callback triggered another GC and another round of | 796 // PostGarbageCollection processing. The current node might |
| 677 // PostGarbageCollection processing. The current node might | 797 // have been deleted in that round, so we need to bail out (or |
| 678 // have been deleted in that round, so we need to bail out (or | 798 // restart the processing). |
| 679 // restart the processing). | 799 return freed_nodes; |
| 680 return freed_nodes; | |
| 681 } | |
| 682 } | |
| 683 if (!node->IsRetainer()) { | |
| 684 freed_nodes++; | |
| 685 } | 800 } |
| 686 } | 801 } |
| 687 } else { | 802 if (!node->IsRetainer()) { |
| 688 for (NodeIterator it(this); !it.done(); it.Advance()) { | 803 freed_nodes++; |
| 689 if (!it.node()->IsRetainer()) { | 804 } |
| 690 // Free nodes do not have weak callbacks. Do not use them to compute | 805 } |
| 691 // the freed_nodes. | 806 return freed_nodes; |
| 692 continue; | 807 } |
| 693 } | 808 |
| 694 it.node()->clear_partially_dependent(); | 809 |
| 695 if (it.node()->PostGarbageCollectionProcessing(isolate_)) { | 810 int GlobalHandles::PostMarkSweepProcessing( |
| 696 if (initial_post_gc_processing_count != post_gc_processing_count_) { | 811 const int initial_post_gc_processing_count) { |
| 697 // See the comment above. | 812 int freed_nodes = 0; |
| 698 return freed_nodes; | 813 for (NodeIterator it(this); !it.done(); it.Advance()) { |
| 699 } | 814 if (!it.node()->IsRetainer()) { |
| 700 } | 815 // Free nodes do not have weak callbacks. Do not use them to compute |
| 701 if (!it.node()->IsRetainer()) { | 816 // the freed_nodes. |
| 702 freed_nodes++; | 817 continue; |
| 818 } | |
| 819 it.node()->clear_partially_dependent(); | |
| 820 if (it.node()->PostGarbageCollectionProcessing(isolate_)) { | |
| 821 if (initial_post_gc_processing_count != post_gc_processing_count_) { | |
| 822 // See the comment above. | |
| 823 return freed_nodes; | |
| 703 } | 824 } |
| 704 } | 825 } |
| 826 if (!it.node()->IsRetainer()) { | |
| 827 freed_nodes++; | |
| 828 } | |
| 705 } | 829 } |
| 706 // Update the list of new space nodes. | 830 return freed_nodes; |
| 831 } | |
| 832 | |
| 833 | |
| 834 void GlobalHandles::UpdateListOfNewSpaceNodes() { | |
| 707 int last = 0; | 835 int last = 0; |
| 708 for (int i = 0; i < new_space_nodes_.length(); ++i) { | 836 for (int i = 0; i < new_space_nodes_.length(); ++i) { |
| 709 Node* node = new_space_nodes_[i]; | 837 Node* node = new_space_nodes_[i]; |
| 710 DCHECK(node->is_in_new_space_list()); | 838 DCHECK(node->is_in_new_space_list()); |
| 711 if (node->IsRetainer()) { | 839 if (node->IsRetainer()) { |
| 712 if (isolate_->heap()->InNewSpace(node->object())) { | 840 if (isolate_->heap()->InNewSpace(node->object())) { |
| 713 new_space_nodes_[last++] = node; | 841 new_space_nodes_[last++] = node; |
| 714 isolate_->heap()->IncrementNodesCopiedInNewSpace(); | 842 isolate_->heap()->IncrementNodesCopiedInNewSpace(); |
| 715 } else { | 843 } else { |
| 716 node->set_in_new_space_list(false); | 844 node->set_in_new_space_list(false); |
| 717 isolate_->heap()->IncrementNodesPromoted(); | 845 isolate_->heap()->IncrementNodesPromoted(); |
| 718 } | 846 } |
| 719 } else { | 847 } else { |
| 720 node->set_in_new_space_list(false); | 848 node->set_in_new_space_list(false); |
| 721 isolate_->heap()->IncrementNodesDiedInNewSpace(); | 849 isolate_->heap()->IncrementNodesDiedInNewSpace(); |
| 722 } | 850 } |
| 723 } | 851 } |
| 724 new_space_nodes_.Rewind(last); | 852 new_space_nodes_.Rewind(last); |
| 853 } | |
| 854 | |
| 855 | |
| 856 int GlobalHandles::DispatchPendingPhantomCallbacks() { | |
| 857 int freed_nodes = 0; | |
| 858 while (pending_phantom_callbacks_.length() != 0) { | |
| 859 PendingPhantomCallback callback = pending_phantom_callbacks_.RemoveLast(); | |
| 860 callback.invoke(); | |
| 861 freed_nodes++; | |
| 862 } | |
| 725 return freed_nodes; | 863 return freed_nodes; |
| 726 } | 864 } |
| 727 | 865 |
| 866 | |
| 867 int GlobalHandles::PostGarbageCollectionProcessing( | |
| 868 GarbageCollector collector) { | |
| 869 // Process weak global handle callbacks. This must be done after the | |
| 870 // GC is completely done, because the callbacks may invoke arbitrary | |
| 871 // API functions. | |
| 872 DCHECK(isolate_->heap()->gc_state() == Heap::NOT_IN_GC); | |
| 873 const int initial_post_gc_processing_count = ++post_gc_processing_count_; | |
| 874 int freed_nodes = 0; | |
| 875 if (collector == SCAVENGER) { | |
| 876 freed_nodes = PostScavengeProcessing(initial_post_gc_processing_count); | |
| 877 } else { | |
| 878 freed_nodes = PostMarkSweepProcessing(initial_post_gc_processing_count); | |
| 879 } | |
| 880 if (initial_post_gc_processing_count != post_gc_processing_count_) { | |
| 881 // If the callbacks caused a nested GC, then return. See comment in | |
| 882 // PostScavengeProcessing. | |
| 883 return freed_nodes; | |
| 884 } | |
| 885 freed_nodes += DispatchPendingPhantomCallbacks(); | |
| 886 if (initial_post_gc_processing_count == post_gc_processing_count_) { | |
| 887 UpdateListOfNewSpaceNodes(); | |
| 888 } | |
| 889 return freed_nodes; | |
| 890 } | |
| 891 | |
| 728 | 892 |
| 729 void GlobalHandles::IterateStrongRoots(ObjectVisitor* v) { | 893 void GlobalHandles::IterateStrongRoots(ObjectVisitor* v) { |
| 730 for (NodeIterator it(this); !it.done(); it.Advance()) { | 894 for (NodeIterator it(this); !it.done(); it.Advance()) { |
| 731 if (it.node()->IsStrongRetainer()) { | 895 if (it.node()->IsStrongRetainer()) { |
| 732 v->VisitPointer(it.node()->location()); | 896 v->VisitPointer(it.node()->location()); |
| 733 } | 897 } |
| 734 } | 898 } |
| 735 } | 899 } |
| 736 | 900 |
| 737 | 901 |
| (...skipping 345 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1083 DCHECK_EQ(isolate->heap()->the_hole_value(), blocks_[block][offset]); | 1247 DCHECK_EQ(isolate->heap()->the_hole_value(), blocks_[block][offset]); |
| 1084 blocks_[block][offset] = object; | 1248 blocks_[block][offset] = object; |
| 1085 if (isolate->heap()->InNewSpace(object)) { | 1249 if (isolate->heap()->InNewSpace(object)) { |
| 1086 new_space_indices_.Add(size_); | 1250 new_space_indices_.Add(size_); |
| 1087 } | 1251 } |
| 1088 *index = size_++; | 1252 *index = size_++; |
| 1089 } | 1253 } |
| 1090 | 1254 |
| 1091 | 1255 |
| 1092 } } // namespace v8::internal | 1256 } } // namespace v8::internal |
| OLD | NEW |