OLD | NEW |
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/api.h" | 7 #include "src/api.h" |
8 #include "src/global-handles.h" | 8 #include "src/global-handles.h" |
9 | 9 |
10 #include "src/vm-state-inl.h" | 10 #include "src/vm-state-inl.h" |
(...skipping 12 matching lines...) Expand all Loading... |
23 delete[] children; | 23 delete[] children; |
24 } | 24 } |
25 | 25 |
26 | 26 |
27 class GlobalHandles::Node { | 27 class GlobalHandles::Node { |
28 public: | 28 public: |
29 // State transition diagram: | 29 // State transition diagram: |
30 // FREE -> NORMAL <-> WEAK -> PENDING -> NEAR_DEATH -> { NORMAL, WEAK, FREE } | 30 // FREE -> NORMAL <-> WEAK -> PENDING -> NEAR_DEATH -> { NORMAL, WEAK, FREE } |
31 enum State { | 31 enum State { |
32 FREE = 0, | 32 FREE = 0, |
33 NORMAL, // Normal global handle. | 33 NORMAL, // Normal global handle. |
34 WEAK, // Flagged as weak but not yet finalized. | 34 WEAK, // Flagged as weak but not yet finalized. |
35 PENDING, // Has been recognized as only reachable by weak handles. | 35 PENDING, // Has been recognized as only reachable by weak handles. |
36 NEAR_DEATH // Callback has informed the handle is near death. | 36 NEAR_DEATH, // Callback has informed the handle is near death. |
| 37 NUMBER_OF_NODE_STATES |
37 }; | 38 }; |
38 | 39 |
39 // Maps handle location (slot) to the containing node. | 40 // Maps handle location (slot) to the containing node. |
40 static Node* FromLocation(Object** location) { | 41 static Node* FromLocation(Object** location) { |
41 DCHECK(OFFSET_OF(Node, object_) == 0); | 42 DCHECK(OFFSET_OF(Node, object_) == 0); |
42 return reinterpret_cast<Node*>(location); | 43 return reinterpret_cast<Node*>(location); |
43 } | 44 } |
44 | 45 |
45 Node() { | 46 Node() { |
46 DCHECK(OFFSET_OF(Node, class_id_) == Internals::kNodeClassIdOffset); | 47 DCHECK(OFFSET_OF(Node, class_id_) == Internals::kNodeClassIdOffset); |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
85 object_ = object; | 86 object_ = object; |
86 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId; | 87 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId; |
87 set_independent(false); | 88 set_independent(false); |
88 set_partially_dependent(false); | 89 set_partially_dependent(false); |
89 set_state(NORMAL); | 90 set_state(NORMAL); |
90 parameter_or_next_free_.parameter = NULL; | 91 parameter_or_next_free_.parameter = NULL; |
91 weak_callback_ = NULL; | 92 weak_callback_ = NULL; |
92 IncreaseBlockUses(); | 93 IncreaseBlockUses(); |
93 } | 94 } |
94 | 95 |
| 96 void Zap() { |
| 97 DCHECK(IsInUse()); |
| 98 // Zap the values for eager trapping. |
| 99 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue); |
| 100 } |
| 101 |
95 void Release() { | 102 void Release() { |
96 DCHECK(state() != FREE); | 103 DCHECK(IsInUse()); |
97 set_state(FREE); | 104 set_state(FREE); |
98 // Zap the values for eager trapping. | 105 // Zap the values for eager trapping. |
99 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue); | 106 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue); |
100 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId; | 107 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId; |
101 set_independent(false); | 108 set_independent(false); |
102 set_partially_dependent(false); | 109 set_partially_dependent(false); |
103 weak_callback_ = NULL; | 110 weak_callback_ = NULL; |
104 DecreaseBlockUses(); | 111 DecreaseBlockUses(); |
105 } | 112 } |
106 | 113 |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
139 flags_ = IsPartiallyDependent::update(flags_, v); | 146 flags_ = IsPartiallyDependent::update(flags_, v); |
140 } | 147 } |
141 | 148 |
142 bool is_in_new_space_list() { | 149 bool is_in_new_space_list() { |
143 return IsInNewSpaceList::decode(flags_); | 150 return IsInNewSpaceList::decode(flags_); |
144 } | 151 } |
145 void set_in_new_space_list(bool v) { | 152 void set_in_new_space_list(bool v) { |
146 flags_ = IsInNewSpaceList::update(flags_, v); | 153 flags_ = IsInNewSpaceList::update(flags_, v); |
147 } | 154 } |
148 | 155 |
149 bool is_zapped_during_weak_callback() { | 156 WeaknessType weakness_type() const { |
150 return IsZappedDuringWeakCallback::decode(flags_); | 157 return NodeWeaknessType::decode(flags_); |
151 } | 158 } |
152 void set_is_zapped_during_weak_callback(bool v) { | 159 void set_weakness_type(WeaknessType weakness_type) { |
153 flags_ = IsZappedDuringWeakCallback::update(flags_, v); | 160 flags_ = NodeWeaknessType::update(flags_, weakness_type); |
154 } | 161 } |
155 | 162 |
156 bool IsNearDeath() const { | 163 bool IsNearDeath() const { |
157 // Check for PENDING to ensure correct answer when processing callbacks. | 164 // Check for PENDING to ensure correct answer when processing callbacks. |
158 return state() == PENDING || state() == NEAR_DEATH; | 165 return state() == PENDING || state() == NEAR_DEATH; |
159 } | 166 } |
160 | 167 |
161 bool IsWeak() const { return state() == WEAK; } | 168 bool IsWeak() const { return state() == WEAK; } |
162 | 169 |
| 170 bool IsInUse() const { return state() != FREE; } |
| 171 |
163 bool IsRetainer() const { return state() != FREE; } | 172 bool IsRetainer() const { return state() != FREE; } |
164 | 173 |
165 bool IsStrongRetainer() const { return state() == NORMAL; } | 174 bool IsStrongRetainer() const { return state() == NORMAL; } |
166 | 175 |
167 bool IsWeakRetainer() const { | 176 bool IsWeakRetainer() const { |
168 return state() == WEAK || state() == PENDING || state() == NEAR_DEATH; | 177 return state() == WEAK || state() == PENDING || state() == NEAR_DEATH; |
169 } | 178 } |
170 | 179 |
171 void MarkPending() { | 180 void MarkPending() { |
172 DCHECK(state() == WEAK); | 181 DCHECK(state() == WEAK); |
173 set_state(PENDING); | 182 set_state(PENDING); |
174 } | 183 } |
175 | 184 |
176 // Independent flag accessors. | 185 // Independent flag accessors. |
177 void MarkIndependent() { | 186 void MarkIndependent() { |
178 DCHECK(state() != FREE); | 187 DCHECK(IsInUse()); |
179 set_independent(true); | 188 set_independent(true); |
180 } | 189 } |
181 | 190 |
182 void MarkPartiallyDependent() { | 191 void MarkPartiallyDependent() { |
183 DCHECK(state() != FREE); | 192 DCHECK(IsInUse()); |
184 if (GetGlobalHandles()->isolate()->heap()->InNewSpace(object_)) { | 193 if (GetGlobalHandles()->isolate()->heap()->InNewSpace(object_)) { |
185 set_partially_dependent(true); | 194 set_partially_dependent(true); |
186 } | 195 } |
187 } | 196 } |
188 void clear_partially_dependent() { set_partially_dependent(false); } | 197 void clear_partially_dependent() { set_partially_dependent(false); } |
189 | 198 |
190 // Callback accessor. | 199 // Callback accessor. |
191 // TODO(svenpanne) Re-enable or nuke later. | 200 // TODO(svenpanne) Re-enable or nuke later. |
192 // WeakReferenceCallback callback() { return callback_; } | 201 // WeakReferenceCallback callback() { return callback_; } |
193 | 202 |
194 // Callback parameter accessors. | 203 // Callback parameter accessors. |
195 void set_parameter(void* parameter) { | 204 void set_parameter(void* parameter) { |
196 DCHECK(state() != FREE); | 205 DCHECK(IsInUse()); |
| 206 DCHECK(weakness_type() == NORMAL_WEAK || weakness_type() == PHANTOM_WEAK); |
197 parameter_or_next_free_.parameter = parameter; | 207 parameter_or_next_free_.parameter = parameter; |
198 } | 208 } |
199 void* parameter() const { | 209 void* parameter() const { |
200 DCHECK(state() != FREE); | 210 DCHECK(IsInUse()); |
201 return parameter_or_next_free_.parameter; | 211 return parameter_or_next_free_.parameter; |
202 } | 212 } |
203 | 213 |
| 214 void set_internal_fields(int internal_field_index1, |
| 215 int internal_field_index2) { |
| 216 DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK); |
| 217 // These are stored in an int16_t. |
| 218 DCHECK(internal_field_index1 < 1 << 16); |
| 219 DCHECK(internal_field_index1 >= -(1 << 16)); |
| 220 DCHECK(internal_field_index2 < 1 << 16); |
| 221 DCHECK(internal_field_index2 >= -(1 << 16)); |
| 222 parameter_or_next_free_.internal_field_indeces.internal_field1 = |
| 223 static_cast<int16_t>(internal_field_index1); |
| 224 parameter_or_next_free_.internal_field_indeces.internal_field2 = |
| 225 static_cast<int16_t>(internal_field_index2); |
| 226 } |
| 227 |
| 228 int internal_field1() const { |
| 229 DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK); |
| 230 return parameter_or_next_free_.internal_field_indeces.internal_field1; |
| 231 } |
| 232 |
| 233 int internal_field2() const { |
| 234 DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK); |
| 235 return parameter_or_next_free_.internal_field_indeces.internal_field2; |
| 236 } |
| 237 |
204 // Accessors for next free node in the free list. | 238 // Accessors for next free node in the free list. |
205 Node* next_free() { | 239 Node* next_free() { |
206 DCHECK(state() == FREE); | 240 DCHECK(state() == FREE); |
207 return parameter_or_next_free_.next_free; | 241 return parameter_or_next_free_.next_free; |
208 } | 242 } |
209 void set_next_free(Node* value) { | 243 void set_next_free(Node* value) { |
210 DCHECK(state() == FREE); | 244 DCHECK(state() == FREE); |
211 parameter_or_next_free_.next_free = value; | 245 parameter_or_next_free_.next_free = value; |
212 } | 246 } |
213 | 247 |
214 void MakeWeak(void* parameter, WeakCallback weak_callback, | 248 void MakeWeak(void* parameter, WeakCallback weak_callback) { |
215 bool is_zapped_during_weak_callback = false) { | |
216 DCHECK(weak_callback != NULL); | 249 DCHECK(weak_callback != NULL); |
217 DCHECK(state() != FREE); | 250 DCHECK(IsInUse()); |
218 CHECK(object_ != NULL); | 251 CHECK(object_ != NULL); |
219 set_state(WEAK); | 252 set_state(WEAK); |
| 253 set_weakness_type(NORMAL_WEAK); |
220 set_parameter(parameter); | 254 set_parameter(parameter); |
221 set_is_zapped_during_weak_callback(is_zapped_during_weak_callback); | |
222 weak_callback_ = weak_callback; | 255 weak_callback_ = weak_callback; |
223 } | 256 } |
224 | 257 |
| 258 void MakePhantom(void* parameter, |
| 259 PhantomCallbackData<void>::Callback phantom_callback, |
| 260 int16_t internal_field_index1, |
| 261 int16_t internal_field_index2) { |
| 262 DCHECK(phantom_callback != NULL); |
| 263 DCHECK(IsInUse()); |
| 264 CHECK(object_ != NULL); |
| 265 set_state(WEAK); |
| 266 if (parameter == NULL) { |
| 267 set_weakness_type(INTERNAL_FIELDS_WEAK); |
| 268 set_internal_fields(internal_field_index1, internal_field_index2); |
| 269 } else { |
| 270 DCHECK(internal_field_index1 == v8::Object::kNoInternalFieldIndex); |
| 271 DCHECK(internal_field_index2 == v8::Object::kNoInternalFieldIndex); |
| 272 set_weakness_type(PHANTOM_WEAK); |
| 273 set_parameter(parameter); |
| 274 } |
| 275 weak_callback_ = reinterpret_cast<WeakCallback>(phantom_callback); |
| 276 } |
| 277 |
225 void* ClearWeakness() { | 278 void* ClearWeakness() { |
226 DCHECK(state() != FREE); | 279 DCHECK(IsInUse()); |
227 void* p = parameter(); | 280 void* p = parameter(); |
228 set_state(NORMAL); | 281 set_state(NORMAL); |
229 set_parameter(NULL); | 282 set_parameter(NULL); |
230 return p; | 283 return p; |
231 } | 284 } |
232 | 285 |
| 286 void CollectPhantomCallbackData( |
| 287 Isolate* isolate, List<PendingPhantomCallback>* pending_phantom_callbacks, |
| 288 List<PendingInternalFieldsCallback>* pending_internal_fields_callbacks) { |
| 289 if (state() != Node::PENDING) return; |
| 290 bool do_release = true; |
| 291 if (weak_callback_ != NULL) { |
| 292 if (weakness_type() == NORMAL_WEAK) return; |
| 293 |
| 294 v8::Isolate* api_isolate = reinterpret_cast<v8::Isolate*>(isolate); |
| 295 |
| 296 if (weakness_type() == PHANTOM_WEAK) { |
| 297 // Phantom weak pointer case. Zap with harmless value. |
| 298 DCHECK(*location() == Smi::FromInt(0)); |
| 299 typedef PhantomCallbackData<void> Data; |
| 300 |
| 301 Data data(api_isolate, parameter()); |
| 302 Data::Callback callback = |
| 303 reinterpret_cast<Data::Callback>(weak_callback_); |
| 304 |
| 305 pending_phantom_callbacks->Add( |
| 306 PendingPhantomCallback(this, data, callback)); |
| 307 |
| 308 // Postpone the release of the handle. The embedder can't use the |
| 309 // handle (it's zapped), but it may be using the location, and we |
| 310 // don't want to confuse things by reusing that. |
| 311 do_release = false; |
| 312 } else { |
| 313 DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK); |
| 314 typedef InternalFieldsCallbackData<void, void> Data; |
| 315 |
| 316 // Phantom weak pointer case, passing internal fields instead of |
| 317 // parameter. Don't use a handle here during GC, because it will |
| 318 // create a handle pointing to a dying object, which can confuse |
| 319 // the next GC. |
| 320 JSObject* jsobject = reinterpret_cast<JSObject*>(object()); |
| 321 DCHECK(jsobject->IsJSObject()); |
| 322 Data data(api_isolate, jsobject->GetInternalField(internal_field1()), |
| 323 jsobject->GetInternalField(internal_field2())); |
| 324 Data::Callback callback = |
| 325 reinterpret_cast<Data::Callback>(weak_callback_); |
| 326 |
| 327 // In the future, we want to delay the callback. In that case we will |
| 328 // zap when we queue up, to stop the C++ side accessing the dead V8 |
| 329 // object, but we will call Release only after the callback (allowing |
| 330 // the node to be reused). |
| 331 pending_internal_fields_callbacks->Add( |
| 332 PendingInternalFieldsCallback(data, callback)); |
| 333 } |
| 334 } |
| 335 // TODO(erikcorry): At the moment the callbacks are not postponed much, |
| 336 // but if we really postpone them until after the mutator has run, we |
| 337 // need to divide things up, so that an early callback clears the handle, |
| 338 // while a later one destroys the objects involved, possibley triggering |
| 339 // some work when decremented ref counts hit zero. |
| 340 if (do_release) Release(); |
| 341 } |
| 342 |
233 bool PostGarbageCollectionProcessing(Isolate* isolate) { | 343 bool PostGarbageCollectionProcessing(Isolate* isolate) { |
234 if (state() != Node::PENDING) return false; | 344 if (state() != Node::PENDING) return false; |
235 if (weak_callback_ == NULL) { | 345 if (weak_callback_ == NULL) { |
236 Release(); | 346 Release(); |
237 return false; | 347 return false; |
238 } | 348 } |
239 void* param = parameter(); | |
240 set_state(NEAR_DEATH); | 349 set_state(NEAR_DEATH); |
| 350 |
| 351 // Check that we are not passing a finalized external string to |
| 352 // the callback. |
| 353 DCHECK(!object_->IsExternalOneByteString() || |
| 354 ExternalOneByteString::cast(object_)->resource() != NULL); |
| 355 DCHECK(!object_->IsExternalTwoByteString() || |
| 356 ExternalTwoByteString::cast(object_)->resource() != NULL); |
| 357 // Leaving V8. |
| 358 VMState<EXTERNAL> vmstate(isolate); |
| 359 HandleScope handle_scope(isolate); |
| 360 if (weakness_type() == PHANTOM_WEAK) return false; |
| 361 DCHECK(weakness_type() == NORMAL_WEAK); |
| 362 Object** object = location(); |
| 363 Handle<Object> handle(*object, isolate); |
| 364 v8::WeakCallbackData<v8::Value, void> data( |
| 365 reinterpret_cast<v8::Isolate*>(isolate), parameter(), |
| 366 v8::Utils::ToLocal(handle)); |
241 set_parameter(NULL); | 367 set_parameter(NULL); |
| 368 weak_callback_(data); |
242 | 369 |
243 Object** object = location(); | |
244 { | |
245 // Check that we are not passing a finalized external string to | |
246 // the callback. | |
247 DCHECK(!object_->IsExternalOneByteString() || | |
248 ExternalOneByteString::cast(object_)->resource() != NULL); | |
249 DCHECK(!object_->IsExternalTwoByteString() || | |
250 ExternalTwoByteString::cast(object_)->resource() != NULL); | |
251 // Leaving V8. | |
252 VMState<EXTERNAL> vmstate(isolate); | |
253 HandleScope handle_scope(isolate); | |
254 if (is_zapped_during_weak_callback()) { | |
255 // Phantom weak pointer case. | |
256 DCHECK(*object == Smi::FromInt(kPhantomReferenceZap)); | |
257 // Make data with a null handle. | |
258 v8::WeakCallbackData<v8::Value, void> data( | |
259 reinterpret_cast<v8::Isolate*>(isolate), v8::Local<v8::Object>(), | |
260 param); | |
261 weak_callback_(data); | |
262 if (state() != FREE) { | |
263 // Callback does not have to clear the global handle if it is a | |
264 // phantom handle. | |
265 Release(); | |
266 } | |
267 } else { | |
268 Handle<Object> handle(*object, isolate); | |
269 v8::WeakCallbackData<v8::Value, void> data( | |
270 reinterpret_cast<v8::Isolate*>(isolate), v8::Utils::ToLocal(handle), | |
271 param); | |
272 weak_callback_(data); | |
273 } | |
274 } | |
275 // Absence of explicit cleanup or revival of weak handle | 370 // Absence of explicit cleanup or revival of weak handle |
276 // in most of the cases would lead to memory leak. | 371 // in most of the cases would lead to memory leak. |
277 CHECK(state() != NEAR_DEATH); | 372 CHECK(state() != NEAR_DEATH); |
278 return true; | 373 return true; |
279 } | 374 } |
280 | 375 |
281 inline GlobalHandles* GetGlobalHandles(); | 376 inline GlobalHandles* GetGlobalHandles(); |
282 | 377 |
283 private: | 378 private: |
284 inline NodeBlock* FindBlock(); | 379 inline NodeBlock* FindBlock(); |
285 inline void IncreaseBlockUses(); | 380 inline void IncreaseBlockUses(); |
286 inline void DecreaseBlockUses(); | 381 inline void DecreaseBlockUses(); |
287 | 382 |
288 // Storage for object pointer. | 383 // Storage for object pointer. |
289 // Placed first to avoid offset computation. | 384 // Placed first to avoid offset computation. |
290 Object* object_; | 385 Object* object_; |
291 | 386 |
292 // Next word stores class_id, index, state, and independent. | 387 // Next word stores class_id, index, state, and independent. |
293 // Note: the most aligned fields should go first. | 388 // Note: the most aligned fields should go first. |
294 | 389 |
295 // Wrapper class ID. | 390 // Wrapper class ID. |
296 uint16_t class_id_; | 391 uint16_t class_id_; |
297 | 392 |
298 // Index in the containing handle block. | 393 // Index in the containing handle block. |
299 uint8_t index_; | 394 uint8_t index_; |
300 | 395 |
301 // This stores three flags (independent, partially_dependent and | 396 // This stores three flags (independent, partially_dependent and |
302 // in_new_space_list) and a State. | 397 // in_new_space_list) and a State. |
303 class NodeState : public BitField<State, 0, 4> {}; | 398 class NodeState : public BitField<State, 0, 3> {}; |
304 class IsIndependent : public BitField<bool, 4, 1> {}; | 399 class IsIndependent : public BitField<bool, 3, 1> {}; |
305 class IsPartiallyDependent : public BitField<bool, 5, 1> {}; | 400 class IsPartiallyDependent : public BitField<bool, 4, 1> {}; |
306 class IsInNewSpaceList : public BitField<bool, 6, 1> {}; | 401 class IsInNewSpaceList : public BitField<bool, 5, 1> {}; |
307 class IsZappedDuringWeakCallback : public BitField<bool, 7, 1> {}; | 402 class NodeWeaknessType : public BitField<WeaknessType, 6, 2> {}; |
308 | 403 |
309 uint8_t flags_; | 404 uint8_t flags_; |
310 | 405 |
311 // Handle specific callback - might be a weak reference in disguise. | 406 // Handle specific callback - might be a weak reference in disguise. |
312 WeakCallback weak_callback_; | 407 WeakCallback weak_callback_; |
313 | 408 |
314 // Provided data for callback. In FREE state, this is used for | 409 // Provided data for callback. In FREE state, this is used for |
315 // the free list link. | 410 // the free list link. |
316 union { | 411 union { |
317 void* parameter; | 412 void* parameter; |
| 413 struct { |
| 414 int16_t internal_field1; |
| 415 int16_t internal_field2; |
| 416 } internal_field_indeces; |
318 Node* next_free; | 417 Node* next_free; |
319 } parameter_or_next_free_; | 418 } parameter_or_next_free_; |
320 | 419 |
321 DISALLOW_COPY_AND_ASSIGN(Node); | 420 DISALLOW_COPY_AND_ASSIGN(Node); |
322 }; | 421 }; |
323 | 422 |
324 | 423 |
325 class GlobalHandles::NodeBlock { | 424 class GlobalHandles::NodeBlock { |
326 public: | 425 public: |
327 static const int kSize = 256; | 426 static const int kSize = 256; |
(...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
493 return Node::FromLocation(location)->GetGlobalHandles()->Create(*location); | 592 return Node::FromLocation(location)->GetGlobalHandles()->Create(*location); |
494 } | 593 } |
495 | 594 |
496 | 595 |
497 void GlobalHandles::Destroy(Object** location) { | 596 void GlobalHandles::Destroy(Object** location) { |
498 if (location != NULL) Node::FromLocation(location)->Release(); | 597 if (location != NULL) Node::FromLocation(location)->Release(); |
499 } | 598 } |
500 | 599 |
501 | 600 |
502 void GlobalHandles::MakeWeak(Object** location, void* parameter, | 601 void GlobalHandles::MakeWeak(Object** location, void* parameter, |
503 WeakCallback weak_callback, PhantomState phantom) { | 602 WeakCallback weak_callback) { |
504 Node::FromLocation(location) | 603 Node::FromLocation(location)->MakeWeak(parameter, weak_callback); |
505 ->MakeWeak(parameter, weak_callback, phantom == Phantom); | |
506 } | 604 } |
507 | 605 |
508 | 606 |
| 607 typedef PhantomCallbackData<void>::Callback GenericCallback; |
| 608 |
| 609 |
| 610 void GlobalHandles::MakePhantom( |
| 611 Object** location, |
| 612 v8::InternalFieldsCallbackData<void, void>::Callback phantom_callback, |
| 613 int16_t internal_field_index1, int16_t internal_field_index2) { |
| 614 Node::FromLocation(location) |
| 615 ->MakePhantom(NULL, reinterpret_cast<GenericCallback>(phantom_callback), |
| 616 internal_field_index1, internal_field_index2); |
| 617 } |
| 618 |
| 619 |
| 620 void GlobalHandles::MakePhantom(Object** location, void* parameter, |
| 621 GenericCallback phantom_callback) { |
| 622 Node::FromLocation(location)->MakePhantom(parameter, phantom_callback, |
| 623 v8::Object::kNoInternalFieldIndex, |
| 624 v8::Object::kNoInternalFieldIndex); |
| 625 } |
| 626 |
| 627 |
| 628 void GlobalHandles::CollectPhantomCallbackData() { |
| 629 for (NodeIterator it(this); !it.done(); it.Advance()) { |
| 630 Node* node = it.node(); |
| 631 node->CollectPhantomCallbackData(isolate(), &pending_phantom_callbacks_, |
| 632 &pending_internal_fields_callbacks_); |
| 633 } |
| 634 } |
| 635 |
| 636 |
509 void* GlobalHandles::ClearWeakness(Object** location) { | 637 void* GlobalHandles::ClearWeakness(Object** location) { |
510 return Node::FromLocation(location)->ClearWeakness(); | 638 return Node::FromLocation(location)->ClearWeakness(); |
511 } | 639 } |
512 | 640 |
513 | 641 |
514 void GlobalHandles::MarkIndependent(Object** location) { | 642 void GlobalHandles::MarkIndependent(Object** location) { |
515 Node::FromLocation(location)->MarkIndependent(); | 643 Node::FromLocation(location)->MarkIndependent(); |
516 } | 644 } |
517 | 645 |
518 | 646 |
(...skipping 14 matching lines...) Expand all Loading... |
533 | 661 |
534 bool GlobalHandles::IsWeak(Object** location) { | 662 bool GlobalHandles::IsWeak(Object** location) { |
535 return Node::FromLocation(location)->IsWeak(); | 663 return Node::FromLocation(location)->IsWeak(); |
536 } | 664 } |
537 | 665 |
538 | 666 |
539 void GlobalHandles::IterateWeakRoots(ObjectVisitor* v) { | 667 void GlobalHandles::IterateWeakRoots(ObjectVisitor* v) { |
540 for (NodeIterator it(this); !it.done(); it.Advance()) { | 668 for (NodeIterator it(this); !it.done(); it.Advance()) { |
541 Node* node = it.node(); | 669 Node* node = it.node(); |
542 if (node->IsWeakRetainer()) { | 670 if (node->IsWeakRetainer()) { |
543 if (node->state() == Node::PENDING && | 671 // Weakness type can be normal, phantom or internal fields. |
544 node->is_zapped_during_weak_callback()) { | 672 // For normal weakness we mark through the handle so that |
545 *(node->location()) = Smi::FromInt(kPhantomReferenceZap); | 673 // the object and things reachable from it are available |
| 674 // to the callback. |
| 675 // In the case of phantom we can zap the object handle now |
| 676 // and we won't need it, so we don't need to mark through it. |
| 677 // In the internal fields case we will need the internal |
| 678 // fields, so we can't zap the handle, but we don't need to |
| 679 // mark through it, because it will die in this GC round. |
| 680 if (node->state() == Node::PENDING) { |
| 681 if (node->weakness_type() == PHANTOM_WEAK) { |
| 682 *(node->location()) = Smi::FromInt(0); |
| 683 } else if (node->weakness_type() == NORMAL_WEAK) { |
| 684 v->VisitPointer(node->location()); |
| 685 } else { |
| 686 DCHECK(node->weakness_type() == INTERNAL_FIELDS_WEAK); |
| 687 } |
546 } else { | 688 } else { |
| 689 // Node is not pending, so that means the object survived. We still |
| 690 // need to visit the pointer in case the object moved, eg. because of |
| 691 // compaction. |
547 v->VisitPointer(node->location()); | 692 v->VisitPointer(node->location()); |
548 } | 693 } |
549 } | 694 } |
550 } | 695 } |
551 } | 696 } |
552 | 697 |
553 | 698 |
554 void GlobalHandles::IdentifyWeakHandles(WeakSlotCallback f) { | 699 void GlobalHandles::IdentifyWeakHandles(WeakSlotCallback f) { |
555 for (NodeIterator it(this); !it.done(); it.Advance()) { | 700 for (NodeIterator it(this); !it.done(); it.Advance()) { |
556 if (it.node()->IsWeak() && f(it.node()->location())) { | 701 if (it.node()->IsWeak() && f(it.node()->location())) { |
(...skipping 27 matching lines...) Expand all Loading... |
584 } | 729 } |
585 } | 730 } |
586 | 731 |
587 | 732 |
588 void GlobalHandles::IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v) { | 733 void GlobalHandles::IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v) { |
589 for (int i = 0; i < new_space_nodes_.length(); ++i) { | 734 for (int i = 0; i < new_space_nodes_.length(); ++i) { |
590 Node* node = new_space_nodes_[i]; | 735 Node* node = new_space_nodes_[i]; |
591 DCHECK(node->is_in_new_space_list()); | 736 DCHECK(node->is_in_new_space_list()); |
592 if ((node->is_independent() || node->is_partially_dependent()) && | 737 if ((node->is_independent() || node->is_partially_dependent()) && |
593 node->IsWeakRetainer()) { | 738 node->IsWeakRetainer()) { |
594 if (node->is_zapped_during_weak_callback()) { | 739 if (node->weakness_type() == PHANTOM_WEAK) { |
595 *(node->location()) = Smi::FromInt(kPhantomReferenceZap); | 740 *(node->location()) = Smi::FromInt(0); |
| 741 } else if (node->weakness_type() == NORMAL_WEAK) { |
| 742 v->VisitPointer(node->location()); |
596 } else { | 743 } else { |
597 v->VisitPointer(node->location()); | 744 DCHECK(node->weakness_type() == INTERNAL_FIELDS_WEAK); |
| 745 // For this case we only need to trace if it's alive: The tracing of |
| 746 // something that is already alive is just to get the pointer updated |
| 747 // to the new location of the object). |
| 748 DCHECK(node->state() != Node::NEAR_DEATH); |
| 749 if (node->state() != Node::PENDING) { |
| 750 v->VisitPointer(node->location()); |
| 751 } |
598 } | 752 } |
599 } | 753 } |
600 } | 754 } |
601 } | 755 } |
602 | 756 |
603 | 757 |
604 bool GlobalHandles::IterateObjectGroups(ObjectVisitor* v, | 758 bool GlobalHandles::IterateObjectGroups(ObjectVisitor* v, |
605 WeakSlotCallbackWithHeap can_skip) { | 759 WeakSlotCallbackWithHeap can_skip) { |
606 ComputeObjectGroupsAndImplicitReferences(); | 760 ComputeObjectGroupsAndImplicitReferences(); |
607 int last = 0; | 761 int last = 0; |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
640 // Once the entire group has been iterated over, set the object | 794 // Once the entire group has been iterated over, set the object |
641 // group to NULL so it won't be processed again. | 795 // group to NULL so it won't be processed again. |
642 delete entry; | 796 delete entry; |
643 object_groups_.at(i) = NULL; | 797 object_groups_.at(i) = NULL; |
644 } | 798 } |
645 object_groups_.Rewind(last); | 799 object_groups_.Rewind(last); |
646 return any_group_was_visited; | 800 return any_group_was_visited; |
647 } | 801 } |
648 | 802 |
649 | 803 |
650 int GlobalHandles::PostGarbageCollectionProcessing( | 804 int GlobalHandles::PostScavengeProcessing( |
651 GarbageCollector collector) { | 805 const int initial_post_gc_processing_count) { |
652 // Process weak global handle callbacks. This must be done after the | |
653 // GC is completely done, because the callbacks may invoke arbitrary | |
654 // API functions. | |
655 DCHECK(isolate_->heap()->gc_state() == Heap::NOT_IN_GC); | |
656 const int initial_post_gc_processing_count = ++post_gc_processing_count_; | |
657 int freed_nodes = 0; | 806 int freed_nodes = 0; |
658 if (collector == SCAVENGER) { | 807 for (int i = 0; i < new_space_nodes_.length(); ++i) { |
659 for (int i = 0; i < new_space_nodes_.length(); ++i) { | 808 Node* node = new_space_nodes_[i]; |
660 Node* node = new_space_nodes_[i]; | 809 DCHECK(node->is_in_new_space_list()); |
661 DCHECK(node->is_in_new_space_list()); | 810 if (!node->IsRetainer()) { |
662 if (!node->IsRetainer()) { | 811 // Free nodes do not have weak callbacks. Do not use them to compute |
663 // Free nodes do not have weak callbacks. Do not use them to compute | 812 // the freed_nodes. |
664 // the freed_nodes. | 813 continue; |
665 continue; | 814 } |
666 } | 815 // Skip dependent handles. Their weak callbacks might expect to be |
667 // Skip dependent handles. Their weak callbacks might expect to be | 816 // called between two global garbage collection callbacks which |
668 // called between two global garbage collection callbacks which | 817 // are not called for minor collections. |
669 // are not called for minor collections. | 818 if (!node->is_independent() && !node->is_partially_dependent()) { |
670 if (!node->is_independent() && !node->is_partially_dependent()) { | 819 continue; |
671 continue; | 820 } |
672 } | 821 node->clear_partially_dependent(); |
673 node->clear_partially_dependent(); | 822 if (node->PostGarbageCollectionProcessing(isolate_)) { |
674 if (node->PostGarbageCollectionProcessing(isolate_)) { | 823 if (initial_post_gc_processing_count != post_gc_processing_count_) { |
675 if (initial_post_gc_processing_count != post_gc_processing_count_) { | 824 // Weak callback triggered another GC and another round of |
676 // Weak callback triggered another GC and another round of | 825 // PostGarbageCollection processing. The current node might |
677 // PostGarbageCollection processing. The current node might | 826 // have been deleted in that round, so we need to bail out (or |
678 // have been deleted in that round, so we need to bail out (or | 827 // restart the processing). |
679 // restart the processing). | 828 return freed_nodes; |
680 return freed_nodes; | |
681 } | |
682 } | |
683 if (!node->IsRetainer()) { | |
684 freed_nodes++; | |
685 } | 829 } |
686 } | 830 } |
687 } else { | 831 if (!node->IsRetainer()) { |
688 for (NodeIterator it(this); !it.done(); it.Advance()) { | 832 freed_nodes++; |
689 if (!it.node()->IsRetainer()) { | 833 } |
690 // Free nodes do not have weak callbacks. Do not use them to compute | 834 } |
691 // the freed_nodes. | 835 return freed_nodes; |
692 continue; | 836 } |
693 } | 837 |
694 it.node()->clear_partially_dependent(); | 838 |
695 if (it.node()->PostGarbageCollectionProcessing(isolate_)) { | 839 int GlobalHandles::PostMarkSweepProcessing( |
696 if (initial_post_gc_processing_count != post_gc_processing_count_) { | 840 const int initial_post_gc_processing_count) { |
697 // See the comment above. | 841 int freed_nodes = 0; |
698 return freed_nodes; | 842 for (NodeIterator it(this); !it.done(); it.Advance()) { |
699 } | 843 if (!it.node()->IsRetainer()) { |
700 } | 844 // Free nodes do not have weak callbacks. Do not use them to compute |
701 if (!it.node()->IsRetainer()) { | 845 // the freed_nodes. |
702 freed_nodes++; | 846 continue; |
| 847 } |
| 848 it.node()->clear_partially_dependent(); |
| 849 if (it.node()->PostGarbageCollectionProcessing(isolate_)) { |
| 850 if (initial_post_gc_processing_count != post_gc_processing_count_) { |
| 851 // See the comment above. |
| 852 return freed_nodes; |
703 } | 853 } |
704 } | 854 } |
| 855 if (!it.node()->IsRetainer()) { |
| 856 freed_nodes++; |
| 857 } |
705 } | 858 } |
706 // Update the list of new space nodes. | 859 return freed_nodes; |
| 860 } |
| 861 |
| 862 |
| 863 void GlobalHandles::UpdateListOfNewSpaceNodes() { |
707 int last = 0; | 864 int last = 0; |
708 for (int i = 0; i < new_space_nodes_.length(); ++i) { | 865 for (int i = 0; i < new_space_nodes_.length(); ++i) { |
709 Node* node = new_space_nodes_[i]; | 866 Node* node = new_space_nodes_[i]; |
710 DCHECK(node->is_in_new_space_list()); | 867 DCHECK(node->is_in_new_space_list()); |
711 if (node->IsRetainer()) { | 868 if (node->IsRetainer()) { |
712 if (isolate_->heap()->InNewSpace(node->object())) { | 869 if (isolate_->heap()->InNewSpace(node->object())) { |
713 new_space_nodes_[last++] = node; | 870 new_space_nodes_[last++] = node; |
714 isolate_->heap()->IncrementNodesCopiedInNewSpace(); | 871 isolate_->heap()->IncrementNodesCopiedInNewSpace(); |
715 } else { | 872 } else { |
716 node->set_in_new_space_list(false); | 873 node->set_in_new_space_list(false); |
717 isolate_->heap()->IncrementNodesPromoted(); | 874 isolate_->heap()->IncrementNodesPromoted(); |
718 } | 875 } |
719 } else { | 876 } else { |
720 node->set_in_new_space_list(false); | 877 node->set_in_new_space_list(false); |
721 isolate_->heap()->IncrementNodesDiedInNewSpace(); | 878 isolate_->heap()->IncrementNodesDiedInNewSpace(); |
722 } | 879 } |
723 } | 880 } |
724 new_space_nodes_.Rewind(last); | 881 new_space_nodes_.Rewind(last); |
| 882 } |
| 883 |
| 884 |
| 885 int GlobalHandles::DispatchPendingPhantomCallbacks() { |
| 886 int freed_nodes = 0; |
| 887 while (pending_phantom_callbacks_.length() != 0) { |
| 888 PendingPhantomCallback callback = pending_phantom_callbacks_.RemoveLast(); |
| 889 callback.invoke(); |
| 890 freed_nodes++; |
| 891 } |
| 892 while (pending_internal_fields_callbacks_.length() != 0) { |
| 893 PendingInternalFieldsCallback callback = |
| 894 pending_internal_fields_callbacks_.RemoveLast(); |
| 895 callback.invoke(); |
| 896 freed_nodes++; |
| 897 } |
725 return freed_nodes; | 898 return freed_nodes; |
726 } | 899 } |
727 | 900 |
728 | 901 |
| 902 int GlobalHandles::PostGarbageCollectionProcessing(GarbageCollector collector) { |
| 903 // Process weak global handle callbacks. This must be done after the |
| 904 // GC is completely done, because the callbacks may invoke arbitrary |
| 905 // API functions. |
| 906 DCHECK(isolate_->heap()->gc_state() == Heap::NOT_IN_GC); |
| 907 const int initial_post_gc_processing_count = ++post_gc_processing_count_; |
| 908 int freed_nodes = 0; |
| 909 if (collector == SCAVENGER) { |
| 910 freed_nodes = PostScavengeProcessing(initial_post_gc_processing_count); |
| 911 } else { |
| 912 freed_nodes = PostMarkSweepProcessing(initial_post_gc_processing_count); |
| 913 } |
| 914 if (initial_post_gc_processing_count != post_gc_processing_count_) { |
| 915 // If the callbacks caused a nested GC, then return. See comment in |
| 916 // PostScavengeProcessing. |
| 917 return freed_nodes; |
| 918 } |
| 919 freed_nodes += DispatchPendingPhantomCallbacks(); |
| 920 if (initial_post_gc_processing_count == post_gc_processing_count_) { |
| 921 UpdateListOfNewSpaceNodes(); |
| 922 } |
| 923 return freed_nodes; |
| 924 } |
| 925 |
| 926 |
| 927 void GlobalHandles::PendingPhantomCallback::invoke() { |
| 928 if (node_->state() == Node::FREE) return; |
| 929 DCHECK(node_->state() == Node::NEAR_DEATH); |
| 930 callback_(data_); |
| 931 if (node_->state() != Node::FREE) node_->Release(); |
| 932 } |
| 933 |
| 934 |
729 void GlobalHandles::IterateStrongRoots(ObjectVisitor* v) { | 935 void GlobalHandles::IterateStrongRoots(ObjectVisitor* v) { |
730 for (NodeIterator it(this); !it.done(); it.Advance()) { | 936 for (NodeIterator it(this); !it.done(); it.Advance()) { |
731 if (it.node()->IsStrongRetainer()) { | 937 if (it.node()->IsStrongRetainer()) { |
732 v->VisitPointer(it.node()->location()); | 938 v->VisitPointer(it.node()->location()); |
733 } | 939 } |
734 } | 940 } |
735 } | 941 } |
736 | 942 |
737 | 943 |
738 void GlobalHandles::IterateAllRoots(ObjectVisitor* v) { | 944 void GlobalHandles::IterateAllRoots(ObjectVisitor* v) { |
(...skipping 344 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1083 DCHECK_EQ(isolate->heap()->the_hole_value(), blocks_[block][offset]); | 1289 DCHECK_EQ(isolate->heap()->the_hole_value(), blocks_[block][offset]); |
1084 blocks_[block][offset] = object; | 1290 blocks_[block][offset] = object; |
1085 if (isolate->heap()->InNewSpace(object)) { | 1291 if (isolate->heap()->InNewSpace(object)) { |
1086 new_space_indices_.Add(size_); | 1292 new_space_indices_.Add(size_); |
1087 } | 1293 } |
1088 *index = size_++; | 1294 *index = size_++; |
1089 } | 1295 } |
1090 | 1296 |
1091 | 1297 |
1092 } } // namespace v8::internal | 1298 } } // namespace v8::internal |
OLD | NEW |