Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(465)

Side by Side Diff: src/global-handles.cc

Issue 842153004: Unify phantom and internal fields weak handle callbacks (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/global-handles.h ('k') | test/cctest/test-api.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/api.h" 7 #include "src/api.h"
8 #include "src/global-handles.h" 8 #include "src/global-handles.h"
9 9
10 #include "src/vm-state-inl.h" 10 #include "src/vm-state-inl.h"
(...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after
196 } 196 }
197 void clear_partially_dependent() { set_partially_dependent(false); } 197 void clear_partially_dependent() { set_partially_dependent(false); }
198 198
199 // Callback accessor. 199 // Callback accessor.
200 // TODO(svenpanne) Re-enable or nuke later. 200 // TODO(svenpanne) Re-enable or nuke later.
201 // WeakReferenceCallback callback() { return callback_; } 201 // WeakReferenceCallback callback() { return callback_; }
202 202
203 // Callback parameter accessors. 203 // Callback parameter accessors.
204 void set_parameter(void* parameter) { 204 void set_parameter(void* parameter) {
205 DCHECK(IsInUse()); 205 DCHECK(IsInUse());
206 DCHECK(weakness_type() == NORMAL_WEAK || weakness_type() == PHANTOM_WEAK);
207 parameter_or_next_free_.parameter = parameter; 206 parameter_or_next_free_.parameter = parameter;
208 } 207 }
209 void* parameter() const { 208 void* parameter() const {
210 DCHECK(IsInUse()); 209 DCHECK(IsInUse());
211 return parameter_or_next_free_.parameter; 210 return parameter_or_next_free_.parameter;
212 } 211 }
213 212
214 void set_internal_fields(int internal_field_index1,
215 int internal_field_index2) {
216 DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK);
217 // These are stored in an int16_t.
218 DCHECK(internal_field_index1 < 1 << 16);
219 DCHECK(internal_field_index1 >= -(1 << 16));
220 DCHECK(internal_field_index2 < 1 << 16);
221 DCHECK(internal_field_index2 >= -(1 << 16));
222 parameter_or_next_free_.internal_field_indeces.internal_field1 =
223 static_cast<int16_t>(internal_field_index1);
224 parameter_or_next_free_.internal_field_indeces.internal_field2 =
225 static_cast<int16_t>(internal_field_index2);
226 }
227
228 int internal_field1() const {
229 DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK);
230 return parameter_or_next_free_.internal_field_indeces.internal_field1;
231 }
232
233 int internal_field2() const {
234 DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK);
235 return parameter_or_next_free_.internal_field_indeces.internal_field2;
236 }
237
238 // Accessors for next free node in the free list. 213 // Accessors for next free node in the free list.
239 Node* next_free() { 214 Node* next_free() {
240 DCHECK(state() == FREE); 215 DCHECK(state() == FREE);
241 return parameter_or_next_free_.next_free; 216 return parameter_or_next_free_.next_free;
242 } 217 }
243 void set_next_free(Node* value) { 218 void set_next_free(Node* value) {
244 DCHECK(state() == FREE); 219 DCHECK(state() == FREE);
245 parameter_or_next_free_.next_free = value; 220 parameter_or_next_free_.next_free = value;
246 } 221 }
247 222
248 void MakeWeak(void* parameter, WeakCallback weak_callback) { 223 void MakeWeak(void* parameter, WeakCallback weak_callback) {
249 DCHECK(weak_callback != NULL); 224 DCHECK(weak_callback != NULL);
250 DCHECK(IsInUse()); 225 DCHECK(IsInUse());
251 CHECK(object_ != NULL); 226 CHECK(object_ != NULL);
252 set_state(WEAK); 227 set_state(WEAK);
253 set_weakness_type(NORMAL_WEAK); 228 set_weakness_type(NORMAL_WEAK);
254 set_parameter(parameter); 229 set_parameter(parameter);
255 weak_callback_ = weak_callback; 230 weak_callback_ = weak_callback;
256 } 231 }
257 232
258 void MakePhantom(void* parameter, 233 void MakePhantom(void* parameter, int number_of_internal_fields,
259 PhantomCallbackData<void>::Callback phantom_callback, 234 PhantomCallbackData<void>::Callback phantom_callback) {
260 int16_t internal_field_index1, 235 DCHECK(number_of_internal_fields >= 0);
261 int16_t internal_field_index2) { 236 DCHECK(number_of_internal_fields <= 2);
262 DCHECK(phantom_callback != NULL); 237 DCHECK(phantom_callback != NULL);
263 DCHECK(IsInUse()); 238 DCHECK(IsInUse());
264 CHECK(object_ != NULL); 239 CHECK(object_ != NULL);
265 set_state(WEAK); 240 set_state(WEAK);
266 if (parameter == NULL) { 241 if (number_of_internal_fields == 0) {
267 set_weakness_type(INTERNAL_FIELDS_WEAK); 242 set_weakness_type(PHANTOM_WEAK_0_INTERNAL_FIELDS);
268 set_internal_fields(internal_field_index1, internal_field_index2); 243 } else if (number_of_internal_fields == 1) {
244 set_weakness_type(PHANTOM_WEAK_1_INTERNAL_FIELDS);
269 } else { 245 } else {
270 DCHECK(internal_field_index1 == v8::Object::kNoInternalFieldIndex); 246 set_weakness_type(PHANTOM_WEAK_2_INTERNAL_FIELDS);
271 DCHECK(internal_field_index2 == v8::Object::kNoInternalFieldIndex);
272 set_weakness_type(PHANTOM_WEAK);
273 set_parameter(parameter);
274 } 247 }
248 set_parameter(parameter);
275 weak_callback_ = reinterpret_cast<WeakCallback>(phantom_callback); 249 weak_callback_ = reinterpret_cast<WeakCallback>(phantom_callback);
276 } 250 }
277 251
278 void* ClearWeakness() { 252 void* ClearWeakness() {
279 DCHECK(IsInUse()); 253 DCHECK(IsInUse());
280 void* p = parameter(); 254 void* p = parameter();
281 set_state(NORMAL); 255 set_state(NORMAL);
282 set_parameter(NULL); 256 set_parameter(NULL);
283 return p; 257 return p;
284 } 258 }
285 259
286 void CollectPhantomCallbackData( 260 void CollectPhantomCallbackData(
287 Isolate* isolate, List<PendingPhantomCallback>* pending_phantom_callbacks, 261 Isolate* isolate,
288 List<PendingInternalFieldsCallback>* pending_internal_fields_callbacks) { 262 List<PendingPhantomCallback>* pending_phantom_callbacks) {
289 if (state() != Node::PENDING) return; 263 if (state() != PENDING) return;
290 bool do_release = true;
291 if (weak_callback_ != NULL) { 264 if (weak_callback_ != NULL) {
292 if (weakness_type() == NORMAL_WEAK) return; 265 if (weakness_type() == NORMAL_WEAK) return;
293 266
294 v8::Isolate* api_isolate = reinterpret_cast<v8::Isolate*>(isolate); 267 v8::Isolate* api_isolate = reinterpret_cast<v8::Isolate*>(isolate);
295 268
296 if (weakness_type() == PHANTOM_WEAK) { 269 DCHECK(weakness_type() == PHANTOM_WEAK_0_INTERNAL_FIELDS ||
297 // Phantom weak pointer case. Zap with harmless value. 270 weakness_type() == PHANTOM_WEAK_1_INTERNAL_FIELDS ||
298 DCHECK(*location() == Smi::FromInt(0)); 271 weakness_type() == PHANTOM_WEAK_2_INTERNAL_FIELDS);
299 typedef PhantomCallbackData<void> Data;
300 272
301 Data data(api_isolate, parameter()); 273 void* internal_field0 = 0;
dcarney 2015/01/09 12:44:38 Object* internal_field0 = nullptr; Object* i
302 Data::Callback callback = 274 void* internal_field1 = 0;
303 reinterpret_cast<Data::Callback>(weak_callback_); 275 if (weakness_type() != PHANTOM_WEAK_0_INTERNAL_FIELDS) {
304
305 pending_phantom_callbacks->Add(
306 PendingPhantomCallback(this, data, callback));
307
308 // Postpone the release of the handle. The embedder can't use the
309 // handle (it's zapped), but it may be using the location, and we
310 // don't want to confuse things by reusing that.
311 do_release = false;
312 } else {
313 DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK);
314 typedef InternalFieldsCallbackData<void, void> Data;
315
316 // Phantom weak pointer case, passing internal fields instead of
317 // parameter. Don't use a handle here during GC, because it will
318 // create a handle pointing to a dying object, which can confuse
319 // the next GC.
320 JSObject* jsobject = reinterpret_cast<JSObject*>(object()); 276 JSObject* jsobject = reinterpret_cast<JSObject*>(object());
321 DCHECK(jsobject->IsJSObject()); 277 DCHECK(jsobject->IsJSObject());
322 Data data(api_isolate, jsobject->GetInternalField(internal_field1()), 278 DCHECK(jsobject->GetInternalFieldCount() >= 1);
323 jsobject->GetInternalField(internal_field2())); 279 internal_field0 = jsobject->GetInternalField(0);
324 Data::Callback callback = 280 if (weakness_type() == PHANTOM_WEAK_2_INTERNAL_FIELDS) {
325 reinterpret_cast<Data::Callback>(weak_callback_); 281 DCHECK(jsobject->GetInternalFieldCount() >= 2);
282 internal_field1 = jsobject->GetInternalField(1);
283 }
284 }
326 285
327 // In the future, we want to delay the callback. In that case we will 286 // Zap with harmless value.
328 // zap when we queue up, to stop the C++ side accessing the dead V8 287 *location() = Smi::FromInt(0);
329 // object, but we will call Release only after the callback (allowing 288 typedef PhantomCallbackData<void, void, void> Data;
330 // the node to be reused). 289
331 pending_internal_fields_callbacks->Add( 290 intptr_t field_as_int = reinterpret_cast<intptr_t>(internal_field0);
332 PendingInternalFieldsCallback(data, callback)); 291 if (!PlatformSmiTagging::IsValidSmi(field_as_int) ||
292 field_as_int != OBJECT_POINTER_ALIGN(field_as_int)) {
293 internal_field0 = 0;
333 } 294 }
295 field_as_int = reinterpret_cast<intptr_t>(internal_field1);
296 if (!PlatformSmiTagging::IsValidSmi(field_as_int) ||
297 field_as_int != OBJECT_POINTER_ALIGN(field_as_int)) {
298 internal_field1 = 0;
299 }
dcarney 2015/01/09 12:44:37 if (!internal_field0->IsSmi()) { internal_
300
301 Data data(api_isolate, parameter(), internal_field0, internal_field1);
302 Data::Callback callback =
303 reinterpret_cast<Data::Callback>(weak_callback_);
304
305 pending_phantom_callbacks->Add(
306 PendingPhantomCallback(this, data, callback));
307 DCHECK(IsInUse());
308 set_state(NEAR_DEATH);
334 } 309 }
335 // TODO(erikcorry): At the moment the callbacks are not postponed much,
336 // but if we really postpone them until after the mutator has run, we
337 // need to divide things up, so that an early callback clears the handle,
338 // while a later one destroys the objects involved, possibley triggering
339 // some work when decremented ref counts hit zero.
340 if (do_release) Release();
341 } 310 }
342 311
343 bool PostGarbageCollectionProcessing(Isolate* isolate) { 312 bool PostGarbageCollectionProcessing(Isolate* isolate) {
313 // Handles only weak handles (not phantom) that are dying.
344 if (state() != Node::PENDING) return false; 314 if (state() != Node::PENDING) return false;
345 if (weak_callback_ == NULL) { 315 if (weak_callback_ == NULL) {
346 Release(); 316 Release();
347 return false; 317 return false;
348 } 318 }
349 set_state(NEAR_DEATH); 319 set_state(NEAR_DEATH);
350 320
351 // Check that we are not passing a finalized external string to 321 // Check that we are not passing a finalized external string to
352 // the callback. 322 // the callback.
353 DCHECK(!object_->IsExternalOneByteString() || 323 DCHECK(!object_->IsExternalOneByteString() ||
354 ExternalOneByteString::cast(object_)->resource() != NULL); 324 ExternalOneByteString::cast(object_)->resource() != NULL);
355 DCHECK(!object_->IsExternalTwoByteString() || 325 DCHECK(!object_->IsExternalTwoByteString() ||
356 ExternalTwoByteString::cast(object_)->resource() != NULL); 326 ExternalTwoByteString::cast(object_)->resource() != NULL);
327 if (weakness_type() != NORMAL_WEAK) return false;
328
357 // Leaving V8. 329 // Leaving V8.
358 VMState<EXTERNAL> vmstate(isolate); 330 VMState<EXTERNAL> vmstate(isolate);
359 HandleScope handle_scope(isolate); 331 HandleScope handle_scope(isolate);
360 if (weakness_type() == PHANTOM_WEAK) return false;
361 DCHECK(weakness_type() == NORMAL_WEAK);
362 Object** object = location(); 332 Object** object = location();
363 Handle<Object> handle(*object, isolate); 333 Handle<Object> handle(*object, isolate);
364 v8::WeakCallbackData<v8::Value, void> data( 334 v8::WeakCallbackData<v8::Value, void> data(
365 reinterpret_cast<v8::Isolate*>(isolate), parameter(), 335 reinterpret_cast<v8::Isolate*>(isolate), parameter(),
366 v8::Utils::ToLocal(handle)); 336 v8::Utils::ToLocal(handle));
367 set_parameter(NULL); 337 set_parameter(NULL);
368 weak_callback_(data); 338 weak_callback_(data);
369 339
370 // Absence of explicit cleanup or revival of weak handle 340 // Absence of explicit cleanup or revival of weak handle
371 // in most of the cases would lead to memory leak. 341 // in most of the cases would lead to memory leak.
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
403 373
404 uint8_t flags_; 374 uint8_t flags_;
405 375
406 // Handle specific callback - might be a weak reference in disguise. 376 // Handle specific callback - might be a weak reference in disguise.
407 WeakCallback weak_callback_; 377 WeakCallback weak_callback_;
408 378
409 // Provided data for callback. In FREE state, this is used for 379 // Provided data for callback. In FREE state, this is used for
410 // the free list link. 380 // the free list link.
411 union { 381 union {
412 void* parameter; 382 void* parameter;
413 struct {
414 int16_t internal_field1;
415 int16_t internal_field2;
416 } internal_field_indeces;
417 Node* next_free; 383 Node* next_free;
418 } parameter_or_next_free_; 384 } parameter_or_next_free_;
419 385
420 DISALLOW_COPY_AND_ASSIGN(Node); 386 DISALLOW_COPY_AND_ASSIGN(Node);
421 }; 387 };
422 388
423 389
424 class GlobalHandles::NodeBlock { 390 class GlobalHandles::NodeBlock {
425 public: 391 public:
426 static const int kSize = 256; 392 static const int kSize = 256;
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after
597 if (location != NULL) Node::FromLocation(location)->Release(); 563 if (location != NULL) Node::FromLocation(location)->Release();
598 } 564 }
599 565
600 566
601 void GlobalHandles::MakeWeak(Object** location, void* parameter, 567 void GlobalHandles::MakeWeak(Object** location, void* parameter,
602 WeakCallback weak_callback) { 568 WeakCallback weak_callback) {
603 Node::FromLocation(location)->MakeWeak(parameter, weak_callback); 569 Node::FromLocation(location)->MakeWeak(parameter, weak_callback);
604 } 570 }
605 571
606 572
607 typedef PhantomCallbackData<void>::Callback GenericCallback; 573 typedef PhantomCallbackData<void, void, void>::Callback GenericCallback;
608
609
610 void GlobalHandles::MakePhantom(
611 Object** location,
612 v8::InternalFieldsCallbackData<void, void>::Callback phantom_callback,
613 int16_t internal_field_index1, int16_t internal_field_index2) {
614 Node::FromLocation(location)
615 ->MakePhantom(NULL, reinterpret_cast<GenericCallback>(phantom_callback),
616 internal_field_index1, internal_field_index2);
617 }
618 574
619 575
620 void GlobalHandles::MakePhantom(Object** location, void* parameter, 576 void GlobalHandles::MakePhantom(Object** location, void* parameter,
577 int number_of_internal_fields,
621 GenericCallback phantom_callback) { 578 GenericCallback phantom_callback) {
622 Node::FromLocation(location)->MakePhantom(parameter, phantom_callback, 579 Node::FromLocation(location)
623 v8::Object::kNoInternalFieldIndex, 580 ->MakePhantom(parameter, number_of_internal_fields, phantom_callback);
624 v8::Object::kNoInternalFieldIndex);
625 } 581 }
626 582
627 583
628 void GlobalHandles::CollectPhantomCallbackData() { 584 void GlobalHandles::CollectPhantomCallbackData() {
629 for (NodeIterator it(this); !it.done(); it.Advance()) { 585 for (NodeIterator it(this); !it.done(); it.Advance()) {
630 Node* node = it.node(); 586 Node* node = it.node();
631 node->CollectPhantomCallbackData(isolate(), &pending_phantom_callbacks_, 587 node->CollectPhantomCallbackData(isolate(), &pending_phantom_callbacks_);
632 &pending_internal_fields_callbacks_);
633 } 588 }
634 } 589 }
635 590
636 591
637 void* GlobalHandles::ClearWeakness(Object** location) { 592 void* GlobalHandles::ClearWeakness(Object** location) {
638 return Node::FromLocation(location)->ClearWeakness(); 593 return Node::FromLocation(location)->ClearWeakness();
639 } 594 }
640 595
641 596
642 void GlobalHandles::MarkIndependent(Object** location) { 597 void GlobalHandles::MarkIndependent(Object** location) {
(...skipping 18 matching lines...) Expand all
661 616
662 bool GlobalHandles::IsWeak(Object** location) { 617 bool GlobalHandles::IsWeak(Object** location) {
663 return Node::FromLocation(location)->IsWeak(); 618 return Node::FromLocation(location)->IsWeak();
664 } 619 }
665 620
666 621
667 void GlobalHandles::IterateWeakRoots(ObjectVisitor* v) { 622 void GlobalHandles::IterateWeakRoots(ObjectVisitor* v) {
668 for (NodeIterator it(this); !it.done(); it.Advance()) { 623 for (NodeIterator it(this); !it.done(); it.Advance()) {
669 Node* node = it.node(); 624 Node* node = it.node();
670 if (node->IsWeakRetainer()) { 625 if (node->IsWeakRetainer()) {
671 // Weakness type can be normal, phantom or internal fields. 626 // Weakness type can be normal or phantom, with or without internal
672 // For normal weakness we mark through the handle so that 627 // fields). For normal weakness we mark through the handle so that the
673 // the object and things reachable from it are available 628 // object and things reachable from it are available to the callback.
674 // to the callback. 629 //
675 // In the case of phantom we can zap the object handle now 630 // In the case of phantom with no internal fields, we can zap the object
676 // and we won't need it, so we don't need to mark through it. 631 // handle now and we won't need it, so we don't need to mark through it.
677 // In the internal fields case we will need the internal 632 // In the internal fields case we will need the internal
678 // fields, so we can't zap the handle, but we don't need to 633 // fields, so we can't zap the handle.
679 // mark through it, because it will die in this GC round.
680 if (node->state() == Node::PENDING) { 634 if (node->state() == Node::PENDING) {
681 if (node->weakness_type() == PHANTOM_WEAK) { 635 if (node->weakness_type() == PHANTOM_WEAK_0_INTERNAL_FIELDS) {
682 *(node->location()) = Smi::FromInt(0); 636 *(node->location()) = Smi::FromInt(0);
683 } else if (node->weakness_type() == NORMAL_WEAK) { 637 } else if (node->weakness_type() == NORMAL_WEAK) {
684 v->VisitPointer(node->location()); 638 v->VisitPointer(node->location());
685 } else { 639 } else {
686 DCHECK(node->weakness_type() == INTERNAL_FIELDS_WEAK); 640 DCHECK(node->weakness_type() == PHANTOM_WEAK_1_INTERNAL_FIELDS ||
641 node->weakness_type() == PHANTOM_WEAK_2_INTERNAL_FIELDS);
687 } 642 }
688 } else { 643 } else {
689 // Node is not pending, so that means the object survived. We still 644 // Node is not pending, so that means the object survived. We still
690 // need to visit the pointer in case the object moved, eg. because of 645 // need to visit the pointer in case the object moved, eg. because of
691 // compaction. 646 // compaction.
692 v->VisitPointer(node->location()); 647 v->VisitPointer(node->location());
693 } 648 }
694 } 649 }
695 } 650 }
696 } 651 }
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
729 } 684 }
730 } 685 }
731 686
732 687
733 void GlobalHandles::IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v) { 688 void GlobalHandles::IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v) {
734 for (int i = 0; i < new_space_nodes_.length(); ++i) { 689 for (int i = 0; i < new_space_nodes_.length(); ++i) {
735 Node* node = new_space_nodes_[i]; 690 Node* node = new_space_nodes_[i];
736 DCHECK(node->is_in_new_space_list()); 691 DCHECK(node->is_in_new_space_list());
737 if ((node->is_independent() || node->is_partially_dependent()) && 692 if ((node->is_independent() || node->is_partially_dependent()) &&
738 node->IsWeakRetainer()) { 693 node->IsWeakRetainer()) {
739 if (node->weakness_type() == PHANTOM_WEAK) { 694 if (node->weakness_type() == PHANTOM_WEAK_0_INTERNAL_FIELDS) {
740 *(node->location()) = Smi::FromInt(0); 695 *(node->location()) = Smi::FromInt(0);
741 } else if (node->weakness_type() == NORMAL_WEAK) { 696 } else if (node->weakness_type() == NORMAL_WEAK) {
742 v->VisitPointer(node->location()); 697 v->VisitPointer(node->location());
743 } else { 698 } else {
744 DCHECK(node->weakness_type() == INTERNAL_FIELDS_WEAK); 699 DCHECK(node->weakness_type() == PHANTOM_WEAK_1_INTERNAL_FIELDS ||
700 node->weakness_type() == PHANTOM_WEAK_2_INTERNAL_FIELDS);
745 // For this case we only need to trace if it's alive: The tracing of 701 // For this case we only need to trace if it's alive: The tracing of
746 // something that is already alive is just to get the pointer updated 702 // something that is already alive is just to get the pointer updated
747 // to the new location of the object). 703 // to the new location of the object).
748 DCHECK(node->state() != Node::NEAR_DEATH); 704 DCHECK(node->state() != Node::NEAR_DEATH);
749 if (node->state() != Node::PENDING) { 705 if (node->state() != Node::PENDING) {
750 v->VisitPointer(node->location()); 706 v->VisitPointer(node->location());
751 } 707 }
752 } 708 }
753 } 709 }
754 } 710 }
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after
879 } 835 }
880 } 836 }
881 new_space_nodes_.Rewind(last); 837 new_space_nodes_.Rewind(last);
882 } 838 }
883 839
884 840
885 int GlobalHandles::DispatchPendingPhantomCallbacks() { 841 int GlobalHandles::DispatchPendingPhantomCallbacks() {
886 int freed_nodes = 0; 842 int freed_nodes = 0;
887 while (pending_phantom_callbacks_.length() != 0) { 843 while (pending_phantom_callbacks_.length() != 0) {
888 PendingPhantomCallback callback = pending_phantom_callbacks_.RemoveLast(); 844 PendingPhantomCallback callback = pending_phantom_callbacks_.RemoveLast();
845 DCHECK(callback.node()->IsInUse());
889 callback.invoke(); 846 callback.invoke();
890 freed_nodes++; 847 if (callback.node()->IsInUse()) callback.node()->Release();
891 }
892 while (pending_internal_fields_callbacks_.length() != 0) {
893 PendingInternalFieldsCallback callback =
894 pending_internal_fields_callbacks_.RemoveLast();
895 callback.invoke();
896 freed_nodes++; 848 freed_nodes++;
897 } 849 }
898 return freed_nodes; 850 return freed_nodes;
899 } 851 }
900 852
901 853
902 int GlobalHandles::PostGarbageCollectionProcessing(GarbageCollector collector) { 854 int GlobalHandles::PostGarbageCollectionProcessing(GarbageCollector collector) {
903 // Process weak global handle callbacks. This must be done after the 855 // Process weak global handle callbacks. This must be done after the
904 // GC is completely done, because the callbacks may invoke arbitrary 856 // GC is completely done, because the callbacks may invoke arbitrary
905 // API functions. 857 // API functions.
(...skipping 383 matching lines...) Expand 10 before | Expand all | Expand 10 after
1289 DCHECK_EQ(isolate->heap()->the_hole_value(), blocks_[block][offset]); 1241 DCHECK_EQ(isolate->heap()->the_hole_value(), blocks_[block][offset]);
1290 blocks_[block][offset] = object; 1242 blocks_[block][offset] = object;
1291 if (isolate->heap()->InNewSpace(object)) { 1243 if (isolate->heap()->InNewSpace(object)) {
1292 new_space_indices_.Add(size_); 1244 new_space_indices_.Add(size_);
1293 } 1245 }
1294 *index = size_++; 1246 *index = size_++;
1295 } 1247 }
1296 1248
1297 1249
1298 } } // namespace v8::internal 1250 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/global-handles.h ('k') | test/cctest/test-api.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698