Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(481)

Side by Side Diff: src/global-handles.cc

Issue 842153004: Unify phantom and internal fields weak handle callbacks (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Require callback to reset handle. Created 5 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/api.h" 7 #include "src/api.h"
8 #include "src/global-handles.h" 8 #include "src/global-handles.h"
9 9
10 #include "src/vm-state-inl.h" 10 #include "src/vm-state-inl.h"
(...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after
196 } 196 }
197 void clear_partially_dependent() { set_partially_dependent(false); } 197 void clear_partially_dependent() { set_partially_dependent(false); }
198 198
199 // Callback accessor. 199 // Callback accessor.
200 // TODO(svenpanne) Re-enable or nuke later. 200 // TODO(svenpanne) Re-enable or nuke later.
201 // WeakReferenceCallback callback() { return callback_; } 201 // WeakReferenceCallback callback() { return callback_; }
202 202
203 // Callback parameter accessors. 203 // Callback parameter accessors.
204 void set_parameter(void* parameter) { 204 void set_parameter(void* parameter) {
205 DCHECK(IsInUse()); 205 DCHECK(IsInUse());
206 DCHECK(weakness_type() == NORMAL_WEAK || weakness_type() == PHANTOM_WEAK);
207 parameter_or_next_free_.parameter = parameter; 206 parameter_or_next_free_.parameter = parameter;
208 } 207 }
209 void* parameter() const { 208 void* parameter() const {
210 DCHECK(IsInUse()); 209 DCHECK(IsInUse());
211 return parameter_or_next_free_.parameter; 210 return parameter_or_next_free_.parameter;
212 } 211 }
213 212
214 void set_internal_fields(int internal_field_index1,
215 int internal_field_index2) {
216 DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK);
217 // These are stored in an int16_t.
218 DCHECK(internal_field_index1 < 1 << 16);
219 DCHECK(internal_field_index1 >= -(1 << 16));
220 DCHECK(internal_field_index2 < 1 << 16);
221 DCHECK(internal_field_index2 >= -(1 << 16));
222 parameter_or_next_free_.internal_field_indeces.internal_field1 =
223 static_cast<int16_t>(internal_field_index1);
224 parameter_or_next_free_.internal_field_indeces.internal_field2 =
225 static_cast<int16_t>(internal_field_index2);
226 }
227
228 int internal_field1() const {
229 DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK);
230 return parameter_or_next_free_.internal_field_indeces.internal_field1;
231 }
232
233 int internal_field2() const {
234 DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK);
235 return parameter_or_next_free_.internal_field_indeces.internal_field2;
236 }
237
238 // Accessors for next free node in the free list. 213 // Accessors for next free node in the free list.
239 Node* next_free() { 214 Node* next_free() {
240 DCHECK(state() == FREE); 215 DCHECK(state() == FREE);
241 return parameter_or_next_free_.next_free; 216 return parameter_or_next_free_.next_free;
242 } 217 }
243 void set_next_free(Node* value) { 218 void set_next_free(Node* value) {
244 DCHECK(state() == FREE); 219 DCHECK(state() == FREE);
245 parameter_or_next_free_.next_free = value; 220 parameter_or_next_free_.next_free = value;
246 } 221 }
247 222
248 void MakeWeak(void* parameter, WeakCallback weak_callback) { 223 void MakeWeak(void* parameter, WeakCallback weak_callback) {
249 DCHECK(weak_callback != NULL); 224 DCHECK(weak_callback != NULL);
250 DCHECK(IsInUse()); 225 DCHECK(IsInUse());
251 CHECK(object_ != NULL); 226 CHECK(object_ != NULL);
252 set_state(WEAK); 227 set_state(WEAK);
253 set_weakness_type(NORMAL_WEAK); 228 set_weakness_type(NORMAL_WEAK);
254 set_parameter(parameter); 229 set_parameter(parameter);
255 weak_callback_ = weak_callback; 230 weak_callback_ = weak_callback;
256 } 231 }
257 232
258 void MakePhantom(void* parameter, 233 void MakePhantom(void* parameter, int number_of_internal_fields,
259 PhantomCallbackData<void>::Callback phantom_callback, 234 PhantomCallbackData<void>::Callback phantom_callback) {
260 int16_t internal_field_index1, 235 DCHECK(number_of_internal_fields >= 0);
261 int16_t internal_field_index2) { 236 DCHECK(number_of_internal_fields <= 2);
262 DCHECK(phantom_callback != NULL); 237 DCHECK(phantom_callback != NULL);
263 DCHECK(IsInUse()); 238 DCHECK(IsInUse());
264 CHECK(object_ != NULL); 239 CHECK(object_ != NULL);
265 set_state(WEAK); 240 set_state(WEAK);
266 if (parameter == NULL) { 241 if (number_of_internal_fields == 0) {
267 set_weakness_type(INTERNAL_FIELDS_WEAK); 242 set_weakness_type(PHANTOM_WEAK_0_INTERNAL_FIELDS);
dcarney 2015/01/09 14:22:52 i think verifying here that the jsobject has enoug
Erik Corry Chromium.org 2015/01/12 11:37:53 I'm not very happy with this suggestion as it stan
268 set_internal_fields(internal_field_index1, internal_field_index2); 243 } else if (number_of_internal_fields == 1) {
244 set_weakness_type(PHANTOM_WEAK_1_INTERNAL_FIELDS);
269 } else { 245 } else {
270 DCHECK(internal_field_index1 == v8::Object::kNoInternalFieldIndex); 246 set_weakness_type(PHANTOM_WEAK_2_INTERNAL_FIELDS);
271 DCHECK(internal_field_index2 == v8::Object::kNoInternalFieldIndex);
272 set_weakness_type(PHANTOM_WEAK);
273 set_parameter(parameter);
274 } 247 }
248 set_parameter(parameter);
275 weak_callback_ = reinterpret_cast<WeakCallback>(phantom_callback); 249 weak_callback_ = reinterpret_cast<WeakCallback>(phantom_callback);
276 } 250 }
277 251
278 void* ClearWeakness() { 252 void* ClearWeakness() {
279 DCHECK(IsInUse()); 253 DCHECK(IsInUse());
280 void* p = parameter(); 254 void* p = parameter();
281 set_state(NORMAL); 255 set_state(NORMAL);
282 set_parameter(NULL); 256 set_parameter(NULL);
283 return p; 257 return p;
284 } 258 }
285 259
286 void CollectPhantomCallbackData( 260 void CollectPhantomCallbackData(
287 Isolate* isolate, List<PendingPhantomCallback>* pending_phantom_callbacks, 261 Isolate* isolate,
288 List<PendingInternalFieldsCallback>* pending_internal_fields_callbacks) { 262 List<PendingPhantomCallback>* pending_phantom_callbacks) {
289 if (state() != Node::PENDING) return; 263 if (state() != PENDING) return;
290 bool do_release = true;
291 if (weak_callback_ != NULL) { 264 if (weak_callback_ != NULL) {
292 if (weakness_type() == NORMAL_WEAK) return; 265 if (weakness_type() == NORMAL_WEAK) return;
293 266
294 v8::Isolate* api_isolate = reinterpret_cast<v8::Isolate*>(isolate); 267 v8::Isolate* api_isolate = reinterpret_cast<v8::Isolate*>(isolate);
295 268
296 if (weakness_type() == PHANTOM_WEAK) { 269 DCHECK(weakness_type() == PHANTOM_WEAK_0_INTERNAL_FIELDS ||
297 // Phantom weak pointer case. Zap with harmless value. 270 weakness_type() == PHANTOM_WEAK_1_INTERNAL_FIELDS ||
298 DCHECK(*location() == Smi::FromInt(0)); 271 weakness_type() == PHANTOM_WEAK_2_INTERNAL_FIELDS);
299 typedef PhantomCallbackData<void> Data;
300 272
301 Data data(api_isolate, parameter()); 273 Object* internal_field0 = nullptr;
302 Data::Callback callback = 274 Object* internal_field1 = nullptr;
303 reinterpret_cast<Data::Callback>(weak_callback_); 275 if (weakness_type() != PHANTOM_WEAK_0_INTERNAL_FIELDS) {
304
305 pending_phantom_callbacks->Add(
306 PendingPhantomCallback(this, data, callback));
307
308 // Postpone the release of the handle. The embedder can't use the
309 // handle (it's zapped), but it may be using the location, and we
310 // don't want to confuse things by reusing that.
311 do_release = false;
312 } else {
313 DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK);
314 typedef InternalFieldsCallbackData<void, void> Data;
315
316 // Phantom weak pointer case, passing internal fields instead of
317 // parameter. Don't use a handle here during GC, because it will
318 // create a handle pointing to a dying object, which can confuse
319 // the next GC.
320 JSObject* jsobject = reinterpret_cast<JSObject*>(object()); 276 JSObject* jsobject = reinterpret_cast<JSObject*>(object());
321 DCHECK(jsobject->IsJSObject()); 277 DCHECK(jsobject->IsJSObject());
322 Data data(api_isolate, jsobject->GetInternalField(internal_field1()), 278 DCHECK(jsobject->GetInternalFieldCount() >= 1);
323 jsobject->GetInternalField(internal_field2())); 279 internal_field0 = jsobject->GetInternalField(0);
324 Data::Callback callback = 280 if (weakness_type() == PHANTOM_WEAK_2_INTERNAL_FIELDS) {
325 reinterpret_cast<Data::Callback>(weak_callback_); 281 DCHECK(jsobject->GetInternalFieldCount() >= 2);
282 internal_field1 = jsobject->GetInternalField(1);
283 }
284 }
326 285
327 // In the future, we want to delay the callback. In that case we will 286 // Zap with harmless value.
328 // zap when we queue up, to stop the C++ side accessing the dead V8 287 *location() = Smi::FromInt(0);
329 // object, but we will call Release only after the callback (allowing 288 typedef PhantomCallbackData<void, void, void> Data;
330 // the node to be reused). 289
331 pending_internal_fields_callbacks->Add( 290 if (!internal_field0->IsSmi()) internal_field0 = nullptr;
332 PendingInternalFieldsCallback(data, callback)); 291 if (!internal_field1->IsSmi()) internal_field1 = nullptr;
333 } 292
293 Data data(api_isolate, parameter(), internal_field0, internal_field1);
294 Data::Callback callback =
295 reinterpret_cast<Data::Callback>(weak_callback_);
296
297 pending_phantom_callbacks->Add(
298 PendingPhantomCallback(this, data, callback));
299 DCHECK(IsInUse());
300 set_state(NEAR_DEATH);
334 } 301 }
335 // TODO(erikcorry): At the moment the callbacks are not postponed much,
336 // but if we really postpone them until after the mutator has run, we
337 // need to divide things up, so that an early callback clears the handle,
338 // while a later one destroys the objects involved, possibley triggering
339 // some work when decremented ref counts hit zero.
340 if (do_release) Release();
341 } 302 }
342 303
343 bool PostGarbageCollectionProcessing(Isolate* isolate) { 304 bool PostGarbageCollectionProcessing(Isolate* isolate) {
305 // Handles only weak handles (not phantom) that are dying.
344 if (state() != Node::PENDING) return false; 306 if (state() != Node::PENDING) return false;
345 if (weak_callback_ == NULL) { 307 if (weak_callback_ == NULL) {
346 Release(); 308 Release();
347 return false; 309 return false;
348 } 310 }
349 set_state(NEAR_DEATH); 311 set_state(NEAR_DEATH);
350 312
351 // Check that we are not passing a finalized external string to 313 // Check that we are not passing a finalized external string to
352 // the callback. 314 // the callback.
353 DCHECK(!object_->IsExternalOneByteString() || 315 DCHECK(!object_->IsExternalOneByteString() ||
354 ExternalOneByteString::cast(object_)->resource() != NULL); 316 ExternalOneByteString::cast(object_)->resource() != NULL);
355 DCHECK(!object_->IsExternalTwoByteString() || 317 DCHECK(!object_->IsExternalTwoByteString() ||
356 ExternalTwoByteString::cast(object_)->resource() != NULL); 318 ExternalTwoByteString::cast(object_)->resource() != NULL);
319 if (weakness_type() != NORMAL_WEAK) return false;
320
357 // Leaving V8. 321 // Leaving V8.
358 VMState<EXTERNAL> vmstate(isolate); 322 VMState<EXTERNAL> vmstate(isolate);
359 HandleScope handle_scope(isolate); 323 HandleScope handle_scope(isolate);
360 if (weakness_type() == PHANTOM_WEAK) return false;
361 DCHECK(weakness_type() == NORMAL_WEAK);
362 Object** object = location(); 324 Object** object = location();
363 Handle<Object> handle(*object, isolate); 325 Handle<Object> handle(*object, isolate);
364 v8::WeakCallbackData<v8::Value, void> data( 326 v8::WeakCallbackData<v8::Value, void> data(
365 reinterpret_cast<v8::Isolate*>(isolate), parameter(), 327 reinterpret_cast<v8::Isolate*>(isolate), parameter(),
366 v8::Utils::ToLocal(handle)); 328 v8::Utils::ToLocal(handle));
367 set_parameter(NULL); 329 set_parameter(NULL);
368 weak_callback_(data); 330 weak_callback_(data);
369 331
370 // Absence of explicit cleanup or revival of weak handle 332 // Absence of explicit cleanup or revival of weak handle
371 // in most of the cases would lead to memory leak. 333 // in most of the cases would lead to memory leak.
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
403 365
404 uint8_t flags_; 366 uint8_t flags_;
405 367
406 // Handle specific callback - might be a weak reference in disguise. 368 // Handle specific callback - might be a weak reference in disguise.
407 WeakCallback weak_callback_; 369 WeakCallback weak_callback_;
408 370
409 // Provided data for callback. In FREE state, this is used for 371 // Provided data for callback. In FREE state, this is used for
410 // the free list link. 372 // the free list link.
411 union { 373 union {
412 void* parameter; 374 void* parameter;
413 struct {
414 int16_t internal_field1;
415 int16_t internal_field2;
416 } internal_field_indeces;
417 Node* next_free; 375 Node* next_free;
418 } parameter_or_next_free_; 376 } parameter_or_next_free_;
419 377
420 DISALLOW_COPY_AND_ASSIGN(Node); 378 DISALLOW_COPY_AND_ASSIGN(Node);
421 }; 379 };
422 380
423 381
424 class GlobalHandles::NodeBlock { 382 class GlobalHandles::NodeBlock {
425 public: 383 public:
426 static const int kSize = 256; 384 static const int kSize = 256;
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after
597 if (location != NULL) Node::FromLocation(location)->Release(); 555 if (location != NULL) Node::FromLocation(location)->Release();
598 } 556 }
599 557
600 558
601 void GlobalHandles::MakeWeak(Object** location, void* parameter, 559 void GlobalHandles::MakeWeak(Object** location, void* parameter,
602 WeakCallback weak_callback) { 560 WeakCallback weak_callback) {
603 Node::FromLocation(location)->MakeWeak(parameter, weak_callback); 561 Node::FromLocation(location)->MakeWeak(parameter, weak_callback);
604 } 562 }
605 563
606 564
607 typedef PhantomCallbackData<void>::Callback GenericCallback; 565 typedef PhantomCallbackData<void, void, void>::Callback GenericCallback;
608
609
610 void GlobalHandles::MakePhantom(
611 Object** location,
612 v8::InternalFieldsCallbackData<void, void>::Callback phantom_callback,
613 int16_t internal_field_index1, int16_t internal_field_index2) {
614 Node::FromLocation(location)
615 ->MakePhantom(NULL, reinterpret_cast<GenericCallback>(phantom_callback),
616 internal_field_index1, internal_field_index2);
617 }
618 566
619 567
620 void GlobalHandles::MakePhantom(Object** location, void* parameter, 568 void GlobalHandles::MakePhantom(Object** location, void* parameter,
569 int number_of_internal_fields,
621 GenericCallback phantom_callback) { 570 GenericCallback phantom_callback) {
622 Node::FromLocation(location)->MakePhantom(parameter, phantom_callback, 571 Node::FromLocation(location)
623 v8::Object::kNoInternalFieldIndex, 572 ->MakePhantom(parameter, number_of_internal_fields, phantom_callback);
624 v8::Object::kNoInternalFieldIndex);
625 } 573 }
626 574
627 575
628 void GlobalHandles::CollectPhantomCallbackData() { 576 void GlobalHandles::CollectPhantomCallbackData() {
629 for (NodeIterator it(this); !it.done(); it.Advance()) { 577 for (NodeIterator it(this); !it.done(); it.Advance()) {
630 Node* node = it.node(); 578 Node* node = it.node();
631 node->CollectPhantomCallbackData(isolate(), &pending_phantom_callbacks_, 579 node->CollectPhantomCallbackData(isolate(), &pending_phantom_callbacks_);
632 &pending_internal_fields_callbacks_);
633 } 580 }
634 } 581 }
635 582
636 583
637 void* GlobalHandles::ClearWeakness(Object** location) { 584 void* GlobalHandles::ClearWeakness(Object** location) {
638 return Node::FromLocation(location)->ClearWeakness(); 585 return Node::FromLocation(location)->ClearWeakness();
639 } 586 }
640 587
641 588
642 void GlobalHandles::MarkIndependent(Object** location) { 589 void GlobalHandles::MarkIndependent(Object** location) {
(...skipping 18 matching lines...) Expand all
661 608
662 bool GlobalHandles::IsWeak(Object** location) { 609 bool GlobalHandles::IsWeak(Object** location) {
663 return Node::FromLocation(location)->IsWeak(); 610 return Node::FromLocation(location)->IsWeak();
664 } 611 }
665 612
666 613
667 void GlobalHandles::IterateWeakRoots(ObjectVisitor* v) { 614 void GlobalHandles::IterateWeakRoots(ObjectVisitor* v) {
668 for (NodeIterator it(this); !it.done(); it.Advance()) { 615 for (NodeIterator it(this); !it.done(); it.Advance()) {
669 Node* node = it.node(); 616 Node* node = it.node();
670 if (node->IsWeakRetainer()) { 617 if (node->IsWeakRetainer()) {
671 // Weakness type can be normal, phantom or internal fields. 618 // Weakness type can be normal or phantom, with or without internal
672 // For normal weakness we mark through the handle so that 619 // fields). For normal weakness we mark through the handle so that the
673 // the object and things reachable from it are available 620 // object and things reachable from it are available to the callback.
674 // to the callback. 621 //
675 // In the case of phantom we can zap the object handle now 622 // In the case of phantom with no internal fields, we can zap the object
676 // and we won't need it, so we don't need to mark through it. 623 // handle now and we won't need it, so we don't need to mark through it.
677 // In the internal fields case we will need the internal 624 // In the internal fields case we will need the internal
678 // fields, so we can't zap the handle, but we don't need to 625 // fields, so we can't zap the handle.
679 // mark through it, because it will die in this GC round.
680 if (node->state() == Node::PENDING) { 626 if (node->state() == Node::PENDING) {
681 if (node->weakness_type() == PHANTOM_WEAK) { 627 if (node->weakness_type() == PHANTOM_WEAK_0_INTERNAL_FIELDS) {
682 *(node->location()) = Smi::FromInt(0); 628 *(node->location()) = Smi::FromInt(0);
683 } else if (node->weakness_type() == NORMAL_WEAK) { 629 } else if (node->weakness_type() == NORMAL_WEAK) {
684 v->VisitPointer(node->location()); 630 v->VisitPointer(node->location());
685 } else { 631 } else {
686 DCHECK(node->weakness_type() == INTERNAL_FIELDS_WEAK); 632 DCHECK(node->weakness_type() == PHANTOM_WEAK_1_INTERNAL_FIELDS ||
633 node->weakness_type() == PHANTOM_WEAK_2_INTERNAL_FIELDS);
687 } 634 }
688 } else { 635 } else {
689 // Node is not pending, so that means the object survived. We still 636 // Node is not pending, so that means the object survived. We still
690 // need to visit the pointer in case the object moved, eg. because of 637 // need to visit the pointer in case the object moved, eg. because of
691 // compaction. 638 // compaction.
692 v->VisitPointer(node->location()); 639 v->VisitPointer(node->location());
693 } 640 }
694 } 641 }
695 } 642 }
696 } 643 }
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
729 } 676 }
730 } 677 }
731 678
732 679
733 void GlobalHandles::IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v) { 680 void GlobalHandles::IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v) {
734 for (int i = 0; i < new_space_nodes_.length(); ++i) { 681 for (int i = 0; i < new_space_nodes_.length(); ++i) {
735 Node* node = new_space_nodes_[i]; 682 Node* node = new_space_nodes_[i];
736 DCHECK(node->is_in_new_space_list()); 683 DCHECK(node->is_in_new_space_list());
737 if ((node->is_independent() || node->is_partially_dependent()) && 684 if ((node->is_independent() || node->is_partially_dependent()) &&
738 node->IsWeakRetainer()) { 685 node->IsWeakRetainer()) {
739 if (node->weakness_type() == PHANTOM_WEAK) { 686 if (node->weakness_type() == PHANTOM_WEAK_0_INTERNAL_FIELDS) {
740 *(node->location()) = Smi::FromInt(0); 687 *(node->location()) = Smi::FromInt(0);
741 } else if (node->weakness_type() == NORMAL_WEAK) { 688 } else if (node->weakness_type() == NORMAL_WEAK) {
742 v->VisitPointer(node->location()); 689 v->VisitPointer(node->location());
743 } else { 690 } else {
744 DCHECK(node->weakness_type() == INTERNAL_FIELDS_WEAK); 691 DCHECK(node->weakness_type() == PHANTOM_WEAK_1_INTERNAL_FIELDS ||
692 node->weakness_type() == PHANTOM_WEAK_2_INTERNAL_FIELDS);
745 // For this case we only need to trace if it's alive: The tracing of 693 // For this case we only need to trace if it's alive: The tracing of
746 // something that is already alive is just to get the pointer updated 694 // something that is already alive is just to get the pointer updated
747 // to the new location of the object). 695 // to the new location of the object).
748 DCHECK(node->state() != Node::NEAR_DEATH); 696 DCHECK(node->state() != Node::NEAR_DEATH);
749 if (node->state() != Node::PENDING) { 697 if (node->state() != Node::PENDING) {
750 v->VisitPointer(node->location()); 698 v->VisitPointer(node->location());
751 } 699 }
752 } 700 }
753 } 701 }
754 } 702 }
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after
879 } 827 }
880 } 828 }
881 new_space_nodes_.Rewind(last); 829 new_space_nodes_.Rewind(last);
882 } 830 }
883 831
884 832
885 int GlobalHandles::DispatchPendingPhantomCallbacks() { 833 int GlobalHandles::DispatchPendingPhantomCallbacks() {
886 int freed_nodes = 0; 834 int freed_nodes = 0;
887 while (pending_phantom_callbacks_.length() != 0) { 835 while (pending_phantom_callbacks_.length() != 0) {
888 PendingPhantomCallback callback = pending_phantom_callbacks_.RemoveLast(); 836 PendingPhantomCallback callback = pending_phantom_callbacks_.RemoveLast();
837 DCHECK(callback.node()->IsInUse());
889 callback.invoke(); 838 callback.invoke();
890 freed_nodes++; 839 DCHECK(!callback.node()->IsInUse());
891 }
892 while (pending_internal_fields_callbacks_.length() != 0) {
893 PendingInternalFieldsCallback callback =
894 pending_internal_fields_callbacks_.RemoveLast();
895 callback.invoke();
896 freed_nodes++; 840 freed_nodes++;
897 } 841 }
898 return freed_nodes; 842 return freed_nodes;
899 } 843 }
900 844
901 845
902 int GlobalHandles::PostGarbageCollectionProcessing(GarbageCollector collector) { 846 int GlobalHandles::PostGarbageCollectionProcessing(GarbageCollector collector) {
903 // Process weak global handle callbacks. This must be done after the 847 // Process weak global handle callbacks. This must be done after the
904 // GC is completely done, because the callbacks may invoke arbitrary 848 // GC is completely done, because the callbacks may invoke arbitrary
905 // API functions. 849 // API functions.
(...skipping 383 matching lines...) Expand 10 before | Expand all | Expand 10 after
1289 DCHECK_EQ(isolate->heap()->the_hole_value(), blocks_[block][offset]); 1233 DCHECK_EQ(isolate->heap()->the_hole_value(), blocks_[block][offset]);
1290 blocks_[block][offset] = object; 1234 blocks_[block][offset] = object;
1291 if (isolate->heap()->InNewSpace(object)) { 1235 if (isolate->heap()->InNewSpace(object)) {
1292 new_space_indices_.Add(size_); 1236 new_space_indices_.Add(size_);
1293 } 1237 }
1294 *index = size_++; 1238 *index = size_++;
1295 } 1239 }
1296 1240
1297 1241
1298 } } // namespace v8::internal 1242 } } // namespace v8::internal
OLDNEW
« include/v8.h ('K') | « src/global-handles.h ('k') | test/cctest/test-api.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698