Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(84)

Side by Side Diff: src/heap/mark-compact.cc

Issue 871253005: Use weak cells in dependent code. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Address comments Created 5 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/objects-visiting-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/base/atomicops.h" 7 #include "src/base/atomicops.h"
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/compilation-cache.h" 10 #include "src/compilation-cache.h"
(...skipping 283 matching lines...) Expand 10 before | Expand all | Expand 10 after
294 294
295 295
296 void MarkCompactCollector::CollectGarbage() { 296 void MarkCompactCollector::CollectGarbage() {
297 // Make sure that Prepare() has been called. The individual steps below will 297 // Make sure that Prepare() has been called. The individual steps below will
298 // update the state as they proceed. 298 // update the state as they proceed.
299 DCHECK(state_ == PREPARE_GC); 299 DCHECK(state_ == PREPARE_GC);
300 300
301 MarkLiveObjects(); 301 MarkLiveObjects();
302 DCHECK(heap_->incremental_marking()->IsStopped()); 302 DCHECK(heap_->incremental_marking()->IsStopped());
303 303
304 // ClearNonLiveReferences can deoptimize code in dependent code arrays.
305 // Process weak cells before so that weak cells in dependent code
306 // arrays are cleared or contain only live code objects.
307 ProcessAndClearWeakCells();
308
304 if (FLAG_collect_maps) ClearNonLiveReferences(); 309 if (FLAG_collect_maps) ClearNonLiveReferences();
305 310
306 ProcessAndClearWeakCells();
307
308 ClearWeakCollections(); 311 ClearWeakCollections();
309 312
310 heap_->set_encountered_weak_cells(Smi::FromInt(0)); 313 heap_->set_encountered_weak_cells(Smi::FromInt(0));
311 314
312 #ifdef VERIFY_HEAP 315 #ifdef VERIFY_HEAP
313 if (FLAG_verify_heap) { 316 if (FLAG_verify_heap) {
314 VerifyMarking(heap_); 317 VerifyMarking(heap_);
315 } 318 }
316 #endif 319 #endif
317 320
318 SweepSpaces(); 321 SweepSpaces();
319 322
320 #ifdef VERIFY_HEAP 323 #ifdef VERIFY_HEAP
321 if (heap()->weak_embedded_objects_verification_enabled()) { 324 VerifyWeakEmbeddedObjectsInCode();
322 VerifyWeakEmbeddedObjectsInCode();
323 }
324 if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) { 325 if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) {
325 VerifyOmittedMapChecks(); 326 VerifyOmittedMapChecks();
326 } 327 }
327 #endif 328 #endif
328 329
329 Finish(); 330 Finish();
330 331
331 if (marking_parity_ == EVEN_MARKING_PARITY) { 332 if (marking_parity_ == EVEN_MARKING_PARITY) {
332 marking_parity_ = ODD_MARKING_PARITY; 333 marking_parity_ = ODD_MARKING_PARITY;
333 } else { 334 } else {
(...skipping 1617 matching lines...) Expand 10 before | Expand all | Expand 10 after
1951 1952
1952 1953
1953 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { 1954 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
1954 // Mark the heap roots including global variables, stack variables, 1955 // Mark the heap roots including global variables, stack variables,
1955 // etc., and all objects reachable from them. 1956 // etc., and all objects reachable from them.
1956 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); 1957 heap()->IterateStrongRoots(visitor, VISIT_ONLY_STRONG);
1957 1958
1958 // Handle the string table specially. 1959 // Handle the string table specially.
1959 MarkStringTable(visitor); 1960 MarkStringTable(visitor);
1960 1961
1961 MarkWeakObjectToCodeTable();
1962
1963 // There may be overflowed objects in the heap. Visit them now. 1962 // There may be overflowed objects in the heap. Visit them now.
1964 while (marking_deque_.overflowed()) { 1963 while (marking_deque_.overflowed()) {
1965 RefillMarkingDeque(); 1964 RefillMarkingDeque();
1966 EmptyMarkingDeque(); 1965 EmptyMarkingDeque();
1967 } 1966 }
1968 } 1967 }
1969 1968
1970 1969
1971 void MarkCompactCollector::MarkImplicitRefGroups() { 1970 void MarkCompactCollector::MarkImplicitRefGroups() {
1972 List<ImplicitRefGroup*>* ref_groups = 1971 List<ImplicitRefGroup*>* ref_groups =
(...skipping 20 matching lines...) Expand all
1993 } 1992 }
1994 1993
1995 // Once the entire group has been marked, dispose it because it's 1994 // Once the entire group has been marked, dispose it because it's
1996 // not needed anymore. 1995 // not needed anymore.
1997 delete entry; 1996 delete entry;
1998 } 1997 }
1999 ref_groups->Rewind(last); 1998 ref_groups->Rewind(last);
2000 } 1999 }
2001 2000
2002 2001
2003 void MarkCompactCollector::MarkWeakObjectToCodeTable() {
2004 HeapObject* weak_object_to_code_table =
2005 HeapObject::cast(heap()->weak_object_to_code_table());
2006 if (!IsMarked(weak_object_to_code_table)) {
2007 MarkBit mark = Marking::MarkBitFrom(weak_object_to_code_table);
2008 SetMark(weak_object_to_code_table, mark);
2009 }
2010 }
2011
2012
2013 // Mark all objects reachable from the objects on the marking stack. 2002 // Mark all objects reachable from the objects on the marking stack.
2014 // Before: the marking stack contains zero or more heap object pointers. 2003 // Before: the marking stack contains zero or more heap object pointers.
2015 // After: the marking stack is empty, and all objects reachable from the 2004 // After: the marking stack is empty, and all objects reachable from the
2016 // marking stack have been marked, or are overflowed in the heap. 2005 // marking stack have been marked, or are overflowed in the heap.
2017 void MarkCompactCollector::EmptyMarkingDeque() { 2006 void MarkCompactCollector::EmptyMarkingDeque() {
2018 Map* filler_map = heap_->one_pointer_filler_map(); 2007 Map* filler_map = heap_->one_pointer_filler_map();
2019 while (!marking_deque_.IsEmpty()) { 2008 while (!marking_deque_.IsEmpty()) {
2020 HeapObject* object = marking_deque_.Pop(); 2009 HeapObject* object = marking_deque_.Pop();
2021 // Explicitly skip one word fillers. Incremental markbit patterns are 2010 // Explicitly skip one word fillers. Incremental markbit patterns are
2022 // correct only for objects that occupy at least two words. 2011 // correct only for objects that occupy at least two words.
(...skipping 298 matching lines...) Expand 10 before | Expand all | Expand 10 after
2321 for (HeapObject* obj = map_iterator.Next(); obj != NULL; 2310 for (HeapObject* obj = map_iterator.Next(); obj != NULL;
2322 obj = map_iterator.Next()) { 2311 obj = map_iterator.Next()) {
2323 Map* map = Map::cast(obj); 2312 Map* map = Map::cast(obj);
2324 2313
2325 if (!map->CanTransition()) continue; 2314 if (!map->CanTransition()) continue;
2326 2315
2327 MarkBit map_mark = Marking::MarkBitFrom(map); 2316 MarkBit map_mark = Marking::MarkBitFrom(map);
2328 ClearNonLivePrototypeTransitions(map); 2317 ClearNonLivePrototypeTransitions(map);
2329 ClearNonLiveMapTransitions(map, map_mark); 2318 ClearNonLiveMapTransitions(map, map_mark);
2330 2319
2331 if (map_mark.Get()) { 2320 if (!map_mark.Get()) {
2332 ClearNonLiveDependentCode(map->dependent_code()); 2321 have_code_to_deoptimize_ |=
2333 } else { 2322 map->dependent_code()->MarkCodeForDeoptimization(
2334 ClearDependentCode(map->dependent_code()); 2323 isolate(), DependentCode::kWeakCodeGroup);
2335 map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array())); 2324 map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array()));
2336 } 2325 }
2337 } 2326 }
2338 2327
2339 // Iterate over property cell space, removing dependent code that is not 2328 WeakHashTable* table = heap_->weak_object_to_code_table();
2340 // otherwise kept alive by strong references. 2329 uint32_t capacity = table->Capacity();
2341 HeapObjectIterator cell_iterator(heap_->property_cell_space()); 2330 for (uint32_t i = 0; i < capacity; i++) {
2342 for (HeapObject* cell = cell_iterator.Next(); cell != NULL; 2331 uint32_t key_index = table->EntryToIndex(i);
2343 cell = cell_iterator.Next()) { 2332 Object* key = table->get(key_index);
2344 if (IsMarked(cell)) { 2333 if (!table->IsKey(key)) continue;
2345 ClearNonLiveDependentCode(PropertyCell::cast(cell)->dependent_code()); 2334 uint32_t value_index = table->EntryToValueIndex(i);
2346 } 2335 Object* value = table->get(value_index);
2347 } 2336 if (WeakCell::cast(key)->cleared()) {
2348 2337 have_code_to_deoptimize_ |=
2349 // Iterate over allocation sites, removing dependent code that is not 2338 DependentCode::cast(value)->MarkCodeForDeoptimization(
2350 // otherwise kept alive by strong references. 2339 isolate(), DependentCode::kWeakCodeGroup);
2351 Object* undefined = heap()->undefined_value(); 2340 table->set(key_index, heap_->the_hole_value());
2352 for (Object* site = heap()->allocation_sites_list(); site != undefined; 2341 table->set(value_index, heap_->the_hole_value());
2353 site = AllocationSite::cast(site)->weak_next()) { 2342 table->ElementRemoved();
2354 if (IsMarked(site)) {
2355 ClearNonLiveDependentCode(AllocationSite::cast(site)->dependent_code());
2356 }
2357 }
2358
2359 if (heap_->weak_object_to_code_table()->IsHashTable()) {
2360 WeakHashTable* table =
2361 WeakHashTable::cast(heap_->weak_object_to_code_table());
2362 uint32_t capacity = table->Capacity();
2363 for (uint32_t i = 0; i < capacity; i++) {
2364 uint32_t key_index = table->EntryToIndex(i);
2365 Object* key = table->get(key_index);
2366 if (!table->IsKey(key)) continue;
2367 uint32_t value_index = table->EntryToValueIndex(i);
2368 Object* value = table->get(value_index);
2369 if (key->IsCell() && !IsMarked(key)) {
2370 Cell* cell = Cell::cast(key);
2371 Object* object = cell->value();
2372 if (IsMarked(object)) {
2373 MarkBit mark = Marking::MarkBitFrom(cell);
2374 SetMark(cell, mark);
2375 Object** value_slot = HeapObject::RawField(cell, Cell::kValueOffset);
2376 RecordSlot(value_slot, value_slot, *value_slot);
2377 }
2378 }
2379 if (IsMarked(key)) {
2380 if (!IsMarked(value)) {
2381 HeapObject* obj = HeapObject::cast(value);
2382 MarkBit mark = Marking::MarkBitFrom(obj);
2383 SetMark(obj, mark);
2384 }
2385 ClearNonLiveDependentCode(DependentCode::cast(value));
2386 } else {
2387 ClearDependentCode(DependentCode::cast(value));
2388 table->set(key_index, heap_->the_hole_value());
2389 table->set(value_index, heap_->the_hole_value());
2390 table->ElementRemoved();
2391 }
2392 } 2343 }
2393 } 2344 }
2394 } 2345 }
2395 2346
2396 2347
2397 void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) { 2348 void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) {
2398 int number_of_transitions = map->NumberOfProtoTransitions(); 2349 int number_of_transitions = map->NumberOfProtoTransitions();
2399 FixedArray* prototype_transitions = map->GetPrototypeTransitions(); 2350 FixedArray* prototype_transitions = map->GetPrototypeTransitions();
2400 2351
2401 int new_number_of_transitions = 0; 2352 int new_number_of_transitions = 0;
(...skipping 151 matching lines...) Expand 10 before | Expand all | Expand 10 after
2553 if (to_trim <= 0) return; 2504 if (to_trim <= 0) return;
2554 heap_->RightTrimFixedArray<Heap::FROM_GC>(descriptors->GetEnumCache(), 2505 heap_->RightTrimFixedArray<Heap::FROM_GC>(descriptors->GetEnumCache(),
2555 to_trim); 2506 to_trim);
2556 2507
2557 if (!descriptors->HasEnumIndicesCache()) return; 2508 if (!descriptors->HasEnumIndicesCache()) return;
2558 FixedArray* enum_indices_cache = descriptors->GetEnumIndicesCache(); 2509 FixedArray* enum_indices_cache = descriptors->GetEnumIndicesCache();
2559 heap_->RightTrimFixedArray<Heap::FROM_GC>(enum_indices_cache, to_trim); 2510 heap_->RightTrimFixedArray<Heap::FROM_GC>(enum_indices_cache, to_trim);
2560 } 2511 }
2561 2512
2562 2513
2563 void MarkCompactCollector::ClearDependentCode(DependentCode* entries) {
2564 DisallowHeapAllocation no_allocation;
2565 DependentCode::GroupStartIndexes starts(entries);
2566 int number_of_entries = starts.number_of_entries();
2567 if (number_of_entries == 0) return;
2568 int g = DependentCode::kWeakCodeGroup;
2569 for (int i = starts.at(g); i < starts.at(g + 1); i++) {
2570 // If the entry is compilation info then the map must be alive,
2571 // and ClearDependentCode shouldn't be called.
2572 DCHECK(entries->is_code_at(i));
2573 Code* code = entries->code_at(i);
2574 if (IsMarked(code) && !code->marked_for_deoptimization()) {
2575 DependentCode::SetMarkedForDeoptimization(
2576 code, static_cast<DependentCode::DependencyGroup>(g));
2577 code->InvalidateEmbeddedObjects();
2578 have_code_to_deoptimize_ = true;
2579 }
2580 }
2581 for (int i = 0; i < number_of_entries; i++) {
2582 entries->clear_at(i);
2583 }
2584 }
2585
2586
2587 int MarkCompactCollector::ClearNonLiveDependentCodeInGroup(
2588 DependentCode* entries, int group, int start, int end, int new_start) {
2589 int survived = 0;
2590 for (int i = start; i < end; i++) {
2591 Object* obj = entries->object_at(i);
2592 DCHECK(obj->IsCode() || IsMarked(obj));
2593 if (IsMarked(obj) &&
2594 (!obj->IsCode() || !WillBeDeoptimized(Code::cast(obj)))) {
2595 if (new_start + survived != i) {
2596 entries->set_object_at(new_start + survived, obj);
2597 }
2598 Object** slot = entries->slot_at(new_start + survived);
2599 RecordSlot(slot, slot, obj);
2600 survived++;
2601 }
2602 }
2603 entries->set_number_of_entries(
2604 static_cast<DependentCode::DependencyGroup>(group), survived);
2605 return survived;
2606 }
2607
2608
2609 void MarkCompactCollector::ClearNonLiveDependentCode(DependentCode* entries) {
2610 DisallowHeapAllocation no_allocation;
2611 DependentCode::GroupStartIndexes starts(entries);
2612 int number_of_entries = starts.number_of_entries();
2613 if (number_of_entries == 0) return;
2614 int new_number_of_entries = 0;
2615 // Go through all groups, remove dead codes and compact.
2616 for (int g = 0; g < DependentCode::kGroupCount; g++) {
2617 int survived = ClearNonLiveDependentCodeInGroup(
2618 entries, g, starts.at(g), starts.at(g + 1), new_number_of_entries);
2619 new_number_of_entries += survived;
2620 }
2621 for (int i = new_number_of_entries; i < number_of_entries; i++) {
2622 entries->clear_at(i);
2623 }
2624 }
2625
2626
2627 void MarkCompactCollector::ProcessWeakCollections() { 2514 void MarkCompactCollector::ProcessWeakCollections() {
2628 GCTracer::Scope gc_scope(heap()->tracer(), 2515 GCTracer::Scope gc_scope(heap()->tracer(),
2629 GCTracer::Scope::MC_WEAKCOLLECTION_PROCESS); 2516 GCTracer::Scope::MC_WEAKCOLLECTION_PROCESS);
2630 Object* weak_collection_obj = heap()->encountered_weak_collections(); 2517 Object* weak_collection_obj = heap()->encountered_weak_collections();
2631 while (weak_collection_obj != Smi::FromInt(0)) { 2518 while (weak_collection_obj != Smi::FromInt(0)) {
2632 JSWeakCollection* weak_collection = 2519 JSWeakCollection* weak_collection =
2633 reinterpret_cast<JSWeakCollection*>(weak_collection_obj); 2520 reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
2634 DCHECK(MarkCompactCollector::IsMarked(weak_collection)); 2521 DCHECK(MarkCompactCollector::IsMarked(weak_collection));
2635 if (weak_collection->table()->IsHashTable()) { 2522 if (weak_collection->table()->IsHashTable()) {
2636 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table()); 2523 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table());
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
2692 2579
2693 void MarkCompactCollector::ProcessAndClearWeakCells() { 2580 void MarkCompactCollector::ProcessAndClearWeakCells() {
2694 HeapObject* undefined = heap()->undefined_value(); 2581 HeapObject* undefined = heap()->undefined_value();
2695 Object* weak_cell_obj = heap()->encountered_weak_cells(); 2582 Object* weak_cell_obj = heap()->encountered_weak_cells();
2696 while (weak_cell_obj != Smi::FromInt(0)) { 2583 while (weak_cell_obj != Smi::FromInt(0)) {
2697 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); 2584 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
2698 // We do not insert cleared weak cells into the list, so the value 2585 // We do not insert cleared weak cells into the list, so the value
2699 // cannot be a Smi here. 2586 // cannot be a Smi here.
2700 HeapObject* value = HeapObject::cast(weak_cell->value()); 2587 HeapObject* value = HeapObject::cast(weak_cell->value());
2701 if (!MarkCompactCollector::IsMarked(value)) { 2588 if (!MarkCompactCollector::IsMarked(value)) {
2702 weak_cell->clear(); 2589 // Cells for new-space objects embedded in optimized code are wrapped in
2590 // WeakCell and put into Heap::weak_object_to_code_table.
2591 // Such cells do not have any strong references but we want to keep them
2592 // alive as long as the cell value is alive.
2593 // TODO(ulan): remove this once we remove Heap::weak_object_to_code_table.
2594 if (value->IsCell()) {
2595 Object* cell_value = Cell::cast(value)->value();
2596 if (cell_value->IsHeapObject() &&
2597 MarkCompactCollector::IsMarked(HeapObject::cast(cell_value))) {
2598 // Resurrect the cell.
2599 MarkBit mark = Marking::MarkBitFrom(value);
2600 SetMark(value, mark);
2601 Object** slot = HeapObject::RawField(value, Cell::kValueOffset);
2602 RecordSlot(slot, slot, *slot);
2603 slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
2604 RecordSlot(slot, slot, *slot);
2605 } else {
2606 weak_cell->clear();
2607 }
2608 } else {
2609 weak_cell->clear();
2610 }
2703 } else { 2611 } else {
2704 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset); 2612 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
2705 heap()->mark_compact_collector()->RecordSlot(slot, slot, value); 2613 RecordSlot(slot, slot, *slot);
2706 } 2614 }
2707 weak_cell_obj = weak_cell->next(); 2615 weak_cell_obj = weak_cell->next();
2708 weak_cell->set_next(undefined, SKIP_WRITE_BARRIER); 2616 weak_cell->set_next(undefined, SKIP_WRITE_BARRIER);
2709 } 2617 }
2710 heap()->set_encountered_weak_cells(Smi::FromInt(0)); 2618 heap()->set_encountered_weak_cells(Smi::FromInt(0));
2711 } 2619 }
2712 2620
2713 2621
2714 void MarkCompactCollector::AbortWeakCells() { 2622 void MarkCompactCollector::AbortWeakCells() {
2715 Object* undefined = heap()->undefined_value(); 2623 Object* undefined = heap()->undefined_value();
(...skipping 825 matching lines...) Expand 10 before | Expand all | Expand 10 after
3541 HeapObjectIterator js_global_property_cell_iterator( 3449 HeapObjectIterator js_global_property_cell_iterator(
3542 heap_->property_cell_space()); 3450 heap_->property_cell_space());
3543 for (HeapObject* cell = js_global_property_cell_iterator.Next(); cell != NULL; 3451 for (HeapObject* cell = js_global_property_cell_iterator.Next(); cell != NULL;
3544 cell = js_global_property_cell_iterator.Next()) { 3452 cell = js_global_property_cell_iterator.Next()) {
3545 if (cell->IsPropertyCell()) { 3453 if (cell->IsPropertyCell()) {
3546 PropertyCell::BodyDescriptor::IterateBody(cell, &updating_visitor); 3454 PropertyCell::BodyDescriptor::IterateBody(cell, &updating_visitor);
3547 } 3455 }
3548 } 3456 }
3549 3457
3550 heap_->string_table()->Iterate(&updating_visitor); 3458 heap_->string_table()->Iterate(&updating_visitor);
3551 updating_visitor.VisitPointer(heap_->weak_object_to_code_table_address());
3552 if (heap_->weak_object_to_code_table()->IsHashTable()) {
3553 WeakHashTable* table =
3554 WeakHashTable::cast(heap_->weak_object_to_code_table());
3555 table->Iterate(&updating_visitor);
3556 table->Rehash(heap_->isolate()->factory()->undefined_value());
3557 }
3558 3459
3559 // Update pointers from external string table. 3460 // Update pointers from external string table.
3560 heap_->UpdateReferencesInExternalStringTable( 3461 heap_->UpdateReferencesInExternalStringTable(
3561 &UpdateReferenceInExternalStringTableEntry); 3462 &UpdateReferenceInExternalStringTableEntry);
3562 3463
3563 EvacuationWeakObjectRetainer evacuation_object_retainer; 3464 EvacuationWeakObjectRetainer evacuation_object_retainer;
3564 heap()->ProcessAllWeakReferences(&evacuation_object_retainer); 3465 heap()->ProcessAllWeakReferences(&evacuation_object_retainer);
3565 3466
3566 // Collects callback info for handles that are pending (about to be 3467 // Collects callback info for handles that are pending (about to be
3567 // collected) and either phantom or internal-fields. Releases the global 3468 // collected) and either phantom or internal-fields. Releases the global
3568 // handles. See also PostGarbageCollectionProcessing. 3469 // handles. See also PostGarbageCollectionProcessing.
3569 isolate()->global_handles()->CollectAllPhantomCallbackData(); 3470 isolate()->global_handles()->CollectAllPhantomCallbackData();
3570 3471
3571 // Visit invalidated code (we ignored all slots on it) and clear mark-bits 3472 // Visit invalidated code (we ignored all slots on it) and clear mark-bits
3572 // under it. 3473 // under it.
3573 ProcessInvalidatedCode(&updating_visitor); 3474 ProcessInvalidatedCode(&updating_visitor);
3574 3475
3575 heap_->isolate()->inner_pointer_to_code_cache()->Flush(); 3476 heap_->isolate()->inner_pointer_to_code_cache()->Flush();
3576 3477
3577 slots_buffer_allocator_.DeallocateChain(&migration_slots_buffer_); 3478 slots_buffer_allocator_.DeallocateChain(&migration_slots_buffer_);
3578 DCHECK(migration_slots_buffer_ == NULL); 3479 DCHECK(migration_slots_buffer_ == NULL);
3480
3481 // The hashing of weak_object_to_code_table is no longer valid.
3482 heap()->weak_object_to_code_table()->Rehash(
3483 heap()->isolate()->factory()->undefined_value());
3579 } 3484 }
3580 3485
3581 3486
3582 void MarkCompactCollector::MoveEvacuationCandidatesToEndOfPagesList() { 3487 void MarkCompactCollector::MoveEvacuationCandidatesToEndOfPagesList() {
3583 int npages = evacuation_candidates_.length(); 3488 int npages = evacuation_candidates_.length();
3584 for (int i = 0; i < npages; i++) { 3489 for (int i = 0; i < npages; i++) {
3585 Page* p = evacuation_candidates_[i]; 3490 Page* p = evacuation_candidates_[i];
3586 if (!p->IsEvacuationCandidate()) continue; 3491 if (!p->IsEvacuationCandidate()) continue;
3587 p->Unlink(); 3492 p->Unlink();
3588 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); 3493 PagedSpace* space = static_cast<PagedSpace*>(p->owner());
(...skipping 834 matching lines...) Expand 10 before | Expand all | Expand 10 after
4423 SlotsBuffer* buffer = *buffer_address; 4328 SlotsBuffer* buffer = *buffer_address;
4424 while (buffer != NULL) { 4329 while (buffer != NULL) {
4425 SlotsBuffer* next_buffer = buffer->next(); 4330 SlotsBuffer* next_buffer = buffer->next();
4426 DeallocateBuffer(buffer); 4331 DeallocateBuffer(buffer);
4427 buffer = next_buffer; 4332 buffer = next_buffer;
4428 } 4333 }
4429 *buffer_address = NULL; 4334 *buffer_address = NULL;
4430 } 4335 }
4431 } 4336 }
4432 } // namespace v8::internal 4337 } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap/mark-compact.h ('k') | src/heap/objects-visiting-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698