OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/base/sys-info.h" | 9 #include "src/base/sys-info.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 2854 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2865 void MarkCompactCollector::UpdateSlotsRecordedIn(SlotsBuffer* buffer) { | 2865 void MarkCompactCollector::UpdateSlotsRecordedIn(SlotsBuffer* buffer) { |
2866 while (buffer != NULL) { | 2866 while (buffer != NULL) { |
2867 UpdateSlots(buffer); | 2867 UpdateSlots(buffer); |
2868 buffer = buffer->next(); | 2868 buffer = buffer->next(); |
2869 } | 2869 } |
2870 } | 2870 } |
2871 | 2871 |
2872 | 2872 |
2873 static void UpdatePointer(HeapObject** address, HeapObject* object) { | 2873 static void UpdatePointer(HeapObject** address, HeapObject* object) { |
2874 MapWord map_word = object->map_word(); | 2874 MapWord map_word = object->map_word(); |
2875 // The store buffer can still contain stale pointers in dead large objects. | 2875 // Since we only filter invalid slots in old space, the store buffer can |
2876 // Ignore these pointers here. | 2876 // still contain stale pointers in large object and in map spaces. Ignore |
| 2877 // these pointers here. |
2877 DCHECK(map_word.IsForwardingAddress() || | 2878 DCHECK(map_word.IsForwardingAddress() || |
2878 object->GetHeap()->lo_space()->FindPage( | 2879 !object->GetHeap()->old_space()->Contains( |
2879 reinterpret_cast<Address>(address)) != NULL); | 2880 reinterpret_cast<Address>(address))); |
2880 if (map_word.IsForwardingAddress()) { | 2881 if (map_word.IsForwardingAddress()) { |
2881 // Update the corresponding slot. | 2882 // Update the corresponding slot. |
2882 *address = map_word.ToForwardingAddress(); | 2883 *address = map_word.ToForwardingAddress(); |
2883 } | 2884 } |
2884 } | 2885 } |
2885 | 2886 |
2886 | 2887 |
2887 static String* UpdateReferenceInExternalStringTableEntry(Heap* heap, | 2888 static String* UpdateReferenceInExternalStringTableEntry(Heap* heap, |
2888 Object** p) { | 2889 Object** p) { |
2889 MapWord map_word = HeapObject::cast(*p)->map_word(); | 2890 MapWord map_word = HeapObject::cast(*p)->map_word(); |
(...skipping 437 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3327 // - Rescan the page as slot recording in the migration buffer only | 3328 // - Rescan the page as slot recording in the migration buffer only |
3328 // happens upon moving (which we potentially didn't do). | 3329 // happens upon moving (which we potentially didn't do). |
3329 // - Leave the page in the list of pages of a space since we could not | 3330 // - Leave the page in the list of pages of a space since we could not |
3330 // fully evacuate it. | 3331 // fully evacuate it. |
3331 // - Mark them for rescanning for store buffer entries as we otherwise | 3332 // - Mark them for rescanning for store buffer entries as we otherwise |
3332 // might have stale store buffer entries that become "valid" again | 3333 // might have stale store buffer entries that become "valid" again |
3333 // after reusing the memory. Note that all existing store buffer | 3334 // after reusing the memory. Note that all existing store buffer |
3334 // entries of such pages are filtered before rescanning. | 3335 // entries of such pages are filtered before rescanning. |
3335 DCHECK(p->IsEvacuationCandidate()); | 3336 DCHECK(p->IsEvacuationCandidate()); |
3336 p->SetFlag(Page::COMPACTION_WAS_ABORTED); | 3337 p->SetFlag(Page::COMPACTION_WAS_ABORTED); |
3337 p->set_scan_on_scavenge(true); | |
3338 abandoned_pages++; | 3338 abandoned_pages++; |
3339 break; | 3339 break; |
3340 case MemoryChunk::kCompactingFinalize: | 3340 case MemoryChunk::kCompactingFinalize: |
3341 DCHECK(p->IsEvacuationCandidate()); | 3341 DCHECK(p->IsEvacuationCandidate()); |
3342 DCHECK(p->SweepingDone()); | 3342 DCHECK(p->SweepingDone()); |
3343 p->Unlink(); | 3343 p->Unlink(); |
3344 break; | 3344 break; |
3345 case MemoryChunk::kCompactingDone: | 3345 case MemoryChunk::kCompactingDone: |
3346 DCHECK(p->IsFlagSet(Page::POPULAR_PAGE)); | 3346 DCHECK(p->IsFlagSet(Page::POPULAR_PAGE)); |
3347 DCHECK(p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); | 3347 DCHECK(p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); |
(...skipping 218 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3566 | 3566 |
3567 LiveObjectIterator<kBlackObjects> it(page); | 3567 LiveObjectIterator<kBlackObjects> it(page); |
3568 HeapObject* object = nullptr; | 3568 HeapObject* object = nullptr; |
3569 while ((object = it.Next()) != nullptr) { | 3569 while ((object = it.Next()) != nullptr) { |
3570 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); | 3570 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); |
3571 if (!visitor->Visit(object)) { | 3571 if (!visitor->Visit(object)) { |
3572 if (mode == kClearMarkbits) { | 3572 if (mode == kClearMarkbits) { |
3573 page->markbits()->ClearRange( | 3573 page->markbits()->ClearRange( |
3574 page->AddressToMarkbitIndex(page->area_start()), | 3574 page->AddressToMarkbitIndex(page->area_start()), |
3575 page->AddressToMarkbitIndex(object->address())); | 3575 page->AddressToMarkbitIndex(object->address())); |
| 3576 if (page->old_to_new_slots() != nullptr) { |
| 3577 page->old_to_new_slots()->RemoveRange( |
| 3578 0, static_cast<int>(object->address() - page->address())); |
| 3579 } |
3576 RecomputeLiveBytes(page); | 3580 RecomputeLiveBytes(page); |
3577 } | 3581 } |
3578 return false; | 3582 return false; |
3579 } | 3583 } |
3580 } | 3584 } |
3581 if (mode == kClearMarkbits) { | 3585 if (mode == kClearMarkbits) { |
3582 Bitmap::Clear(page); | 3586 Bitmap::Clear(page); |
3583 } | 3587 } |
3584 return true; | 3588 return true; |
3585 } | 3589 } |
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3720 SemiSpaceIterator to_it(heap()->new_space()); | 3724 SemiSpaceIterator to_it(heap()->new_space()); |
3721 for (HeapObject* object = to_it.Next(); object != NULL; | 3725 for (HeapObject* object = to_it.Next(); object != NULL; |
3722 object = to_it.Next()) { | 3726 object = to_it.Next()) { |
3723 Map* map = object->map(); | 3727 Map* map = object->map(); |
3724 object->IterateBody(map->instance_type(), object->SizeFromMap(map), | 3728 object->IterateBody(map->instance_type(), object->SizeFromMap(map), |
3725 &updating_visitor); | 3729 &updating_visitor); |
3726 } | 3730 } |
3727 // Update roots. | 3731 // Update roots. |
3728 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE); | 3732 heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE); |
3729 | 3733 |
3730 StoreBufferRebuildScope scope(heap_, heap_->store_buffer(), | |
3731 &Heap::ScavengeStoreBufferCallback); | |
3732 heap_->store_buffer()->IteratePointersToNewSpace(&UpdatePointer); | 3734 heap_->store_buffer()->IteratePointersToNewSpace(&UpdatePointer); |
3733 } | 3735 } |
3734 | 3736 |
3735 { | 3737 { |
3736 GCTracer::Scope gc_scope( | 3738 GCTracer::Scope gc_scope( |
3737 heap()->tracer(), | 3739 heap()->tracer(), |
3738 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_BETWEEN_EVACUATED); | 3740 GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS_BETWEEN_EVACUATED); |
3739 for (Page* p : evacuation_candidates_) { | 3741 for (Page* p : evacuation_candidates_) { |
3740 DCHECK(p->IsEvacuationCandidate() || | 3742 DCHECK(p->IsEvacuationCandidate() || |
3741 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); | 3743 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3808 heap()->ProcessAllWeakReferences(&evacuation_object_retainer); | 3810 heap()->ProcessAllWeakReferences(&evacuation_object_retainer); |
3809 } | 3811 } |
3810 } | 3812 } |
3811 | 3813 |
3812 | 3814 |
3813 void MarkCompactCollector::ReleaseEvacuationCandidates() { | 3815 void MarkCompactCollector::ReleaseEvacuationCandidates() { |
3814 for (Page* p : evacuation_candidates_) { | 3816 for (Page* p : evacuation_candidates_) { |
3815 if (!p->IsEvacuationCandidate()) continue; | 3817 if (!p->IsEvacuationCandidate()) continue; |
3816 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); | 3818 PagedSpace* space = static_cast<PagedSpace*>(p->owner()); |
3817 space->Free(p->area_start(), p->area_size()); | 3819 space->Free(p->area_start(), p->area_size()); |
3818 p->set_scan_on_scavenge(false); | |
3819 p->ResetLiveBytes(); | 3820 p->ResetLiveBytes(); |
3820 CHECK(p->SweepingDone()); | 3821 CHECK(p->SweepingDone()); |
3821 space->ReleasePage(p, true); | 3822 space->ReleasePage(p, true); |
3822 } | 3823 } |
3823 evacuation_candidates_.Rewind(0); | 3824 evacuation_candidates_.Rewind(0); |
3824 compacting_ = false; | 3825 compacting_ = false; |
3825 heap()->FilterStoreBufferEntriesOnAboutToBeFreedPages(); | |
3826 heap()->FreeQueuedChunks(); | 3826 heap()->FreeQueuedChunks(); |
3827 } | 3827 } |
3828 | 3828 |
3829 | 3829 |
3830 int MarkCompactCollector::SweepInParallel(PagedSpace* space, | 3830 int MarkCompactCollector::SweepInParallel(PagedSpace* space, |
3831 int required_freed_bytes, | 3831 int required_freed_bytes, |
3832 int max_pages) { | 3832 int max_pages) { |
3833 int max_freed = 0; | 3833 int max_freed = 0; |
3834 int max_freed_overall = 0; | 3834 int max_freed_overall = 0; |
3835 int page_count = 0; | 3835 int page_count = 0; |
(...skipping 227 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4063 MarkBit mark_bit = Marking::MarkBitFrom(host); | 4063 MarkBit mark_bit = Marking::MarkBitFrom(host); |
4064 if (Marking::IsBlack(mark_bit)) { | 4064 if (Marking::IsBlack(mark_bit)) { |
4065 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); | 4065 RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host); |
4066 RecordRelocSlot(&rinfo, target); | 4066 RecordRelocSlot(&rinfo, target); |
4067 } | 4067 } |
4068 } | 4068 } |
4069 } | 4069 } |
4070 | 4070 |
4071 } // namespace internal | 4071 } // namespace internal |
4072 } // namespace v8 | 4072 } // namespace v8 |
OLD | NEW |