Chromium Code Reviews| Index: src/mark-compact.cc |
| diff --git a/src/mark-compact.cc b/src/mark-compact.cc |
| index f6b495a209d840efc735c55e6e47efd570ec3f3d..6416c3b43b46b7e32b1e537d04d7f96be262c190 100644 |
| --- a/src/mark-compact.cc |
| +++ b/src/mark-compact.cc |
| @@ -28,6 +28,7 @@ |
| #include "v8.h" |
| #include "compilation-cache.h" |
| +#include "deoptimizer.h" |
| #include "execution.h" |
| #include "gdb-jit.h" |
| #include "global-handles.h" |
| @@ -258,22 +259,6 @@ bool MarkCompactCollector::StartCompaction() { |
| } |
| -void MarkCompactCollector::AbortCompaction() { |
| - if (compacting_) { |
| - int npages = evacuation_candidates_.length(); |
| - for (int i = 0; i < npages; i++) { |
| - Page* p = evacuation_candidates_[i]; |
| - slots_buffer_allocator_.DeallocateChain(p->slots_buffer_address()); |
| - p->ClearEvacuationCandidate(); |
| - p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION); |
| - } |
| - compacting_ = false; |
| - evacuation_candidates_.Rewind(0); |
| - } |
| - ASSERT_EQ(0, evacuation_candidates_.length()); |
| -} |
| - |
| - |
| void MarkCompactCollector::CollectGarbage() { |
| // Make sure that Prepare() has been called. The individual steps below will |
| // update the state as they proceed. |
| @@ -463,6 +448,23 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) { |
| } |
| +void MarkCompactCollector::AbortCompaction() { |
| + if (compacting_) { |
| + int npages = evacuation_candidates_.length(); |
| + for (int i = 0; i < npages; i++) { |
| + Page* p = evacuation_candidates_[i]; |
| + slots_buffer_allocator_.DeallocateChain(p->slots_buffer_address()); |
| + p->ClearEvacuationCandidate(); |
| + p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION); |
| + } |
| + compacting_ = false; |
| + evacuation_candidates_.Rewind(0); |
| + invalidated_code_.Rewind(0); |
| + } |
| + ASSERT_EQ(0, evacuation_candidates_.length()); |
| +} |
| + |
| + |
| void MarkCompactCollector::Prepare(GCTracer* tracer) { |
| FLAG_flush_code = false; |
| @@ -2844,7 +2846,123 @@ static void SweepPrecisely(PagedSpace* space, |
| } |
| +static bool SetMarkBitsUnderInvalidatedCode(Code* code, bool value) { |
| + Page* p = Page::FromAddress(code->address()); |
| + |
| + if (p->IsEvacuationCandidate() || |
| + p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) { |
| + return false; |
| + } |
| + |
| + Address code_start = code->address(); |
| + Address code_end = code_start + code->Size(); |
| + |
| + uint32_t start_index = MemoryChunk::FastAddressToMarkbitIndex(code_start); |
| + uint32_t end_index = |
| + MemoryChunk::FastAddressToMarkbitIndex(code_end - kPointerSize); |
| + |
| + Bitmap* b = p->markbits(); |
| + |
| + MarkBit start_mark_bit = b->MarkBitFromIndex(start_index); |
| + MarkBit end_mark_bit = b->MarkBitFromIndex(end_index); |
| + |
| + MarkBit::CellType* start_cell = start_mark_bit.cell(); |
| + MarkBit::CellType* end_cell = end_mark_bit.cell(); |
| + |
| + if (value) { |
| + MarkBit::CellType start_mask = ~(start_mark_bit.mask() - 1); |
| + MarkBit::CellType end_mask = (end_mark_bit.mask() << 1) - 1; |
| + |
| + if (start_cell == end_cell) { |
| + *start_cell |= start_mask & end_mask; |
| + } else { |
| + *start_cell |= start_mask; |
| + for (MarkBit::CellType* cell = start_cell + 1; cell < end_cell; cell++) { |
| + *cell = ~0; |
| + } |
| + *end_cell |= end_mask; |
| + } |
| + } else { |
| + for (MarkBit::CellType* cell = start_cell ; cell <= end_cell; cell++) { |
| + *cell = 0; |
| + } |
| + } |
| + |
| + return true; |
| +} |
| + |
| + |
| +static bool IsOnInvalidatedCodeObject(Address addr) { |
| + Page* p = Page::FromAddress(addr); |
|
Erik Corry
2011/09/22 09:10:47
This doesn't work for large objects.
FromAnyPointe
|
| + |
| + // First check owners identity because old pointer and old data spaces |
|
Erik Corry
2011/09/22 09:10:47
owners -> owner's
|
| + // are swept lazily and might still have non-zero mark-bits on some |
| + // pages. |
| + if (p->owner()->identity() != CODE_SPACE) return false; |
| + |
| + // In code space only bits on evacuation candidates (but we don't record |
| + // any slots on them) and under invalidated code objects are non-zero. |
| + MarkBit mark_bit = |
| + p->markbits()->MarkBitFromIndex(Page::FastAddressToMarkbitIndex(addr)); |
| + |
| + return mark_bit.Get(); |
| +} |
| + |
| + |
| +void MarkCompactCollector::InvalidateCode(Code* code) { |
| + if (heap_->incremental_marking()->IsCompacting() && |
| + !ShouldSkipEvacuationSlotRecording(code)) { |
| + ASSERT(compacting_); |
| + |
| + // If the object is white than no slots were recorded on it yet. |
| + MarkBit mark_bit = Marking::MarkBitFrom(code); |
| + if (Marking::IsWhite(mark_bit)) return; |
| + |
| + invalidated_code_.Add(code); |
| + } |
| +} |
| + |
| + |
| +bool MarkCompactCollector::MarkInvalidatedCode() { |
| + bool code_marked = false; |
| + |
| + int length = invalidated_code_.length(); |
| + for (int i = 0; i < length; i++) { |
| + Code* code = invalidated_code_[i]; |
| + |
| + if (SetMarkBitsUnderInvalidatedCode(code, true)) { |
| + code_marked = true; |
| + } |
| + } |
| + |
| + return code_marked; |
| +} |
| + |
| + |
| +void MarkCompactCollector::RemoveDeadInvalidatedCode() { |
| + int length = invalidated_code_.length(); |
| + for (int i = 0; i < length; i++) { |
| + if (!IsMarked(invalidated_code_[i])) invalidated_code_[i] = NULL; |
| + } |
| +} |
| + |
| + |
| +void MarkCompactCollector::ProcessInvalidatedCode(ObjectVisitor* visitor) { |
| + int length = invalidated_code_.length(); |
| + for (int i = 0; i < length; i++) { |
| + Code* code = invalidated_code_[i]; |
| + if (code != NULL) { |
| + code->Iterate(visitor); |
| + SetMarkBitsUnderInvalidatedCode(code, false); |
| + } |
| + } |
| + invalidated_code_.Rewind(0); |
| +} |
| + |
| + |
| void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { |
| + bool code_slots_filtering_required = MarkInvalidatedCode(); |
| + |
| EvacuateNewSpace(); |
| EvacuatePages(); |
| @@ -2874,7 +2992,9 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { |
| heap_->store_buffer()->IteratePointersToNewSpace(&UpdatePointer); |
| } |
| - SlotsBuffer::UpdateSlotsRecordedIn(heap_, migration_slots_buffer_); |
| + SlotsBuffer::UpdateSlotsRecordedIn(heap_, |
| + migration_slots_buffer_, |
| + code_slots_filtering_required); |
| if (FLAG_trace_fragmentation) { |
| PrintF(" migration slots buffer: %d\n", |
| SlotsBuffer::SizeOfChain(migration_slots_buffer_)); |
| @@ -2887,7 +3007,9 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { |
| p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); |
| if (p->IsEvacuationCandidate()) { |
| - SlotsBuffer::UpdateSlotsRecordedIn(heap_, p->slots_buffer()); |
| + SlotsBuffer::UpdateSlotsRecordedIn(heap_, |
| + p->slots_buffer(), |
| + code_slots_filtering_required); |
| if (FLAG_trace_fragmentation) { |
| PrintF(" page %p slots buffer: %d\n", |
| reinterpret_cast<void*>(p), |
| @@ -2959,6 +3081,10 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { |
| EvacuationWeakObjectRetainer evacuation_object_retainer; |
| heap()->ProcessWeakReferences(&evacuation_object_retainer); |
| + // Visit invalidated code (we ignored all slots on it) and clear mark-bits |
| + // under it. |
| + ProcessInvalidatedCode(&updating_visitor); |
| + |
| #ifdef DEBUG |
| if (FLAG_verify_heap) { |
| VerifyEvacuation(heap_); |
| @@ -3480,11 +3606,16 @@ void MarkCompactCollector::SweepSpaces() { |
| // non-live objects. |
| SweepSpace(heap()->old_pointer_space(), how_to_sweep); |
| SweepSpace(heap()->old_data_space(), how_to_sweep); |
| + |
| + RemoveDeadInvalidatedCode(); |
| SweepSpace(heap()->code_space(), PRECISE); |
| + |
| SweepSpace(heap()->cell_space(), PRECISE); |
| + |
| { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE); |
| EvacuateNewSpaceAndCandidates(); |
| } |
| + |
| // ClearNonLiveTransitions depends on precise sweeping of map space to |
| // detect whether unmarked map became dead in this collection or in one |
| // of the previous ones. |
| @@ -3626,6 +3757,29 @@ void SlotsBuffer::UpdateSlots(Heap* heap) { |
| } |
| +void SlotsBuffer::UpdateSlotsWithFilter(Heap* heap) { |
| + PointersUpdatingVisitor v(heap); |
| + |
| + for (int slot_idx = 0; slot_idx < idx_; ++slot_idx) { |
| + ObjectSlot slot = slots_[slot_idx]; |
| + if (!IsTypedSlot(slot)) { |
| + if (!IsOnInvalidatedCodeObject(reinterpret_cast<Address>(slot))) { |
| + UpdateSlot(slot); |
| + } |
| + } else { |
| + ++slot_idx; |
| + ASSERT(slot_idx < idx_); |
| + Address pc = reinterpret_cast<Address>(slots_[slot_idx]); |
| + if (!IsOnInvalidatedCodeObject(pc)) { |
| + UpdateSlot(&v, |
| + DecodeSlotType(slot), |
| + reinterpret_cast<Address>(slots_[slot_idx])); |
| + } |
| + } |
| + } |
| +} |
| + |
| + |
| SlotsBuffer* SlotsBufferAllocator::AllocateBuffer(SlotsBuffer* next_buffer) { |
| return new SlotsBuffer(next_buffer); |
| } |