OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/compilation-cache.h" | 10 #include "src/compilation-cache.h" |
(...skipping 539 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
550 // to only refill them for the old space. | 550 // to only refill them for the old space. |
551 return; | 551 return; |
552 } | 552 } |
553 | 553 |
554 intptr_t freed_bytes = space->free_list()->Concatenate(free_list); | 554 intptr_t freed_bytes = space->free_list()->Concatenate(free_list); |
555 space->AddToAccountingStats(freed_bytes); | 555 space->AddToAccountingStats(freed_bytes); |
556 space->DecrementUnsweptFreeBytes(freed_bytes); | 556 space->DecrementUnsweptFreeBytes(freed_bytes); |
557 } | 557 } |
558 | 558 |
559 | 559 |
| 560 void Marking::SetAllMarkBitsInRange(MarkBit start, MarkBit end) { |
| 561 MarkBit::CellType* start_cell = start.cell(); |
| 562 MarkBit::CellType* end_cell = end.cell(); |
| 563 MarkBit::CellType start_mask = ~(start.mask() - 1); |
| 564 MarkBit::CellType end_mask = (end.mask() << 1) - 1; |
| 565 |
| 566 if (start_cell == end_cell) { |
| 567 *start_cell |= start_mask & end_mask; |
| 568 } else { |
| 569 *start_cell |= start_mask; |
| 570 for (MarkBit::CellType* cell = start_cell + 1; cell < end_cell; cell++) { |
| 571 *cell = ~0; |
| 572 } |
| 573 *end_cell |= end_mask; |
| 574 } |
| 575 } |
| 576 |
| 577 |
| 578 void Marking::ClearAllMarkBitsOfCellsContainedInRange(MarkBit start, |
| 579 MarkBit end) { |
| 580 MarkBit::CellType* start_cell = start.cell(); |
| 581 MarkBit::CellType* end_cell = end.cell(); |
| 582 for (MarkBit::CellType* cell = start_cell; cell <= end_cell; cell++) { |
| 583 *cell = 0; |
| 584 } |
| 585 } |
| 586 |
| 587 |
560 void Marking::TransferMark(Address old_start, Address new_start) { | 588 void Marking::TransferMark(Address old_start, Address new_start) { |
561 // This is only used when resizing an object. | 589 // This is only used when resizing an object. |
562 DCHECK(MemoryChunk::FromAddress(old_start) == | 590 DCHECK(MemoryChunk::FromAddress(old_start) == |
563 MemoryChunk::FromAddress(new_start)); | 591 MemoryChunk::FromAddress(new_start)); |
564 | 592 |
565 if (!heap_->incremental_marking()->IsMarking()) return; | 593 if (!heap_->incremental_marking()->IsMarking()) return; |
566 | 594 |
567 // If the mark doesn't move, we don't check the color of the object. | 595 // If the mark doesn't move, we don't check the color of the object. |
568 // It doesn't matter whether the object is black, since it hasn't changed | 596 // It doesn't matter whether the object is black, since it hasn't changed |
569 // size, so the adjustment to the live data count will be zero anyway. | 597 // size, so the adjustment to the live data count will be zero anyway. |
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
742 if (compacting_) { | 770 if (compacting_) { |
743 int npages = evacuation_candidates_.length(); | 771 int npages = evacuation_candidates_.length(); |
744 for (int i = 0; i < npages; i++) { | 772 for (int i = 0; i < npages; i++) { |
745 Page* p = evacuation_candidates_[i]; | 773 Page* p = evacuation_candidates_[i]; |
746 slots_buffer_allocator_.DeallocateChain(p->slots_buffer_address()); | 774 slots_buffer_allocator_.DeallocateChain(p->slots_buffer_address()); |
747 p->ClearEvacuationCandidate(); | 775 p->ClearEvacuationCandidate(); |
748 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION); | 776 p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION); |
749 } | 777 } |
750 compacting_ = false; | 778 compacting_ = false; |
751 evacuation_candidates_.Rewind(0); | 779 evacuation_candidates_.Rewind(0); |
| 780 invalidated_code_.Rewind(0); |
752 } | 781 } |
753 DCHECK_EQ(0, evacuation_candidates_.length()); | 782 DCHECK_EQ(0, evacuation_candidates_.length()); |
754 } | 783 } |
755 | 784 |
756 | 785 |
757 void MarkCompactCollector::Prepare() { | 786 void MarkCompactCollector::Prepare() { |
758 was_marked_incrementally_ = heap()->incremental_marking()->IsMarking(); | 787 was_marked_incrementally_ = heap()->incremental_marking()->IsMarking(); |
759 | 788 |
760 #ifdef DEBUG | 789 #ifdef DEBUG |
761 DCHECK(state_ == IDLE); | 790 DCHECK(state_ == IDLE); |
(...skipping 2462 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3224 // The target object has to be black. | 3253 // The target object has to be black. |
3225 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); | 3254 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); |
3226 | 3255 |
3227 // The target object is black but we don't know if the source slot is black. | 3256 // The target object is black but we don't know if the source slot is black. |
3228 // The source object could have died and the slot could be part of a free | 3257 // The source object could have died and the slot could be part of a free |
3229 // space. Use the mark bit iterator to find out about liveness of the slot. | 3258 // space. Use the mark bit iterator to find out about liveness of the slot. |
3230 CHECK(IsSlotInBlackObjectSlow(Page::FromAddress(slot), slot)); | 3259 CHECK(IsSlotInBlackObjectSlow(Page::FromAddress(slot), slot)); |
3231 } | 3260 } |
3232 | 3261 |
3233 | 3262 |
3234 void MarkCompactCollector::RemoveObjectSlots(Address start_slot, | |
3235 Address end_slot) { | |
3236 // Remove entries by replacing them with an old-space slot containing a smi | |
3237 // that is located in an unmovable page. | |
3238 int npages = evacuation_candidates_.length(); | |
3239 for (int i = 0; i < npages; i++) { | |
3240 Page* p = evacuation_candidates_[i]; | |
3241 DCHECK(p->IsEvacuationCandidate() || | |
3242 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); | |
3243 if (p->IsEvacuationCandidate()) { | |
3244 SlotsBuffer::RemoveObjectSlots(heap_, p->slots_buffer(), start_slot, | |
3245 end_slot); | |
3246 } | |
3247 } | |
3248 } | |
3249 | |
3250 | |
3251 void MarkCompactCollector::EvacuateNewSpace() { | 3263 void MarkCompactCollector::EvacuateNewSpace() { |
3252 // There are soft limits in the allocation code, designed trigger a mark | 3264 // There are soft limits in the allocation code, designed trigger a mark |
3253 // sweep collection by failing allocations. But since we are already in | 3265 // sweep collection by failing allocations. But since we are already in |
3254 // a mark-sweep allocation, there is no sense in trying to trigger one. | 3266 // a mark-sweep allocation, there is no sense in trying to trigger one. |
3255 AlwaysAllocateScope scope(isolate()); | 3267 AlwaysAllocateScope scope(isolate()); |
3256 | 3268 |
3257 NewSpace* new_space = heap()->new_space(); | 3269 NewSpace* new_space = heap()->new_space(); |
3258 | 3270 |
3259 // Store allocation range before flipping semispaces. | 3271 // Store allocation range before flipping semispaces. |
3260 Address from_bottom = new_space->bottom(); | 3272 Address from_bottom = new_space->bottom(); |
(...skipping 291 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3552 // When concurrent sweeping is active, the page will be marked after | 3564 // When concurrent sweeping is active, the page will be marked after |
3553 // sweeping by the main thread. | 3565 // sweeping by the main thread. |
3554 p->set_parallel_sweeping(MemoryChunk::SWEEPING_FINALIZE); | 3566 p->set_parallel_sweeping(MemoryChunk::SWEEPING_FINALIZE); |
3555 } else { | 3567 } else { |
3556 p->SetWasSwept(); | 3568 p->SetWasSwept(); |
3557 } | 3569 } |
3558 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes)); | 3570 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes)); |
3559 } | 3571 } |
3560 | 3572 |
3561 | 3573 |
| 3574 static bool SetMarkBitsUnderInvalidatedCode(Code* code, bool value) { |
| 3575 Page* p = Page::FromAddress(code->address()); |
| 3576 |
| 3577 if (p->IsEvacuationCandidate() || p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) { |
| 3578 return false; |
| 3579 } |
| 3580 |
| 3581 Address code_start = code->address(); |
| 3582 Address code_end = code_start + code->Size(); |
| 3583 |
| 3584 uint32_t start_index = MemoryChunk::FastAddressToMarkbitIndex(code_start); |
| 3585 uint32_t end_index = |
| 3586 MemoryChunk::FastAddressToMarkbitIndex(code_end - kPointerSize); |
| 3587 |
| 3588 // TODO(hpayer): Filter out invalidated code in |
| 3589 // ClearInvalidSlotsBufferEntries. |
| 3590 Bitmap* b = p->markbits(); |
| 3591 |
| 3592 MarkBit start_mark_bit = b->MarkBitFromIndex(start_index); |
| 3593 MarkBit end_mark_bit = b->MarkBitFromIndex(end_index); |
| 3594 |
| 3595 if (value) { |
| 3596 Marking::SetAllMarkBitsInRange(start_mark_bit, end_mark_bit); |
| 3597 } else { |
| 3598 Marking::ClearAllMarkBitsOfCellsContainedInRange(start_mark_bit, |
| 3599 end_mark_bit); |
| 3600 } |
| 3601 |
| 3602 return true; |
| 3603 } |
| 3604 |
| 3605 |
| 3606 static bool IsOnInvalidatedCodeObject(Address addr) { |
| 3607 // We did not record any slots in large objects thus |
| 3608 // we can safely go to the page from the slot address. |
| 3609 Page* p = Page::FromAddress(addr); |
| 3610 |
| 3611 // First check owner's identity because old space is swept concurrently or |
| 3612 // lazily and might still have non-zero mark-bits on some pages. |
| 3613 if (p->owner()->identity() != CODE_SPACE) return false; |
| 3614 |
| 3615 // In code space only bits on evacuation candidates (but we don't record |
| 3616 // any slots on them) and under invalidated code objects are non-zero. |
| 3617 MarkBit mark_bit = |
| 3618 p->markbits()->MarkBitFromIndex(Page::FastAddressToMarkbitIndex(addr)); |
| 3619 |
| 3620 return Marking::IsBlackOrGrey(mark_bit); |
| 3621 } |
| 3622 |
| 3623 |
| 3624 void MarkCompactCollector::InvalidateCode(Code* code) { |
| 3625 if (heap_->incremental_marking()->IsCompacting() && |
| 3626 !ShouldSkipEvacuationSlotRecording(code)) { |
| 3627 DCHECK(compacting_); |
| 3628 |
| 3629 // If the object is white than no slots were recorded on it yet. |
| 3630 MarkBit mark_bit = Marking::MarkBitFrom(code); |
| 3631 if (Marking::IsWhite(mark_bit)) return; |
| 3632 |
| 3633 invalidated_code_.Add(code); |
| 3634 } |
| 3635 } |
| 3636 |
| 3637 |
3562 // Return true if the given code is deoptimized or will be deoptimized. | 3638 // Return true if the given code is deoptimized or will be deoptimized. |
3563 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { | 3639 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { |
3564 return code->is_optimized_code() && code->marked_for_deoptimization(); | 3640 return code->is_optimized_code() && code->marked_for_deoptimization(); |
3565 } | 3641 } |
3566 | 3642 |
3567 | 3643 |
| 3644 bool MarkCompactCollector::MarkInvalidatedCode() { |
| 3645 bool code_marked = false; |
| 3646 |
| 3647 int length = invalidated_code_.length(); |
| 3648 for (int i = 0; i < length; i++) { |
| 3649 Code* code = invalidated_code_[i]; |
| 3650 |
| 3651 if (SetMarkBitsUnderInvalidatedCode(code, true)) { |
| 3652 code_marked = true; |
| 3653 } |
| 3654 } |
| 3655 |
| 3656 return code_marked; |
| 3657 } |
| 3658 |
| 3659 |
| 3660 void MarkCompactCollector::RemoveDeadInvalidatedCode() { |
| 3661 int length = invalidated_code_.length(); |
| 3662 for (int i = 0; i < length; i++) { |
| 3663 if (!IsMarked(invalidated_code_[i])) invalidated_code_[i] = NULL; |
| 3664 } |
| 3665 } |
| 3666 |
| 3667 |
| 3668 void MarkCompactCollector::ProcessInvalidatedCode(ObjectVisitor* visitor) { |
| 3669 int length = invalidated_code_.length(); |
| 3670 for (int i = 0; i < length; i++) { |
| 3671 Code* code = invalidated_code_[i]; |
| 3672 if (code != NULL) { |
| 3673 code->Iterate(visitor); |
| 3674 SetMarkBitsUnderInvalidatedCode(code, false); |
| 3675 } |
| 3676 } |
| 3677 invalidated_code_.Rewind(0); |
| 3678 } |
| 3679 |
| 3680 |
3568 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { | 3681 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { |
3569 Heap::RelocationLock relocation_lock(heap()); | 3682 Heap::RelocationLock relocation_lock(heap()); |
3570 | 3683 |
| 3684 bool code_slots_filtering_required; |
3571 { | 3685 { |
3572 GCTracer::Scope gc_scope(heap()->tracer(), | 3686 GCTracer::Scope gc_scope(heap()->tracer(), |
3573 GCTracer::Scope::MC_SWEEP_NEWSPACE); | 3687 GCTracer::Scope::MC_SWEEP_NEWSPACE); |
| 3688 code_slots_filtering_required = MarkInvalidatedCode(); |
3574 EvacuationScope evacuation_scope(this); | 3689 EvacuationScope evacuation_scope(this); |
3575 EvacuateNewSpace(); | 3690 EvacuateNewSpace(); |
3576 } | 3691 } |
3577 | 3692 |
3578 { | 3693 { |
3579 GCTracer::Scope gc_scope(heap()->tracer(), | 3694 GCTracer::Scope gc_scope(heap()->tracer(), |
3580 GCTracer::Scope::MC_EVACUATE_PAGES); | 3695 GCTracer::Scope::MC_EVACUATE_PAGES); |
3581 EvacuationScope evacuation_scope(this); | 3696 EvacuationScope evacuation_scope(this); |
3582 EvacuatePages(); | 3697 EvacuatePages(); |
3583 } | 3698 } |
(...skipping 26 matching lines...) Expand all Loading... |
3610 GCTracer::Scope gc_scope(heap()->tracer(), | 3725 GCTracer::Scope gc_scope(heap()->tracer(), |
3611 GCTracer::Scope::MC_UPDATE_OLD_TO_NEW_POINTERS); | 3726 GCTracer::Scope::MC_UPDATE_OLD_TO_NEW_POINTERS); |
3612 StoreBufferRebuildScope scope(heap_, heap_->store_buffer(), | 3727 StoreBufferRebuildScope scope(heap_, heap_->store_buffer(), |
3613 &Heap::ScavengeStoreBufferCallback); | 3728 &Heap::ScavengeStoreBufferCallback); |
3614 heap_->store_buffer()->IteratePointersToNewSpace(&UpdatePointer); | 3729 heap_->store_buffer()->IteratePointersToNewSpace(&UpdatePointer); |
3615 } | 3730 } |
3616 | 3731 |
3617 { | 3732 { |
3618 GCTracer::Scope gc_scope(heap()->tracer(), | 3733 GCTracer::Scope gc_scope(heap()->tracer(), |
3619 GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED); | 3734 GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED); |
3620 SlotsBuffer::UpdateSlotsRecordedIn(heap_, migration_slots_buffer_); | 3735 SlotsBuffer::UpdateSlotsRecordedIn(heap_, migration_slots_buffer_, |
| 3736 code_slots_filtering_required); |
3621 if (FLAG_trace_fragmentation_verbose) { | 3737 if (FLAG_trace_fragmentation_verbose) { |
3622 PrintF(" migration slots buffer: %d\n", | 3738 PrintF(" migration slots buffer: %d\n", |
3623 SlotsBuffer::SizeOfChain(migration_slots_buffer_)); | 3739 SlotsBuffer::SizeOfChain(migration_slots_buffer_)); |
3624 } | 3740 } |
3625 | 3741 |
3626 if (compacting_ && was_marked_incrementally_) { | 3742 if (compacting_ && was_marked_incrementally_) { |
3627 // It's difficult to filter out slots recorded for large objects. | 3743 // It's difficult to filter out slots recorded for large objects. |
3628 LargeObjectIterator it(heap_->lo_space()); | 3744 LargeObjectIterator it(heap_->lo_space()); |
3629 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 3745 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
3630 // LargeObjectSpace is not swept yet thus we have to skip | 3746 // LargeObjectSpace is not swept yet thus we have to skip |
(...skipping 13 matching lines...) Expand all Loading... |
3644 { | 3760 { |
3645 GCTracer::Scope gc_scope( | 3761 GCTracer::Scope gc_scope( |
3646 heap()->tracer(), | 3762 heap()->tracer(), |
3647 GCTracer::Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED); | 3763 GCTracer::Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED); |
3648 for (int i = 0; i < npages; i++) { | 3764 for (int i = 0; i < npages; i++) { |
3649 Page* p = evacuation_candidates_[i]; | 3765 Page* p = evacuation_candidates_[i]; |
3650 DCHECK(p->IsEvacuationCandidate() || | 3766 DCHECK(p->IsEvacuationCandidate() || |
3651 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); | 3767 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); |
3652 | 3768 |
3653 if (p->IsEvacuationCandidate()) { | 3769 if (p->IsEvacuationCandidate()) { |
3654 SlotsBuffer::UpdateSlotsRecordedIn(heap_, p->slots_buffer()); | 3770 SlotsBuffer::UpdateSlotsRecordedIn(heap_, p->slots_buffer(), |
| 3771 code_slots_filtering_required); |
3655 if (FLAG_trace_fragmentation_verbose) { | 3772 if (FLAG_trace_fragmentation_verbose) { |
3656 PrintF(" page %p slots buffer: %d\n", reinterpret_cast<void*>(p), | 3773 PrintF(" page %p slots buffer: %d\n", reinterpret_cast<void*>(p), |
3657 SlotsBuffer::SizeOfChain(p->slots_buffer())); | 3774 SlotsBuffer::SizeOfChain(p->slots_buffer())); |
3658 } | 3775 } |
3659 | 3776 |
3660 // Important: skip list should be cleared only after roots were updated | 3777 // Important: skip list should be cleared only after roots were updated |
3661 // because root iteration traverses the stack and might have to find | 3778 // because root iteration traverses the stack and might have to find |
3662 // code objects from non-updated pc pointing into evacuation candidate. | 3779 // code objects from non-updated pc pointing into evacuation candidate. |
3663 SkipList* list = p->skip_list(); | 3780 SkipList* list = p->skip_list(); |
3664 if (list != NULL) list->Clear(); | 3781 if (list != NULL) list->Clear(); |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3700 | 3817 |
3701 heap_->string_table()->Iterate(&updating_visitor); | 3818 heap_->string_table()->Iterate(&updating_visitor); |
3702 | 3819 |
3703 // Update pointers from external string table. | 3820 // Update pointers from external string table. |
3704 heap_->UpdateReferencesInExternalStringTable( | 3821 heap_->UpdateReferencesInExternalStringTable( |
3705 &UpdateReferenceInExternalStringTableEntry); | 3822 &UpdateReferenceInExternalStringTableEntry); |
3706 | 3823 |
3707 EvacuationWeakObjectRetainer evacuation_object_retainer; | 3824 EvacuationWeakObjectRetainer evacuation_object_retainer; |
3708 heap()->ProcessAllWeakReferences(&evacuation_object_retainer); | 3825 heap()->ProcessAllWeakReferences(&evacuation_object_retainer); |
3709 | 3826 |
| 3827 // Visit invalidated code (we ignored all slots on it) and clear mark-bits |
| 3828 // under it. |
| 3829 ProcessInvalidatedCode(&updating_visitor); |
| 3830 |
3710 heap_->isolate()->inner_pointer_to_code_cache()->Flush(); | 3831 heap_->isolate()->inner_pointer_to_code_cache()->Flush(); |
3711 | 3832 |
3712 slots_buffer_allocator_.DeallocateChain(&migration_slots_buffer_); | 3833 slots_buffer_allocator_.DeallocateChain(&migration_slots_buffer_); |
3713 DCHECK(migration_slots_buffer_ == NULL); | 3834 DCHECK(migration_slots_buffer_ == NULL); |
3714 | 3835 |
3715 // The hashing of weak_object_to_code_table is no longer valid. | 3836 // The hashing of weak_object_to_code_table is no longer valid. |
3716 heap()->weak_object_to_code_table()->Rehash( | 3837 heap()->weak_object_to_code_table()->Rehash( |
3717 heap()->isolate()->factory()->undefined_value()); | 3838 heap()->isolate()->factory()->undefined_value()); |
3718 } | 3839 } |
3719 | 3840 |
(...skipping 569 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4289 // non-live objects. | 4410 // non-live objects. |
4290 { | 4411 { |
4291 GCTracer::Scope sweep_scope(heap()->tracer(), | 4412 GCTracer::Scope sweep_scope(heap()->tracer(), |
4292 GCTracer::Scope::MC_SWEEP_OLDSPACE); | 4413 GCTracer::Scope::MC_SWEEP_OLDSPACE); |
4293 { SweepSpace(heap()->old_space(), CONCURRENT_SWEEPING); } | 4414 { SweepSpace(heap()->old_space(), CONCURRENT_SWEEPING); } |
4294 sweeping_in_progress_ = true; | 4415 sweeping_in_progress_ = true; |
4295 if (heap()->concurrent_sweeping_enabled()) { | 4416 if (heap()->concurrent_sweeping_enabled()) { |
4296 StartSweeperThreads(); | 4417 StartSweeperThreads(); |
4297 } | 4418 } |
4298 } | 4419 } |
| 4420 RemoveDeadInvalidatedCode(); |
| 4421 |
4299 { | 4422 { |
4300 GCTracer::Scope sweep_scope(heap()->tracer(), | 4423 GCTracer::Scope sweep_scope(heap()->tracer(), |
4301 GCTracer::Scope::MC_SWEEP_CODE); | 4424 GCTracer::Scope::MC_SWEEP_CODE); |
4302 SweepSpace(heap()->code_space(), SEQUENTIAL_SWEEPING); | 4425 SweepSpace(heap()->code_space(), SEQUENTIAL_SWEEPING); |
4303 } | 4426 } |
4304 | 4427 |
4305 EvacuateNewSpaceAndCandidates(); | 4428 EvacuateNewSpaceAndCandidates(); |
4306 | 4429 |
4307 heap()->FreeDeadArrayBuffers(false); | 4430 heap()->FreeDeadArrayBuffers(false); |
4308 | 4431 |
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4445 } else { | 4568 } else { |
4446 ++slot_idx; | 4569 ++slot_idx; |
4447 DCHECK(slot_idx < slots_count); | 4570 DCHECK(slot_idx < slots_count); |
4448 } | 4571 } |
4449 } | 4572 } |
4450 buffer = buffer->next(); | 4573 buffer = buffer->next(); |
4451 } | 4574 } |
4452 } | 4575 } |
4453 | 4576 |
4454 | 4577 |
4455 void SlotsBuffer::RemoveObjectSlots(Heap* heap, SlotsBuffer* buffer, | |
4456 Address start_slot, Address end_slot) { | |
4457 // Remove entries by replacing them with an old-space slot containing a smi | |
4458 // that is located in an unmovable page. | |
4459 const ObjectSlot kRemovedEntry = HeapObject::RawField( | |
4460 heap->empty_fixed_array(), FixedArrayBase::kLengthOffset); | |
4461 DCHECK(Page::FromAddress(reinterpret_cast<Address>(kRemovedEntry)) | |
4462 ->NeverEvacuate()); | |
4463 | |
4464 while (buffer != NULL) { | |
4465 SlotsBuffer::ObjectSlot* slots = buffer->slots_; | |
4466 intptr_t slots_count = buffer->idx_; | |
4467 bool is_typed_slot = false; | |
4468 | |
4469 for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) { | |
4470 ObjectSlot slot = slots[slot_idx]; | |
4471 if (!IsTypedSlot(slot)) { | |
4472 Address slot_address = reinterpret_cast<Address>(slot); | |
4473 if (slot_address >= start_slot && slot_address < end_slot) { | |
4474 slots[slot_idx] = kRemovedEntry; | |
4475 if (is_typed_slot) { | |
4476 slots[slot_idx - 1] = kRemovedEntry; | |
4477 } | |
4478 } | |
4479 is_typed_slot = false; | |
4480 } else { | |
4481 is_typed_slot = true; | |
4482 DCHECK(slot_idx < slots_count); | |
4483 } | |
4484 } | |
4485 buffer = buffer->next(); | |
4486 } | |
4487 } | |
4488 | |
4489 | |
4490 void SlotsBuffer::VerifySlots(Heap* heap, SlotsBuffer* buffer) { | 4578 void SlotsBuffer::VerifySlots(Heap* heap, SlotsBuffer* buffer) { |
4491 while (buffer != NULL) { | 4579 while (buffer != NULL) { |
4492 SlotsBuffer::ObjectSlot* slots = buffer->slots_; | 4580 SlotsBuffer::ObjectSlot* slots = buffer->slots_; |
4493 intptr_t slots_count = buffer->idx_; | 4581 intptr_t slots_count = buffer->idx_; |
4494 | 4582 |
4495 for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) { | 4583 for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) { |
4496 ObjectSlot slot = slots[slot_idx]; | 4584 ObjectSlot slot = slots[slot_idx]; |
4497 if (!IsTypedSlot(slot)) { | 4585 if (!IsTypedSlot(slot)) { |
4498 Object* object = *slot; | 4586 Object* object = *slot; |
4499 if (object->IsHeapObject()) { | 4587 if (object->IsHeapObject()) { |
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4621 } else { | 4709 } else { |
4622 ++slot_idx; | 4710 ++slot_idx; |
4623 DCHECK(slot_idx < idx_); | 4711 DCHECK(slot_idx < idx_); |
4624 UpdateSlot(heap->isolate(), &v, DecodeSlotType(slot), | 4712 UpdateSlot(heap->isolate(), &v, DecodeSlotType(slot), |
4625 reinterpret_cast<Address>(slots_[slot_idx])); | 4713 reinterpret_cast<Address>(slots_[slot_idx])); |
4626 } | 4714 } |
4627 } | 4715 } |
4628 } | 4716 } |
4629 | 4717 |
4630 | 4718 |
| 4719 void SlotsBuffer::UpdateSlotsWithFilter(Heap* heap) { |
| 4720 PointersUpdatingVisitor v(heap); |
| 4721 |
| 4722 for (int slot_idx = 0; slot_idx < idx_; ++slot_idx) { |
| 4723 ObjectSlot slot = slots_[slot_idx]; |
| 4724 if (!IsTypedSlot(slot)) { |
| 4725 if (!IsOnInvalidatedCodeObject(reinterpret_cast<Address>(slot))) { |
| 4726 PointersUpdatingVisitor::UpdateSlot(heap, slot); |
| 4727 } |
| 4728 } else { |
| 4729 ++slot_idx; |
| 4730 DCHECK(slot_idx < idx_); |
| 4731 Address pc = reinterpret_cast<Address>(slots_[slot_idx]); |
| 4732 if (!IsOnInvalidatedCodeObject(pc)) { |
| 4733 UpdateSlot(heap->isolate(), &v, DecodeSlotType(slot), |
| 4734 reinterpret_cast<Address>(slots_[slot_idx])); |
| 4735 } |
| 4736 } |
| 4737 } |
| 4738 } |
| 4739 |
| 4740 |
4631 SlotsBuffer* SlotsBufferAllocator::AllocateBuffer(SlotsBuffer* next_buffer) { | 4741 SlotsBuffer* SlotsBufferAllocator::AllocateBuffer(SlotsBuffer* next_buffer) { |
4632 return new SlotsBuffer(next_buffer); | 4742 return new SlotsBuffer(next_buffer); |
4633 } | 4743 } |
4634 | 4744 |
4635 | 4745 |
4636 void SlotsBufferAllocator::DeallocateBuffer(SlotsBuffer* buffer) { | 4746 void SlotsBufferAllocator::DeallocateBuffer(SlotsBuffer* buffer) { |
4637 delete buffer; | 4747 delete buffer; |
4638 } | 4748 } |
4639 | 4749 |
4640 | 4750 |
4641 void SlotsBufferAllocator::DeallocateChain(SlotsBuffer** buffer_address) { | 4751 void SlotsBufferAllocator::DeallocateChain(SlotsBuffer** buffer_address) { |
4642 SlotsBuffer* buffer = *buffer_address; | 4752 SlotsBuffer* buffer = *buffer_address; |
4643 while (buffer != NULL) { | 4753 while (buffer != NULL) { |
4644 SlotsBuffer* next_buffer = buffer->next(); | 4754 SlotsBuffer* next_buffer = buffer->next(); |
4645 DeallocateBuffer(buffer); | 4755 DeallocateBuffer(buffer); |
4646 buffer = next_buffer; | 4756 buffer = next_buffer; |
4647 } | 4757 } |
4648 *buffer_address = NULL; | 4758 *buffer_address = NULL; |
4649 } | 4759 } |
4650 } // namespace internal | 4760 } // namespace internal |
4651 } // namespace v8 | 4761 } // namespace v8 |
OLD | NEW |