OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/compilation-cache.h" | 10 #include "src/compilation-cache.h" |
(...skipping 274 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
285 while (it.has_next()) { | 285 while (it.has_next()) { |
286 Page* p = it.next(); | 286 Page* p = it.next(); |
287 SlotsBuffer::RemoveInvalidSlots(heap_, p->slots_buffer()); | 287 SlotsBuffer::RemoveInvalidSlots(heap_, p->slots_buffer()); |
288 } | 288 } |
289 } | 289 } |
290 | 290 |
291 | 291 |
292 void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() { | 292 void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() { |
293 heap_->store_buffer()->ClearInvalidStoreBufferEntries(); | 293 heap_->store_buffer()->ClearInvalidStoreBufferEntries(); |
294 | 294 |
| 295 RemoveDeoptimizedCodeSlots(); |
| 296 |
295 ClearInvalidSlotsBufferEntries(heap_->old_space()); | 297 ClearInvalidSlotsBufferEntries(heap_->old_space()); |
296 ClearInvalidSlotsBufferEntries(heap_->code_space()); | 298 ClearInvalidSlotsBufferEntries(heap_->code_space()); |
297 ClearInvalidSlotsBufferEntries(heap_->map_space()); | 299 ClearInvalidSlotsBufferEntries(heap_->map_space()); |
298 | 300 |
299 LargeObjectIterator it(heap_->lo_space()); | 301 LargeObjectIterator it(heap_->lo_space()); |
300 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { | 302 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) { |
301 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); | 303 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); |
302 SlotsBuffer::RemoveInvalidSlots(heap_, chunk->slots_buffer()); | 304 SlotsBuffer::RemoveInvalidSlots(heap_, chunk->slots_buffer()); |
303 } | 305 } |
304 } | 306 } |
(...skipping 245 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
550 // to only refill them for the old space. | 552 // to only refill them for the old space. |
551 return; | 553 return; |
552 } | 554 } |
553 | 555 |
554 intptr_t freed_bytes = space->free_list()->Concatenate(free_list); | 556 intptr_t freed_bytes = space->free_list()->Concatenate(free_list); |
555 space->AddToAccountingStats(freed_bytes); | 557 space->AddToAccountingStats(freed_bytes); |
556 space->DecrementUnsweptFreeBytes(freed_bytes); | 558 space->DecrementUnsweptFreeBytes(freed_bytes); |
557 } | 559 } |
558 | 560 |
559 | 561 |
560 void Marking::SetAllMarkBitsInRange(MarkBit start, MarkBit end) { | |
561 MarkBit::CellType* start_cell = start.cell(); | |
562 MarkBit::CellType* end_cell = end.cell(); | |
563 MarkBit::CellType start_mask = ~(start.mask() - 1); | |
564 MarkBit::CellType end_mask = (end.mask() << 1) - 1; | |
565 | |
566 if (start_cell == end_cell) { | |
567 *start_cell |= start_mask & end_mask; | |
568 } else { | |
569 *start_cell |= start_mask; | |
570 for (MarkBit::CellType* cell = start_cell + 1; cell < end_cell; cell++) { | |
571 *cell = ~0; | |
572 } | |
573 *end_cell |= end_mask; | |
574 } | |
575 } | |
576 | |
577 | |
578 void Marking::ClearAllMarkBitsOfCellsContainedInRange(MarkBit start, | |
579 MarkBit end) { | |
580 MarkBit::CellType* start_cell = start.cell(); | |
581 MarkBit::CellType* end_cell = end.cell(); | |
582 for (MarkBit::CellType* cell = start_cell; cell <= end_cell; cell++) { | |
583 *cell = 0; | |
584 } | |
585 } | |
586 | |
587 | |
588 void Marking::TransferMark(Address old_start, Address new_start) { | 562 void Marking::TransferMark(Address old_start, Address new_start) { |
589 // This is only used when resizing an object. | 563 // This is only used when resizing an object. |
590 DCHECK(MemoryChunk::FromAddress(old_start) == | 564 DCHECK(MemoryChunk::FromAddress(old_start) == |
591 MemoryChunk::FromAddress(new_start)); | 565 MemoryChunk::FromAddress(new_start)); |
592 | 566 |
593 if (!heap_->incremental_marking()->IsMarking()) return; | 567 if (!heap_->incremental_marking()->IsMarking()) return; |
594 | 568 |
595 // If the mark doesn't move, we don't check the color of the object. | 569 // If the mark doesn't move, we don't check the color of the object. |
596 // It doesn't matter whether the object is black, since it hasn't changed | 570 // It doesn't matter whether the object is black, since it hasn't changed |
597 // size, so the adjustment to the live data count will be zero anyway. | 571 // size, so the adjustment to the live data count will be zero anyway. |
(...skipping 2966 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3564 // When concurrent sweeping is active, the page will be marked after | 3538 // When concurrent sweeping is active, the page will be marked after |
3565 // sweeping by the main thread. | 3539 // sweeping by the main thread. |
3566 p->set_parallel_sweeping(MemoryChunk::SWEEPING_FINALIZE); | 3540 p->set_parallel_sweeping(MemoryChunk::SWEEPING_FINALIZE); |
3567 } else { | 3541 } else { |
3568 p->SetWasSwept(); | 3542 p->SetWasSwept(); |
3569 } | 3543 } |
3570 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes)); | 3544 return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes)); |
3571 } | 3545 } |
3572 | 3546 |
3573 | 3547 |
3574 static bool SetMarkBitsUnderInvalidatedCode(Code* code, bool value) { | |
3575 Page* p = Page::FromAddress(code->address()); | |
3576 | |
3577 if (p->IsEvacuationCandidate() || p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) { | |
3578 return false; | |
3579 } | |
3580 | |
3581 Address code_start = code->address(); | |
3582 Address code_end = code_start + code->Size(); | |
3583 | |
3584 uint32_t start_index = MemoryChunk::FastAddressToMarkbitIndex(code_start); | |
3585 uint32_t end_index = | |
3586 MemoryChunk::FastAddressToMarkbitIndex(code_end - kPointerSize); | |
3587 | |
3588 // TODO(hpayer): Filter out invalidated code in | |
3589 // ClearInvalidSlotsBufferEntries. | |
3590 Bitmap* b = p->markbits(); | |
3591 | |
3592 MarkBit start_mark_bit = b->MarkBitFromIndex(start_index); | |
3593 MarkBit end_mark_bit = b->MarkBitFromIndex(end_index); | |
3594 | |
3595 if (value) { | |
3596 Marking::SetAllMarkBitsInRange(start_mark_bit, end_mark_bit); | |
3597 } else { | |
3598 Marking::ClearAllMarkBitsOfCellsContainedInRange(start_mark_bit, | |
3599 end_mark_bit); | |
3600 } | |
3601 | |
3602 return true; | |
3603 } | |
3604 | |
3605 | |
3606 static bool IsOnInvalidatedCodeObject(Address addr) { | 3548 static bool IsOnInvalidatedCodeObject(Address addr) { |
3607 // We did not record any slots in large objects thus | 3549 // We did not record any slots in large objects thus |
3608 // we can safely go to the page from the slot address. | 3550 // we can safely go to the page from the slot address. |
3609 Page* p = Page::FromAddress(addr); | 3551 Page* p = Page::FromAddress(addr); |
3610 | 3552 |
3611 // First check owner's identity because old space is swept concurrently or | 3553 // First check owner's identity because old space is swept concurrently or |
3612 // lazily and might still have non-zero mark-bits on some pages. | 3554 // lazily and might still have non-zero mark-bits on some pages. |
3613 if (p->owner()->identity() != CODE_SPACE) return false; | 3555 if (p->owner()->identity() != CODE_SPACE) return false; |
3614 | 3556 |
3615 // In code space only bits on evacuation candidates (but we don't record | 3557 // In code space only bits on evacuation candidates (but we don't record |
(...skipping 18 matching lines...) Expand all Loading... |
3634 } | 3576 } |
3635 } | 3577 } |
3636 | 3578 |
3637 | 3579 |
3638 // Return true if the given code is deoptimized or will be deoptimized. | 3580 // Return true if the given code is deoptimized or will be deoptimized. |
3639 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { | 3581 bool MarkCompactCollector::WillBeDeoptimized(Code* code) { |
3640 return code->is_optimized_code() && code->marked_for_deoptimization(); | 3582 return code->is_optimized_code() && code->marked_for_deoptimization(); |
3641 } | 3583 } |
3642 | 3584 |
3643 | 3585 |
3644 bool MarkCompactCollector::MarkInvalidatedCode() { | 3586 void MarkCompactCollector::RemoveDeoptimizedCodeSlots() { |
3645 bool code_marked = false; | |
3646 | |
3647 int length = invalidated_code_.length(); | 3587 int length = invalidated_code_.length(); |
3648 for (int i = 0; i < length; i++) { | 3588 for (int i = 0; i < length; i++) { |
3649 Code* code = invalidated_code_[i]; | 3589 Code* code = invalidated_code_[i]; |
3650 | 3590 Page* p = Page::FromAddress(code->address()); |
3651 if (SetMarkBitsUnderInvalidatedCode(code, true)) { | 3591 if (!p->IsEvacuationCandidate() && |
3652 code_marked = true; | 3592 !p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) { |
| 3593 // Ignore all slots that might have been recorded in the body of the |
| 3594 // deoptimized code object. |
| 3595 RemoveObjectSlots(code->instruction_start(), |
| 3596 code->address() + code->Size()); |
3653 } | 3597 } |
3654 } | 3598 } |
3655 | |
3656 return code_marked; | |
3657 } | 3599 } |
3658 | 3600 |
3659 | 3601 |
3660 void MarkCompactCollector::RemoveDeadInvalidatedCode() { | 3602 void MarkCompactCollector::RemoveDeadInvalidatedCode() { |
3661 int length = invalidated_code_.length(); | 3603 int length = invalidated_code_.length(); |
3662 for (int i = 0; i < length; i++) { | 3604 for (int i = 0; i < length; i++) { |
3663 if (!IsMarked(invalidated_code_[i])) invalidated_code_[i] = NULL; | 3605 if (!IsMarked(invalidated_code_[i])) invalidated_code_[i] = NULL; |
3664 } | 3606 } |
3665 } | 3607 } |
3666 | 3608 |
3667 | 3609 |
3668 void MarkCompactCollector::ProcessInvalidatedCode(ObjectVisitor* visitor) { | 3610 void MarkCompactCollector::ProcessInvalidatedCode(ObjectVisitor* visitor) { |
3669 int length = invalidated_code_.length(); | 3611 int length = invalidated_code_.length(); |
3670 for (int i = 0; i < length; i++) { | 3612 for (int i = 0; i < length; i++) { |
3671 Code* code = invalidated_code_[i]; | 3613 Code* code = invalidated_code_[i]; |
3672 if (code != NULL) { | 3614 if (code != NULL) { |
3673 code->Iterate(visitor); | 3615 code->Iterate(visitor); |
3674 SetMarkBitsUnderInvalidatedCode(code, false); | |
3675 } | 3616 } |
3676 } | 3617 } |
3677 invalidated_code_.Rewind(0); | 3618 invalidated_code_.Rewind(0); |
3678 } | 3619 } |
3679 | 3620 |
3680 | 3621 |
| 3622 void MarkCompactCollector::RemoveObjectSlots(Address start_slot, |
| 3623 Address end_slot) { |
| 3624 // Remove entries by replacing them with an old-space slot containing a smi |
| 3625 // that is located in an unmovable page. |
| 3626 int npages = evacuation_candidates_.length(); |
| 3627 for (int i = 0; i < npages; i++) { |
| 3628 Page* p = evacuation_candidates_[i]; |
| 3629 DCHECK(p->IsEvacuationCandidate() || |
| 3630 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); |
| 3631 if (p->IsEvacuationCandidate()) { |
| 3632 SlotsBuffer::RemoveObjectSlots(heap_, p->slots_buffer(), start_slot, |
| 3633 end_slot); |
| 3634 } |
| 3635 } |
| 3636 } |
| 3637 |
| 3638 |
3681 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { | 3639 void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { |
3682 Heap::RelocationLock relocation_lock(heap()); | 3640 Heap::RelocationLock relocation_lock(heap()); |
3683 | 3641 |
3684 bool code_slots_filtering_required; | |
3685 { | 3642 { |
3686 GCTracer::Scope gc_scope(heap()->tracer(), | 3643 GCTracer::Scope gc_scope(heap()->tracer(), |
3687 GCTracer::Scope::MC_SWEEP_NEWSPACE); | 3644 GCTracer::Scope::MC_SWEEP_NEWSPACE); |
3688 code_slots_filtering_required = MarkInvalidatedCode(); | |
3689 EvacuationScope evacuation_scope(this); | 3645 EvacuationScope evacuation_scope(this); |
3690 EvacuateNewSpace(); | 3646 EvacuateNewSpace(); |
3691 } | 3647 } |
3692 | 3648 |
3693 { | 3649 { |
3694 GCTracer::Scope gc_scope(heap()->tracer(), | 3650 GCTracer::Scope gc_scope(heap()->tracer(), |
3695 GCTracer::Scope::MC_EVACUATE_PAGES); | 3651 GCTracer::Scope::MC_EVACUATE_PAGES); |
3696 EvacuationScope evacuation_scope(this); | 3652 EvacuationScope evacuation_scope(this); |
3697 EvacuatePages(); | 3653 EvacuatePages(); |
3698 } | 3654 } |
(...skipping 26 matching lines...) Expand all Loading... |
3725 GCTracer::Scope gc_scope(heap()->tracer(), | 3681 GCTracer::Scope gc_scope(heap()->tracer(), |
3726 GCTracer::Scope::MC_UPDATE_OLD_TO_NEW_POINTERS); | 3682 GCTracer::Scope::MC_UPDATE_OLD_TO_NEW_POINTERS); |
3727 StoreBufferRebuildScope scope(heap_, heap_->store_buffer(), | 3683 StoreBufferRebuildScope scope(heap_, heap_->store_buffer(), |
3728 &Heap::ScavengeStoreBufferCallback); | 3684 &Heap::ScavengeStoreBufferCallback); |
3729 heap_->store_buffer()->IteratePointersToNewSpace(&UpdatePointer); | 3685 heap_->store_buffer()->IteratePointersToNewSpace(&UpdatePointer); |
3730 } | 3686 } |
3731 | 3687 |
3732 { | 3688 { |
3733 GCTracer::Scope gc_scope(heap()->tracer(), | 3689 GCTracer::Scope gc_scope(heap()->tracer(), |
3734 GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED); | 3690 GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED); |
3735 SlotsBuffer::UpdateSlotsRecordedIn(heap_, migration_slots_buffer_, | 3691 SlotsBuffer::UpdateSlotsRecordedIn(heap_, migration_slots_buffer_); |
3736 code_slots_filtering_required); | |
3737 if (FLAG_trace_fragmentation_verbose) { | 3692 if (FLAG_trace_fragmentation_verbose) { |
3738 PrintF(" migration slots buffer: %d\n", | 3693 PrintF(" migration slots buffer: %d\n", |
3739 SlotsBuffer::SizeOfChain(migration_slots_buffer_)); | 3694 SlotsBuffer::SizeOfChain(migration_slots_buffer_)); |
3740 } | 3695 } |
3741 | 3696 |
3742 if (compacting_ && was_marked_incrementally_) { | 3697 if (compacting_ && was_marked_incrementally_) { |
3743 // It's difficult to filter out slots recorded for large objects. | 3698 // It's difficult to filter out slots recorded for large objects. |
3744 LargeObjectIterator it(heap_->lo_space()); | 3699 LargeObjectIterator it(heap_->lo_space()); |
3745 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { | 3700 for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) { |
3746 // LargeObjectSpace is not swept yet thus we have to skip | 3701 // LargeObjectSpace is not swept yet thus we have to skip |
(...skipping 13 matching lines...) Expand all Loading... |
3760 { | 3715 { |
3761 GCTracer::Scope gc_scope( | 3716 GCTracer::Scope gc_scope( |
3762 heap()->tracer(), | 3717 heap()->tracer(), |
3763 GCTracer::Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED); | 3718 GCTracer::Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED); |
3764 for (int i = 0; i < npages; i++) { | 3719 for (int i = 0; i < npages; i++) { |
3765 Page* p = evacuation_candidates_[i]; | 3720 Page* p = evacuation_candidates_[i]; |
3766 DCHECK(p->IsEvacuationCandidate() || | 3721 DCHECK(p->IsEvacuationCandidate() || |
3767 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); | 3722 p->IsFlagSet(Page::RESCAN_ON_EVACUATION)); |
3768 | 3723 |
3769 if (p->IsEvacuationCandidate()) { | 3724 if (p->IsEvacuationCandidate()) { |
3770 SlotsBuffer::UpdateSlotsRecordedIn(heap_, p->slots_buffer(), | 3725 SlotsBuffer::UpdateSlotsRecordedIn(heap_, p->slots_buffer()); |
3771 code_slots_filtering_required); | |
3772 if (FLAG_trace_fragmentation_verbose) { | 3726 if (FLAG_trace_fragmentation_verbose) { |
3773 PrintF(" page %p slots buffer: %d\n", reinterpret_cast<void*>(p), | 3727 PrintF(" page %p slots buffer: %d\n", reinterpret_cast<void*>(p), |
3774 SlotsBuffer::SizeOfChain(p->slots_buffer())); | 3728 SlotsBuffer::SizeOfChain(p->slots_buffer())); |
3775 } | 3729 } |
3776 | 3730 |
3777 // Important: skip list should be cleared only after roots were updated | 3731 // Important: skip list should be cleared only after roots were updated |
3778 // because root iteration traverses the stack and might have to find | 3732 // because root iteration traverses the stack and might have to find |
3779 // code objects from non-updated pc pointing into evacuation candidate. | 3733 // code objects from non-updated pc pointing into evacuation candidate. |
3780 SkipList* list = p->skip_list(); | 3734 SkipList* list = p->skip_list(); |
3781 if (list != NULL) list->Clear(); | 3735 if (list != NULL) list->Clear(); |
(...skipping 793 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4575 } else { | 4529 } else { |
4576 ++slot_idx; | 4530 ++slot_idx; |
4577 DCHECK(slot_idx < slots_count); | 4531 DCHECK(slot_idx < slots_count); |
4578 } | 4532 } |
4579 } | 4533 } |
4580 buffer = buffer->next(); | 4534 buffer = buffer->next(); |
4581 } | 4535 } |
4582 } | 4536 } |
4583 | 4537 |
4584 | 4538 |
| 4539 void SlotsBuffer::RemoveObjectSlots(Heap* heap, SlotsBuffer* buffer, |
| 4540 Address start_slot, Address end_slot) { |
| 4541 // Remove entries by replacing them with an old-space slot containing a smi |
| 4542 // that is located in an unmovable page. |
| 4543 const ObjectSlot kRemovedEntry = HeapObject::RawField( |
| 4544 heap->empty_fixed_array(), FixedArrayBase::kLengthOffset); |
| 4545 DCHECK(Page::FromAddress(reinterpret_cast<Address>(kRemovedEntry)) |
| 4546 ->NeverEvacuate()); |
| 4547 |
| 4548 while (buffer != NULL) { |
| 4549 SlotsBuffer::ObjectSlot* slots = buffer->slots_; |
| 4550 intptr_t slots_count = buffer->idx_; |
| 4551 bool is_typed_slot = false; |
| 4552 |
| 4553 for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) { |
| 4554 ObjectSlot slot = slots[slot_idx]; |
| 4555 if (!IsTypedSlot(slot)) { |
| 4556 Address slot_address = reinterpret_cast<Address>(slot); |
| 4557 if (slot_address >= start_slot && slot_address < end_slot) { |
| 4558 slots[slot_idx] = kRemovedEntry; |
| 4559 if (is_typed_slot) { |
| 4560 slots[slot_idx - 1] = kRemovedEntry; |
| 4561 } |
| 4562 } |
| 4563 is_typed_slot = false; |
| 4564 } else { |
| 4565 is_typed_slot = true; |
| 4566 DCHECK(slot_idx < slots_count); |
| 4567 } |
| 4568 } |
| 4569 buffer = buffer->next(); |
| 4570 } |
| 4571 } |
| 4572 |
| 4573 |
4585 void SlotsBuffer::VerifySlots(Heap* heap, SlotsBuffer* buffer) { | 4574 void SlotsBuffer::VerifySlots(Heap* heap, SlotsBuffer* buffer) { |
4586 while (buffer != NULL) { | 4575 while (buffer != NULL) { |
4587 SlotsBuffer::ObjectSlot* slots = buffer->slots_; | 4576 SlotsBuffer::ObjectSlot* slots = buffer->slots_; |
4588 intptr_t slots_count = buffer->idx_; | 4577 intptr_t slots_count = buffer->idx_; |
4589 | 4578 |
4590 for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) { | 4579 for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) { |
4591 ObjectSlot slot = slots[slot_idx]; | 4580 ObjectSlot slot = slots[slot_idx]; |
4592 if (!IsTypedSlot(slot)) { | 4581 if (!IsTypedSlot(slot)) { |
4593 Object* object = *slot; | 4582 Object* object = *slot; |
4594 if (object->IsHeapObject()) { | 4583 if (object->IsHeapObject()) { |
(...skipping 164 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4759 SlotsBuffer* buffer = *buffer_address; | 4748 SlotsBuffer* buffer = *buffer_address; |
4760 while (buffer != NULL) { | 4749 while (buffer != NULL) { |
4761 SlotsBuffer* next_buffer = buffer->next(); | 4750 SlotsBuffer* next_buffer = buffer->next(); |
4762 DeallocateBuffer(buffer); | 4751 DeallocateBuffer(buffer); |
4763 buffer = next_buffer; | 4752 buffer = next_buffer; |
4764 } | 4753 } |
4765 *buffer_address = NULL; | 4754 *buffer_address = NULL; |
4766 } | 4755 } |
4767 } // namespace internal | 4756 } // namespace internal |
4768 } // namespace v8 | 4757 } // namespace v8 |
OLD | NEW |