| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
| 6 | 6 |
| 7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
| 8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
| 9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
| 10 #include "src/compilation-cache.h" | 10 #include "src/compilation-cache.h" |
| (...skipping 2672 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2683 while (weak_cell_obj != Smi::FromInt(0)) { | 2683 while (weak_cell_obj != Smi::FromInt(0)) { |
| 2684 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); | 2684 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj); |
| 2685 weak_cell_obj = weak_cell->next(); | 2685 weak_cell_obj = weak_cell->next(); |
| 2686 weak_cell->clear_next(heap()); | 2686 weak_cell->clear_next(heap()); |
| 2687 } | 2687 } |
| 2688 heap()->set_encountered_weak_cells(Smi::FromInt(0)); | 2688 heap()->set_encountered_weak_cells(Smi::FromInt(0)); |
| 2689 } | 2689 } |
| 2690 | 2690 |
| 2691 | 2691 |
| 2692 void MarkCompactCollector::RecordMigratedSlot(Object* value, Address slot) { | 2692 void MarkCompactCollector::RecordMigratedSlot(Object* value, Address slot) { |
| 2693 // When parallel compaction is in progress, store and slots buffer entries |
| 2694 // require synchronization. |
| 2693 if (heap_->InNewSpace(value)) { | 2695 if (heap_->InNewSpace(value)) { |
| 2694 if (parallel_compaction_in_progress_) { | 2696 if (parallel_compaction_in_progress_) { |
| 2695 heap_->store_buffer()->MarkSynchronized(slot); | 2697 heap_->store_buffer()->MarkSynchronized(slot); |
| 2696 } else { | 2698 } else { |
| 2697 heap_->store_buffer()->Mark(slot); | 2699 heap_->store_buffer()->Mark(slot); |
| 2698 } | 2700 } |
| 2699 } else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) { | 2701 } else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) { |
| 2702 if (parallel_compaction_in_progress_) { |
| 2703 SlotsBuffer::AddToSynchronized( |
| 2704 &slots_buffer_allocator_, &migration_slots_buffer_, |
| 2705 &migration_slots_buffer_mutex_, reinterpret_cast<Object**>(slot), |
| 2706 SlotsBuffer::IGNORE_OVERFLOW); |
| 2707 } else { |
| 2708 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_, |
| 2709 reinterpret_cast<Object**>(slot), |
| 2710 SlotsBuffer::IGNORE_OVERFLOW); |
| 2711 } |
| 2712 } |
| 2713 } |
| 2714 |
| 2715 |
| 2716 void MarkCompactCollector::RecordMigratedCodeEntrySlot( |
| 2717 Address code_entry, Address code_entry_slot) { |
| 2718 if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) { |
| 2719 if (parallel_compaction_in_progress_) { |
| 2720 SlotsBuffer::AddToSynchronized( |
| 2721 &slots_buffer_allocator_, &migration_slots_buffer_, |
| 2722 &migration_slots_buffer_mutex_, SlotsBuffer::CODE_ENTRY_SLOT, |
| 2723 code_entry_slot, SlotsBuffer::IGNORE_OVERFLOW); |
| 2724 } else { |
| 2725 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_, |
| 2726 SlotsBuffer::CODE_ENTRY_SLOT, code_entry_slot, |
| 2727 SlotsBuffer::IGNORE_OVERFLOW); |
| 2728 } |
| 2729 } |
| 2730 } |
| 2731 |
| 2732 |
| 2733 void MarkCompactCollector::RecordMigratedCodeObjectSlot(Address code_object) { |
| 2734 if (parallel_compaction_in_progress_) { |
| 2735 SlotsBuffer::AddToSynchronized( |
| 2736 &slots_buffer_allocator_, &migration_slots_buffer_, |
| 2737 &migration_slots_buffer_mutex_, SlotsBuffer::RELOCATED_CODE_OBJECT, |
| 2738 code_object, SlotsBuffer::IGNORE_OVERFLOW); |
| 2739 } else { |
| 2700 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_, | 2740 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_, |
| 2701 reinterpret_cast<Object**>(slot), | 2741 SlotsBuffer::RELOCATED_CODE_OBJECT, code_object, |
| 2702 SlotsBuffer::IGNORE_OVERFLOW); | 2742 SlotsBuffer::IGNORE_OVERFLOW); |
| 2703 } | 2743 } |
| 2704 } | 2744 } |
| 2705 | 2745 |
| 2706 | 2746 |
| 2707 // We scavenge new space simultaneously with sweeping. This is done in two | 2747 // We scavenge new space simultaneously with sweeping. This is done in two |
| 2708 // passes. | 2748 // passes. |
| 2709 // | 2749 // |
| 2710 // The first pass migrates all alive objects from one semispace to another or | 2750 // The first pass migrates all alive objects from one semispace to another or |
| 2711 // promotes them to old space. Forwarding address is written directly into | 2751 // promotes them to old space. Forwarding address is written directly into |
| (...skipping 24 matching lines...) Expand all Loading... |
| 2736 break; | 2776 break; |
| 2737 | 2777 |
| 2738 case HeapObjectContents::kRawValues: | 2778 case HeapObjectContents::kRawValues: |
| 2739 MigrateObjectRaw(dst, src, size); | 2779 MigrateObjectRaw(dst, src, size); |
| 2740 break; | 2780 break; |
| 2741 } | 2781 } |
| 2742 | 2782 |
| 2743 if (compacting_ && dst->IsJSFunction()) { | 2783 if (compacting_ && dst->IsJSFunction()) { |
| 2744 Address code_entry_slot = dst->address() + JSFunction::kCodeEntryOffset; | 2784 Address code_entry_slot = dst->address() + JSFunction::kCodeEntryOffset; |
| 2745 Address code_entry = Memory::Address_at(code_entry_slot); | 2785 Address code_entry = Memory::Address_at(code_entry_slot); |
| 2746 | 2786 RecordMigratedCodeEntrySlot(code_entry, code_entry_slot); |
| 2747 if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) { | |
| 2748 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_, | |
| 2749 SlotsBuffer::CODE_ENTRY_SLOT, code_entry_slot, | |
| 2750 SlotsBuffer::IGNORE_OVERFLOW); | |
| 2751 } | |
| 2752 } | 2787 } |
| 2753 } else if (dest == CODE_SPACE) { | 2788 } else if (dest == CODE_SPACE) { |
| 2754 PROFILE(isolate(), CodeMoveEvent(src_addr, dst_addr)); | 2789 PROFILE(isolate(), CodeMoveEvent(src_addr, dst_addr)); |
| 2755 heap()->MoveBlock(dst_addr, src_addr, size); | 2790 heap()->MoveBlock(dst_addr, src_addr, size); |
| 2756 SlotsBuffer::AddTo(&slots_buffer_allocator_, &migration_slots_buffer_, | 2791 RecordMigratedCodeObjectSlot(dst_addr); |
| 2757 SlotsBuffer::RELOCATED_CODE_OBJECT, dst_addr, | |
| 2758 SlotsBuffer::IGNORE_OVERFLOW); | |
| 2759 Code::cast(dst)->Relocate(dst_addr - src_addr); | 2792 Code::cast(dst)->Relocate(dst_addr - src_addr); |
| 2760 } else { | 2793 } else { |
| 2761 DCHECK(dest == NEW_SPACE); | 2794 DCHECK(dest == NEW_SPACE); |
| 2762 heap()->MoveBlock(dst_addr, src_addr, size); | 2795 heap()->MoveBlock(dst_addr, src_addr, size); |
| 2763 } | 2796 } |
| 2764 heap()->OnMoveEvent(dst, src, size); | 2797 heap()->OnMoveEvent(dst, src, size); |
| 2765 Memory::Address_at(src_addr) = dst_addr; | 2798 Memory::Address_at(src_addr) = dst_addr; |
| 2766 } | 2799 } |
| 2767 | 2800 |
| 2768 | 2801 |
| (...skipping 1711 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4480 MarkCompactMarkingVisitor::Initialize(); | 4513 MarkCompactMarkingVisitor::Initialize(); |
| 4481 IncrementalMarking::Initialize(); | 4514 IncrementalMarking::Initialize(); |
| 4482 } | 4515 } |
| 4483 | 4516 |
| 4484 | 4517 |
| 4485 bool SlotsBuffer::IsTypedSlot(ObjectSlot slot) { | 4518 bool SlotsBuffer::IsTypedSlot(ObjectSlot slot) { |
| 4486 return reinterpret_cast<uintptr_t>(slot) < NUMBER_OF_SLOT_TYPES; | 4519 return reinterpret_cast<uintptr_t>(slot) < NUMBER_OF_SLOT_TYPES; |
| 4487 } | 4520 } |
| 4488 | 4521 |
| 4489 | 4522 |
| 4523 bool SlotsBuffer::AddToSynchronized(SlotsBufferAllocator* allocator, |
| 4524 SlotsBuffer** buffer_address, |
| 4525 base::Mutex* buffer_mutex, SlotType type, |
| 4526 Address addr, AdditionMode mode) { |
| 4527 base::LockGuard<base::Mutex> lock_guard(buffer_mutex); |
| 4528 return AddTo(allocator, buffer_address, type, addr, mode); |
| 4529 } |
| 4530 |
| 4531 |
| 4490 bool SlotsBuffer::AddTo(SlotsBufferAllocator* allocator, | 4532 bool SlotsBuffer::AddTo(SlotsBufferAllocator* allocator, |
| 4491 SlotsBuffer** buffer_address, SlotType type, | 4533 SlotsBuffer** buffer_address, SlotType type, |
| 4492 Address addr, AdditionMode mode) { | 4534 Address addr, AdditionMode mode) { |
| 4493 SlotsBuffer* buffer = *buffer_address; | 4535 SlotsBuffer* buffer = *buffer_address; |
| 4494 if (buffer == NULL || !buffer->HasSpaceForTypedSlot()) { | 4536 if (buffer == NULL || !buffer->HasSpaceForTypedSlot()) { |
| 4495 if (mode == FAIL_ON_OVERFLOW && ChainLengthThresholdReached(buffer)) { | 4537 if (mode == FAIL_ON_OVERFLOW && ChainLengthThresholdReached(buffer)) { |
| 4496 allocator->DeallocateChain(buffer_address); | 4538 allocator->DeallocateChain(buffer_address); |
| 4497 return false; | 4539 return false; |
| 4498 } | 4540 } |
| 4499 buffer = allocator->AllocateBuffer(buffer); | 4541 buffer = allocator->AllocateBuffer(buffer); |
| (...skipping 251 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4751 SlotsBuffer* buffer = *buffer_address; | 4793 SlotsBuffer* buffer = *buffer_address; |
| 4752 while (buffer != NULL) { | 4794 while (buffer != NULL) { |
| 4753 SlotsBuffer* next_buffer = buffer->next(); | 4795 SlotsBuffer* next_buffer = buffer->next(); |
| 4754 DeallocateBuffer(buffer); | 4796 DeallocateBuffer(buffer); |
| 4755 buffer = next_buffer; | 4797 buffer = next_buffer; |
| 4756 } | 4798 } |
| 4757 *buffer_address = NULL; | 4799 *buffer_address = NULL; |
| 4758 } | 4800 } |
| 4759 } // namespace internal | 4801 } // namespace internal |
| 4760 } // namespace v8 | 4802 } // namespace v8 |
| OLD | NEW |