OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/base/sys-info.h" | 9 #include "src/base/sys-info.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 2603 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2614 bool success = SlotsBuffer::AddTo( | 2614 bool success = SlotsBuffer::AddTo( |
2615 slots_buffer_allocator_, target_page->slots_buffer_address(), slot_type, | 2615 slots_buffer_allocator_, target_page->slots_buffer_address(), slot_type, |
2616 addr, SlotsBuffer::FAIL_ON_OVERFLOW); | 2616 addr, SlotsBuffer::FAIL_ON_OVERFLOW); |
2617 if (!success) { | 2617 if (!success) { |
2618 EvictPopularEvacuationCandidate(target_page); | 2618 EvictPopularEvacuationCandidate(target_page); |
2619 } | 2619 } |
2620 } | 2620 } |
2621 } | 2621 } |
2622 | 2622 |
2623 | 2623 |
| 2624 class RecordMigratedSlotVisitor final : public ObjectVisitor { |
| 2625 public: |
| 2626 RecordMigratedSlotVisitor(MarkCompactCollector* collector, |
| 2627 SlotsBuffer** evacuation_slots_buffer) |
| 2628 : collector_(collector), |
| 2629 evacuation_slots_buffer_(evacuation_slots_buffer) {} |
| 2630 |
| 2631 V8_INLINE void VisitPointer(Object** p) override { |
| 2632 collector_->RecordMigratedSlot(*p, reinterpret_cast<Address>(p), |
| 2633 evacuation_slots_buffer_); |
| 2634 } |
| 2635 |
| 2636 V8_INLINE void VisitPointers(Object** start, Object** end) override { |
| 2637 while (start < end) { |
| 2638 collector_->RecordMigratedSlot(*start, reinterpret_cast<Address>(start), |
| 2639 evacuation_slots_buffer_); |
| 2640 ++start; |
| 2641 } |
| 2642 } |
| 2643 |
| 2644 V8_INLINE void VisitCodeEntry(Address code_entry_slot) override { |
| 2645 if (collector_->compacting_) { |
| 2646 Address code_entry = Memory::Address_at(code_entry_slot); |
| 2647 collector_->RecordMigratedCodeEntrySlot(code_entry, code_entry_slot, |
| 2648 evacuation_slots_buffer_); |
| 2649 } |
| 2650 } |
| 2651 |
| 2652 private: |
| 2653 MarkCompactCollector* collector_; |
| 2654 SlotsBuffer** evacuation_slots_buffer_; |
| 2655 }; |
| 2656 |
| 2657 |
2624 // We scavenge new space simultaneously with sweeping. This is done in two | 2658 // We scavenge new space simultaneously with sweeping. This is done in two |
2625 // passes. | 2659 // passes. |
2626 // | 2660 // |
2627 // The first pass migrates all alive objects from one semispace to another or | 2661 // The first pass migrates all alive objects from one semispace to another or |
2628 // promotes them to old space. Forwarding address is written directly into | 2662 // promotes them to old space. Forwarding address is written directly into |
2629 // first word of object without any encoding. If object is dead we write | 2663 // first word of object without any encoding. If object is dead we write |
2630 // NULL as a forwarding address. | 2664 // NULL as a forwarding address. |
2631 // | 2665 // |
2632 // The second pass updates pointers to new space in all spaces. It is possible | 2666 // The second pass updates pointers to new space in all spaces. It is possible |
2633 // to encounter pointers to dead new space objects during traversal of pointers | 2667 // to encounter pointers to dead new space objects during traversal of pointers |
2634 // to new space. We should clear them to avoid encountering them during next | 2668 // to new space. We should clear them to avoid encountering them during next |
2635 // pointer iteration. This is an issue if the store buffer overflows and we | 2669 // pointer iteration. This is an issue if the store buffer overflows and we |
2636 // have to scan the entire old space, including dead objects, looking for | 2670 // have to scan the entire old space, including dead objects, looking for |
2637 // pointers to new space. | 2671 // pointers to new space. |
2638 void MarkCompactCollector::MigrateObject( | 2672 void MarkCompactCollector::MigrateObject( |
2639 HeapObject* dst, HeapObject* src, int size, AllocationSpace dest, | 2673 HeapObject* dst, HeapObject* src, int size, AllocationSpace dest, |
2640 SlotsBuffer** evacuation_slots_buffer) { | 2674 SlotsBuffer** evacuation_slots_buffer) { |
2641 Address dst_addr = dst->address(); | 2675 Address dst_addr = dst->address(); |
2642 Address src_addr = src->address(); | 2676 Address src_addr = src->address(); |
2643 DCHECK(heap()->AllowedToBeMigrated(src, dest)); | 2677 DCHECK(heap()->AllowedToBeMigrated(src, dest)); |
2644 DCHECK(dest != LO_SPACE); | 2678 DCHECK(dest != LO_SPACE); |
2645 if (dest == OLD_SPACE) { | 2679 if (dest == OLD_SPACE) { |
2646 DCHECK_OBJECT_SIZE(size); | 2680 DCHECK_OBJECT_SIZE(size); |
2647 DCHECK(evacuation_slots_buffer != nullptr); | 2681 DCHECK(evacuation_slots_buffer != nullptr); |
2648 DCHECK(IsAligned(size, kPointerSize)); | 2682 DCHECK(IsAligned(size, kPointerSize)); |
2649 switch (src->ContentType()) { | |
2650 case HeapObjectContents::kTaggedValues: | |
2651 MigrateObjectTagged(dst, src, size, evacuation_slots_buffer); | |
2652 break; | |
2653 | 2683 |
2654 case HeapObjectContents::kMixedValues: | 2684 heap()->MoveBlock(dst->address(), src->address(), size); |
2655 MigrateObjectMixed(dst, src, size, evacuation_slots_buffer); | 2685 RecordMigratedSlotVisitor visitor(this, evacuation_slots_buffer); |
2656 break; | 2686 dst->IterateBody(&visitor); |
2657 | |
2658 case HeapObjectContents::kRawValues: | |
2659 MigrateObjectRaw(dst, src, size); | |
2660 break; | |
2661 } | |
2662 | |
2663 if (compacting_ && dst->IsJSFunction()) { | |
2664 Address code_entry_slot = dst->address() + JSFunction::kCodeEntryOffset; | |
2665 Address code_entry = Memory::Address_at(code_entry_slot); | |
2666 RecordMigratedCodeEntrySlot(code_entry, code_entry_slot, | |
2667 evacuation_slots_buffer); | |
2668 } | |
2669 } else if (dest == CODE_SPACE) { | 2687 } else if (dest == CODE_SPACE) { |
2670 DCHECK_CODEOBJECT_SIZE(size, heap()->code_space()); | 2688 DCHECK_CODEOBJECT_SIZE(size, heap()->code_space()); |
2671 DCHECK(evacuation_slots_buffer != nullptr); | 2689 DCHECK(evacuation_slots_buffer != nullptr); |
2672 PROFILE(isolate(), CodeMoveEvent(src_addr, dst_addr)); | 2690 PROFILE(isolate(), CodeMoveEvent(src_addr, dst_addr)); |
2673 heap()->MoveBlock(dst_addr, src_addr, size); | 2691 heap()->MoveBlock(dst_addr, src_addr, size); |
2674 RecordMigratedCodeObjectSlot(dst_addr, evacuation_slots_buffer); | 2692 RecordMigratedCodeObjectSlot(dst_addr, evacuation_slots_buffer); |
2675 Code::cast(dst)->Relocate(dst_addr - src_addr); | 2693 Code::cast(dst)->Relocate(dst_addr - src_addr); |
2676 } else { | 2694 } else { |
2677 DCHECK_OBJECT_SIZE(size); | 2695 DCHECK_OBJECT_SIZE(size); |
2678 DCHECK(evacuation_slots_buffer == nullptr); | 2696 DCHECK(evacuation_slots_buffer == nullptr); |
2679 DCHECK(dest == NEW_SPACE); | 2697 DCHECK(dest == NEW_SPACE); |
2680 heap()->MoveBlock(dst_addr, src_addr, size); | 2698 heap()->MoveBlock(dst_addr, src_addr, size); |
2681 } | 2699 } |
2682 heap()->OnMoveEvent(dst, src, size); | 2700 heap()->OnMoveEvent(dst, src, size); |
2683 Memory::Address_at(src_addr) = dst_addr; | 2701 Memory::Address_at(src_addr) = dst_addr; |
2684 } | 2702 } |
2685 | 2703 |
2686 | 2704 |
2687 void MarkCompactCollector::MigrateObjectTagged( | |
2688 HeapObject* dst, HeapObject* src, int size, | |
2689 SlotsBuffer** evacuation_slots_buffer) { | |
2690 Address src_slot = src->address(); | |
2691 Address dst_slot = dst->address(); | |
2692 for (int remaining = size / kPointerSize; remaining > 0; remaining--) { | |
2693 Object* value = Memory::Object_at(src_slot); | |
2694 Memory::Object_at(dst_slot) = value; | |
2695 RecordMigratedSlot(value, dst_slot, evacuation_slots_buffer); | |
2696 src_slot += kPointerSize; | |
2697 dst_slot += kPointerSize; | |
2698 } | |
2699 } | |
2700 | |
2701 | |
2702 void MarkCompactCollector::MigrateObjectMixed( | |
2703 HeapObject* dst, HeapObject* src, int size, | |
2704 SlotsBuffer** evacuation_slots_buffer) { | |
2705 if (src->IsFixedTypedArrayBase()) { | |
2706 heap()->MoveBlock(dst->address(), src->address(), size); | |
2707 Address base_pointer_slot = | |
2708 dst->address() + FixedTypedArrayBase::kBasePointerOffset; | |
2709 RecordMigratedSlot(Memory::Object_at(base_pointer_slot), base_pointer_slot, | |
2710 evacuation_slots_buffer); | |
2711 } else if (src->IsBytecodeArray()) { | |
2712 heap()->MoveBlock(dst->address(), src->address(), size); | |
2713 Address constant_pool_slot = | |
2714 dst->address() + BytecodeArray::kConstantPoolOffset; | |
2715 RecordMigratedSlot(Memory::Object_at(constant_pool_slot), | |
2716 constant_pool_slot, evacuation_slots_buffer); | |
2717 } else if (src->IsJSArrayBuffer()) { | |
2718 heap()->MoveBlock(dst->address(), src->address(), size); | |
2719 | |
2720 // Visit inherited JSObject properties and byte length of ArrayBuffer | |
2721 Address regular_slot = dst->address() + JSArrayBuffer::kPropertiesOffset; | |
2722 Address regular_slots_end = | |
2723 dst->address() + JSArrayBuffer::kByteLengthOffset + kPointerSize; | |
2724 while (regular_slot < regular_slots_end) { | |
2725 RecordMigratedSlot(Memory::Object_at(regular_slot), regular_slot, | |
2726 evacuation_slots_buffer); | |
2727 regular_slot += kPointerSize; | |
2728 } | |
2729 | |
2730 // Skip backing store and visit just internal fields | |
2731 Address internal_field_slot = dst->address() + JSArrayBuffer::kSize; | |
2732 Address internal_fields_end = | |
2733 dst->address() + JSArrayBuffer::kSizeWithInternalFields; | |
2734 while (internal_field_slot < internal_fields_end) { | |
2735 RecordMigratedSlot(Memory::Object_at(internal_field_slot), | |
2736 internal_field_slot, evacuation_slots_buffer); | |
2737 internal_field_slot += kPointerSize; | |
2738 } | |
2739 } else if (FLAG_unbox_double_fields) { | |
2740 Address dst_addr = dst->address(); | |
2741 Address src_addr = src->address(); | |
2742 Address src_slot = src_addr; | |
2743 Address dst_slot = dst_addr; | |
2744 | |
2745 LayoutDescriptorHelper helper(src->map()); | |
2746 DCHECK(!helper.all_fields_tagged()); | |
2747 for (int remaining = size / kPointerSize; remaining > 0; remaining--) { | |
2748 Object* value = Memory::Object_at(src_slot); | |
2749 | |
2750 Memory::Object_at(dst_slot) = value; | |
2751 | |
2752 if (helper.IsTagged(static_cast<int>(src_slot - src_addr))) { | |
2753 RecordMigratedSlot(value, dst_slot, evacuation_slots_buffer); | |
2754 } | |
2755 | |
2756 src_slot += kPointerSize; | |
2757 dst_slot += kPointerSize; | |
2758 } | |
2759 } else { | |
2760 UNREACHABLE(); | |
2761 } | |
2762 } | |
2763 | |
2764 | |
2765 void MarkCompactCollector::MigrateObjectRaw(HeapObject* dst, HeapObject* src, | |
2766 int size) { | |
2767 heap()->MoveBlock(dst->address(), src->address(), size); | |
2768 } | |
2769 | |
2770 | |
2771 static inline void UpdateSlot(Isolate* isolate, ObjectVisitor* v, | 2705 static inline void UpdateSlot(Isolate* isolate, ObjectVisitor* v, |
2772 SlotsBuffer::SlotType slot_type, Address addr) { | 2706 SlotsBuffer::SlotType slot_type, Address addr) { |
2773 switch (slot_type) { | 2707 switch (slot_type) { |
2774 case SlotsBuffer::CODE_TARGET_SLOT: { | 2708 case SlotsBuffer::CODE_TARGET_SLOT: { |
2775 RelocInfo rinfo(addr, RelocInfo::CODE_TARGET, 0, NULL); | 2709 RelocInfo rinfo(addr, RelocInfo::CODE_TARGET, 0, NULL); |
2776 rinfo.Visit(isolate, v); | 2710 rinfo.Visit(isolate, v); |
2777 break; | 2711 break; |
2778 } | 2712 } |
2779 case SlotsBuffer::CELL_TARGET_SLOT: { | 2713 case SlotsBuffer::CELL_TARGET_SLOT: { |
2780 RelocInfo rinfo(addr, RelocInfo::CELL, 0, NULL); | 2714 RelocInfo rinfo(addr, RelocInfo::CELL, 0, NULL); |
(...skipping 322 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3103 bool MarkCompactCollector::IsSlotInLiveObject(Address slot) { | 3037 bool MarkCompactCollector::IsSlotInLiveObject(Address slot) { |
3104 HeapObject* object = NULL; | 3038 HeapObject* object = NULL; |
3105 // The target object is black but we don't know if the source slot is black. | 3039 // The target object is black but we don't know if the source slot is black. |
3106 // The source object could have died and the slot could be part of a free | 3040 // The source object could have died and the slot could be part of a free |
3107 // space. Find out based on mark bits if the slot is part of a live object. | 3041 // space. Find out based on mark bits if the slot is part of a live object. |
3108 if (!IsSlotInBlackObject(Page::FromAddress(slot), slot, &object)) { | 3042 if (!IsSlotInBlackObject(Page::FromAddress(slot), slot, &object)) { |
3109 return false; | 3043 return false; |
3110 } | 3044 } |
3111 | 3045 |
3112 DCHECK(object != NULL); | 3046 DCHECK(object != NULL); |
3113 | 3047 int offset = static_cast<int>(slot - object->address()); |
3114 switch (object->ContentType()) { | 3048 return object->IsValidSlot(offset); |
3115 case HeapObjectContents::kTaggedValues: | |
3116 return true; | |
3117 | |
3118 case HeapObjectContents::kRawValues: { | |
3119 InstanceType type = object->map()->instance_type(); | |
3120 // Slots in maps and code can't be invalid because they are never | |
3121 // shrunk. | |
3122 if (type == MAP_TYPE || type == CODE_TYPE) return true; | |
3123 | |
3124 // Consider slots in objects that contain ONLY raw data as invalid. | |
3125 return false; | |
3126 } | |
3127 | |
3128 case HeapObjectContents::kMixedValues: { | |
3129 if (object->IsFixedTypedArrayBase()) { | |
3130 return static_cast<int>(slot - object->address()) == | |
3131 FixedTypedArrayBase::kBasePointerOffset; | |
3132 } else if (object->IsBytecodeArray()) { | |
3133 return static_cast<int>(slot - object->address()) == | |
3134 BytecodeArray::kConstantPoolOffset; | |
3135 } else if (object->IsJSArrayBuffer()) { | |
3136 int off = static_cast<int>(slot - object->address()); | |
3137 return (off >= JSArrayBuffer::kPropertiesOffset && | |
3138 off <= JSArrayBuffer::kByteLengthOffset) || | |
3139 (off >= JSArrayBuffer::kSize && | |
3140 off < JSArrayBuffer::kSizeWithInternalFields); | |
3141 } else if (FLAG_unbox_double_fields) { | |
3142 // Filter out slots that happen to point to unboxed double fields. | |
3143 LayoutDescriptorHelper helper(object->map()); | |
3144 DCHECK(!helper.all_fields_tagged()); | |
3145 return helper.IsTagged(static_cast<int>(slot - object->address())); | |
3146 } | |
3147 break; | |
3148 } | |
3149 } | |
3150 UNREACHABLE(); | |
3151 return true; | |
3152 } | 3049 } |
3153 | 3050 |
3154 | 3051 |
3155 void MarkCompactCollector::VerifyIsSlotInLiveObject(Address slot, | 3052 void MarkCompactCollector::VerifyIsSlotInLiveObject(Address slot, |
3156 HeapObject* object) { | 3053 HeapObject* object) { |
3157 // The target object has to be black. | 3054 // The target object has to be black. |
3158 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); | 3055 CHECK(Marking::IsBlack(Marking::MarkBitFrom(object))); |
3159 | 3056 |
3160 // The target object is black but we don't know if the source slot is black. | 3057 // The target object is black but we don't know if the source slot is black. |
3161 // The source object could have died and the slot could be part of a free | 3058 // The source object could have died and the slot could be part of a free |
(...skipping 1412 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4574 MarkBit mark_bit = Marking::MarkBitFrom(host); | 4471 MarkBit mark_bit = Marking::MarkBitFrom(host); |
4575 if (Marking::IsBlack(mark_bit)) { | 4472 if (Marking::IsBlack(mark_bit)) { |
4576 RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host); | 4473 RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host); |
4577 RecordRelocSlot(&rinfo, target); | 4474 RecordRelocSlot(&rinfo, target); |
4578 } | 4475 } |
4579 } | 4476 } |
4580 } | 4477 } |
4581 | 4478 |
4582 } // namespace internal | 4479 } // namespace internal |
4583 } // namespace v8 | 4480 } // namespace v8 |
OLD | NEW |