OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/ast/context-slot-cache.h" | 9 #include "src/ast/context-slot-cache.h" |
10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
(...skipping 4705 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4716 if (!new_space_->IsFromSpaceCommitted()) return; | 4716 if (!new_space_->IsFromSpaceCommitted()) return; |
4717 for (Page* page : NewSpacePageRange(new_space_->FromSpaceStart(), | 4717 for (Page* page : NewSpacePageRange(new_space_->FromSpaceStart(), |
4718 new_space_->FromSpaceEnd())) { | 4718 new_space_->FromSpaceEnd())) { |
4719 for (Address cursor = page->area_start(), limit = page->area_end(); | 4719 for (Address cursor = page->area_start(), limit = page->area_end(); |
4720 cursor < limit; cursor += kPointerSize) { | 4720 cursor < limit; cursor += kPointerSize) { |
4721 Memory::Address_at(cursor) = kFromSpaceZapValue; | 4721 Memory::Address_at(cursor) = kFromSpaceZapValue; |
4722 } | 4722 } |
4723 } | 4723 } |
4724 } | 4724 } |
4725 | 4725 |
4726 void Heap::IteratePromotedObjectPointers(HeapObject* object, Address start, | 4726 enum IteratePromotedObjectsMode { |
4727 Address end, bool record_slots, | 4727 RECORD_ONLY, |
4728 ObjectSlotCallback callback) { | 4728 RECORD_AND_SCAVENGE, |
4729 Address slot_address = start; | 4729 }; |
4730 Page* page = Page::FromAddress(start); | |
4731 | 4730 |
4732 while (slot_address < end) { | 4731 template <IteratePromotedObjectsMode mode> |
4733 Object** slot = reinterpret_cast<Object**>(slot_address); | |
4734 Object* target = *slot; | |
4735 if (target->IsHeapObject()) { | |
4736 if (Heap::InFromSpace(target)) { | |
4737 callback(reinterpret_cast<HeapObject**>(slot), | |
4738 HeapObject::cast(target)); | |
4739 Object* new_target = *slot; | |
4740 if (InNewSpace(new_target)) { | |
4741 SLOW_DCHECK(Heap::InToSpace(new_target)); | |
4742 SLOW_DCHECK(new_target->IsHeapObject()); | |
4743 RememberedSet<OLD_TO_NEW>::Insert(page, slot_address); | |
4744 } | |
4745 SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(new_target)); | |
4746 } else if (record_slots && | |
4747 MarkCompactCollector::IsOnEvacuationCandidate(target)) { | |
4748 mark_compact_collector()->RecordSlot(object, slot, target); | |
4749 } | |
4750 } | |
4751 slot_address += kPointerSize; | |
4752 } | |
4753 } | |
4754 | |
4755 class IteratePromotedObjectsVisitor final : public ObjectVisitor { | 4732 class IteratePromotedObjectsVisitor final : public ObjectVisitor { |
4756 public: | 4733 public: |
4757 IteratePromotedObjectsVisitor(Heap* heap, HeapObject* target, | 4734 IteratePromotedObjectsVisitor(Heap* heap, HeapObject* target, |
4758 bool record_slots, ObjectSlotCallback callback) | 4735 bool record_slots, ObjectSlotCallback callback) |
ulan
2016/11/15 14:39:31
Can you rename the "callback" to "scavenge" to mak
Michael Lippautz
2016/11/15 14:59:50
Restructured.
| |
4759 : heap_(heap), | 4736 : heap_(heap), |
4760 target_(target), | 4737 target_(target), |
4761 record_slots_(record_slots), | 4738 record_slots_(record_slots), |
4762 callback_(callback) {} | 4739 callback_(callback) {} |
4763 | 4740 |
4764 V8_INLINE void VisitPointers(Object** start, Object** end) override { | 4741 inline void VisitPointers(Object** start, Object** end) override { |
4765 heap_->IteratePromotedObjectPointers( | 4742 Address slot_address = reinterpret_cast<Address>(start); |
4766 target_, reinterpret_cast<Address>(start), | 4743 Page* page = Page::FromAddress(slot_address); |
4767 reinterpret_cast<Address>(end), record_slots_, callback_); | 4744 |
4745 while (slot_address < reinterpret_cast<Address>(end)) { | |
4746 Object** slot = reinterpret_cast<Object**>(slot_address); | |
4747 Object* target = *slot; | |
4748 | |
4749 if (mode == RECORD_AND_SCAVENGE && heap_->InNewSpace(target)) { | |
4750 callback_(reinterpret_cast<HeapObject**>(slot), | |
4751 HeapObject::cast(target)); | |
4752 target = *slot; | |
4753 DCHECK(target->IsHeapObject); | |
4754 SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(target)); | |
4755 } | |
4756 | |
4757 if (heap_->InNewSpace(target)) { | |
4758 SLOW_DCHECK(Heap::InToSpace(new_target)); | |
4759 SLOW_DCHECK(new_target->IsHeapObject()); | |
4760 RememberedSet<OLD_TO_NEW>::Insert(page, slot_address); | |
4761 } else if (record_slots_ && | |
4762 MarkCompactCollector::IsOnEvacuationCandidate(target)) { | |
4763 heap_->mark_compact_collector()->RecordSlot(target_, slot, target); | |
4764 } | |
4765 | |
4766 slot_address += kPointerSize; | |
4767 } | |
4768 } | 4768 } |
4769 | 4769 |
4770 V8_INLINE void VisitCodeEntry(Address code_entry_slot) override { | 4770 inline void VisitCodeEntry(Address code_entry_slot) override { |
4771 // Black allocation requires us to process objects referenced by | 4771 // Black allocation requires us to process objects referenced by |
4772 // promoted objects. | 4772 // promoted objects. |
4773 if (heap_->incremental_marking()->black_allocation()) { | 4773 if (heap_->incremental_marking()->black_allocation()) { |
4774 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); | 4774 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); |
4775 IncrementalMarking::MarkGrey(heap_, code); | 4775 IncrementalMarking::MarkGrey(heap_, code); |
4776 } | 4776 } |
4777 } | 4777 } |
4778 | 4778 |
4779 private: | 4779 private: |
4780 Heap* heap_; | 4780 Heap* heap_; |
(...skipping 10 matching lines...) Expand all Loading... | |
4791 // promote objects. But we should not record any slots in non-black | 4791 // promote objects. But we should not record any slots in non-black |
4792 // objects. Grey object's slots would be rescanned. | 4792 // objects. Grey object's slots would be rescanned. |
4793 // White object might not survive until the end of collection | 4793 // White object might not survive until the end of collection |
4794 // it would be a violation of the invariant to record it's slots. | 4794 // it would be a violation of the invariant to record it's slots. |
4795 bool record_slots = false; | 4795 bool record_slots = false; |
4796 if (incremental_marking()->IsCompacting()) { | 4796 if (incremental_marking()->IsCompacting()) { |
4797 MarkBit mark_bit = ObjectMarking::MarkBitFrom(target); | 4797 MarkBit mark_bit = ObjectMarking::MarkBitFrom(target); |
4798 record_slots = Marking::IsBlack(mark_bit); | 4798 record_slots = Marking::IsBlack(mark_bit); |
4799 } | 4799 } |
4800 | 4800 |
4801 IteratePromotedObjectsVisitor visitor(this, target, record_slots, callback); | 4801 IteratePromotedObjectsVisitor<RECORD_AND_SCAVENGE> |
4802 target->IterateBody(target->map()->instance_type(), size, &visitor); | 4802 record_and_scavenge_visitor(this, target, record_slots, callback); |
4803 if (target->IsJSFunction()) { | |
4804 // JSFunctions reachable through kNextFunctionLinkOffset are weak and should | |
4805 // only trigger recording, not a scavenge. | |
4806 JSFunction::BodyDescriptorWeakCode::IterateBody( | |
4807 target, size, &record_and_scavenge_visitor); | |
4808 IteratePromotedObjectsVisitor<RECORD_ONLY> record_only_visitor( | |
4809 this, target, record_slots, callback); | |
4810 JSFunction::BodyDescriptorWeakFields::IterateBody(target, size, | |
4811 &record_only_visitor); | |
4812 } else { | |
4813 target->IterateBody(target->map()->instance_type(), size, | |
4814 &record_and_scavenge_visitor); | |
4815 } | |
4803 | 4816 |
4804 // When black allocations is on, we have to visit not already marked black | 4817 // When black allocations is on, we have to visit not already marked black |
4805 // objects (in new space) promoted to black pages to keep their references | 4818 // objects (in new space) promoted to black pages to keep their references |
4806 // alive. | 4819 // alive. |
4807 // TODO(hpayer): Implement a special promotion visitor that incorporates | 4820 // TODO(hpayer): Implement a special promotion visitor that incorporates |
4808 // regular visiting and IteratePromotedObjectPointers. | 4821 // regular visiting and IteratePromotedObjectPointers. |
4809 if (!was_marked_black) { | 4822 if (!was_marked_black) { |
4810 if (incremental_marking()->black_allocation()) { | 4823 if (incremental_marking()->black_allocation()) { |
4811 IncrementalMarking::MarkGrey(this, target->map()); | 4824 IncrementalMarking::MarkGrey(this, target->map()); |
4812 incremental_marking()->IterateBlackObject(target); | 4825 incremental_marking()->IterateBlackObject(target); |
(...skipping 1653 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6466 } | 6479 } |
6467 | 6480 |
6468 | 6481 |
6469 // static | 6482 // static |
6470 int Heap::GetStaticVisitorIdForMap(Map* map) { | 6483 int Heap::GetStaticVisitorIdForMap(Map* map) { |
6471 return StaticVisitorBase::GetVisitorId(map); | 6484 return StaticVisitorBase::GetVisitorId(map); |
6472 } | 6485 } |
6473 | 6486 |
6474 } // namespace internal | 6487 } // namespace internal |
6475 } // namespace v8 | 6488 } // namespace v8 |
OLD | NEW |