OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/ast/context-slot-cache.h" | 9 #include "src/ast/context-slot-cache.h" |
10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
(...skipping 1890 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1901 int32_t size; | 1901 int32_t size; |
1902 bool was_marked_black; | 1902 bool was_marked_black; |
1903 promotion_queue()->remove(&target, &size, &was_marked_black); | 1903 promotion_queue()->remove(&target, &size, &was_marked_black); |
1904 | 1904 |
1905 // Promoted object might be already partially visited | 1905 // Promoted object might be already partially visited |
1906 // during old space pointer iteration. Thus we search specifically | 1906 // during old space pointer iteration. Thus we search specifically |
1907 // for pointers to from semispace instead of looking for pointers | 1907 // for pointers to from semispace instead of looking for pointers |
1908 // to new space. | 1908 // to new space. |
1909 DCHECK(!target->IsMap()); | 1909 DCHECK(!target->IsMap()); |
1910 | 1910 |
1911 IteratePromotedObject(target, static_cast<int>(size), was_marked_black, | 1911 IterateAndScavengePromotedObject(target, static_cast<int>(size), |
1912 &Scavenger::ScavengeObject); | 1912 was_marked_black); |
1913 } | 1913 } |
1914 } | 1914 } |
1915 | 1915 |
1916 // Take another spin if there are now unswept objects in new space | 1916 // Take another spin if there are now unswept objects in new space |
1917 // (there are currently no more unswept promoted objects). | 1917 // (there are currently no more unswept promoted objects). |
1918 } while (new_space_front != new_space_->top()); | 1918 } while (new_space_front != new_space_->top()); |
1919 | 1919 |
1920 return new_space_front; | 1920 return new_space_front; |
1921 } | 1921 } |
1922 | 1922 |
(...skipping 2793 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4716 if (!new_space_->IsFromSpaceCommitted()) return; | 4716 if (!new_space_->IsFromSpaceCommitted()) return; |
4717 for (Page* page : NewSpacePageRange(new_space_->FromSpaceStart(), | 4717 for (Page* page : NewSpacePageRange(new_space_->FromSpaceStart(), |
4718 new_space_->FromSpaceEnd())) { | 4718 new_space_->FromSpaceEnd())) { |
4719 for (Address cursor = page->area_start(), limit = page->area_end(); | 4719 for (Address cursor = page->area_start(), limit = page->area_end(); |
4720 cursor < limit; cursor += kPointerSize) { | 4720 cursor < limit; cursor += kPointerSize) { |
4721 Memory::Address_at(cursor) = kFromSpaceZapValue; | 4721 Memory::Address_at(cursor) = kFromSpaceZapValue; |
4722 } | 4722 } |
4723 } | 4723 } |
4724 } | 4724 } |
4725 | 4725 |
4726 void Heap::IteratePromotedObjectPointers(HeapObject* object, Address start, | 4726 class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor { |
4727 Address end, bool record_slots, | 4727 public: |
4728 ObjectSlotCallback callback) { | 4728 IterateAndScavengePromotedObjectsVisitor(Heap* heap, HeapObject* target, |
4729 Address slot_address = start; | 4729 bool record_slots) |
4730 Page* page = Page::FromAddress(start); | 4730 : heap_(heap), target_(target), record_slots_(record_slots) {} |
4731 | 4731 |
4732 while (slot_address < end) { | 4732 inline void VisitPointers(Object** start, Object** end) override { |
4733 Object** slot = reinterpret_cast<Object**>(slot_address); | 4733 Address slot_address = reinterpret_cast<Address>(start); |
4734 Object* target = *slot; | 4734 Page* page = Page::FromAddress(slot_address); |
4735 if (target->IsHeapObject()) { | 4735 |
4736 if (Heap::InFromSpace(target)) { | 4736 while (slot_address < reinterpret_cast<Address>(end)) { |
4737 callback(reinterpret_cast<HeapObject**>(slot), | 4737 Object** slot = reinterpret_cast<Object**>(slot_address); |
4738 HeapObject::cast(target)); | 4738 Object* target = *slot; |
4739 Object* new_target = *slot; | 4739 |
4740 if (InNewSpace(new_target)) { | 4740 if (target->IsHeapObject()) { |
4741 SLOW_DCHECK(Heap::InToSpace(new_target)); | 4741 if (heap_->InFromSpace(target)) { |
4742 SLOW_DCHECK(new_target->IsHeapObject()); | 4742 Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(slot), |
4743 RememberedSet<OLD_TO_NEW>::Insert(page, slot_address); | 4743 HeapObject::cast(target)); |
| 4744 target = *slot; |
| 4745 if (heap_->InNewSpace(target)) { |
| 4746 SLOW_DCHECK(heap_->InToSpace(target)); |
| 4747 SLOW_DCHECK(target->IsHeapObject()); |
| 4748 RememberedSet<OLD_TO_NEW>::Insert(page, slot_address); |
| 4749 } |
| 4750 SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate( |
| 4751 HeapObject::cast(target))); |
| 4752 } else if (record_slots_ && |
| 4753 MarkCompactCollector::IsOnEvacuationCandidate( |
| 4754 HeapObject::cast(target))) { |
| 4755 heap_->mark_compact_collector()->RecordSlot(target_, slot, target); |
4744 } | 4756 } |
4745 SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(new_target)); | |
4746 } else if (record_slots && | |
4747 MarkCompactCollector::IsOnEvacuationCandidate(target)) { | |
4748 mark_compact_collector()->RecordSlot(object, slot, target); | |
4749 } | 4757 } |
| 4758 |
| 4759 slot_address += kPointerSize; |
4750 } | 4760 } |
4751 slot_address += kPointerSize; | |
4752 } | |
4753 } | |
4754 | |
4755 class IteratePromotedObjectsVisitor final : public ObjectVisitor { | |
4756 public: | |
4757 IteratePromotedObjectsVisitor(Heap* heap, HeapObject* target, | |
4758 bool record_slots, ObjectSlotCallback callback) | |
4759 : heap_(heap), | |
4760 target_(target), | |
4761 record_slots_(record_slots), | |
4762 callback_(callback) {} | |
4763 | |
4764 V8_INLINE void VisitPointers(Object** start, Object** end) override { | |
4765 heap_->IteratePromotedObjectPointers( | |
4766 target_, reinterpret_cast<Address>(start), | |
4767 reinterpret_cast<Address>(end), record_slots_, callback_); | |
4768 } | 4761 } |
4769 | 4762 |
4770 V8_INLINE void VisitCodeEntry(Address code_entry_slot) override { | 4763 inline void VisitCodeEntry(Address code_entry_slot) override { |
4771 // Black allocation requires us to process objects referenced by | 4764 // Black allocation requires us to process objects referenced by |
4772 // promoted objects. | 4765 // promoted objects. |
4773 if (heap_->incremental_marking()->black_allocation()) { | 4766 if (heap_->incremental_marking()->black_allocation()) { |
4774 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); | 4767 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); |
4775 IncrementalMarking::MarkGrey(heap_, code); | 4768 IncrementalMarking::MarkGrey(heap_, code); |
4776 } | 4769 } |
4777 } | 4770 } |
4778 | 4771 |
4779 private: | 4772 private: |
4780 Heap* heap_; | 4773 Heap* heap_; |
4781 HeapObject* target_; | 4774 HeapObject* target_; |
4782 bool record_slots_; | 4775 bool record_slots_; |
4783 ObjectSlotCallback callback_; | |
4784 }; | 4776 }; |
4785 | 4777 |
4786 void Heap::IteratePromotedObject(HeapObject* target, int size, | 4778 void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size, |
4787 bool was_marked_black, | 4779 bool was_marked_black) { |
4788 ObjectSlotCallback callback) { | |
4789 // We are not collecting slots on new space objects during mutation | 4780 // We are not collecting slots on new space objects during mutation |
4790 // thus we have to scan for pointers to evacuation candidates when we | 4781 // thus we have to scan for pointers to evacuation candidates when we |
4791 // promote objects. But we should not record any slots in non-black | 4782 // promote objects. But we should not record any slots in non-black |
4792 // objects. Grey object's slots would be rescanned. | 4783 // objects. Grey object's slots would be rescanned. |
4793 // White object might not survive until the end of collection | 4784 // White object might not survive until the end of collection |
4794 // it would be a violation of the invariant to record it's slots. | 4785 // it would be a violation of the invariant to record it's slots. |
4795 bool record_slots = false; | 4786 bool record_slots = false; |
4796 if (incremental_marking()->IsCompacting()) { | 4787 if (incremental_marking()->IsCompacting()) { |
4797 MarkBit mark_bit = ObjectMarking::MarkBitFrom(target); | 4788 MarkBit mark_bit = ObjectMarking::MarkBitFrom(target); |
4798 record_slots = Marking::IsBlack(mark_bit); | 4789 record_slots = Marking::IsBlack(mark_bit); |
4799 } | 4790 } |
4800 | 4791 |
4801 IteratePromotedObjectsVisitor visitor(this, target, record_slots, callback); | 4792 IterateAndScavengePromotedObjectsVisitor visitor(this, target, record_slots); |
4802 target->IterateBody(target->map()->instance_type(), size, &visitor); | 4793 if (target->IsJSFunction()) { |
| 4794 // JSFunctions reachable through kNextFunctionLinkOffset are weak. Slots for |
| 4795 // this links are recorded during processing of weak lists. |
| 4796 JSFunction::BodyDescriptorWeakCode::IterateBody(target, size, &visitor); |
| 4797 } else { |
| 4798 target->IterateBody(target->map()->instance_type(), size, &visitor); |
| 4799 } |
4803 | 4800 |
4804 // When black allocations is on, we have to visit not already marked black | 4801 // When black allocations is on, we have to visit not already marked black |
4805 // objects (in new space) promoted to black pages to keep their references | 4802 // objects (in new space) promoted to black pages to keep their references |
4806 // alive. | 4803 // alive. |
4807 // TODO(hpayer): Implement a special promotion visitor that incorporates | 4804 // TODO(hpayer): Implement a special promotion visitor that incorporates |
4808 // regular visiting and IteratePromotedObjectPointers. | 4805 // regular visiting and IteratePromotedObjectPointers. |
4809 if (!was_marked_black) { | 4806 if (!was_marked_black) { |
4810 if (incremental_marking()->black_allocation()) { | 4807 if (incremental_marking()->black_allocation()) { |
4811 IncrementalMarking::MarkGrey(this, target->map()); | 4808 IncrementalMarking::MarkGrey(this, target->map()); |
4812 incremental_marking()->IterateBlackObject(target); | 4809 incremental_marking()->IterateBlackObject(target); |
(...skipping 1653 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6466 } | 6463 } |
6467 | 6464 |
6468 | 6465 |
6469 // static | 6466 // static |
6470 int Heap::GetStaticVisitorIdForMap(Map* map) { | 6467 int Heap::GetStaticVisitorIdForMap(Map* map) { |
6471 return StaticVisitorBase::GetVisitorId(map); | 6468 return StaticVisitorBase::GetVisitorId(map); |
6472 } | 6469 } |
6473 | 6470 |
6474 } // namespace internal | 6471 } // namespace internal |
6475 } // namespace v8 | 6472 } // namespace v8 |
OLD | NEW |