OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/assembler-inl.h" | 9 #include "src/assembler-inl.h" |
10 #include "src/ast/context-slot-cache.h" | 10 #include "src/ast/context-slot-cache.h" |
(...skipping 1119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1130 Object** dst_objects = array->data_start() + dst_index; | 1130 Object** dst_objects = array->data_start() + dst_index; |
1131 MemMove(dst_objects, array->data_start() + src_index, len * kPointerSize); | 1131 MemMove(dst_objects, array->data_start() + src_index, len * kPointerSize); |
1132 FIXED_ARRAY_ELEMENTS_WRITE_BARRIER(this, array, dst_index, len); | 1132 FIXED_ARRAY_ELEMENTS_WRITE_BARRIER(this, array, dst_index, len); |
1133 } | 1133 } |
1134 | 1134 |
1135 | 1135 |
1136 #ifdef VERIFY_HEAP | 1136 #ifdef VERIFY_HEAP |
1137 // Helper class for verifying the string table. | 1137 // Helper class for verifying the string table. |
1138 class StringTableVerifier : public ObjectVisitor { | 1138 class StringTableVerifier : public ObjectVisitor { |
1139 public: | 1139 public: |
1140 void VisitPointers(Object** start, Object** end) override { | 1140 void VisitPointers(HeapObject* host, Object** start, Object** end) override { |
1141 // Visit all HeapObject pointers in [start, end). | 1141 // Visit all HeapObject pointers in [start, end). |
1142 for (Object** p = start; p < end; p++) { | 1142 for (Object** p = start; p < end; p++) { |
1143 if ((*p)->IsHeapObject()) { | 1143 if ((*p)->IsHeapObject()) { |
1144 HeapObject* object = HeapObject::cast(*p); | 1144 HeapObject* object = HeapObject::cast(*p); |
1145 Isolate* isolate = object->GetIsolate(); | 1145 Isolate* isolate = object->GetIsolate(); |
1146 // Check that the string is actually internalized. | 1146 // Check that the string is actually internalized. |
1147 CHECK(object->IsTheHole(isolate) || object->IsUndefined(isolate) || | 1147 CHECK(object->IsTheHole(isolate) || object->IsUndefined(isolate) || |
1148 object->IsInternalizedString()); | 1148 object->IsInternalizedString()); |
1149 } | 1149 } |
1150 } | 1150 } |
(...skipping 595 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1746 { | 1746 { |
1747 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_WEAK); | 1747 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_WEAK); |
1748 IterateEncounteredWeakCollections(&root_scavenge_visitor); | 1748 IterateEncounteredWeakCollections(&root_scavenge_visitor); |
1749 } | 1749 } |
1750 | 1750 |
1751 { | 1751 { |
1752 // Copy objects reachable from the code flushing candidates list. | 1752 // Copy objects reachable from the code flushing candidates list. |
1753 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_CODE_FLUSH_CANDIDATES); | 1753 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_CODE_FLUSH_CANDIDATES); |
1754 MarkCompactCollector* collector = mark_compact_collector(); | 1754 MarkCompactCollector* collector = mark_compact_collector(); |
1755 if (collector->is_code_flushing_enabled()) { | 1755 if (collector->is_code_flushing_enabled()) { |
1756 collector->code_flusher()->IteratePointersToFromSpace( | 1756 collector->code_flusher()->VisitListHeads(&root_scavenge_visitor); |
ulan
2017/04/10 10:59:37
This is the only non-mechanical change:
we first v
| |
1757 &root_scavenge_visitor); | 1757 collector->code_flusher() |
1758 ->IteratePointersToFromSpace<StaticScavengeVisitor>(); | |
1758 } | 1759 } |
1759 } | 1760 } |
1760 | 1761 |
1761 { | 1762 { |
1762 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SEMISPACE); | 1763 TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SEMISPACE); |
1763 new_space_front = DoScavenge(new_space_front); | 1764 new_space_front = DoScavenge(new_space_front); |
1764 } | 1765 } |
1765 | 1766 |
1766 isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending( | 1767 isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending( |
1767 &IsUnscavengedHeapObject); | 1768 &IsUnscavengedHeapObject); |
(...skipping 3046 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4814 } | 4815 } |
4815 } | 4816 } |
4816 } | 4817 } |
4817 | 4818 |
4818 class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor { | 4819 class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor { |
4819 public: | 4820 public: |
4820 IterateAndScavengePromotedObjectsVisitor(Heap* heap, HeapObject* target, | 4821 IterateAndScavengePromotedObjectsVisitor(Heap* heap, HeapObject* target, |
4821 bool record_slots) | 4822 bool record_slots) |
4822 : heap_(heap), target_(target), record_slots_(record_slots) {} | 4823 : heap_(heap), target_(target), record_slots_(record_slots) {} |
4823 | 4824 |
4824 inline void VisitPointers(Object** start, Object** end) override { | 4825 inline void VisitPointers(HeapObject* host, Object** start, |
4826 Object** end) override { | |
4827 DCHECK_EQ(host, target_); | |
4825 Address slot_address = reinterpret_cast<Address>(start); | 4828 Address slot_address = reinterpret_cast<Address>(start); |
4826 Page* page = Page::FromAddress(slot_address); | 4829 Page* page = Page::FromAddress(slot_address); |
4827 | 4830 |
4828 while (slot_address < reinterpret_cast<Address>(end)) { | 4831 while (slot_address < reinterpret_cast<Address>(end)) { |
4829 Object** slot = reinterpret_cast<Object**>(slot_address); | 4832 Object** slot = reinterpret_cast<Object**>(slot_address); |
4830 Object* target = *slot; | 4833 Object* target = *slot; |
4831 | 4834 |
4832 if (target->IsHeapObject()) { | 4835 if (target->IsHeapObject()) { |
4833 if (heap_->InFromSpace(target)) { | 4836 if (heap_->InFromSpace(target)) { |
4834 Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(slot), | 4837 Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(slot), |
(...skipping 10 matching lines...) Expand all Loading... | |
4845 MarkCompactCollector::IsOnEvacuationCandidate( | 4848 MarkCompactCollector::IsOnEvacuationCandidate( |
4846 HeapObject::cast(target))) { | 4849 HeapObject::cast(target))) { |
4847 heap_->mark_compact_collector()->RecordSlot(target_, slot, target); | 4850 heap_->mark_compact_collector()->RecordSlot(target_, slot, target); |
4848 } | 4851 } |
4849 } | 4852 } |
4850 | 4853 |
4851 slot_address += kPointerSize; | 4854 slot_address += kPointerSize; |
4852 } | 4855 } |
4853 } | 4856 } |
4854 | 4857 |
4855 inline void VisitCodeEntry(Address code_entry_slot) override { | 4858 inline void VisitCodeEntry(JSFunction* host, |
4859 Address code_entry_slot) override { | |
4856 // Black allocation requires us to process objects referenced by | 4860 // Black allocation requires us to process objects referenced by |
4857 // promoted objects. | 4861 // promoted objects. |
4858 if (heap_->incremental_marking()->black_allocation()) { | 4862 if (heap_->incremental_marking()->black_allocation()) { |
4859 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); | 4863 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); |
4860 IncrementalMarking::MarkGrey(heap_, code); | 4864 IncrementalMarking::MarkGrey(heap_, code); |
4861 } | 4865 } |
4862 } | 4866 } |
4863 | 4867 |
4864 private: | 4868 private: |
4865 Heap* heap_; | 4869 Heap* heap_; |
4866 HeapObject* target_; | 4870 HeapObject* target_; |
4867 bool record_slots_; | 4871 bool record_slots_; |
4868 }; | 4872 }; |
4869 | 4873 |
4870 void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size, | 4874 void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size, |
4871 bool was_marked_black) { | 4875 bool was_marked_black) { |
4872 // We are not collecting slots on new space objects during mutation | 4876 // We are not collecting slots on new space objects during mutation |
4873 // thus we have to scan for pointers to evacuation candidates when we | 4877 // thus we have to scan for pointers to evacuation candidates when we |
4874 // promote objects. But we should not record any slots in non-black | 4878 // promote objects. But we should not record any slots in non-black |
4875 // objects. Grey object's slots would be rescanned. | 4879 // objects. Grey object's slots would be rescanned. |
4876 // White object might not survive until the end of collection | 4880 // White object might not survive until the end of collection |
4877 // it would be a violation of the invariant to record it's slots. | 4881 // it would be a violation of the invariant to record it's slots. |
4878 bool record_slots = false; | 4882 bool record_slots = false; |
4879 if (incremental_marking()->IsCompacting()) { | 4883 if (incremental_marking()->IsCompacting()) { |
4880 record_slots = | 4884 record_slots = |
4881 ObjectMarking::IsBlack(target, MarkingState::Internal(target)); | 4885 ObjectMarking::IsBlack(target, MarkingState::Internal(target)); |
4882 } | 4886 } |
4883 | 4887 |
4888 // TODO(ulan): remove the target, the visitor now gets the host object | |
4889 // in each visit method. | |
4884 IterateAndScavengePromotedObjectsVisitor visitor(this, target, record_slots); | 4890 IterateAndScavengePromotedObjectsVisitor visitor(this, target, record_slots); |
4885 if (target->IsJSFunction()) { | 4891 if (target->IsJSFunction()) { |
4886 // JSFunctions reachable through kNextFunctionLinkOffset are weak. Slots for | 4892 // JSFunctions reachable through kNextFunctionLinkOffset are weak. Slots for |
4887 // this links are recorded during processing of weak lists. | 4893 // this links are recorded during processing of weak lists. |
4888 JSFunction::BodyDescriptorWeakCode::IterateBody(target, size, &visitor); | 4894 JSFunction::BodyDescriptorWeakCode::IterateBody(target, size, &visitor); |
4889 } else { | 4895 } else { |
4890 target->IterateBody(target->map()->instance_type(), size, &visitor); | 4896 target->IterateBody(target->map()->instance_type(), size, &visitor); |
4891 } | 4897 } |
4892 | 4898 |
4893 // When black allocations is on, we have to visit not already marked black | 4899 // When black allocations is on, we have to visit not already marked black |
(...skipping 1224 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6118 bool SkipObject(HeapObject* object) { | 6124 bool SkipObject(HeapObject* object) { |
6119 if (object->IsFiller()) return true; | 6125 if (object->IsFiller()) return true; |
6120 return ObjectMarking::IsWhite(object, MarkingState::Internal(object)); | 6126 return ObjectMarking::IsWhite(object, MarkingState::Internal(object)); |
6121 } | 6127 } |
6122 | 6128 |
6123 private: | 6129 private: |
6124 class MarkingVisitor : public ObjectVisitor, public RootVisitor { | 6130 class MarkingVisitor : public ObjectVisitor, public RootVisitor { |
6125 public: | 6131 public: |
6126 MarkingVisitor() : marking_stack_(10) {} | 6132 MarkingVisitor() : marking_stack_(10) {} |
6127 | 6133 |
6128 void VisitPointers(Object** start, Object** end) override { | 6134 void VisitPointers(HeapObject* host, Object** start, |
6135 Object** end) override { | |
6129 MarkPointers(start, end); | 6136 MarkPointers(start, end); |
6130 } | 6137 } |
6131 | 6138 |
6132 void VisitRootPointers(Root root, Object** start, Object** end) override { | 6139 void VisitRootPointers(Root root, Object** start, Object** end) override { |
6133 MarkPointers(start, end); | 6140 MarkPointers(start, end); |
6134 } | 6141 } |
6135 | 6142 |
6136 void TransitiveClosure() { | 6143 void TransitiveClosure() { |
6137 while (!marking_stack_.is_empty()) { | 6144 while (!marking_stack_.is_empty()) { |
6138 HeapObject* obj = marking_stack_.RemoveLast(); | 6145 HeapObject* obj = marking_stack_.RemoveLast(); |
(...skipping 276 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6415 } | 6422 } |
6416 | 6423 |
6417 | 6424 |
6418 // static | 6425 // static |
6419 int Heap::GetStaticVisitorIdForMap(Map* map) { | 6426 int Heap::GetStaticVisitorIdForMap(Map* map) { |
6420 return StaticVisitorBase::GetVisitorId(map); | 6427 return StaticVisitorBase::GetVisitorId(map); |
6421 } | 6428 } |
6422 | 6429 |
6423 } // namespace internal | 6430 } // namespace internal |
6424 } // namespace v8 | 6431 } // namespace v8 |
OLD | NEW |