Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
| 6 | 6 |
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
| 8 #include "src/api.h" | 8 #include "src/api.h" |
| 9 #include "src/assembler-inl.h" | 9 #include "src/assembler-inl.h" |
| 10 #include "src/ast/context-slot-cache.h" | 10 #include "src/ast/context-slot-cache.h" |
| (...skipping 3184 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3195 // Calculate location of new array start. | 3195 // Calculate location of new array start. |
| 3196 Address old_start = object->address(); | 3196 Address old_start = object->address(); |
| 3197 Address new_start = old_start + bytes_to_trim; | 3197 Address new_start = old_start + bytes_to_trim; |
| 3198 | 3198 |
| 3199 // Transfer the mark bits to their new location if the object is not within | 3199 // Transfer the mark bits to their new location if the object is not within |
| 3200 // a black area. | 3200 // a black area. |
| 3201 if (!incremental_marking()->black_allocation() || | 3201 if (!incremental_marking()->black_allocation() || |
| 3202 !Marking::IsBlack(ObjectMarking::MarkBitFrom( | 3202 !Marking::IsBlack(ObjectMarking::MarkBitFrom( |
| 3203 HeapObject::FromAddress(new_start), | 3203 HeapObject::FromAddress(new_start), |
| 3204 MarkingState::Internal(HeapObject::FromAddress(new_start))))) { | 3204 MarkingState::Internal(HeapObject::FromAddress(new_start))))) { |
| 3205 IncrementalMarking::TransferMark(this, object, | 3205 incremental_marking()->TransferMark(this, object, |
| 3206 HeapObject::FromAddress(new_start)); | 3206 HeapObject::FromAddress(new_start)); |
| 3207 } | 3207 } |
| 3208 | 3208 |
| 3209 // Technically in new space this write might be omitted (except for | 3209 // Technically in new space this write might be omitted (except for |
| 3210 // debug mode which iterates through the heap), but to play safer | 3210 // debug mode which iterates through the heap), but to play safer |
| 3211 // we still do it. | 3211 // we still do it. |
| 3212 CreateFillerObjectAt(old_start, bytes_to_trim, ClearRecordedSlots::kYes); | 3212 CreateFillerObjectAt(old_start, bytes_to_trim, ClearRecordedSlots::kYes); |
| 3213 | 3213 |
| 3214 // Initialize header of the trimmed array. Since left trimming is only | 3214 // Initialize header of the trimmed array. Since left trimming is only |
| 3215 // performed on pages which are not concurrently swept creating a filler | 3215 // performed on pages which are not concurrently swept creating a filler |
| 3216 // object does not require synchronization. | 3216 // object does not require synchronization. |
| (...skipping 1631 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 4848 slot_address += kPointerSize; | 4848 slot_address += kPointerSize; |
| 4849 } | 4849 } |
| 4850 } | 4850 } |
| 4851 | 4851 |
| 4852 inline void VisitCodeEntry(JSFunction* host, | 4852 inline void VisitCodeEntry(JSFunction* host, |
| 4853 Address code_entry_slot) override { | 4853 Address code_entry_slot) override { |
| 4854 // Black allocation requires us to process objects referenced by | 4854 // Black allocation requires us to process objects referenced by |
| 4855 // promoted objects. | 4855 // promoted objects. |
| 4856 if (heap_->incremental_marking()->black_allocation()) { | 4856 if (heap_->incremental_marking()->black_allocation()) { |
| 4857 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); | 4857 Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); |
| 4858 IncrementalMarking::MarkGrey(heap_, code); | 4858 heap_->incremental_marking()->MarkGrey(heap_, code); |
| 4859 } | 4859 } |
| 4860 } | 4860 } |
| 4861 | 4861 |
| 4862 private: | 4862 private: |
| 4863 Heap* heap_; | 4863 Heap* heap_; |
| 4864 HeapObject* target_; | 4864 HeapObject* target_; |
| 4865 bool record_slots_; | 4865 bool record_slots_; |
| 4866 }; | 4866 }; |
| 4867 | 4867 |
| 4868 void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size, | 4868 void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size, |
| (...skipping 21 matching lines...) Expand all Loading... | |
| 4890 target->IterateBody(target->map()->instance_type(), size, &visitor); | 4890 target->IterateBody(target->map()->instance_type(), size, &visitor); |
| 4891 } | 4891 } |
| 4892 | 4892 |
| 4893 // When black allocations is on, we have to visit not already marked black | 4893 // When black allocations is on, we have to visit not already marked black |
| 4894 // objects (in new space) promoted to black pages to keep their references | 4894 // objects (in new space) promoted to black pages to keep their references |
| 4895 // alive. | 4895 // alive. |
| 4896 // TODO(hpayer): Implement a special promotion visitor that incorporates | 4896 // TODO(hpayer): Implement a special promotion visitor that incorporates |
| 4897 // regular visiting and IteratePromotedObjectPointers. | 4897 // regular visiting and IteratePromotedObjectPointers. |
| 4898 if (!was_marked_black) { | 4898 if (!was_marked_black) { |
| 4899 if (incremental_marking()->black_allocation()) { | 4899 if (incremental_marking()->black_allocation()) { |
| 4900 IncrementalMarking::MarkGrey(this, target->map()); | 4900 incremental_marking()->MarkGrey(this, target->map()); |
| 4901 incremental_marking()->IterateBlackObject(target); | 4901 incremental_marking()->IterateBlackObject(target); |
| 4902 } | 4902 } |
| 4903 } | 4903 } |
| 4904 } | 4904 } |
| 4905 | 4905 |
| 4906 void Heap::IterateRoots(RootVisitor* v, VisitMode mode) { | 4906 void Heap::IterateRoots(RootVisitor* v, VisitMode mode) { |
| 4907 IterateStrongRoots(v, mode); | 4907 IterateStrongRoots(v, mode); |
| 4908 IterateWeakRoots(v, mode); | 4908 IterateWeakRoots(v, mode); |
| 4909 } | 4909 } |
| 4910 | 4910 |
| (...skipping 599 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 5510 | 5510 |
| 5511 // Set up memory allocator. | 5511 // Set up memory allocator. |
| 5512 memory_allocator_ = new MemoryAllocator(isolate_); | 5512 memory_allocator_ = new MemoryAllocator(isolate_); |
| 5513 if (!memory_allocator_->SetUp(MaxReserved(), MaxExecutableSize(), | 5513 if (!memory_allocator_->SetUp(MaxReserved(), MaxExecutableSize(), |
| 5514 code_range_size_)) | 5514 code_range_size_)) |
| 5515 return false; | 5515 return false; |
| 5516 | 5516 |
| 5517 store_buffer_ = new StoreBuffer(this); | 5517 store_buffer_ = new StoreBuffer(this); |
| 5518 | 5518 |
| 5519 incremental_marking_ = new IncrementalMarking(this); | 5519 incremental_marking_ = new IncrementalMarking(this); |
| 5520 | |
| 5521 concurrent_marking_ = new ConcurrentMarking(this); | 5520 concurrent_marking_ = new ConcurrentMarking(this); |
| 5522 | 5521 |
| 5523 for (int i = 0; i <= LAST_SPACE; i++) { | 5522 for (int i = 0; i <= LAST_SPACE; i++) { |
| 5524 space_[i] = nullptr; | 5523 space_[i] = nullptr; |
| 5525 } | 5524 } |
| 5526 | 5525 |
| 5527 space_[NEW_SPACE] = new_space_ = new NewSpace(this); | 5526 space_[NEW_SPACE] = new_space_ = new NewSpace(this); |
| 5528 if (!new_space_->SetUp(initial_semispace_size_, max_semi_space_size_)) { | 5527 if (!new_space_->SetUp(initial_semispace_size_, max_semi_space_size_)) { |
| 5529 return false; | 5528 return false; |
| 5530 } | 5529 } |
| (...skipping 27 matching lines...) Expand all Loading... | |
| 5558 } | 5557 } |
| 5559 | 5558 |
| 5560 for (int i = 0; i < static_cast<int>(v8::Isolate::kUseCounterFeatureCount); | 5559 for (int i = 0; i < static_cast<int>(v8::Isolate::kUseCounterFeatureCount); |
| 5561 i++) { | 5560 i++) { |
| 5562 deferred_counters_[i] = 0; | 5561 deferred_counters_[i] = 0; |
| 5563 } | 5562 } |
| 5564 | 5563 |
| 5565 tracer_ = new GCTracer(this); | 5564 tracer_ = new GCTracer(this); |
| 5566 scavenge_collector_ = new Scavenger(this); | 5565 scavenge_collector_ = new Scavenger(this); |
| 5567 mark_compact_collector_ = new MarkCompactCollector(this); | 5566 mark_compact_collector_ = new MarkCompactCollector(this); |
| 5567 incremental_marking_->set_marking_deque( | |
|
Michael Lippautz
2017/04/28 12:49:40
There's a dependency of setting up spaces in IM so
| |
| 5568 mark_compact_collector_->marking_deque()); | |
| 5568 if (FLAG_minor_mc) | 5569 if (FLAG_minor_mc) |
| 5569 minor_mark_compact_collector_ = new MinorMarkCompactCollector(this); | 5570 minor_mark_compact_collector_ = new MinorMarkCompactCollector(this); |
| 5570 gc_idle_time_handler_ = new GCIdleTimeHandler(); | 5571 gc_idle_time_handler_ = new GCIdleTimeHandler(); |
| 5571 memory_reducer_ = new MemoryReducer(this); | 5572 memory_reducer_ = new MemoryReducer(this); |
| 5572 if (V8_UNLIKELY(FLAG_gc_stats)) { | 5573 if (V8_UNLIKELY(FLAG_gc_stats)) { |
| 5573 live_object_stats_ = new ObjectStats(this); | 5574 live_object_stats_ = new ObjectStats(this); |
| 5574 dead_object_stats_ = new ObjectStats(this); | 5575 dead_object_stats_ = new ObjectStats(this); |
| 5575 } | 5576 } |
| 5576 scavenge_job_ = new ScavengeJob(); | 5577 scavenge_job_ = new ScavengeJob(); |
| 5577 local_embedder_heap_tracer_ = new LocalEmbedderHeapTracer(); | 5578 local_embedder_heap_tracer_ = new LocalEmbedderHeapTracer(); |
| (...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 5669 } | 5670 } |
| 5670 } | 5671 } |
| 5671 | 5672 |
| 5672 void Heap::RegisterExternallyReferencedObject(Object** object) { | 5673 void Heap::RegisterExternallyReferencedObject(Object** object) { |
| 5673 // The embedder is not aware of whether numbers are materialized as heap | 5674 // The embedder is not aware of whether numbers are materialized as heap |
| 5674 // objects are just passed around as Smis. | 5675 // objects are just passed around as Smis. |
| 5675 if (!(*object)->IsHeapObject()) return; | 5676 if (!(*object)->IsHeapObject()) return; |
| 5676 HeapObject* heap_object = HeapObject::cast(*object); | 5677 HeapObject* heap_object = HeapObject::cast(*object); |
| 5677 DCHECK(Contains(heap_object)); | 5678 DCHECK(Contains(heap_object)); |
| 5678 if (FLAG_incremental_marking_wrappers && incremental_marking()->IsMarking()) { | 5679 if (FLAG_incremental_marking_wrappers && incremental_marking()->IsMarking()) { |
| 5679 IncrementalMarking::MarkGrey(this, heap_object); | 5680 incremental_marking()->MarkGrey(this, heap_object); |
| 5680 } else { | 5681 } else { |
| 5681 DCHECK(mark_compact_collector()->in_use()); | 5682 DCHECK(mark_compact_collector()->in_use()); |
| 5682 mark_compact_collector()->MarkObject(heap_object); | 5683 mark_compact_collector()->MarkObject(heap_object); |
| 5683 } | 5684 } |
| 5684 } | 5685 } |
| 5685 | 5686 |
| 5686 void Heap::TearDown() { | 5687 void Heap::TearDown() { |
| 5687 #ifdef VERIFY_HEAP | 5688 #ifdef VERIFY_HEAP |
| 5688 if (FLAG_verify_heap) { | 5689 if (FLAG_verify_heap) { |
| 5689 Verify(); | 5690 Verify(); |
| (...skipping 747 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 6437 case LO_SPACE: | 6438 case LO_SPACE: |
| 6438 return "LO_SPACE"; | 6439 return "LO_SPACE"; |
| 6439 default: | 6440 default: |
| 6440 UNREACHABLE(); | 6441 UNREACHABLE(); |
| 6441 } | 6442 } |
| 6442 return NULL; | 6443 return NULL; |
| 6443 } | 6444 } |
| 6444 | 6445 |
| 6445 } // namespace internal | 6446 } // namespace internal |
| 6446 } // namespace v8 | 6447 } // namespace v8 |
| OLD | NEW |