OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/ast/scopeinfo.h" | 9 #include "src/ast/scopeinfo.h" |
10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
(...skipping 875 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
886 // garbage. | 886 // garbage. |
887 // Note: as weak callbacks can execute arbitrary code, we cannot | 887 // Note: as weak callbacks can execute arbitrary code, we cannot |
888 // hope that eventually there will be no weak callbacks invocations. | 888 // hope that eventually there will be no weak callbacks invocations. |
889 // Therefore stop recollecting after several attempts. | 889 // Therefore stop recollecting after several attempts. |
890 if (isolate()->concurrent_recompilation_enabled()) { | 890 if (isolate()->concurrent_recompilation_enabled()) { |
891 // The optimizing compiler may be unnecessarily holding on to memory. | 891 // The optimizing compiler may be unnecessarily holding on to memory. |
892 DisallowHeapAllocation no_recursive_gc; | 892 DisallowHeapAllocation no_recursive_gc; |
893 isolate()->optimizing_compile_dispatcher()->Flush(); | 893 isolate()->optimizing_compile_dispatcher()->Flush(); |
894 } | 894 } |
895 isolate()->ClearSerializerData(); | 895 isolate()->ClearSerializerData(); |
896 set_current_gc_flags(kAbortIncrementalMarkingMask | | 896 set_current_gc_flags(kMakeHeapIterableMask | kReduceMemoryFootprintMask); |
897 kReduceMemoryFootprintMask); | |
898 isolate_->compilation_cache()->Clear(); | 897 isolate_->compilation_cache()->Clear(); |
899 const int kMaxNumberOfAttempts = 7; | 898 const int kMaxNumberOfAttempts = 7; |
900 const int kMinNumberOfAttempts = 2; | 899 const int kMinNumberOfAttempts = 2; |
901 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) { | 900 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) { |
902 if (!CollectGarbage(MARK_COMPACTOR, gc_reason, NULL, | 901 if (!CollectGarbage(MARK_COMPACTOR, gc_reason, NULL, |
903 v8::kGCCallbackFlagCollectAllAvailableGarbage) && | 902 v8::kGCCallbackFlagCollectAllAvailableGarbage) && |
904 attempt + 1 >= kMinNumberOfAttempts) { | 903 attempt + 1 >= kMinNumberOfAttempts) { |
905 break; | 904 break; |
906 } | 905 } |
907 } | 906 } |
(...skipping 3131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4039 Struct* result = nullptr; | 4038 Struct* result = nullptr; |
4040 { | 4039 { |
4041 AllocationResult allocation = Allocate(map, OLD_SPACE); | 4040 AllocationResult allocation = Allocate(map, OLD_SPACE); |
4042 if (!allocation.To(&result)) return allocation; | 4041 if (!allocation.To(&result)) return allocation; |
4043 } | 4042 } |
4044 result->InitializeBody(size); | 4043 result->InitializeBody(size); |
4045 return result; | 4044 return result; |
4046 } | 4045 } |
4047 | 4046 |
4048 | 4047 |
| 4048 bool Heap::IsHeapIterable() { |
| 4049 // TODO(hpayer): This function is not correct. Allocation folding in old |
| 4050 // space breaks the iterability. |
| 4051 return new_space_top_after_last_gc_ == new_space()->top(); |
| 4052 } |
| 4053 |
| 4054 |
4049 void Heap::MakeHeapIterable() { | 4055 void Heap::MakeHeapIterable() { |
| 4056 DCHECK(AllowHeapAllocation::IsAllowed()); |
| 4057 if (!IsHeapIterable()) { |
| 4058 CollectAllGarbage(kMakeHeapIterableMask, "Heap::MakeHeapIterable"); |
| 4059 } |
4050 if (mark_compact_collector()->sweeping_in_progress()) { | 4060 if (mark_compact_collector()->sweeping_in_progress()) { |
4051 mark_compact_collector()->EnsureSweepingCompleted(); | 4061 mark_compact_collector()->EnsureSweepingCompleted(); |
4052 } | 4062 } |
| 4063 DCHECK(IsHeapIterable()); |
4053 } | 4064 } |
4054 | 4065 |
4055 | 4066 |
4056 static double ComputeMutatorUtilization(double mutator_speed, double gc_speed) { | 4067 static double ComputeMutatorUtilization(double mutator_speed, double gc_speed) { |
4057 const double kMinMutatorUtilization = 0.0; | 4068 const double kMinMutatorUtilization = 0.0; |
4058 const double kConservativeGcSpeedInBytesPerMillisecond = 200000; | 4069 const double kConservativeGcSpeedInBytesPerMillisecond = 200000; |
4059 if (mutator_speed == 0) return kMinMutatorUtilization; | 4070 if (mutator_speed == 0) return kMinMutatorUtilization; |
4060 if (gc_speed == 0) gc_speed = kConservativeGcSpeedInBytesPerMillisecond; | 4071 if (gc_speed == 0) gc_speed = kConservativeGcSpeedInBytesPerMillisecond; |
4061 // Derivation: | 4072 // Derivation: |
4062 // mutator_utilization = mutator_time / (mutator_time + gc_time) | 4073 // mutator_utilization = mutator_time / (mutator_time + gc_time) |
(...skipping 524 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4587 return false; | 4598 return false; |
4588 } | 4599 } |
4589 } | 4600 } |
4590 | 4601 |
4591 | 4602 |
4592 #ifdef VERIFY_HEAP | 4603 #ifdef VERIFY_HEAP |
4593 void Heap::Verify() { | 4604 void Heap::Verify() { |
4594 CHECK(HasBeenSetUp()); | 4605 CHECK(HasBeenSetUp()); |
4595 HandleScope scope(isolate()); | 4606 HandleScope scope(isolate()); |
4596 | 4607 |
4597 MakeHeapIterable(); | 4608 if (mark_compact_collector()->sweeping_in_progress()) { |
| 4609 // We have to wait here for the sweeper threads to have an iterable heap. |
| 4610 mark_compact_collector()->EnsureSweepingCompleted(); |
| 4611 } |
4598 | 4612 |
4599 VerifyPointersVisitor visitor; | 4613 VerifyPointersVisitor visitor; |
4600 IterateRoots(&visitor, VISIT_ONLY_STRONG); | 4614 IterateRoots(&visitor, VISIT_ONLY_STRONG); |
4601 | 4615 |
4602 VerifySmisVisitor smis_visitor; | 4616 VerifySmisVisitor smis_visitor; |
4603 IterateSmiRoots(&smis_visitor); | 4617 IterateSmiRoots(&smis_visitor); |
4604 | 4618 |
4605 new_space_.Verify(); | 4619 new_space_.Verify(); |
4606 | 4620 |
4607 old_space_->Verify(&visitor); | 4621 old_space_->Verify(&visitor); |
(...skipping 1173 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5781 case LO_SPACE: | 5795 case LO_SPACE: |
5782 iterator_ = new LargeObjectIterator(heap_->lo_space()); | 5796 iterator_ = new LargeObjectIterator(heap_->lo_space()); |
5783 break; | 5797 break; |
5784 } | 5798 } |
5785 | 5799 |
5786 // Return the newly allocated iterator; | 5800 // Return the newly allocated iterator; |
5787 DCHECK(iterator_ != NULL); | 5801 DCHECK(iterator_ != NULL); |
5788 return iterator_; | 5802 return iterator_; |
5789 } | 5803 } |
5790 | 5804 |
5791 HeapIterator::HeapIterator(Heap* heap, HeapObjectsFiltering filtering) | 5805 |
5792 : heap_(heap), space_iterator_(nullptr), object_iterator_(nullptr) { | 5806 class HeapObjectsFilter { |
| 5807 public: |
| 5808 virtual ~HeapObjectsFilter() {} |
| 5809 virtual bool SkipObject(HeapObject* object) = 0; |
| 5810 }; |
| 5811 |
| 5812 |
| 5813 class UnreachableObjectsFilter : public HeapObjectsFilter { |
| 5814 public: |
| 5815 explicit UnreachableObjectsFilter(Heap* heap) : heap_(heap) { |
| 5816 MarkReachableObjects(); |
| 5817 } |
| 5818 |
| 5819 ~UnreachableObjectsFilter() { |
| 5820 heap_->mark_compact_collector()->ClearMarkbits(); |
| 5821 } |
| 5822 |
| 5823 bool SkipObject(HeapObject* object) { |
| 5824 if (object->IsFiller()) return true; |
| 5825 MarkBit mark_bit = Marking::MarkBitFrom(object); |
| 5826 return Marking::IsWhite(mark_bit); |
| 5827 } |
| 5828 |
| 5829 private: |
| 5830 class MarkingVisitor : public ObjectVisitor { |
| 5831 public: |
| 5832 MarkingVisitor() : marking_stack_(10) {} |
| 5833 |
| 5834 void VisitPointers(Object** start, Object** end) override { |
| 5835 for (Object** p = start; p < end; p++) { |
| 5836 if (!(*p)->IsHeapObject()) continue; |
| 5837 HeapObject* obj = HeapObject::cast(*p); |
| 5838 MarkBit mark_bit = Marking::MarkBitFrom(obj); |
| 5839 if (Marking::IsWhite(mark_bit)) { |
| 5840 Marking::WhiteToBlack(mark_bit); |
| 5841 marking_stack_.Add(obj); |
| 5842 } |
| 5843 } |
| 5844 } |
| 5845 |
| 5846 void TransitiveClosure() { |
| 5847 while (!marking_stack_.is_empty()) { |
| 5848 HeapObject* obj = marking_stack_.RemoveLast(); |
| 5849 obj->Iterate(this); |
| 5850 } |
| 5851 } |
| 5852 |
| 5853 private: |
| 5854 List<HeapObject*> marking_stack_; |
| 5855 }; |
| 5856 |
| 5857 void MarkReachableObjects() { |
| 5858 MarkingVisitor visitor; |
| 5859 heap_->IterateRoots(&visitor, VISIT_ALL); |
| 5860 visitor.TransitiveClosure(); |
| 5861 } |
| 5862 |
| 5863 Heap* heap_; |
| 5864 DisallowHeapAllocation no_allocation_; |
| 5865 }; |
| 5866 |
| 5867 |
| 5868 HeapIterator::HeapIterator(Heap* heap, |
| 5869 HeapIterator::HeapObjectsFiltering filtering) |
| 5870 : make_heap_iterable_helper_(heap), |
| 5871 no_heap_allocation_(), |
| 5872 heap_(heap), |
| 5873 filtering_(filtering), |
| 5874 filter_(nullptr), |
| 5875 space_iterator_(nullptr), |
| 5876 object_iterator_(nullptr) { |
5793 heap_->heap_iterator_start(); | 5877 heap_->heap_iterator_start(); |
5794 // Start the iteration. | 5878 // Start the iteration. |
5795 space_iterator_ = new SpaceIterator(heap_); | 5879 space_iterator_ = new SpaceIterator(heap_); |
5796 switch (filtering) { | 5880 switch (filtering_) { |
5797 case HeapObjectsFiltering::kFilterUnreachable: | 5881 case kFilterUnreachable: |
5798 heap_->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask, | 5882 filter_ = new UnreachableObjectsFilter(heap_); |
5799 "filter unreachable objects"); | |
5800 break; | 5883 break; |
5801 default: | 5884 default: |
5802 break; | 5885 break; |
5803 } | 5886 } |
5804 heap_->MakeHeapIterable(); | |
5805 disallow_heap_allocation_ = new DisallowHeapAllocation(); | |
5806 object_iterator_ = space_iterator_->next(); | 5887 object_iterator_ = space_iterator_->next(); |
5807 } | 5888 } |
5808 | 5889 |
5809 | 5890 |
5810 HeapIterator::~HeapIterator() { | 5891 HeapIterator::~HeapIterator() { |
5811 heap_->heap_iterator_end(); | 5892 heap_->heap_iterator_end(); |
| 5893 #ifdef DEBUG |
| 5894 // Assert that in filtering mode we have iterated through all |
| 5895 // objects. Otherwise, heap will be left in an inconsistent state. |
| 5896 if (filtering_ != kNoFiltering) { |
| 5897 DCHECK(object_iterator_ == nullptr); |
| 5898 } |
| 5899 #endif |
5812 // Make sure the last iterator is deallocated. | 5900 // Make sure the last iterator is deallocated. |
5813 delete object_iterator_; | 5901 delete object_iterator_; |
5814 delete space_iterator_; | 5902 delete space_iterator_; |
5815 delete disallow_heap_allocation_; | 5903 delete filter_; |
5816 } | 5904 } |
5817 | 5905 |
5818 | 5906 |
5819 HeapObject* HeapIterator::next() { | 5907 HeapObject* HeapIterator::next() { |
| 5908 if (filter_ == nullptr) return NextObject(); |
| 5909 |
| 5910 HeapObject* obj = NextObject(); |
| 5911 while ((obj != nullptr) && (filter_->SkipObject(obj))) obj = NextObject(); |
| 5912 return obj; |
| 5913 } |
| 5914 |
| 5915 |
| 5916 HeapObject* HeapIterator::NextObject() { |
5820 // No iterator means we are done. | 5917 // No iterator means we are done. |
5821 if (object_iterator_ == nullptr) return nullptr; | 5918 if (object_iterator_ == nullptr) return nullptr; |
5822 | 5919 |
5823 if (HeapObject* obj = object_iterator_->next_object()) { | 5920 if (HeapObject* obj = object_iterator_->next_object()) { |
5824 // If the current iterator has more objects we are fine. | 5921 // If the current iterator has more objects we are fine. |
5825 return obj; | 5922 return obj; |
5826 } else { | 5923 } else { |
5827 // Go though the spaces looking for one that has objects. | 5924 // Go though the spaces looking for one that has objects. |
5828 while (space_iterator_->has_next()) { | 5925 while (space_iterator_->has_next()) { |
5829 object_iterator_ = space_iterator_->next(); | 5926 object_iterator_ = space_iterator_->next(); |
(...skipping 433 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6263 } | 6360 } |
6264 | 6361 |
6265 | 6362 |
6266 // static | 6363 // static |
6267 int Heap::GetStaticVisitorIdForMap(Map* map) { | 6364 int Heap::GetStaticVisitorIdForMap(Map* map) { |
6268 return StaticVisitorBase::GetVisitorId(map); | 6365 return StaticVisitorBase::GetVisitorId(map); |
6269 } | 6366 } |
6270 | 6367 |
6271 } // namespace internal | 6368 } // namespace internal |
6272 } // namespace v8 | 6369 } // namespace v8 |
OLD | NEW |