OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/ast/scopeinfo.h" | 9 #include "src/ast/scopeinfo.h" |
10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
(...skipping 3094 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3105 | 3105 |
3106 void Heap::AdjustLiveBytes(HeapObject* object, int by, InvocationMode mode) { | 3106 void Heap::AdjustLiveBytes(HeapObject* object, int by, InvocationMode mode) { |
3107 // As long as the inspected object is black and we are currently not iterating | 3107 // As long as the inspected object is black and we are currently not iterating |
3108 // the heap using HeapIterator, we can update the live byte count. We cannot | 3108 // the heap using HeapIterator, we can update the live byte count. We cannot |
3109 // update while using HeapIterator because the iterator is temporarily | 3109 // update while using HeapIterator because the iterator is temporarily |
3110 // marking the whole object graph, without updating live bytes. | 3110 // marking the whole object graph, without updating live bytes. |
3111 if (lo_space()->Contains(object)) { | 3111 if (lo_space()->Contains(object)) { |
3112 lo_space()->AdjustLiveBytes(by); | 3112 lo_space()->AdjustLiveBytes(by); |
3113 } else if (!in_heap_iterator() && | 3113 } else if (!in_heap_iterator() && |
3114 !mark_compact_collector()->sweeping_in_progress() && | 3114 !mark_compact_collector()->sweeping_in_progress() && |
3115 Marking::IsBlack(Marking::MarkBitFrom(object->address()))) { | 3115 Marking::IsBlack(ObjectMarking::MarkBitFrom(object->address()))) { |
3116 if (mode == SEQUENTIAL_TO_SWEEPER) { | 3116 if (mode == SEQUENTIAL_TO_SWEEPER) { |
3117 MemoryChunk::IncrementLiveBytesFromGC(object, by); | 3117 MemoryChunk::IncrementLiveBytesFromGC(object, by); |
3118 } else { | 3118 } else { |
3119 MemoryChunk::IncrementLiveBytesFromMutator(object, by); | 3119 MemoryChunk::IncrementLiveBytesFromMutator(object, by); |
3120 } | 3120 } |
3121 } | 3121 } |
3122 } | 3122 } |
3123 | 3123 |
3124 | 3124 |
3125 FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object, | 3125 FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object, |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3162 former_start[new_start_index + 1] = Smi::FromInt(len - elements_to_trim); | 3162 former_start[new_start_index + 1] = Smi::FromInt(len - elements_to_trim); |
3163 FixedArrayBase* new_object = | 3163 FixedArrayBase* new_object = |
3164 FixedArrayBase::cast(HeapObject::FromAddress(new_start)); | 3164 FixedArrayBase::cast(HeapObject::FromAddress(new_start)); |
3165 | 3165 |
3166 // Remove recorded slots for the new map and length offset. | 3166 // Remove recorded slots for the new map and length offset. |
3167 ClearRecordedSlot(new_object, HeapObject::RawField(new_object, 0)); | 3167 ClearRecordedSlot(new_object, HeapObject::RawField(new_object, 0)); |
3168 ClearRecordedSlot(new_object, HeapObject::RawField( | 3168 ClearRecordedSlot(new_object, HeapObject::RawField( |
3169 new_object, FixedArrayBase::kLengthOffset)); | 3169 new_object, FixedArrayBase::kLengthOffset)); |
3170 | 3170 |
3171 // Maintain consistency of live bytes during incremental marking | 3171 // Maintain consistency of live bytes during incremental marking |
3172 Marking::TransferMark(this, object->address(), new_start); | 3172 IncrementalMarking::TransferMark(this, object->address(), new_start); |
3173 AdjustLiveBytes(new_object, -bytes_to_trim, Heap::CONCURRENT_TO_SWEEPER); | 3173 AdjustLiveBytes(new_object, -bytes_to_trim, Heap::CONCURRENT_TO_SWEEPER); |
3174 | 3174 |
3175 // Notify the heap profiler of change in object layout. | 3175 // Notify the heap profiler of change in object layout. |
3176 OnMoveEvent(new_object, object, new_object->Size()); | 3176 OnMoveEvent(new_object, object, new_object->Size()); |
3177 return new_object; | 3177 return new_object; |
3178 } | 3178 } |
3179 | 3179 |
3180 | 3180 |
3181 // Force instantiation of templatized method. | 3181 // Force instantiation of templatized method. |
3182 template void Heap::RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>( | 3182 template void Heap::RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>( |
(...skipping 1001 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4184 // Code space, map space, and large object space do not use black pages. | 4184 // Code space, map space, and large object space do not use black pages. |
4185 // Hence we have to color all objects of the reservation first black to avoid | 4185 // Hence we have to color all objects of the reservation first black to avoid |
4186 // unnecessary marking deque load. | 4186 // unnecessary marking deque load. |
4187 if (incremental_marking()->black_allocation()) { | 4187 if (incremental_marking()->black_allocation()) { |
4188 for (int i = CODE_SPACE; i < Serializer::kNumberOfSpaces; i++) { | 4188 for (int i = CODE_SPACE; i < Serializer::kNumberOfSpaces; i++) { |
4189 const Heap::Reservation& res = reservations[i]; | 4189 const Heap::Reservation& res = reservations[i]; |
4190 for (auto& chunk : res) { | 4190 for (auto& chunk : res) { |
4191 Address addr = chunk.start; | 4191 Address addr = chunk.start; |
4192 while (addr < chunk.end) { | 4192 while (addr < chunk.end) { |
4193 HeapObject* obj = HeapObject::FromAddress(addr); | 4193 HeapObject* obj = HeapObject::FromAddress(addr); |
4194 Marking::MarkBlack(Marking::MarkBitFrom(obj)); | 4194 Marking::MarkBlack(ObjectMarking::MarkBitFrom(obj)); |
4195 MemoryChunk::IncrementLiveBytesFromGC(obj, obj->Size()); | 4195 MemoryChunk::IncrementLiveBytesFromGC(obj, obj->Size()); |
4196 addr += obj->Size(); | 4196 addr += obj->Size(); |
4197 } | 4197 } |
4198 } | 4198 } |
4199 } | 4199 } |
4200 for (int i = OLD_SPACE; i < Serializer::kNumberOfSpaces; i++) { | 4200 for (int i = OLD_SPACE; i < Serializer::kNumberOfSpaces; i++) { |
4201 const Heap::Reservation& res = reservations[i]; | 4201 const Heap::Reservation& res = reservations[i]; |
4202 for (auto& chunk : res) { | 4202 for (auto& chunk : res) { |
4203 Address addr = chunk.start; | 4203 Address addr = chunk.start; |
4204 while (addr < chunk.end) { | 4204 while (addr < chunk.end) { |
(...skipping 499 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4704 bool was_marked_black, | 4704 bool was_marked_black, |
4705 ObjectSlotCallback callback) { | 4705 ObjectSlotCallback callback) { |
4706 // We are not collecting slots on new space objects during mutation | 4706 // We are not collecting slots on new space objects during mutation |
4707 // thus we have to scan for pointers to evacuation candidates when we | 4707 // thus we have to scan for pointers to evacuation candidates when we |
4708 // promote objects. But we should not record any slots in non-black | 4708 // promote objects. But we should not record any slots in non-black |
4709 // objects. Grey object's slots would be rescanned. | 4709 // objects. Grey object's slots would be rescanned. |
4710 // White object might not survive until the end of collection | 4710 // White object might not survive until the end of collection |
4711 // it would be a violation of the invariant to record it's slots. | 4711 // it would be a violation of the invariant to record it's slots. |
4712 bool record_slots = false; | 4712 bool record_slots = false; |
4713 if (incremental_marking()->IsCompacting()) { | 4713 if (incremental_marking()->IsCompacting()) { |
4714 MarkBit mark_bit = Marking::MarkBitFrom(target); | 4714 MarkBit mark_bit = ObjectMarking::MarkBitFrom(target); |
4715 record_slots = Marking::IsBlack(mark_bit); | 4715 record_slots = Marking::IsBlack(mark_bit); |
4716 } | 4716 } |
4717 | 4717 |
4718 IteratePromotedObjectsVisitor visitor(this, target, record_slots, callback); | 4718 IteratePromotedObjectsVisitor visitor(this, target, record_slots, callback); |
4719 target->IterateBody(target->map()->instance_type(), size, &visitor); | 4719 target->IterateBody(target->map()->instance_type(), size, &visitor); |
4720 | 4720 |
4721 // When black allocations is on, we have to visit not already marked black | 4721 // When black allocations is on, we have to visit not already marked black |
4722 // objects (in new space) promoted to black pages to keep their references | 4722 // objects (in new space) promoted to black pages to keep their references |
4723 // alive. | 4723 // alive. |
4724 // TODO(hpayer): Implement a special promotion visitor that incorporates | 4724 // TODO(hpayer): Implement a special promotion visitor that incorporates |
(...skipping 1157 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5882 explicit UnreachableObjectsFilter(Heap* heap) : heap_(heap) { | 5882 explicit UnreachableObjectsFilter(Heap* heap) : heap_(heap) { |
5883 MarkReachableObjects(); | 5883 MarkReachableObjects(); |
5884 } | 5884 } |
5885 | 5885 |
5886 ~UnreachableObjectsFilter() { | 5886 ~UnreachableObjectsFilter() { |
5887 heap_->mark_compact_collector()->ClearMarkbits(); | 5887 heap_->mark_compact_collector()->ClearMarkbits(); |
5888 } | 5888 } |
5889 | 5889 |
5890 bool SkipObject(HeapObject* object) { | 5890 bool SkipObject(HeapObject* object) { |
5891 if (object->IsFiller()) return true; | 5891 if (object->IsFiller()) return true; |
5892 MarkBit mark_bit = Marking::MarkBitFrom(object); | 5892 MarkBit mark_bit = ObjectMarking::MarkBitFrom(object); |
5893 return Marking::IsWhite(mark_bit); | 5893 return Marking::IsWhite(mark_bit); |
5894 } | 5894 } |
5895 | 5895 |
5896 private: | 5896 private: |
5897 class MarkingVisitor : public ObjectVisitor { | 5897 class MarkingVisitor : public ObjectVisitor { |
5898 public: | 5898 public: |
5899 MarkingVisitor() : marking_stack_(10) {} | 5899 MarkingVisitor() : marking_stack_(10) {} |
5900 | 5900 |
5901 void VisitPointers(Object** start, Object** end) override { | 5901 void VisitPointers(Object** start, Object** end) override { |
5902 for (Object** p = start; p < end; p++) { | 5902 for (Object** p = start; p < end; p++) { |
5903 if (!(*p)->IsHeapObject()) continue; | 5903 if (!(*p)->IsHeapObject()) continue; |
5904 HeapObject* obj = HeapObject::cast(*p); | 5904 HeapObject* obj = HeapObject::cast(*p); |
5905 MarkBit mark_bit = Marking::MarkBitFrom(obj); | 5905 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj); |
5906 if (Marking::IsWhite(mark_bit)) { | 5906 if (Marking::IsWhite(mark_bit)) { |
5907 Marking::WhiteToBlack(mark_bit); | 5907 Marking::WhiteToBlack(mark_bit); |
5908 marking_stack_.Add(obj); | 5908 marking_stack_.Add(obj); |
5909 } | 5909 } |
5910 } | 5910 } |
5911 } | 5911 } |
5912 | 5912 |
5913 void TransitiveClosure() { | 5913 void TransitiveClosure() { |
5914 while (!marking_stack_.is_empty()) { | 5914 while (!marking_stack_.is_empty()) { |
5915 HeapObject* obj = marking_stack_.RemoveLast(); | 5915 HeapObject* obj = marking_stack_.RemoveLast(); |
(...skipping 512 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6428 } | 6428 } |
6429 | 6429 |
6430 | 6430 |
6431 // static | 6431 // static |
6432 int Heap::GetStaticVisitorIdForMap(Map* map) { | 6432 int Heap::GetStaticVisitorIdForMap(Map* map) { |
6433 return StaticVisitorBase::GetVisitorId(map); | 6433 return StaticVisitorBase::GetVisitorId(map); |
6434 } | 6434 } |
6435 | 6435 |
6436 } // namespace internal | 6436 } // namespace internal |
6437 } // namespace v8 | 6437 } // namespace v8 |
OLD | NEW |