| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
| 6 | 6 |
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
| 8 #include "src/api.h" | 8 #include "src/api.h" |
| 9 #include "src/ast/context-slot-cache.h" | 9 #include "src/ast/context-slot-cache.h" |
| 10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
| (...skipping 3165 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3176 | 3176 |
| 3177 void Heap::AdjustLiveBytes(HeapObject* object, int by) { | 3177 void Heap::AdjustLiveBytes(HeapObject* object, int by) { |
| 3178 // As long as the inspected object is black and we are currently not iterating | 3178 // As long as the inspected object is black and we are currently not iterating |
| 3179 // the heap using HeapIterator, we can update the live byte count. We cannot | 3179 // the heap using HeapIterator, we can update the live byte count. We cannot |
| 3180 // update while using HeapIterator because the iterator is temporarily | 3180 // update while using HeapIterator because the iterator is temporarily |
| 3181 // marking the whole object graph, without updating live bytes. | 3181 // marking the whole object graph, without updating live bytes. |
| 3182 if (lo_space()->Contains(object)) { | 3182 if (lo_space()->Contains(object)) { |
| 3183 lo_space()->AdjustLiveBytes(by); | 3183 lo_space()->AdjustLiveBytes(by); |
| 3184 } else if (!in_heap_iterator() && | 3184 } else if (!in_heap_iterator() && |
| 3185 !mark_compact_collector()->sweeping_in_progress() && | 3185 !mark_compact_collector()->sweeping_in_progress() && |
| 3186 Marking::IsBlack(ObjectMarking::MarkBitFrom(object))) { | 3186 ObjectMarking::IsBlack(object)) { |
| 3187 DCHECK(MemoryChunk::FromAddress(object->address())->SweepingDone()); | 3187 DCHECK(MemoryChunk::FromAddress(object->address())->SweepingDone()); |
| 3188 MemoryChunk::IncrementLiveBytes(object, by); | 3188 MemoryChunk::IncrementLiveBytes(object, by); |
| 3189 } | 3189 } |
| 3190 } | 3190 } |
| 3191 | 3191 |
| 3192 | 3192 |
| 3193 FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object, | 3193 FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object, |
| 3194 int elements_to_trim) { | 3194 int elements_to_trim) { |
| 3195 CHECK_NOT_NULL(object); | 3195 CHECK_NOT_NULL(object); |
| 3196 DCHECK(CanMoveObjectStart(object)); |
| 3196 DCHECK(!object->IsFixedTypedArrayBase()); | 3197 DCHECK(!object->IsFixedTypedArrayBase()); |
| 3197 DCHECK(!object->IsByteArray()); | 3198 DCHECK(!object->IsByteArray()); |
| 3198 const int element_size = object->IsFixedArray() ? kPointerSize : kDoubleSize; | 3199 const int element_size = object->IsFixedArray() ? kPointerSize : kDoubleSize; |
| 3199 const int bytes_to_trim = elements_to_trim * element_size; | 3200 const int bytes_to_trim = elements_to_trim * element_size; |
| 3200 Map* map = object->map(); | 3201 Map* map = object->map(); |
| 3201 | 3202 |
| 3202 // For now this trick is only applied to objects in new and paged space. | 3203 // For now this trick is only applied to objects in new and paged space. |
| 3203 // In large object space the object's start must coincide with chunk | 3204 // In large object space the object's start must coincide with chunk |
| 3204 // and thus the trick is just not applicable. | 3205 // and thus the trick is just not applicable. |
| 3205 DCHECK(!lo_space()->Contains(object)); | 3206 DCHECK(!lo_space()->Contains(object)); |
| (...skipping 30 matching lines...) Expand all Loading... |
| 3236 Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(object))) { | 3237 Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(object))) { |
| 3237 Page* page = Page::FromAddress(old_start); | 3238 Page* page = Page::FromAddress(old_start); |
| 3238 page->markbits()->ClearRange( | 3239 page->markbits()->ClearRange( |
| 3239 page->AddressToMarkbitIndex(old_start), | 3240 page->AddressToMarkbitIndex(old_start), |
| 3240 page->AddressToMarkbitIndex(old_start + bytes_to_trim)); | 3241 page->AddressToMarkbitIndex(old_start + bytes_to_trim)); |
| 3241 } | 3242 } |
| 3242 | 3243 |
| 3243 // Initialize header of the trimmed array. Since left trimming is only | 3244 // Initialize header of the trimmed array. Since left trimming is only |
| 3244 // performed on pages which are not concurrently swept creating a filler | 3245 // performed on pages which are not concurrently swept creating a filler |
| 3245 // object does not require synchronization. | 3246 // object does not require synchronization. |
| 3246 DCHECK(CanMoveObjectStart(object)); | |
| 3247 Object** former_start = HeapObject::RawField(object, 0); | 3247 Object** former_start = HeapObject::RawField(object, 0); |
| 3248 int new_start_index = elements_to_trim * (element_size / kPointerSize); | 3248 int new_start_index = elements_to_trim * (element_size / kPointerSize); |
| 3249 former_start[new_start_index] = map; | 3249 former_start[new_start_index] = map; |
| 3250 former_start[new_start_index + 1] = Smi::FromInt(len - elements_to_trim); | 3250 former_start[new_start_index + 1] = Smi::FromInt(len - elements_to_trim); |
| 3251 | 3251 |
| 3252 FixedArrayBase* new_object = | 3252 FixedArrayBase* new_object = |
| 3253 FixedArrayBase::cast(HeapObject::FromAddress(new_start)); | 3253 FixedArrayBase::cast(HeapObject::FromAddress(new_start)); |
| 3254 | 3254 |
| 3255 // Maintain consistency of live bytes during incremental marking | 3255 // Maintain consistency of live bytes during incremental marking |
| 3256 AdjustLiveBytes(new_object, -bytes_to_trim); | 3256 AdjustLiveBytes(new_object, -bytes_to_trim); |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3307 // We do not create a filler for objects in large object space. | 3307 // We do not create a filler for objects in large object space. |
| 3308 // TODO(hpayer): We should shrink the large object page if the size | 3308 // TODO(hpayer): We should shrink the large object page if the size |
| 3309 // of the object changed significantly. | 3309 // of the object changed significantly. |
| 3310 if (!lo_space()->Contains(object)) { | 3310 if (!lo_space()->Contains(object)) { |
| 3311 HeapObject* filler = | 3311 HeapObject* filler = |
| 3312 CreateFillerObjectAt(new_end, bytes_to_trim, ClearRecordedSlots::kYes); | 3312 CreateFillerObjectAt(new_end, bytes_to_trim, ClearRecordedSlots::kYes); |
| 3313 DCHECK_NOT_NULL(filler); | 3313 DCHECK_NOT_NULL(filler); |
| 3314 // Clear the mark bits of the black area that belongs now to the filler. | 3314 // Clear the mark bits of the black area that belongs now to the filler. |
| 3315 // This is an optimization. The sweeper will release black fillers anyway. | 3315 // This is an optimization. The sweeper will release black fillers anyway. |
| 3316 if (incremental_marking()->black_allocation() && | 3316 if (incremental_marking()->black_allocation() && |
| 3317 Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(filler))) { | 3317 ObjectMarking::IsBlackOrGrey(filler)) { |
| 3318 Page* page = Page::FromAddress(new_end); | 3318 Page* page = Page::FromAddress(new_end); |
| 3319 page->markbits()->ClearRange( | 3319 page->markbits()->ClearRange( |
| 3320 page->AddressToMarkbitIndex(new_end), | 3320 page->AddressToMarkbitIndex(new_end), |
| 3321 page->AddressToMarkbitIndex(new_end + bytes_to_trim)); | 3321 page->AddressToMarkbitIndex(new_end + bytes_to_trim)); |
| 3322 } | 3322 } |
| 3323 } | 3323 } |
| 3324 | 3324 |
| 3325 // Initialize header of the trimmed array. We are storing the new length | 3325 // Initialize header of the trimmed array. We are storing the new length |
| 3326 // using release store after creating a filler for the left-over space to | 3326 // using release store after creating a filler for the left-over space to |
| 3327 // avoid races with the sweeper thread. | 3327 // avoid races with the sweeper thread. |
| (...skipping 970 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4298 return true; | 4298 return true; |
| 4299 } | 4299 } |
| 4300 return false; | 4300 return false; |
| 4301 } | 4301 } |
| 4302 | 4302 |
| 4303 void Heap::RegisterReservationsForBlackAllocation(Reservation* reservations) { | 4303 void Heap::RegisterReservationsForBlackAllocation(Reservation* reservations) { |
| 4304 // TODO(hpayer): We do not have to iterate reservations on black objects | 4304 // TODO(hpayer): We do not have to iterate reservations on black objects |
| 4305 // for marking. We just have to execute the special visiting side effect | 4305 // for marking. We just have to execute the special visiting side effect |
| 4306 // code that adds objects to global data structures, e.g. for array buffers. | 4306 // code that adds objects to global data structures, e.g. for array buffers. |
| 4307 | 4307 |
| 4308 // Code space, map space, and large object space do not use black pages. | |
| 4309 // Hence we have to color all objects of the reservation first black to avoid | |
| 4310 // unnecessary marking deque load. | |
| 4311 if (incremental_marking()->black_allocation()) { | 4308 if (incremental_marking()->black_allocation()) { |
| 4309 // Iterate black objects in old space, code space, map space, and large |
| 4310 // object space for side effects. |
| 4312 for (int i = OLD_SPACE; i < Serializer::kNumberOfSpaces; i++) { | 4311 for (int i = OLD_SPACE; i < Serializer::kNumberOfSpaces; i++) { |
| 4313 const Heap::Reservation& res = reservations[i]; | 4312 const Heap::Reservation& res = reservations[i]; |
| 4314 for (auto& chunk : res) { | 4313 for (auto& chunk : res) { |
| 4315 Address addr = chunk.start; | 4314 Address addr = chunk.start; |
| 4316 while (addr < chunk.end) { | 4315 while (addr < chunk.end) { |
| 4317 HeapObject* obj = HeapObject::FromAddress(addr); | 4316 HeapObject* obj = HeapObject::FromAddress(addr); |
| 4318 Marking::MarkBlack(ObjectMarking::MarkBitFrom(obj)); | 4317 // There might be grey objects due to black to grey transitions in |
| 4318 // incremental marking. E.g. see VisitNativeContextIncremental. |
| 4319 DCHECK(ObjectMarking::IsBlackOrGrey(obj)); |
| 4320 if (ObjectMarking::IsBlack(obj)) { |
| 4321 incremental_marking()->IterateBlackObject(obj); |
| 4322 } |
| 4319 addr += obj->Size(); | 4323 addr += obj->Size(); |
| 4320 } | 4324 } |
| 4321 } | 4325 } |
| 4322 } | |
| 4323 for (int i = OLD_SPACE; i < Serializer::kNumberOfSpaces; i++) { | |
| 4324 const Heap::Reservation& res = reservations[i]; | |
| 4325 for (auto& chunk : res) { | |
| 4326 Address addr = chunk.start; | |
| 4327 while (addr < chunk.end) { | |
| 4328 HeapObject* obj = HeapObject::FromAddress(addr); | |
| 4329 incremental_marking()->IterateBlackObject(obj); | |
| 4330 addr += obj->Size(); | |
| 4331 } | |
| 4332 } | |
| 4333 } | 4326 } |
| 4334 } | 4327 } |
| 4335 } | 4328 } |
| 4336 | 4329 |
| 4337 GCIdleTimeHeapState Heap::ComputeHeapState() { | 4330 GCIdleTimeHeapState Heap::ComputeHeapState() { |
| 4338 GCIdleTimeHeapState heap_state; | 4331 GCIdleTimeHeapState heap_state; |
| 4339 heap_state.contexts_disposed = contexts_disposed_; | 4332 heap_state.contexts_disposed = contexts_disposed_; |
| 4340 heap_state.contexts_disposal_rate = | 4333 heap_state.contexts_disposal_rate = |
| 4341 tracer()->ContextDisposalRateInMilliseconds(); | 4334 tracer()->ContextDisposalRateInMilliseconds(); |
| 4342 heap_state.size_of_objects = static_cast<size_t>(SizeOfObjects()); | 4335 heap_state.size_of_objects = static_cast<size_t>(SizeOfObjects()); |
| (...skipping 548 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4891 void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size, | 4884 void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size, |
| 4892 bool was_marked_black) { | 4885 bool was_marked_black) { |
| 4893 // We are not collecting slots on new space objects during mutation | 4886 // We are not collecting slots on new space objects during mutation |
| 4894 // thus we have to scan for pointers to evacuation candidates when we | 4887 // thus we have to scan for pointers to evacuation candidates when we |
| 4895 // promote objects. But we should not record any slots in non-black | 4888 // promote objects. But we should not record any slots in non-black |
| 4896 // objects. Grey object's slots would be rescanned. | 4889 // objects. Grey object's slots would be rescanned. |
| 4897 // White object might not survive until the end of collection | 4890 // White object might not survive until the end of collection |
| 4898 // it would be a violation of the invariant to record it's slots. | 4891 // it would be a violation of the invariant to record it's slots. |
| 4899 bool record_slots = false; | 4892 bool record_slots = false; |
| 4900 if (incremental_marking()->IsCompacting()) { | 4893 if (incremental_marking()->IsCompacting()) { |
| 4901 MarkBit mark_bit = ObjectMarking::MarkBitFrom(target); | 4894 record_slots = ObjectMarking::IsBlack(target); |
| 4902 record_slots = Marking::IsBlack(mark_bit); | |
| 4903 } | 4895 } |
| 4904 | 4896 |
| 4905 IterateAndScavengePromotedObjectsVisitor visitor(this, target, record_slots); | 4897 IterateAndScavengePromotedObjectsVisitor visitor(this, target, record_slots); |
| 4906 if (target->IsJSFunction()) { | 4898 if (target->IsJSFunction()) { |
| 4907 // JSFunctions reachable through kNextFunctionLinkOffset are weak. Slots for | 4899 // JSFunctions reachable through kNextFunctionLinkOffset are weak. Slots for |
| 4908 // this links are recorded during processing of weak lists. | 4900 // this links are recorded during processing of weak lists. |
| 4909 JSFunction::BodyDescriptorWeakCode::IterateBody(target, size, &visitor); | 4901 JSFunction::BodyDescriptorWeakCode::IterateBody(target, size, &visitor); |
| 4910 } else { | 4902 } else { |
| 4911 target->IterateBody(target->map()->instance_type(), size, &visitor); | 4903 target->IterateBody(target->map()->instance_type(), size, &visitor); |
| 4912 } | 4904 } |
| (...skipping 1217 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6130 explicit UnreachableObjectsFilter(Heap* heap) : heap_(heap) { | 6122 explicit UnreachableObjectsFilter(Heap* heap) : heap_(heap) { |
| 6131 MarkReachableObjects(); | 6123 MarkReachableObjects(); |
| 6132 } | 6124 } |
| 6133 | 6125 |
| 6134 ~UnreachableObjectsFilter() { | 6126 ~UnreachableObjectsFilter() { |
| 6135 heap_->mark_compact_collector()->ClearMarkbits(); | 6127 heap_->mark_compact_collector()->ClearMarkbits(); |
| 6136 } | 6128 } |
| 6137 | 6129 |
| 6138 bool SkipObject(HeapObject* object) { | 6130 bool SkipObject(HeapObject* object) { |
| 6139 if (object->IsFiller()) return true; | 6131 if (object->IsFiller()) return true; |
| 6140 MarkBit mark_bit = ObjectMarking::MarkBitFrom(object); | 6132 return ObjectMarking::IsWhite(object); |
| 6141 return Marking::IsWhite(mark_bit); | |
| 6142 } | 6133 } |
| 6143 | 6134 |
| 6144 private: | 6135 private: |
| 6145 class MarkingVisitor : public ObjectVisitor { | 6136 class MarkingVisitor : public ObjectVisitor { |
| 6146 public: | 6137 public: |
| 6147 MarkingVisitor() : marking_stack_(10) {} | 6138 MarkingVisitor() : marking_stack_(10) {} |
| 6148 | 6139 |
| 6149 void VisitPointers(Object** start, Object** end) override { | 6140 void VisitPointers(Object** start, Object** end) override { |
| 6150 for (Object** p = start; p < end; p++) { | 6141 for (Object** p = start; p < end; p++) { |
| 6151 if (!(*p)->IsHeapObject()) continue; | 6142 if (!(*p)->IsHeapObject()) continue; |
| 6152 HeapObject* obj = HeapObject::cast(*p); | 6143 HeapObject* obj = HeapObject::cast(*p); |
| 6144 // Use Marking instead of ObjectMarking to avoid adjusting live bytes |
| 6145 // counter. |
| 6153 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj); | 6146 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj); |
| 6154 if (Marking::IsWhite(mark_bit)) { | 6147 if (Marking::IsWhite(mark_bit)) { |
| 6155 Marking::WhiteToBlack(mark_bit); | 6148 Marking::WhiteToBlack(mark_bit); |
| 6156 marking_stack_.Add(obj); | 6149 marking_stack_.Add(obj); |
| 6157 } | 6150 } |
| 6158 } | 6151 } |
| 6159 } | 6152 } |
| 6160 | 6153 |
| 6161 void TransitiveClosure() { | 6154 void TransitiveClosure() { |
| 6162 while (!marking_stack_.is_empty()) { | 6155 while (!marking_stack_.is_empty()) { |
| (...skipping 452 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6615 } | 6608 } |
| 6616 | 6609 |
| 6617 | 6610 |
| 6618 // static | 6611 // static |
| 6619 int Heap::GetStaticVisitorIdForMap(Map* map) { | 6612 int Heap::GetStaticVisitorIdForMap(Map* map) { |
| 6620 return StaticVisitorBase::GetVisitorId(map); | 6613 return StaticVisitorBase::GetVisitorId(map); |
| 6621 } | 6614 } |
| 6622 | 6615 |
| 6623 } // namespace internal | 6616 } // namespace internal |
| 6624 } // namespace v8 | 6617 } // namespace v8 |
| OLD | NEW |