Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(459)

Side by Side Diff: src/heap/heap.cc

Issue 2644523002: [heap] Provide ObjectMarking with marking transitions (Closed)
Patch Set: Fix markbit clearing for LO Created 3 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/heap.h" 5 #include "src/heap/heap.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/ast/context-slot-cache.h" 9 #include "src/ast/context-slot-cache.h"
10 #include "src/base/bits.h" 10 #include "src/base/bits.h"
(...skipping 3143 matching lines...) Expand 10 before | Expand all | Expand 10 after
3154 3154
3155 void Heap::AdjustLiveBytes(HeapObject* object, int by) { 3155 void Heap::AdjustLiveBytes(HeapObject* object, int by) {
3156 // As long as the inspected object is black and we are currently not iterating 3156 // As long as the inspected object is black and we are currently not iterating
3157 // the heap using HeapIterator, we can update the live byte count. We cannot 3157 // the heap using HeapIterator, we can update the live byte count. We cannot
3158 // update while using HeapIterator because the iterator is temporarily 3158 // update while using HeapIterator because the iterator is temporarily
3159 // marking the whole object graph, without updating live bytes. 3159 // marking the whole object graph, without updating live bytes.
3160 if (lo_space()->Contains(object)) { 3160 if (lo_space()->Contains(object)) {
3161 lo_space()->AdjustLiveBytes(by); 3161 lo_space()->AdjustLiveBytes(by);
3162 } else if (!in_heap_iterator() && 3162 } else if (!in_heap_iterator() &&
3163 !mark_compact_collector()->sweeping_in_progress() && 3163 !mark_compact_collector()->sweeping_in_progress() &&
3164 Marking::IsBlack(ObjectMarking::MarkBitFrom(object))) { 3164 ObjectMarking::IsBlack(object)) {
3165 DCHECK(MemoryChunk::FromAddress(object->address())->SweepingDone()); 3165 DCHECK(MemoryChunk::FromAddress(object->address())->SweepingDone());
3166 MemoryChunk::IncrementLiveBytes(object, by); 3166 MemoryChunk::IncrementLiveBytes(object, by);
3167 } 3167 }
3168 } 3168 }
3169 3169
3170 3170
3171 FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object, 3171 FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object,
3172 int elements_to_trim) { 3172 int elements_to_trim) {
3173 CHECK_NOT_NULL(object); 3173 CHECK_NOT_NULL(object);
3174 DCHECK(CanMoveObjectStart(object));
3174 DCHECK(!object->IsFixedTypedArrayBase()); 3175 DCHECK(!object->IsFixedTypedArrayBase());
3175 DCHECK(!object->IsByteArray()); 3176 DCHECK(!object->IsByteArray());
3176 const int element_size = object->IsFixedArray() ? kPointerSize : kDoubleSize; 3177 const int element_size = object->IsFixedArray() ? kPointerSize : kDoubleSize;
3177 const int bytes_to_trim = elements_to_trim * element_size; 3178 const int bytes_to_trim = elements_to_trim * element_size;
3178 Map* map = object->map(); 3179 Map* map = object->map();
3179 3180
3180 // For now this trick is only applied to objects in new and paged space. 3181 // For now this trick is only applied to objects in new and paged space.
3181 // In large object space the object's start must coincide with chunk 3182 // In large object space the object's start must coincide with chunk
3182 // and thus the trick is just not applicable. 3183 // and thus the trick is just not applicable.
3183 DCHECK(!lo_space()->Contains(object)); 3184 DCHECK(!lo_space()->Contains(object));
(...skipping 30 matching lines...) Expand all
3214 Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(object))) { 3215 Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(object))) {
3215 Page* page = Page::FromAddress(old_start); 3216 Page* page = Page::FromAddress(old_start);
3216 page->markbits()->ClearRange( 3217 page->markbits()->ClearRange(
3217 page->AddressToMarkbitIndex(old_start), 3218 page->AddressToMarkbitIndex(old_start),
3218 page->AddressToMarkbitIndex(old_start + bytes_to_trim)); 3219 page->AddressToMarkbitIndex(old_start + bytes_to_trim));
3219 } 3220 }
3220 3221
3221 // Initialize header of the trimmed array. Since left trimming is only 3222 // Initialize header of the trimmed array. Since left trimming is only
3222 // performed on pages which are not concurrently swept creating a filler 3223 // performed on pages which are not concurrently swept creating a filler
3223 // object does not require synchronization. 3224 // object does not require synchronization.
3224 DCHECK(CanMoveObjectStart(object));
3225 Object** former_start = HeapObject::RawField(object, 0); 3225 Object** former_start = HeapObject::RawField(object, 0);
3226 int new_start_index = elements_to_trim * (element_size / kPointerSize); 3226 int new_start_index = elements_to_trim * (element_size / kPointerSize);
3227 former_start[new_start_index] = map; 3227 former_start[new_start_index] = map;
3228 former_start[new_start_index + 1] = Smi::FromInt(len - elements_to_trim); 3228 former_start[new_start_index + 1] = Smi::FromInt(len - elements_to_trim);
3229 3229
3230 FixedArrayBase* new_object = 3230 FixedArrayBase* new_object =
3231 FixedArrayBase::cast(HeapObject::FromAddress(new_start)); 3231 FixedArrayBase::cast(HeapObject::FromAddress(new_start));
3232 3232
3233 // Maintain consistency of live bytes during incremental marking 3233 // Maintain consistency of live bytes during incremental marking
3234 AdjustLiveBytes(new_object, -bytes_to_trim); 3234 AdjustLiveBytes(new_object, -bytes_to_trim);
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
3285 // We do not create a filler for objects in large object space. 3285 // We do not create a filler for objects in large object space.
3286 // TODO(hpayer): We should shrink the large object page if the size 3286 // TODO(hpayer): We should shrink the large object page if the size
3287 // of the object changed significantly. 3287 // of the object changed significantly.
3288 if (!lo_space()->Contains(object)) { 3288 if (!lo_space()->Contains(object)) {
3289 HeapObject* filler = 3289 HeapObject* filler =
3290 CreateFillerObjectAt(new_end, bytes_to_trim, ClearRecordedSlots::kYes); 3290 CreateFillerObjectAt(new_end, bytes_to_trim, ClearRecordedSlots::kYes);
3291 DCHECK_NOT_NULL(filler); 3291 DCHECK_NOT_NULL(filler);
3292 // Clear the mark bits of the black area that belongs now to the filler. 3292 // Clear the mark bits of the black area that belongs now to the filler.
3293 // This is an optimization. The sweeper will release black fillers anyway. 3293 // This is an optimization. The sweeper will release black fillers anyway.
3294 if (incremental_marking()->black_allocation() && 3294 if (incremental_marking()->black_allocation() &&
3295 Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(filler))) { 3295 ObjectMarking::IsBlackOrGrey(filler)) {
3296 Page* page = Page::FromAddress(new_end); 3296 Page* page = Page::FromAddress(new_end);
3297 page->markbits()->ClearRange( 3297 page->markbits()->ClearRange(
3298 page->AddressToMarkbitIndex(new_end), 3298 page->AddressToMarkbitIndex(new_end),
3299 page->AddressToMarkbitIndex(new_end + bytes_to_trim)); 3299 page->AddressToMarkbitIndex(new_end + bytes_to_trim));
3300 } 3300 }
3301 } 3301 }
3302 3302
3303 // Initialize header of the trimmed array. We are storing the new length 3303 // Initialize header of the trimmed array. We are storing the new length
3304 // using release store after creating a filler for the left-over space to 3304 // using release store after creating a filler for the left-over space to
3305 // avoid races with the sweeper thread. 3305 // avoid races with the sweeper thread.
(...skipping 968 matching lines...) Expand 10 before | Expand all | Expand 10 after
4274 4274
4275 void Heap::RegisterReservationsForBlackAllocation(Reservation* reservations) { 4275 void Heap::RegisterReservationsForBlackAllocation(Reservation* reservations) {
4276 // TODO(hpayer): We do not have to iterate reservations on black objects 4276 // TODO(hpayer): We do not have to iterate reservations on black objects
4277 // for marking. We just have to execute the special visiting side effect 4277 // for marking. We just have to execute the special visiting side effect
4278 // code that adds objects to global data structures, e.g. for array buffers. 4278 // code that adds objects to global data structures, e.g. for array buffers.
4279 4279
4280 // Code space, map space, and large object space do not use black pages. 4280 // Code space, map space, and large object space do not use black pages.
4281 // Hence we have to color all objects of the reservation first black to avoid 4281 // Hence we have to color all objects of the reservation first black to avoid
4282 // unnecessary marking deque load. 4282 // unnecessary marking deque load.
4283 if (incremental_marking()->black_allocation()) { 4283 if (incremental_marking()->black_allocation()) {
4284 for (int i = OLD_SPACE; i < Serializer::kNumberOfSpaces; i++) { 4284 for (int i = CODE_SPACE; i < Serializer::kNumberOfSpaces; i++) {
4285 const Heap::Reservation& res = reservations[i]; 4285 const Heap::Reservation& res = reservations[i];
4286 for (auto& chunk : res) { 4286 for (auto& chunk : res) {
4287 Address addr = chunk.start; 4287 Address addr = chunk.start;
4288 while (addr < chunk.end) { 4288 while (addr < chunk.end) {
4289 HeapObject* obj = HeapObject::FromAddress(addr); 4289 HeapObject* obj = HeapObject::FromAddress(addr);
4290 Marking::MarkBlack(ObjectMarking::MarkBitFrom(obj)); 4290 ObjectMarking::WhiteToBlack(obj);
4291 addr += obj->Size(); 4291 addr += obj->Size();
4292 } 4292 }
4293 } 4293 }
4294 } 4294 }
4295 // Iterate black objects in old space, code space, map space, and large
4296 // object space for side effects.
4295 for (int i = OLD_SPACE; i < Serializer::kNumberOfSpaces; i++) { 4297 for (int i = OLD_SPACE; i < Serializer::kNumberOfSpaces; i++) {
4296 const Heap::Reservation& res = reservations[i]; 4298 const Heap::Reservation& res = reservations[i];
4297 for (auto& chunk : res) { 4299 for (auto& chunk : res) {
4298 Address addr = chunk.start; 4300 Address addr = chunk.start;
4299 while (addr < chunk.end) { 4301 while (addr < chunk.end) {
4300 HeapObject* obj = HeapObject::FromAddress(addr); 4302 HeapObject* obj = HeapObject::FromAddress(addr);
4301 incremental_marking()->IterateBlackObject(obj); 4303 incremental_marking()->IterateBlackObject(obj);
4302 addr += obj->Size(); 4304 addr += obj->Size();
4303 } 4305 }
4304 } 4306 }
(...skipping 558 matching lines...) Expand 10 before | Expand all | Expand 10 after
4863 void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size, 4865 void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size,
4864 bool was_marked_black) { 4866 bool was_marked_black) {
4865 // We are not collecting slots on new space objects during mutation 4867 // We are not collecting slots on new space objects during mutation
4866 // thus we have to scan for pointers to evacuation candidates when we 4868 // thus we have to scan for pointers to evacuation candidates when we
4867 // promote objects. But we should not record any slots in non-black 4869 // promote objects. But we should not record any slots in non-black
4868 // objects. Grey object's slots would be rescanned. 4870 // objects. Grey object's slots would be rescanned.
4869 // White object might not survive until the end of collection 4871 // White object might not survive until the end of collection
4870 // it would be a violation of the invariant to record it's slots. 4872 // it would be a violation of the invariant to record it's slots.
4871 bool record_slots = false; 4873 bool record_slots = false;
4872 if (incremental_marking()->IsCompacting()) { 4874 if (incremental_marking()->IsCompacting()) {
4873 MarkBit mark_bit = ObjectMarking::MarkBitFrom(target); 4875 record_slots = ObjectMarking::IsBlack(target);
4874 record_slots = Marking::IsBlack(mark_bit);
4875 } 4876 }
4876 4877
4877 IterateAndScavengePromotedObjectsVisitor visitor(this, target, record_slots); 4878 IterateAndScavengePromotedObjectsVisitor visitor(this, target, record_slots);
4878 if (target->IsJSFunction()) { 4879 if (target->IsJSFunction()) {
4879 // JSFunctions reachable through kNextFunctionLinkOffset are weak. Slots for 4880 // JSFunctions reachable through kNextFunctionLinkOffset are weak. Slots for
4880 // this links are recorded during processing of weak lists. 4881 // this links are recorded during processing of weak lists.
4881 JSFunction::BodyDescriptorWeakCode::IterateBody(target, size, &visitor); 4882 JSFunction::BodyDescriptorWeakCode::IterateBody(target, size, &visitor);
4882 } else { 4883 } else {
4883 target->IterateBody(target->map()->instance_type(), size, &visitor); 4884 target->IterateBody(target->map()->instance_type(), size, &visitor);
4884 } 4885 }
(...skipping 1217 matching lines...) Expand 10 before | Expand all | Expand 10 after
6102 explicit UnreachableObjectsFilter(Heap* heap) : heap_(heap) { 6103 explicit UnreachableObjectsFilter(Heap* heap) : heap_(heap) {
6103 MarkReachableObjects(); 6104 MarkReachableObjects();
6104 } 6105 }
6105 6106
6106 ~UnreachableObjectsFilter() { 6107 ~UnreachableObjectsFilter() {
6107 heap_->mark_compact_collector()->ClearMarkbits(); 6108 heap_->mark_compact_collector()->ClearMarkbits();
6108 } 6109 }
6109 6110
6110 bool SkipObject(HeapObject* object) { 6111 bool SkipObject(HeapObject* object) {
6111 if (object->IsFiller()) return true; 6112 if (object->IsFiller()) return true;
6112 MarkBit mark_bit = ObjectMarking::MarkBitFrom(object); 6113 return ObjectMarking::IsWhite(object);
6113 return Marking::IsWhite(mark_bit);
6114 } 6114 }
6115 6115
6116 private: 6116 private:
6117 class MarkingVisitor : public ObjectVisitor { 6117 class MarkingVisitor : public ObjectVisitor {
6118 public: 6118 public:
6119 MarkingVisitor() : marking_stack_(10) {} 6119 MarkingVisitor() : marking_stack_(10) {}
6120 6120
6121 void VisitPointers(Object** start, Object** end) override { 6121 void VisitPointers(Object** start, Object** end) override {
6122 for (Object** p = start; p < end; p++) { 6122 for (Object** p = start; p < end; p++) {
6123 if (!(*p)->IsHeapObject()) continue; 6123 if (!(*p)->IsHeapObject()) continue;
6124 HeapObject* obj = HeapObject::cast(*p); 6124 HeapObject* obj = HeapObject::cast(*p);
6125 // Use Marking instead of ObjectMarking to avoid adjusting live bytes
6126 // counter.
6125 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj); 6127 MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj);
6126 if (Marking::IsWhite(mark_bit)) { 6128 if (Marking::IsWhite(mark_bit)) {
6127 Marking::WhiteToBlack(mark_bit); 6129 Marking::WhiteToBlack(mark_bit);
6128 marking_stack_.Add(obj); 6130 marking_stack_.Add(obj);
6129 } 6131 }
6130 } 6132 }
6131 } 6133 }
6132 6134
6133 void TransitiveClosure() { 6135 void TransitiveClosure() {
6134 while (!marking_stack_.is_empty()) { 6136 while (!marking_stack_.is_empty()) {
(...skipping 432 matching lines...) Expand 10 before | Expand all | Expand 10 after
6567 } 6569 }
6568 6570
6569 6571
6570 // static 6572 // static
6571 int Heap::GetStaticVisitorIdForMap(Map* map) { 6573 int Heap::GetStaticVisitorIdForMap(Map* map) {
6572 return StaticVisitorBase::GetVisitorId(map); 6574 return StaticVisitorBase::GetVisitorId(map);
6573 } 6575 }
6574 6576
6575 } // namespace internal 6577 } // namespace internal
6576 } // namespace v8 6578 } // namespace v8
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698