Index: src/heap/heap.cc |
diff --git a/src/heap/heap.cc b/src/heap/heap.cc |
index 958d0f8c948adef6d75ba3078f67be22aefa723d..8854c706dda4f1ffefced9c3f56e71d8afc720a3 100644 |
--- a/src/heap/heap.cc |
+++ b/src/heap/heap.cc |
@@ -3069,6 +3069,16 @@ void Heap::CreateFillerObjectAt(Address addr, int size, |
if (mode == ClearRecordedSlots::kYes) { |
ClearRecordedSlotRange(addr, addr + size); |
} |
+ |
+ // If the location where the filler is created is within a black area we have |
+ // to clear the mark bits of the filler space. |
+ if (incremental_marking()->black_allocation() && |
+ Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(addr))) { |
+ Page* page = Page::FromAddress(addr); |
+ page->markbits()->ClearRange(page->AddressToMarkbitIndex(addr), |
+ page->AddressToMarkbitIndex(addr + size)); |
+ } |
+ |
// At this point, we may be deserializing the heap from a snapshot, and |
// none of the maps have been created yet and are NULL. |
DCHECK((filler->map() == NULL && !deserialization_complete_) || |
@@ -3141,6 +3151,9 @@ FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object, |
// Calculate location of new array start. |
Address new_start = object->address() + bytes_to_trim; |
+ // Transfer the mark bits to their new location. |
+ IncrementalMarking::TransferMark(this, object->address(), new_start); |
+ |
// Technically in new space this write might be omitted (except for |
// debug mode which iterates through the heap), but to play safer |
// we still do it. |
@@ -3154,18 +3167,18 @@ FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object, |
int new_start_index = elements_to_trim * (element_size / kPointerSize); |
former_start[new_start_index] = map; |
former_start[new_start_index + 1] = Smi::FromInt(len - elements_to_trim); |
+ |
FixedArrayBase* new_object = |
FixedArrayBase::cast(HeapObject::FromAddress(new_start)); |
+ // Maintain consistency of live bytes during incremental marking |
+ AdjustLiveBytes(new_object, -bytes_to_trim, Heap::CONCURRENT_TO_SWEEPER); |
+ |
// Remove recorded slots for the new map and length offset. |
ClearRecordedSlot(new_object, HeapObject::RawField(new_object, 0)); |
ClearRecordedSlot(new_object, HeapObject::RawField( |
new_object, FixedArrayBase::kLengthOffset)); |
- // Maintain consistency of live bytes during incremental marking |
- IncrementalMarking::TransferMark(this, object->address(), new_start); |
- AdjustLiveBytes(new_object, -bytes_to_trim, Heap::CONCURRENT_TO_SWEEPER); |
- |
// Notify the heap profiler of change in object layout. |
OnMoveEvent(new_object, object, new_object->Size()); |
return new_object; |
@@ -4179,14 +4192,13 @@ void Heap::RegisterReservationsForBlackAllocation(Reservation* reservations) { |
// Hence we have to color all objects of the reservation first black to avoid |
// unnecessary marking deque load. |
if (incremental_marking()->black_allocation()) { |
- for (int i = CODE_SPACE; i < Serializer::kNumberOfSpaces; i++) { |
+ for (int i = OLD_SPACE; i < Serializer::kNumberOfSpaces; i++) { |
const Heap::Reservation& res = reservations[i]; |
for (auto& chunk : res) { |
Address addr = chunk.start; |
while (addr < chunk.end) { |
HeapObject* obj = HeapObject::FromAddress(addr); |
Marking::MarkBlack(ObjectMarking::MarkBitFrom(obj)); |
- MemoryChunk::IncrementLiveBytesFromGC(obj, obj->Size()); |
addr += obj->Size(); |
} |
} |