Index: src/heap/heap.cc |
diff --git a/src/heap/heap.cc b/src/heap/heap.cc |
index e6c1217a4f9e08d8833d06c3941cf8c9c1c61655..8e7a0bb85a52cce78a009ed60c311e2c175db079 100644 |
--- a/src/heap/heap.cc |
+++ b/src/heap/heap.cc |
@@ -1903,6 +1903,18 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, |
// to new space. |
DCHECK(!target->IsMap()); |
Address obj_address = target->address(); |
+ |
+ // We are not collecting slots on new space objects during mutation |
+ // thus we have to scan for pointers to evacuation candidates when we |
+ // promote objects. But we should not record any slots in non-black |
+ // objects. Grey object's slots would be rescanned. |
+ // White object might not survive until the end of collection |
+ // it would be a violation of the invariant to record it's slots. |
+ bool record_slots = false; |
+ if (incremental_marking()->IsCompacting()) { |
+ MarkBit mark_bit = Marking::MarkBitFrom(target); |
+ record_slots = Marking::IsBlack(mark_bit); |
+ } |
#if V8_DOUBLE_FIELDS_UNBOXING |
LayoutDescriptorHelper helper(target->map()); |
bool has_only_tagged_fields = helper.all_fields_tagged(); |
@@ -1912,15 +1924,15 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, |
int end_of_region_offset; |
if (helper.IsTagged(offset, size, &end_of_region_offset)) { |
IterateAndMarkPointersToFromSpace( |
- obj_address + offset, obj_address + end_of_region_offset, |
- &ScavengeObject); |
+ record_slots, obj_address + offset, |
+ obj_address + end_of_region_offset, &ScavengeObject); |
} |
offset = end_of_region_offset; |
} |
} else { |
#endif |
- IterateAndMarkPointersToFromSpace(obj_address, obj_address + size, |
- &ScavengeObject); |
+ IterateAndMarkPointersToFromSpace( |
+ record_slots, obj_address, obj_address + size, &ScavengeObject); |
#if V8_DOUBLE_FIELDS_UNBOXING |
} |
#endif |
@@ -4892,22 +4904,11 @@ void Heap::ZapFromSpace() { |
} |
-void Heap::IterateAndMarkPointersToFromSpace(Address start, Address end, |
+void Heap::IterateAndMarkPointersToFromSpace(bool record_slots, Address start, |
+ Address end, |
ObjectSlotCallback callback) { |
Address slot_address = start; |
- // We are not collecting slots on new space objects during mutation |
- // thus we have to scan for pointers to evacuation candidates when we |
- // promote objects. But we should not record any slots in non-black |
- // objects. Grey object's slots would be rescanned. |
- // White object might not survive until the end of collection |
- // it would be a violation of the invariant to record it's slots. |
- bool record_slots = false; |
- if (incremental_marking()->IsCompacting()) { |
- MarkBit mark_bit = Marking::MarkBitFrom(HeapObject::FromAddress(start)); |
- record_slots = Marking::IsBlack(mark_bit); |
- } |
- |
while (slot_address < end) { |
Object** slot = reinterpret_cast<Object**>(slot_address); |
Object* object = *slot; |