Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(861)

Unified Diff: src/heap/heap.cc

Issue 1039733003: This fixes missing incremental write barrier issue when double fields unboxing is enabled. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/mark-compact.cc » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/heap/heap.cc
diff --git a/src/heap/heap.cc b/src/heap/heap.cc
index e6c1217a4f9e08d8833d06c3941cf8c9c1c61655..8e7a0bb85a52cce78a009ed60c311e2c175db079 100644
--- a/src/heap/heap.cc
+++ b/src/heap/heap.cc
@@ -1903,6 +1903,18 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
// to new space.
DCHECK(!target->IsMap());
Address obj_address = target->address();
+
+ // We are not collecting slots on new space objects during mutation
+ // thus we have to scan for pointers to evacuation candidates when we
+ // promote objects. But we should not record any slots in non-black
+ // objects. Grey object's slots would be rescanned.
+ // White object might not survive until the end of collection
+ // it would be a violation of the invariant to record it's slots.
+ bool record_slots = false;
+ if (incremental_marking()->IsCompacting()) {
+ MarkBit mark_bit = Marking::MarkBitFrom(target);
+ record_slots = Marking::IsBlack(mark_bit);
+ }
#if V8_DOUBLE_FIELDS_UNBOXING
LayoutDescriptorHelper helper(target->map());
bool has_only_tagged_fields = helper.all_fields_tagged();
@@ -1912,15 +1924,15 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
int end_of_region_offset;
if (helper.IsTagged(offset, size, &end_of_region_offset)) {
IterateAndMarkPointersToFromSpace(
- obj_address + offset, obj_address + end_of_region_offset,
- &ScavengeObject);
+ record_slots, obj_address + offset,
+ obj_address + end_of_region_offset, &ScavengeObject);
}
offset = end_of_region_offset;
}
} else {
#endif
- IterateAndMarkPointersToFromSpace(obj_address, obj_address + size,
- &ScavengeObject);
+ IterateAndMarkPointersToFromSpace(
+ record_slots, obj_address, obj_address + size, &ScavengeObject);
#if V8_DOUBLE_FIELDS_UNBOXING
}
#endif
@@ -4892,22 +4904,11 @@ void Heap::ZapFromSpace() {
}
-void Heap::IterateAndMarkPointersToFromSpace(Address start, Address end,
+void Heap::IterateAndMarkPointersToFromSpace(bool record_slots, Address start,
+ Address end,
ObjectSlotCallback callback) {
Address slot_address = start;
- // We are not collecting slots on new space objects during mutation
- // thus we have to scan for pointers to evacuation candidates when we
- // promote objects. But we should not record any slots in non-black
- // objects. Grey object's slots would be rescanned.
- // White object might not survive until the end of collection
- // it would be a violation of the invariant to record it's slots.
- bool record_slots = false;
- if (incremental_marking()->IsCompacting()) {
- MarkBit mark_bit = Marking::MarkBitFrom(HeapObject::FromAddress(start));
- record_slots = Marking::IsBlack(mark_bit);
- }
-
while (slot_address < end) {
Object** slot = reinterpret_cast<Object**>(slot_address);
Object* object = *slot;
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/mark-compact.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698