Index: src/heap/heap.cc |
diff --git a/src/heap/heap.cc b/src/heap/heap.cc |
index 2861f2306321d21609f26296cdd2380d558f155f..1612e2c824b7a5d294249d53152f27ea2f6ea731 100644 |
--- a/src/heap/heap.cc |
+++ b/src/heap/heap.cc |
@@ -1145,6 +1145,7 @@ bool Heap::ReserveSpace(Reservation* reservations) { |
static const int kThreshold = 20; |
while (gc_performed && counter++ < kThreshold) { |
gc_performed = false; |
+ bool black_allocation = incremental_marking_->black_allocation(); |
for (int space = NEW_SPACE; space < Serializer::kNumberOfSpaces; space++) { |
Reservation* reservation = &reservations[space]; |
DCHECK_LE(1, reservation->length()); |
@@ -1173,6 +1174,12 @@ bool Heap::ReserveSpace(Reservation* reservations) { |
DCHECK(space < Serializer::kNumberOfPreallocatedSpaces); |
chunk.start = free_space_address; |
chunk.end = free_space_address + size; |
+ // We have to make sure that either all pages are black pages or |
+ // none. If we turned on black allocation while allocating |
+ // reservations for different spaces we have to abort. |
+ if (black_allocation != incremental_marking_->black_allocation()) { |
+ perform_gc = true; |
ulan
2016/02/10 10:27:39
How about not starting black allocation if deseria
Hannes Payer (out of office)
2016/02/11 18:18:07
I don't think it would be an issue since there sho
|
+ } |
} else { |
perform_gc = true; |
break; |
@@ -1961,7 +1968,7 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, |
// to new space. |
DCHECK(!target->IsMap()); |
- IteratePointersToFromSpace(target, size, &Scavenger::ScavengeObject); |
+ IteratePromotedObject(target, size, &Scavenger::ScavengeObject); |
} |
} |
@@ -3317,7 +3324,6 @@ AllocationResult Heap::AllocateCode(int object_size, bool immovable) { |
HeapObject* result = nullptr; |
if (!allocation.To(&result)) return allocation; |
- |
if (immovable) { |
Address address = result->address(); |
// Code objects which should stay at a fixed address are allocated either |
@@ -3367,6 +3373,12 @@ AllocationResult Heap::CopyCode(Code* code) { |
isolate_->code_range()->contains(code->address()) || |
obj_size <= code_space()->AreaSize()); |
new_code->Relocate(new_addr - old_addr); |
+ // We have to iterate over the object and process its pointers when black |
+ // allocation is on. |
+ if (incremental_marking()->black_allocation() && |
+ Page::FromAddress(new_code->address())->IsFlagSet(Page::BLACK_PAGE)) { |
+ incremental_marking()->IterateBlackCode(new_code); |
ulan
2016/02/10 10:27:39
How about making this more generic "allocation bar
Hannes Payer (out of office)
2016/02/11 18:18:07
I like it. It will be also called when processing
|
+ } |
return new_code; |
} |
@@ -3414,7 +3426,12 @@ AllocationResult Heap::CopyCode(Code* code, Vector<byte> reloc_info) { |
new_obj_size <= code_space()->AreaSize()); |
new_code->Relocate(new_addr - old_addr); |
- |
+ // We have to iterate over over the object and process its pointers when |
+ // black allocation is on. |
+ if (incremental_marking()->black_allocation() && |
+ Page::FromAddress(new_code->address())->IsFlagSet(Page::BLACK_PAGE)) { |
+ incremental_marking()->IterateBlackCode(new_code); |
+ } |
#ifdef VERIFY_HEAP |
if (FLAG_verify_heap) code->ObjectVerify(); |
#endif |
@@ -4478,10 +4495,9 @@ void Heap::ZapFromSpace() { |
} |
} |
- |
-void Heap::IterateAndMarkPointersToFromSpace(HeapObject* object, Address start, |
- Address end, bool record_slots, |
- ObjectSlotCallback callback) { |
+void Heap::IteratePromotedObjectPointers(HeapObject* object, Address start, |
+ Address end, bool record_slots, |
+ ObjectSlotCallback callback) { |
Address slot_address = start; |
while (slot_address < end) { |
@@ -4507,29 +4523,41 @@ void Heap::IterateAndMarkPointersToFromSpace(HeapObject* object, Address start, |
MarkCompactCollector::IsOnEvacuationCandidate(target)) { |
mark_compact_collector()->RecordSlot(object, slot, target); |
} |
+ // Black allocations requires to mark objects referenced by promoted |
+ // objects. If the object got forwarded to *slot |
+ // UpdateMarkingDequeAfterScavenge will take care of the color |
+ // transition. |
+ if (incremental_marking()->black_allocation()) { |
+ IncrementalMarking::MarkObject(this, HeapObject::cast(target)); |
ulan
2016/02/10 10:27:39
This ignores all the special handling that the mar
Hannes Payer (out of office)
2016/02/11 18:18:07
Weakness during black allocation is special, i.e.
|
+ } |
} |
slot_address += kPointerSize; |
} |
} |
- |
-class IteratePointersToFromSpaceVisitor final : public ObjectVisitor { |
+class IteratePromotedObjectsVisitor final : public ObjectVisitor { |
public: |
- IteratePointersToFromSpaceVisitor(Heap* heap, HeapObject* target, |
- bool record_slots, |
- ObjectSlotCallback callback) |
+ IteratePromotedObjectsVisitor(Heap* heap, HeapObject* target, |
+ bool record_slots, ObjectSlotCallback callback) |
: heap_(heap), |
target_(target), |
record_slots_(record_slots), |
callback_(callback) {} |
V8_INLINE void VisitPointers(Object** start, Object** end) override { |
- heap_->IterateAndMarkPointersToFromSpace( |
+ heap_->IteratePromotedObjectPointers( |
target_, reinterpret_cast<Address>(start), |
reinterpret_cast<Address>(end), record_slots_, callback_); |
} |
- V8_INLINE void VisitCodeEntry(Address code_entry_slot) override {} |
+ V8_INLINE void VisitCodeEntry(Address code_entry_slot) override { |
+ // Black allocation requires us to process objects referenced by |
+ // promoted objects. |
+ if (heap_->incremental_marking()->black_allocation()) { |
+ Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot)); |
+ IncrementalMarking::MarkObject(heap_, code); |
+ } |
+ } |
private: |
Heap* heap_; |
@@ -4538,9 +4566,8 @@ class IteratePointersToFromSpaceVisitor final : public ObjectVisitor { |
ObjectSlotCallback callback_; |
}; |
- |
-void Heap::IteratePointersToFromSpace(HeapObject* target, int size, |
- ObjectSlotCallback callback) { |
+void Heap::IteratePromotedObject(HeapObject* target, int size, |
+ ObjectSlotCallback callback) { |
// We are not collecting slots on new space objects during mutation |
// thus we have to scan for pointers to evacuation candidates when we |
// promote objects. But we should not record any slots in non-black |
@@ -4553,9 +4580,15 @@ void Heap::IteratePointersToFromSpace(HeapObject* target, int size, |
record_slots = Marking::IsBlack(mark_bit); |
} |
- IteratePointersToFromSpaceVisitor visitor(this, target, record_slots, |
- callback); |
+ IteratePromotedObjectsVisitor visitor(this, target, record_slots, callback); |
target->IterateBody(target->map()->instance_type(), size, &visitor); |
+ |
+ // Black allocation requires us to process objects referenced by |
+ // promoted objects. |
+ if (incremental_marking()->black_allocation()) { |
+ Map* map = target->map(); |
+ IncrementalMarking::MarkObject(this, map); |
+ } |
} |