| Index: src/heap/mark-compact.cc
|
| diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc
|
| index 019b9e392cc08028eee4b6cef66b0319dccb5842..7842dac047e3adfab875d8189feefe876beee367 100644
|
| --- a/src/heap/mark-compact.cc
|
| +++ b/src/heap/mark-compact.cc
|
| @@ -1556,51 +1556,18 @@
|
| };
|
|
|
|
|
| -class MarkCompactCollector::EvacuateVisitorBase
|
| +class MarkCompactCollector::EvacuateNewSpaceVisitor
|
| : public MarkCompactCollector::HeapObjectVisitor {
|
| public:
|
| - EvacuateVisitorBase(Heap* heap, SlotsBuffer** evacuation_slots_buffer)
|
| - : heap_(heap), evacuation_slots_buffer_(evacuation_slots_buffer) {}
|
| -
|
| - bool TryEvacuateObject(PagedSpace* target_space, HeapObject* object,
|
| - HeapObject** target_object) {
|
| - int size = object->Size();
|
| - AllocationAlignment alignment = object->RequiredAlignment();
|
| - AllocationResult allocation = target_space->AllocateRaw(size, alignment);
|
| - if (allocation.To(target_object)) {
|
| - heap_->mark_compact_collector()->MigrateObject(
|
| - *target_object, object, size, target_space->identity(),
|
| - evacuation_slots_buffer_);
|
| - return true;
|
| - }
|
| - return false;
|
| - }
|
| -
|
| - protected:
|
| - Heap* heap_;
|
| - SlotsBuffer** evacuation_slots_buffer_;
|
| -};
|
| -
|
| -
|
| -class MarkCompactCollector::EvacuateNewSpaceVisitor
|
| - : public MarkCompactCollector::EvacuateVisitorBase {
|
| - public:
|
| - explicit EvacuateNewSpaceVisitor(Heap* heap,
|
| - SlotsBuffer** evacuation_slots_buffer)
|
| - : EvacuateVisitorBase(heap, evacuation_slots_buffer) {}
|
| + explicit EvacuateNewSpaceVisitor(Heap* heap) : heap_(heap) {}
|
|
|
| virtual bool Visit(HeapObject* object) {
|
| Heap::UpdateAllocationSiteFeedback(object, Heap::RECORD_SCRATCHPAD_SLOT);
|
| int size = object->Size();
|
| - HeapObject* target_object = nullptr;
|
| +
|
| + // TODO(hpayer): Refactor EvacuateObject and call this function instead.
|
| if (heap_->ShouldBePromoted(object->address(), size) &&
|
| - TryEvacuateObject(heap_->old_space(), object, &target_object)) {
|
| - // If we end up needing more special cases, we should factor this out.
|
| - if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) {
|
| - heap_->array_buffer_tracker()->Promote(
|
| - JSArrayBuffer::cast(target_object));
|
| - }
|
| - heap_->IncrementPromotedObjectsSize(size);
|
| + heap_->mark_compact_collector()->TryPromoteObject(object, size)) {
|
| return true;
|
| }
|
|
|
| @@ -1627,31 +1594,43 @@
|
| heap_->IncrementSemiSpaceCopiedObjectSize(size);
|
| return true;
|
| }
|
| +
|
| + private:
|
| + Heap* heap_;
|
| };
|
|
|
|
|
| class MarkCompactCollector::EvacuateOldSpaceVisitor
|
| - : public MarkCompactCollector::EvacuateVisitorBase {
|
| + : public MarkCompactCollector::HeapObjectVisitor {
|
| public:
|
| EvacuateOldSpaceVisitor(Heap* heap,
|
| CompactionSpaceCollection* compaction_spaces,
|
| SlotsBuffer** evacuation_slots_buffer)
|
| - : EvacuateVisitorBase(heap, evacuation_slots_buffer),
|
| - compaction_spaces_(compaction_spaces) {}
|
| + : heap_(heap),
|
| + compaction_spaces_(compaction_spaces),
|
| + evacuation_slots_buffer_(evacuation_slots_buffer) {}
|
|
|
| virtual bool Visit(HeapObject* object) {
|
| - CompactionSpace* target_space = compaction_spaces_->Get(
|
| - Page::FromAddress(object->address())->owner()->identity());
|
| + int size = object->Size();
|
| + AllocationAlignment alignment = object->RequiredAlignment();
|
| HeapObject* target_object = nullptr;
|
| - if (TryEvacuateObject(target_space, object, &target_object)) {
|
| - DCHECK(object->map_word().IsForwardingAddress());
|
| - return true;
|
| - }
|
| - return false;
|
| + AllocationSpace id =
|
| + Page::FromAddress(object->address())->owner()->identity();
|
| + AllocationResult allocation =
|
| + compaction_spaces_->Get(id)->AllocateRaw(size, alignment);
|
| + if (!allocation.To(&target_object)) {
|
| + return false;
|
| + }
|
| + heap_->mark_compact_collector()->MigrateObject(
|
| + target_object, object, size, id, evacuation_slots_buffer_);
|
| + DCHECK(object->map_word().IsForwardingAddress());
|
| + return true;
|
| }
|
|
|
| private:
|
| + Heap* heap_;
|
| CompactionSpaceCollection* compaction_spaces_;
|
| + SlotsBuffer** evacuation_slots_buffer_;
|
| };
|
|
|
|
|
| @@ -3010,6 +2989,28 @@
|
| }
|
|
|
|
|
| +bool MarkCompactCollector::TryPromoteObject(HeapObject* object,
|
| + int object_size) {
|
| + OldSpace* old_space = heap()->old_space();
|
| +
|
| + HeapObject* target = nullptr;
|
| + AllocationAlignment alignment = object->RequiredAlignment();
|
| + AllocationResult allocation = old_space->AllocateRaw(object_size, alignment);
|
| + if (allocation.To(&target)) {
|
| + MigrateObject(target, object, object_size, old_space->identity(),
|
| + &migration_slots_buffer_);
|
| + // If we end up needing more special cases, we should factor this out.
|
| + if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
|
| + heap()->array_buffer_tracker()->Promote(JSArrayBuffer::cast(target));
|
| + }
|
| + heap()->IncrementPromotedObjectsSize(object_size);
|
| + return true;
|
| + }
|
| +
|
| + return false;
|
| +}
|
| +
|
| +
|
| bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot,
|
| HeapObject** out_object) {
|
| Space* owner = p->owner();
|
| @@ -3182,7 +3183,7 @@
|
| // new entries in the store buffer and may cause some pages to be marked
|
| // scan-on-scavenge.
|
| NewSpacePageIterator it(from_bottom, from_top);
|
| - EvacuateNewSpaceVisitor new_space_visitor(heap(), &migration_slots_buffer_);
|
| + EvacuateNewSpaceVisitor new_space_visitor(heap());
|
| while (it.has_next()) {
|
| NewSpacePage* p = it.next();
|
| survivors_size += p->LiveBytes();
|
|
|