Index: src/heap/mark-compact.cc |
diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc |
index 15a532e8d075a69df21eb5b5deb6219343a06597..92d94d676ccd3b5c31674791de360be60665f2e4 100644 |
--- a/src/heap/mark-compact.cc |
+++ b/src/heap/mark-compact.cc |
@@ -14,7 +14,7 @@ |
#include "src/frames-inl.h" |
#include "src/gdb-jit.h" |
#include "src/global-handles.h" |
-#include "src/heap/array-buffer-tracker.h" |
+#include "src/heap/array-buffer-tracker-inl.h" |
#include "src/heap/gc-tracer.h" |
#include "src/heap/incremental-marking.h" |
#include "src/heap/mark-compact-inl.h" |
@@ -872,6 +872,10 @@ void MarkCompactCollector::Prepare() { |
space = spaces.next()) { |
space->PrepareForMarkCompact(); |
} |
+ if (!was_marked_incrementally_) { |
+ heap_->array_buffer_tracker()->ResetTrackersInOldSpace(); |
+ } |
+ heap()->account_amount_of_external_allocated_freed_memory(); |
#ifdef VERIFY_HEAP |
if (!was_marked_incrementally_ && FLAG_verify_heap) { |
@@ -1727,20 +1731,12 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final |
if (heap_->ShouldBePromoted(object->address(), size) && |
TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object, |
&target_object)) { |
- // If we end up needing more special cases, we should factor this out. |
- if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) { |
- heap_->array_buffer_tracker()->Promote( |
- JSArrayBuffer::cast(target_object)); |
- } |
promoted_size_ += size; |
return true; |
} |
HeapObject* target = nullptr; |
AllocationSpace space = AllocateTargetObject(object, &target); |
MigrateObject(HeapObject::cast(target), object, size, space); |
- if (V8_UNLIKELY(target->IsJSArrayBuffer())) { |
- heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target)); |
- } |
semispace_copied_size_ += size; |
return true; |
} |
@@ -1865,10 +1861,6 @@ class MarkCompactCollector::EvacuateNewSpacePageVisitor final |
} |
inline bool Visit(HeapObject* object) { |
- if (V8_UNLIKELY(object->IsJSArrayBuffer())) { |
- object->GetHeap()->array_buffer_tracker()->Promote( |
- JSArrayBuffer::cast(object)); |
- } |
RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector()); |
object->IterateBodyFast(&visitor); |
promoted_size_ += object->Size(); |
@@ -1909,6 +1901,9 @@ class MarkCompactCollector::EvacuateRecordOnlyVisitor final |
inline bool Visit(HeapObject* object) { |
RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector()); |
object->IterateBody(&visitor); |
+ if (V8_UNLIKELY(object->IsJSArrayBuffer())) { |
+ heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object)); |
+ } |
return true; |
} |
@@ -3126,21 +3121,27 @@ bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) { |
switch (ComputeEvacuationMode(page)) { |
case kObjectsNewToOld: |
result = EvacuateSinglePage<kClearMarkbits>(page, &new_space_visitor_); |
+ page->ScanAndFreeDeadArrayBuffers< |
+ LocalArrayBufferTracker::kForwardingPointer>(); |
DCHECK(result); |
USE(result); |
break; |
case kPageNewToOld: |
result = EvacuateSinglePage<kKeepMarking>(page, &new_space_page_visitor); |
+ // ArrayBufferTracker will be updated during sweeping. |
DCHECK(result); |
USE(result); |
break; |
case kObjectsOldToOld: |
result = EvacuateSinglePage<kClearMarkbits>(page, &old_space_visitor_); |
+ page->ScanAndFreeDeadArrayBuffers< |
+ LocalArrayBufferTracker::kForwardingPointer>(); |
if (!result) { |
// Aborted compaction page. We can record slots here to have them |
// processed in parallel later on. |
EvacuateRecordOnlyVisitor record_visitor(collector_->heap()); |
result = EvacuateSinglePage<kKeepMarking>(page, &record_visitor); |
+ page->ScanAndFreeDeadArrayBuffers<LocalArrayBufferTracker::kMarkBit>(); |
DCHECK(result); |
USE(result); |
// We need to return failure here to indicate that we want this page |
@@ -3383,6 +3384,7 @@ int MarkCompactCollector::Sweeper::RawSweep(PagedSpace* space, Page* p, |
freed_bytes = space->UnaccountedFree(free_start, size); |
max_freed_bytes = Max(freed_bytes, max_freed_bytes); |
} |
+ p->FreeDeadArrayBuffers(); |
p->concurrent_sweeping_state().SetValue(Page::kSweepingDone); |
return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes)); |
} |
@@ -3526,11 +3528,6 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { |
} |
} |
- // EvacuateNewSpaceAndCandidates iterates over new space objects and for |
- // ArrayBuffers either re-registers them as live or promotes them. This is |
- // needed to properly free them. |
- heap()->array_buffer_tracker()->FreeDead(false); |
- |
// Deallocate evacuated candidate pages. |
ReleaseEvacuationCandidates(); |
} |