| Index: src/heap/mark-compact.cc
|
| diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc
|
| index ad0c58e4a8d53e61c5c459caec33d9299184d065..0aefec2147c19121c408311f918aeebb3ba0d225 100644
|
| --- a/src/heap/mark-compact.cc
|
| +++ b/src/heap/mark-compact.cc
|
| @@ -1675,7 +1675,8 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final
|
| // If we end up needing more special cases, we should factor this out.
|
| if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) {
|
| heap_->array_buffer_tracker()->Promote(
|
| - JSArrayBuffer::cast(target_object));
|
| + JSArrayBuffer::cast(target_object),
|
| + reinterpret_cast<JSArrayBuffer*>(object));
|
| }
|
| promoted_size_ += size;
|
| return true;
|
| @@ -1684,7 +1685,9 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final
|
| AllocationSpace space = AllocateTargetObject(object, &target);
|
| MigrateObject(HeapObject::cast(target), object, size, space);
|
| if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
|
| - heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target));
|
| + heap_->array_buffer_tracker()->SemiSpaceCopy(
|
| + JSArrayBuffer::cast(target),
|
| + reinterpret_cast<JSArrayBuffer*>(object));
|
| }
|
| semispace_copied_size_ += size;
|
| return true;
|
| @@ -1811,7 +1814,7 @@ class MarkCompactCollector::EvacuateNewSpacePageVisitor final
|
| inline bool Visit(HeapObject* object) {
|
| if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
|
| object->GetHeap()->array_buffer_tracker()->Promote(
|
| - JSArrayBuffer::cast(object));
|
| + JSArrayBuffer::cast(object), JSArrayBuffer::cast(object));
|
| }
|
| RecordMigratedSlotVisitor visitor;
|
| object->IterateBodyFast(&visitor);
|
| @@ -1838,8 +1841,16 @@ class MarkCompactCollector::EvacuateOldSpaceVisitor final
|
| HeapObject* target_object = nullptr;
|
| if (TryEvacuateObject(target_space, object, &target_object)) {
|
| DCHECK(object->map_word().IsForwardingAddress());
|
| + if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) {
|
| + heap_->array_buffer_tracker()->Compact(
|
| + JSArrayBuffer::cast(target_object),
|
| + reinterpret_cast<JSArrayBuffer*>(object));
|
| + }
|
| return true;
|
| }
|
| + if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
|
| + heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
|
| + }
|
| return false;
|
| }
|
| };
|
| @@ -1847,12 +1858,16 @@ class MarkCompactCollector::EvacuateOldSpaceVisitor final
|
| class MarkCompactCollector::EvacuateRecordOnlyVisitor final
|
| : public MarkCompactCollector::HeapObjectVisitor {
|
| public:
|
| - explicit EvacuateRecordOnlyVisitor(AllocationSpace space) : space_(space) {}
|
| + explicit EvacuateRecordOnlyVisitor(Heap* heap, AllocationSpace space)
|
| + : heap_(heap), space_(space) {}
|
|
|
| inline bool Visit(HeapObject* object) {
|
| if (space_ == OLD_SPACE) {
|
| RecordMigratedSlotVisitor visitor;
|
| object->IterateBody(&visitor);
|
| + if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
|
| + heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
|
| + }
|
| } else {
|
| DCHECK_EQ(space_, CODE_SPACE);
|
| // Add a typed slot for the whole code object.
|
| @@ -1864,6 +1879,7 @@ class MarkCompactCollector::EvacuateRecordOnlyVisitor final
|
| }
|
|
|
| private:
|
| + Heap* heap_;
|
| AllocationSpace space_;
|
| };
|
|
|
| @@ -3172,7 +3188,8 @@ bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) {
|
| if (!result) {
|
| // Aborted compaction page. We can record slots here to have them
|
| // processed in parallel later on.
|
| - EvacuateRecordOnlyVisitor record_visitor(page->owner()->identity());
|
| + EvacuateRecordOnlyVisitor record_visitor(page->heap(),
|
| + page->owner()->identity());
|
| result = EvacuateSinglePage<kKeepMarking>(page, &record_visitor);
|
| DCHECK(result);
|
| USE(result);
|
| @@ -3415,6 +3432,7 @@ int MarkCompactCollector::Sweeper::RawSweep(PagedSpace* space, Page* p,
|
| freed_bytes = space->UnaccountedFree(free_start, size);
|
| max_freed_bytes = Max(freed_bytes, max_freed_bytes);
|
| }
|
| + p->FreeDeadArraybuffersAndResetTracker();
|
| p->concurrent_sweeping_state().SetValue(Page::kSweepingDone);
|
| return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes));
|
| }
|
| @@ -3557,10 +3575,9 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
|
| }
|
| }
|
|
|
| - // EvacuateNewSpaceAndCandidates iterates over new space objects and for
|
| - // ArrayBuffers either re-registers them as live or promotes them. This is
|
| - // needed to properly free them.
|
| - heap()->array_buffer_tracker()->FreeDead(false);
|
| + // Free up backing stores for arrays buffers that died in new space in this
|
| + // cycle.
|
| + heap()->array_buffer_tracker()->FreeDeadInNewSpace();
|
|
|
| // Deallocate evacuated candidate pages.
|
| ReleaseEvacuationCandidates();
|
| @@ -3830,6 +3847,7 @@ void MarkCompactCollector::StartSweepSpace(PagedSpace* space) {
|
| DCHECK(p->SweepingDone());
|
|
|
| if (p->IsEvacuationCandidate()) {
|
| + p->FreeDeadArraybuffersAndResetTracker();
|
| // Will be processed in EvacuateNewSpaceAndCandidates.
|
| DCHECK(evacuation_candidates_.length() > 0);
|
| continue;
|
| @@ -3838,6 +3856,7 @@ void MarkCompactCollector::StartSweepSpace(PagedSpace* space) {
|
| // We can not sweep black pages, since all mark bits are set for these
|
| // pages.
|
| if (p->IsFlagSet(Page::BLACK_PAGE)) {
|
| + p->FreeDeadArraybuffersAndResetTracker();
|
| Bitmap::Clear(p);
|
| p->concurrent_sweeping_state().SetValue(Page::kSweepingDone);
|
| p->ClearFlag(Page::BLACK_PAGE);
|
|
|