| Index: src/heap/mark-compact.cc
 | 
| diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc
 | 
| index 462b8ca89a027c3c14912d9588283a5d73f9f479..15a532e8d075a69df21eb5b5deb6219343a06597 100644
 | 
| --- a/src/heap/mark-compact.cc
 | 
| +++ b/src/heap/mark-compact.cc
 | 
| @@ -14,7 +14,7 @@
 | 
|  #include "src/frames-inl.h"
 | 
|  #include "src/gdb-jit.h"
 | 
|  #include "src/global-handles.h"
 | 
| -#include "src/heap/array-buffer-tracker-inl.h"
 | 
| +#include "src/heap/array-buffer-tracker.h"
 | 
|  #include "src/heap/gc-tracer.h"
 | 
|  #include "src/heap/incremental-marking.h"
 | 
|  #include "src/heap/mark-compact-inl.h"
 | 
| @@ -872,10 +872,6 @@
 | 
|         space = spaces.next()) {
 | 
|      space->PrepareForMarkCompact();
 | 
|    }
 | 
| -  if (!was_marked_incrementally_) {
 | 
| -    heap_->array_buffer_tracker()->ResetTrackersInOldSpace();
 | 
| -  }
 | 
| -  heap()->account_amount_of_external_allocated_freed_memory();
 | 
|  
 | 
|  #ifdef VERIFY_HEAP
 | 
|    if (!was_marked_incrementally_ && FLAG_verify_heap) {
 | 
| @@ -1731,12 +1727,20 @@
 | 
|      if (heap_->ShouldBePromoted(object->address(), size) &&
 | 
|          TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object,
 | 
|                            &target_object)) {
 | 
| +      // If we end up needing more special cases, we should factor this out.
 | 
| +      if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) {
 | 
| +        heap_->array_buffer_tracker()->Promote(
 | 
| +            JSArrayBuffer::cast(target_object));
 | 
| +      }
 | 
|        promoted_size_ += size;
 | 
|        return true;
 | 
|      }
 | 
|      HeapObject* target = nullptr;
 | 
|      AllocationSpace space = AllocateTargetObject(object, &target);
 | 
|      MigrateObject(HeapObject::cast(target), object, size, space);
 | 
| +    if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
 | 
| +      heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target));
 | 
| +    }
 | 
|      semispace_copied_size_ += size;
 | 
|      return true;
 | 
|    }
 | 
| @@ -1861,6 +1865,10 @@
 | 
|    }
 | 
|  
 | 
|    inline bool Visit(HeapObject* object) {
 | 
| +    if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
 | 
| +      object->GetHeap()->array_buffer_tracker()->Promote(
 | 
| +          JSArrayBuffer::cast(object));
 | 
| +    }
 | 
|      RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector());
 | 
|      object->IterateBodyFast(&visitor);
 | 
|      promoted_size_ += object->Size();
 | 
| @@ -1901,9 +1909,6 @@
 | 
|    inline bool Visit(HeapObject* object) {
 | 
|      RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector());
 | 
|      object->IterateBody(&visitor);
 | 
| -    if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
 | 
| -      heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
 | 
| -    }
 | 
|      return true;
 | 
|    }
 | 
|  
 | 
| @@ -3118,35 +3123,24 @@
 | 
|  bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) {
 | 
|    bool result = false;
 | 
|    DCHECK(page->SweepingDone());
 | 
| -  Heap* heap = page->heap();
 | 
|    switch (ComputeEvacuationMode(page)) {
 | 
|      case kObjectsNewToOld:
 | 
|        result = EvacuateSinglePage<kClearMarkbits>(page, &new_space_visitor_);
 | 
| -      heap->array_buffer_tracker()
 | 
| -          ->ScanAndFreeDeadArrayBuffers<
 | 
| -              LocalArrayBufferTracker::kForwardingPointer>(page);
 | 
|        DCHECK(result);
 | 
|        USE(result);
 | 
|        break;
 | 
|      case kPageNewToOld:
 | 
|        result = EvacuateSinglePage<kKeepMarking>(page, &new_space_page_visitor);
 | 
| -      // ArrayBufferTracker will be updated during sweeping.
 | 
|        DCHECK(result);
 | 
|        USE(result);
 | 
|        break;
 | 
|      case kObjectsOldToOld:
 | 
|        result = EvacuateSinglePage<kClearMarkbits>(page, &old_space_visitor_);
 | 
| -      heap->array_buffer_tracker()
 | 
| -          ->ScanAndFreeDeadArrayBuffers<
 | 
| -              LocalArrayBufferTracker::kForwardingPointer>(page);
 | 
|        if (!result) {
 | 
|          // Aborted compaction page. We can record slots here to have them
 | 
|          // processed in parallel later on.
 | 
|          EvacuateRecordOnlyVisitor record_visitor(collector_->heap());
 | 
|          result = EvacuateSinglePage<kKeepMarking>(page, &record_visitor);
 | 
| -        heap->array_buffer_tracker()
 | 
| -            ->ScanAndFreeDeadArrayBuffers<LocalArrayBufferTracker::kMarkBit>(
 | 
| -                page);
 | 
|          DCHECK(result);
 | 
|          USE(result);
 | 
|          // We need to return failure here to indicate that we want this page
 | 
| @@ -3389,7 +3383,6 @@
 | 
|      freed_bytes = space->UnaccountedFree(free_start, size);
 | 
|      max_freed_bytes = Max(freed_bytes, max_freed_bytes);
 | 
|    }
 | 
| -  p->heap()->array_buffer_tracker()->FreeDead(p);
 | 
|    p->concurrent_sweeping_state().SetValue(Page::kSweepingDone);
 | 
|    return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes));
 | 
|  }
 | 
| @@ -3532,6 +3525,11 @@
 | 
|          p->ClearFlag(Page::COMPACTION_WAS_ABORTED);
 | 
|        }
 | 
|      }
 | 
| +
 | 
| +    // EvacuateNewSpaceAndCandidates iterates over new space objects and for
 | 
| +    // ArrayBuffers either re-registers them as live or promotes them. This is
 | 
| +    // needed to properly free them.
 | 
| +    heap()->array_buffer_tracker()->FreeDead(false);
 | 
|  
 | 
|      // Deallocate evacuated candidate pages.
 | 
|      ReleaseEvacuationCandidates();
 | 
| 
 |