Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(398)

Unified Diff: src/heap/mark-compact.cc

Issue 2026463002: Reland "[heap] Fine-grained JSArrayBuffer tracking" (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: make ArrayBufferTracker AllStatic Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « src/heap/incremental-marking.cc ('k') | src/heap/objects-visiting-inl.h » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/heap/mark-compact.cc
diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc
index 7a4e2f2d03af344b27294e13720bf99cd0664d68..ed245f921fabf4061e0f2238ae0a2b37bfec6ca8 100644
--- a/src/heap/mark-compact.cc
+++ b/src/heap/mark-compact.cc
@@ -14,7 +14,7 @@
#include "src/frames-inl.h"
#include "src/gdb-jit.h"
#include "src/global-handles.h"
-#include "src/heap/array-buffer-tracker.h"
+#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/gc-tracer.h"
#include "src/heap/incremental-marking.h"
#include "src/heap/mark-compact-inl.h"
@@ -872,6 +872,10 @@ void MarkCompactCollector::Prepare() {
space = spaces.next()) {
space->PrepareForMarkCompact();
}
+ if (!was_marked_incrementally_) {
+ ArrayBufferTracker::ResetTrackersInOldSpace(heap_);
+ }
+ heap()->account_amount_of_external_allocated_freed_memory();
#ifdef VERIFY_HEAP
if (!was_marked_incrementally_ && FLAG_verify_heap) {
@@ -1727,20 +1731,12 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final
if (heap_->ShouldBePromoted(object->address(), size) &&
TryEvacuateObject(compaction_spaces_->Get(OLD_SPACE), object,
&target_object)) {
- // If we end up needing more special cases, we should factor this out.
- if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) {
- heap_->array_buffer_tracker()->Promote(
- JSArrayBuffer::cast(target_object));
- }
promoted_size_ += size;
return true;
}
HeapObject* target = nullptr;
AllocationSpace space = AllocateTargetObject(object, &target);
MigrateObject(HeapObject::cast(target), object, size, space);
- if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
- heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target));
- }
semispace_copied_size_ += size;
return true;
}
@@ -1865,10 +1861,6 @@ class MarkCompactCollector::EvacuateNewSpacePageVisitor final
}
inline bool Visit(HeapObject* object) {
- if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
- object->GetHeap()->array_buffer_tracker()->Promote(
- JSArrayBuffer::cast(object));
- }
RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector());
object->IterateBodyFast(&visitor);
promoted_size_ += object->Size();
@@ -1909,6 +1901,9 @@ class MarkCompactCollector::EvacuateRecordOnlyVisitor final
inline bool Visit(HeapObject* object) {
RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector());
object->IterateBody(&visitor);
+ if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
+ ArrayBufferTracker::MarkLive(heap_, JSArrayBuffer::cast(object));
+ }
return true;
}
@@ -3127,11 +3122,14 @@ bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) {
switch (ComputeEvacuationMode(page)) {
case kObjectsNewToOld:
result = EvacuateSinglePage<kClearMarkbits>(page, &new_space_visitor_);
+ ArrayBufferTracker::ScanAndFreeDeadArrayBuffers<
+ LocalArrayBufferTracker::kForwardingPointer>(page);
DCHECK(result);
USE(result);
break;
case kPageNewToOld:
result = EvacuateSinglePage<kKeepMarking>(page, &new_space_page_visitor);
+ // ArrayBufferTracker will be updated during sweeping.
DCHECK(result);
USE(result);
break;
@@ -3142,12 +3140,16 @@ bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) {
// processed in parallel later on.
EvacuateRecordOnlyVisitor record_visitor(collector_->heap());
result = EvacuateSinglePage<kKeepMarking>(page, &record_visitor);
+ ArrayBufferTracker::ScanAndFreeDeadArrayBuffers<
+ LocalArrayBufferTracker::kForwardingPointerOrMarkBit>(page);
DCHECK(result);
USE(result);
// We need to return failure here to indicate that we want this page
// added to the sweeper.
return false;
}
+ ArrayBufferTracker::ScanAndFreeDeadArrayBuffers<
+ LocalArrayBufferTracker::kForwardingPointer>(page);
break;
default:
UNREACHABLE();
@@ -3384,6 +3386,7 @@ int MarkCompactCollector::Sweeper::RawSweep(PagedSpace* space, Page* p,
freed_bytes = space->UnaccountedFree(free_start, size);
max_freed_bytes = Max(freed_bytes, max_freed_bytes);
}
+ ArrayBufferTracker::FreeDead(p);
p->concurrent_sweeping_state().SetValue(Page::kSweepingDone);
return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes));
}
@@ -3527,11 +3530,6 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
}
}
- // EvacuateNewSpaceAndCandidates iterates over new space objects and for
- // ArrayBuffers either re-registers them as live or promotes them. This is
- // needed to properly free them.
- heap()->array_buffer_tracker()->FreeDead(false);
-
// Deallocate evacuated candidate pages.
ReleaseEvacuationCandidates();
}
« no previous file with comments | « src/heap/incremental-marking.cc ('k') | src/heap/objects-visiting-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698