| Index: third_party/WebKit/Source/platform/heap/Heap.cpp
|
| diff --git a/third_party/WebKit/Source/platform/heap/Heap.cpp b/third_party/WebKit/Source/platform/heap/Heap.cpp
|
| index daf927fe5397fdc1744adc8b05b57faacc5037ce..a916993ee827eb58f633b6024aeb12e01010e781 100644
|
| --- a/third_party/WebKit/Source/platform/heap/Heap.cpp
|
| +++ b/third_party/WebKit/Source/platform/heap/Heap.cpp
|
| @@ -30,7 +30,6 @@
|
|
|
| #include "platform/heap/Heap.h"
|
|
|
| -#include "base/debug/alias.h"
|
| #include "base/sys_info.h"
|
| #include "platform/Histogram.h"
|
| #include "platform/ScriptForbiddenScope.h"
|
| @@ -66,7 +65,7 @@ public:
|
|
|
| bool parkThreads(ThreadState* state)
|
| {
|
| - TRACE_EVENT0("blink_gc", "Heap::ParkThreadsScope");
|
| + TRACE_EVENT0("blink_gc", "ThreadHeap::ParkThreadsScope");
|
| const char* samplingState = TRACE_EVENT_GET_SAMPLING_STATE();
|
| if (state->isMainThread())
|
| TRACE_EVENT_SET_SAMPLING_STATE("blink_gc", "BlinkGCWaiting");
|
| @@ -99,7 +98,7 @@ private:
|
| bool m_shouldResumeThreads;
|
| };
|
|
|
| -void Heap::flushHeapDoesNotContainCache()
|
| +void ThreadHeap::flushHeapDoesNotContainCache()
|
| {
|
| s_heapDoesNotContainCache->flush();
|
| }
|
| @@ -118,7 +117,7 @@ void ProcessHeap::resetHeapCounters()
|
| s_totalMarkedObjectSize = 0;
|
| }
|
|
|
| -void Heap::init()
|
| +void ThreadHeap::init()
|
| {
|
| ThreadState::init();
|
| ProcessHeap::init();
|
| @@ -138,7 +137,7 @@ void Heap::init()
|
| Platform::current()->registerMemoryDumpProvider(BlinkGCMemoryDumpProvider::instance(), "BlinkGC");
|
| }
|
|
|
| -void Heap::shutdown()
|
| +void ThreadHeap::shutdown()
|
| {
|
| ASSERT(s_markingStack);
|
|
|
| @@ -164,7 +163,7 @@ void Heap::shutdown()
|
| s_ephemeronStack = nullptr;
|
| GCInfoTable::shutdown();
|
| ThreadState::shutdown();
|
| - ASSERT(Heap::heapStats().allocatedSpace() == 0);
|
| + ASSERT(ThreadHeap::heapStats().allocatedSpace() == 0);
|
| }
|
|
|
| CrossThreadPersistentRegion& ProcessHeap::crossThreadPersistentRegion()
|
| @@ -246,7 +245,7 @@ void ThreadHeapStats::decreaseAllocatedSpace(size_t delta)
|
| }
|
|
|
| #if ENABLE(ASSERT)
|
| -BasePage* Heap::findPageFromAddress(Address address)
|
| +BasePage* ThreadHeap::findPageFromAddress(Address address)
|
| {
|
| MutexLocker lock(ThreadState::threadAttachMutex());
|
| for (ThreadState* state : ThreadState::attachedThreads()) {
|
| @@ -257,7 +256,7 @@ BasePage* Heap::findPageFromAddress(Address address)
|
| }
|
| #endif
|
|
|
| -Address Heap::checkAndMarkPointer(Visitor* visitor, Address address)
|
| +Address ThreadHeap::checkAndMarkPointer(Visitor* visitor, Address address)
|
| {
|
| ASSERT(ThreadState::current()->isInGC());
|
|
|
| @@ -283,17 +282,17 @@ Address Heap::checkAndMarkPointer(Visitor* visitor, Address address)
|
| return nullptr;
|
| }
|
|
|
| -void Heap::pushTraceCallback(void* object, TraceCallback callback)
|
| +void ThreadHeap::pushTraceCallback(void* object, TraceCallback callback)
|
| {
|
| ASSERT(ThreadState::current()->isInGC());
|
|
|
| // Trace should never reach an orphaned page.
|
| - ASSERT(!Heap::getOrphanedPagePool()->contains(object));
|
| + ASSERT(!ThreadHeap::getOrphanedPagePool()->contains(object));
|
| CallbackStack::Item* slot = s_markingStack->allocateEntry();
|
| *slot = CallbackStack::Item(object, callback);
|
| }
|
|
|
| -bool Heap::popAndInvokeTraceCallback(Visitor* visitor)
|
| +bool ThreadHeap::popAndInvokeTraceCallback(Visitor* visitor)
|
| {
|
| CallbackStack::Item* item = s_markingStack->pop();
|
| if (!item)
|
| @@ -302,17 +301,17 @@ bool Heap::popAndInvokeTraceCallback(Visitor* visitor)
|
| return true;
|
| }
|
|
|
| -void Heap::pushPostMarkingCallback(void* object, TraceCallback callback)
|
| +void ThreadHeap::pushPostMarkingCallback(void* object, TraceCallback callback)
|
| {
|
| ASSERT(ThreadState::current()->isInGC());
|
|
|
| // Trace should never reach an orphaned page.
|
| - ASSERT(!Heap::getOrphanedPagePool()->contains(object));
|
| + ASSERT(!ThreadHeap::getOrphanedPagePool()->contains(object));
|
| CallbackStack::Item* slot = s_postMarkingCallbackStack->allocateEntry();
|
| *slot = CallbackStack::Item(object, callback);
|
| }
|
|
|
| -bool Heap::popAndInvokePostMarkingCallback(Visitor* visitor)
|
| +bool ThreadHeap::popAndInvokePostMarkingCallback(Visitor* visitor)
|
| {
|
| if (CallbackStack::Item* item = s_postMarkingCallbackStack->pop()) {
|
| item->call(visitor);
|
| @@ -321,27 +320,27 @@ bool Heap::popAndInvokePostMarkingCallback(Visitor* visitor)
|
| return false;
|
| }
|
|
|
| -void Heap::pushGlobalWeakCallback(void** cell, WeakCallback callback)
|
| +void ThreadHeap::pushGlobalWeakCallback(void** cell, WeakCallback callback)
|
| {
|
| ASSERT(ThreadState::current()->isInGC());
|
|
|
| // Trace should never reach an orphaned page.
|
| - ASSERT(!Heap::getOrphanedPagePool()->contains(cell));
|
| + ASSERT(!ThreadHeap::getOrphanedPagePool()->contains(cell));
|
| CallbackStack::Item* slot = s_globalWeakCallbackStack->allocateEntry();
|
| *slot = CallbackStack::Item(cell, callback);
|
| }
|
|
|
| -void Heap::pushThreadLocalWeakCallback(void* closure, void* object, WeakCallback callback)
|
| +void ThreadHeap::pushThreadLocalWeakCallback(void* closure, void* object, WeakCallback callback)
|
| {
|
| ASSERT(ThreadState::current()->isInGC());
|
|
|
| // Trace should never reach an orphaned page.
|
| - ASSERT(!Heap::getOrphanedPagePool()->contains(object));
|
| + ASSERT(!ThreadHeap::getOrphanedPagePool()->contains(object));
|
| ThreadState* state = pageFromObject(object)->arena()->getThreadState();
|
| state->pushThreadLocalWeakCallback(closure, callback);
|
| }
|
|
|
| -bool Heap::popAndInvokeGlobalWeakCallback(Visitor* visitor)
|
| +bool ThreadHeap::popAndInvokeGlobalWeakCallback(Visitor* visitor)
|
| {
|
| if (CallbackStack::Item* item = s_globalWeakCallbackStack->pop()) {
|
| item->call(visitor);
|
| @@ -350,12 +349,12 @@ bool Heap::popAndInvokeGlobalWeakCallback(Visitor* visitor)
|
| return false;
|
| }
|
|
|
| -void Heap::registerWeakTable(void* table, EphemeronCallback iterationCallback, EphemeronCallback iterationDoneCallback)
|
| +void ThreadHeap::registerWeakTable(void* table, EphemeronCallback iterationCallback, EphemeronCallback iterationDoneCallback)
|
| {
|
| ASSERT(ThreadState::current()->isInGC());
|
|
|
| // Trace should never reach an orphaned page.
|
| - ASSERT(!Heap::getOrphanedPagePool()->contains(table));
|
| + ASSERT(!ThreadHeap::getOrphanedPagePool()->contains(table));
|
| CallbackStack::Item* slot = s_ephemeronStack->allocateEntry();
|
| *slot = CallbackStack::Item(table, iterationCallback);
|
|
|
| @@ -365,14 +364,14 @@ void Heap::registerWeakTable(void* table, EphemeronCallback iterationCallback, E
|
| }
|
|
|
| #if ENABLE(ASSERT)
|
| -bool Heap::weakTableRegistered(const void* table)
|
| +bool ThreadHeap::weakTableRegistered(const void* table)
|
| {
|
| ASSERT(s_ephemeronStack);
|
| return s_ephemeronStack->hasCallbackForObject(table);
|
| }
|
| #endif
|
|
|
| -void Heap::decommitCallbackStacks()
|
| +void ThreadHeap::decommitCallbackStacks()
|
| {
|
| s_markingStack->decommit();
|
| s_postMarkingCallbackStack->decommit();
|
| @@ -380,21 +379,21 @@ void Heap::decommitCallbackStacks()
|
| s_ephemeronStack->decommit();
|
| }
|
|
|
| -void Heap::preGC()
|
| +void ThreadHeap::preGC()
|
| {
|
| ASSERT(!ThreadState::current()->isInGC());
|
| for (ThreadState* state : ThreadState::attachedThreads())
|
| state->preGC();
|
| }
|
|
|
| -void Heap::postGC(BlinkGC::GCType gcType)
|
| +void ThreadHeap::postGC(BlinkGC::GCType gcType)
|
| {
|
| ASSERT(ThreadState::current()->isInGC());
|
| for (ThreadState* state : ThreadState::attachedThreads())
|
| state->postGC(gcType);
|
| }
|
|
|
| -const char* Heap::gcReasonString(BlinkGC::GCReason reason)
|
| +const char* ThreadHeap::gcReasonString(BlinkGC::GCReason reason)
|
| {
|
| switch (reason) {
|
| case BlinkGC::IdleGC:
|
| @@ -415,7 +414,7 @@ const char* Heap::gcReasonString(BlinkGC::GCReason reason)
|
| return "<Unknown>";
|
| }
|
|
|
| -void Heap::collectGarbage(BlinkGC::StackState stackState, BlinkGC::GCType gcType, BlinkGC::GCReason reason)
|
| +void ThreadHeap::collectGarbage(BlinkGC::StackState stackState, BlinkGC::GCType gcType, BlinkGC::GCReason reason)
|
| {
|
| ASSERT(gcType != BlinkGC::ThreadTerminationGC);
|
|
|
| @@ -424,11 +423,6 @@ void Heap::collectGarbage(BlinkGC::StackState stackState, BlinkGC::GCType gcType
|
| RELEASE_ASSERT(!state->isGCForbidden());
|
| state->completeSweep();
|
|
|
| - size_t debugAllocatedObjectSize = Heap::heapStats().allocatedObjectSize();
|
| - base::debug::Alias(&debugAllocatedObjectSize);
|
| - size_t debugWrapperCount = Heap::heapStats().wrapperCount();
|
| - base::debug::Alias(&debugWrapperCount);
|
| -
|
| OwnPtr<Visitor> visitor = Visitor::create(state, gcType);
|
|
|
| SafePointScope safePointScope(stackState, state);
|
| @@ -442,7 +436,7 @@ void Heap::collectGarbage(BlinkGC::StackState stackState, BlinkGC::GCType gcType
|
|
|
| ScriptForbiddenIfMainThreadScope scriptForbidden;
|
|
|
| - TRACE_EVENT2("blink_gc,devtools.timeline", "Heap::collectGarbage",
|
| + TRACE_EVENT2("blink_gc,devtools.timeline", "BlinkGCMarking",
|
| "lazySweeping", gcType == BlinkGC::GCWithoutSweep,
|
| "gcReason", gcReasonString(reason));
|
| TRACE_EVENT_SCOPED_SAMPLING_STATE("blink_gc", "BlinkGC");
|
| @@ -459,9 +453,9 @@ void Heap::collectGarbage(BlinkGC::StackState stackState, BlinkGC::GCType gcType
|
|
|
| StackFrameDepthScope stackDepthScope;
|
|
|
| - size_t totalObjectSize = Heap::heapStats().allocatedObjectSize() + Heap::heapStats().markedObjectSize();
|
| + size_t totalObjectSize = ThreadHeap::heapStats().allocatedObjectSize() + ThreadHeap::heapStats().markedObjectSize();
|
| if (gcType != BlinkGC::TakeSnapshot)
|
| - Heap::resetHeapCounters();
|
| + ThreadHeap::resetHeapCounters();
|
|
|
| // 1. Trace persistent roots.
|
| ThreadState::visitPersistentRoots(visitor.get());
|
| @@ -482,10 +476,10 @@ void Heap::collectGarbage(BlinkGC::StackState stackState, BlinkGC::GCType gcType
|
| getOrphanedPagePool()->decommitOrphanedPages();
|
|
|
| double markingTimeInMilliseconds = WTF::currentTimeMS() - startTime;
|
| - Heap::heapStats().setEstimatedMarkingTimePerByte(totalObjectSize ? (markingTimeInMilliseconds / 1000 / totalObjectSize) : 0);
|
| + ThreadHeap::heapStats().setEstimatedMarkingTimePerByte(totalObjectSize ? (markingTimeInMilliseconds / 1000 / totalObjectSize) : 0);
|
|
|
| #if PRINT_HEAP_STATS
|
| - dataLogF("Heap::collectGarbage (gcReason=%s, lazySweeping=%d, time=%.1lfms)\n", gcReasonString(reason), gcType == BlinkGC::GCWithoutSweep, markingTimeInMilliseconds);
|
| + dataLogF("ThreadHeap::collectGarbage (gcReason=%s, lazySweeping=%d, time=%.1lfms)\n", gcReasonString(reason), gcType == BlinkGC::GCWithoutSweep, markingTimeInMilliseconds);
|
| #endif
|
|
|
| DEFINE_THREAD_SAFE_STATIC_LOCAL(CustomCountHistogram, markingTimeHistogram, new CustomCountHistogram("BlinkGC.CollectGarbage", 0, 10 * 1000, 50));
|
| @@ -499,14 +493,14 @@ void Heap::collectGarbage(BlinkGC::StackState stackState, BlinkGC::GCType gcType
|
|
|
| s_lastGCReason = reason;
|
|
|
| - Heap::reportMemoryUsageHistogram();
|
| + ThreadHeap::reportMemoryUsageHistogram();
|
| WTF::Partitions::reportMemoryUsageHistogram();
|
|
|
| postGC(gcType);
|
| - Heap::decommitCallbackStacks();
|
| + ThreadHeap::decommitCallbackStacks();
|
| }
|
|
|
| -void Heap::collectGarbageForTerminatingThread(ThreadState* state)
|
| +void ThreadHeap::collectGarbageForTerminatingThread(ThreadState* state)
|
| {
|
| {
|
| // A thread-specific termination GC must not allow other global GCs to go
|
| @@ -539,26 +533,26 @@ void Heap::collectGarbageForTerminatingThread(ThreadState* state)
|
| globalWeakProcessing(visitor.get());
|
|
|
| state->postGC(BlinkGC::GCWithSweep);
|
| - Heap::decommitCallbackStacks();
|
| + ThreadHeap::decommitCallbackStacks();
|
| }
|
| state->preSweep();
|
| }
|
|
|
| -void Heap::processMarkingStack(Visitor* visitor)
|
| +void ThreadHeap::processMarkingStack(Visitor* visitor)
|
| {
|
| // Ephemeron fixed point loop.
|
| do {
|
| {
|
| // Iteratively mark all objects that are reachable from the objects
|
| // currently pushed onto the marking stack.
|
| - TRACE_EVENT0("blink_gc", "Heap::processMarkingStackSingleThreaded");
|
| + TRACE_EVENT0("blink_gc", "ThreadHeap::processMarkingStackSingleThreaded");
|
| while (popAndInvokeTraceCallback(visitor)) { }
|
| }
|
|
|
| {
|
| // Mark any strong pointers that have now become reachable in
|
| // ephemeron maps.
|
| - TRACE_EVENT0("blink_gc", "Heap::processEphemeronStack");
|
| + TRACE_EVENT0("blink_gc", "ThreadHeap::processEphemeronStack");
|
| s_ephemeronStack->invokeEphemeronCallbacks(visitor);
|
| }
|
|
|
| @@ -566,9 +560,9 @@ void Heap::processMarkingStack(Visitor* visitor)
|
| } while (!s_markingStack->isEmpty());
|
| }
|
|
|
| -void Heap::postMarkingProcessing(Visitor* visitor)
|
| +void ThreadHeap::postMarkingProcessing(Visitor* visitor)
|
| {
|
| - TRACE_EVENT0("blink_gc", "Heap::postMarkingProcessing");
|
| + TRACE_EVENT0("blink_gc", "ThreadHeap::postMarkingProcessing");
|
| // Call post-marking callbacks including:
|
| // 1. the ephemeronIterationDone callbacks on weak tables to do cleanup
|
| // (specifically to clear the queued bits for weak hash tables), and
|
| @@ -582,9 +576,9 @@ void Heap::postMarkingProcessing(Visitor* visitor)
|
| ASSERT(s_markingStack->isEmpty());
|
| }
|
|
|
| -void Heap::globalWeakProcessing(Visitor* visitor)
|
| +void ThreadHeap::globalWeakProcessing(Visitor* visitor)
|
| {
|
| - TRACE_EVENT0("blink_gc", "Heap::globalWeakProcessing");
|
| + TRACE_EVENT0("blink_gc", "ThreadHeap::globalWeakProcessing");
|
| double startTime = WTF::currentTimeMS();
|
|
|
| // Call weak callbacks on objects that may now be pointing to dead objects.
|
| @@ -599,20 +593,20 @@ void Heap::globalWeakProcessing(Visitor* visitor)
|
| globalWeakTimeHistogram.count(timeForGlobalWeakProcessing);
|
| }
|
|
|
| -void Heap::collectAllGarbage()
|
| +void ThreadHeap::collectAllGarbage()
|
| {
|
| // We need to run multiple GCs to collect a chain of persistent handles.
|
| size_t previousLiveObjects = 0;
|
| for (int i = 0; i < 5; ++i) {
|
| collectGarbage(BlinkGC::NoHeapPointersOnStack, BlinkGC::GCWithSweep, BlinkGC::ForcedGC);
|
| - size_t liveObjects = Heap::heapStats().markedObjectSize();
|
| + size_t liveObjects = ThreadHeap::heapStats().markedObjectSize();
|
| if (liveObjects == previousLiveObjects)
|
| break;
|
| previousLiveObjects = liveObjects;
|
| }
|
| }
|
|
|
| -void Heap::reportMemoryUsageHistogram()
|
| +void ThreadHeap::reportMemoryUsageHistogram()
|
| {
|
| static size_t supportedMaxSizeInMB = 4 * 1024;
|
| static size_t observedMaxSizeInMB = 0;
|
| @@ -621,7 +615,7 @@ void Heap::reportMemoryUsageHistogram()
|
| if (!isMainThread())
|
| return;
|
| // +1 is for rounding up the sizeInMB.
|
| - size_t sizeInMB = Heap::heapStats().allocatedSpace() / 1024 / 1024 + 1;
|
| + size_t sizeInMB = ThreadHeap::heapStats().allocatedSpace() / 1024 / 1024 + 1;
|
| if (sizeInMB >= supportedMaxSizeInMB)
|
| sizeInMB = supportedMaxSizeInMB - 1;
|
| if (sizeInMB > observedMaxSizeInMB) {
|
| @@ -633,10 +627,10 @@ void Heap::reportMemoryUsageHistogram()
|
| }
|
| }
|
|
|
| -void Heap::reportMemoryUsageForTracing()
|
| +void ThreadHeap::reportMemoryUsageForTracing()
|
| {
|
| #if PRINT_HEAP_STATS
|
| - // dataLogF("allocatedSpace=%ldMB, allocatedObjectSize=%ldMB, markedObjectSize=%ldMB, partitionAllocSize=%ldMB, wrapperCount=%ld, collectedWrapperCount=%ld\n", Heap::allocatedSpace() / 1024 / 1024, Heap::allocatedObjectSize() / 1024 / 1024, Heap::markedObjectSize() / 1024 / 1024, WTF::Partitions::totalSizeOfCommittedPages() / 1024 / 1024, Heap::wrapperCount(), Heap::collectedWrapperCount());
|
| + // dataLogF("allocatedSpace=%ldMB, allocatedObjectSize=%ldMB, markedObjectSize=%ldMB, partitionAllocSize=%ldMB, wrapperCount=%ld, collectedWrapperCount=%ld\n", ThreadHeap::allocatedSpace() / 1024 / 1024, ThreadHeap::allocatedObjectSize() / 1024 / 1024, ThreadHeap::markedObjectSize() / 1024 / 1024, WTF::Partitions::totalSizeOfCommittedPages() / 1024 / 1024, ThreadHeap::wrapperCount(), ThreadHeap::collectedWrapperCount());
|
| #endif
|
|
|
| bool gcTracingEnabled;
|
| @@ -646,19 +640,19 @@ void Heap::reportMemoryUsageForTracing()
|
|
|
| // These values are divided by 1024 to avoid overflow in practical cases (TRACE_COUNTER values are 32-bit ints).
|
| // They are capped to INT_MAX just in case.
|
| - TRACE_COUNTER1(TRACE_DISABLED_BY_DEFAULT("blink_gc"), "Heap::allocatedObjectSizeKB", std::min(Heap::heapStats().allocatedObjectSize() / 1024, static_cast<size_t>(INT_MAX)));
|
| - TRACE_COUNTER1(TRACE_DISABLED_BY_DEFAULT("blink_gc"), "Heap::markedObjectSizeKB", std::min(Heap::heapStats().markedObjectSize() / 1024, static_cast<size_t>(INT_MAX)));
|
| - TRACE_COUNTER1(TRACE_DISABLED_BY_DEFAULT("blink_gc"), "Heap::markedObjectSizeAtLastCompleteSweepKB", std::min(Heap::heapStats().markedObjectSizeAtLastCompleteSweep() / 1024, static_cast<size_t>(INT_MAX)));
|
| - TRACE_COUNTER1(TRACE_DISABLED_BY_DEFAULT("blink_gc"), "Heap::allocatedSpaceKB", std::min(Heap::heapStats().allocatedSpace() / 1024, static_cast<size_t>(INT_MAX)));
|
| - TRACE_COUNTER1(TRACE_DISABLED_BY_DEFAULT("blink_gc"), "Heap::objectSizeAtLastGCKB", std::min(Heap::heapStats().objectSizeAtLastGC() / 1024, static_cast<size_t>(INT_MAX)));
|
| - TRACE_COUNTER1(TRACE_DISABLED_BY_DEFAULT("blink_gc"), "Heap::wrapperCount", std::min(Heap::heapStats().wrapperCount(), static_cast<size_t>(INT_MAX)));
|
| - TRACE_COUNTER1(TRACE_DISABLED_BY_DEFAULT("blink_gc"), "Heap::heapStats().wrapperCountAtLastGC", std::min(Heap::heapStats().wrapperCountAtLastGC(), static_cast<size_t>(INT_MAX)));
|
| - TRACE_COUNTER1(TRACE_DISABLED_BY_DEFAULT("blink_gc"), "Heap::collectedWrapperCount", std::min(Heap::heapStats().collectedWrapperCount(), static_cast<size_t>(INT_MAX)));
|
| - TRACE_COUNTER1(TRACE_DISABLED_BY_DEFAULT("blink_gc"), "Heap::partitionAllocSizeAtLastGCKB", std::min(Heap::heapStats().partitionAllocSizeAtLastGC() / 1024, static_cast<size_t>(INT_MAX)));
|
| + TRACE_COUNTER1(TRACE_DISABLED_BY_DEFAULT("blink_gc"), "ThreadHeap::allocatedObjectSizeKB", std::min(ThreadHeap::heapStats().allocatedObjectSize() / 1024, static_cast<size_t>(INT_MAX)));
|
| + TRACE_COUNTER1(TRACE_DISABLED_BY_DEFAULT("blink_gc"), "ThreadHeap::markedObjectSizeKB", std::min(ThreadHeap::heapStats().markedObjectSize() / 1024, static_cast<size_t>(INT_MAX)));
|
| + TRACE_COUNTER1(TRACE_DISABLED_BY_DEFAULT("blink_gc"), "ThreadHeap::markedObjectSizeAtLastCompleteSweepKB", std::min(ThreadHeap::heapStats().markedObjectSizeAtLastCompleteSweep() / 1024, static_cast<size_t>(INT_MAX)));
|
| + TRACE_COUNTER1(TRACE_DISABLED_BY_DEFAULT("blink_gc"), "ThreadHeap::allocatedSpaceKB", std::min(ThreadHeap::heapStats().allocatedSpace() / 1024, static_cast<size_t>(INT_MAX)));
|
| + TRACE_COUNTER1(TRACE_DISABLED_BY_DEFAULT("blink_gc"), "ThreadHeap::objectSizeAtLastGCKB", std::min(ThreadHeap::heapStats().objectSizeAtLastGC() / 1024, static_cast<size_t>(INT_MAX)));
|
| + TRACE_COUNTER1(TRACE_DISABLED_BY_DEFAULT("blink_gc"), "ThreadHeap::wrapperCount", std::min(ThreadHeap::heapStats().wrapperCount(), static_cast<size_t>(INT_MAX)));
|
| + TRACE_COUNTER1(TRACE_DISABLED_BY_DEFAULT("blink_gc"), "ThreadHeap::heapStats().wrapperCountAtLastGC", std::min(ThreadHeap::heapStats().wrapperCountAtLastGC(), static_cast<size_t>(INT_MAX)));
|
| + TRACE_COUNTER1(TRACE_DISABLED_BY_DEFAULT("blink_gc"), "ThreadHeap::collectedWrapperCount", std::min(ThreadHeap::heapStats().collectedWrapperCount(), static_cast<size_t>(INT_MAX)));
|
| + TRACE_COUNTER1(TRACE_DISABLED_BY_DEFAULT("blink_gc"), "ThreadHeap::partitionAllocSizeAtLastGCKB", std::min(ThreadHeap::heapStats().partitionAllocSizeAtLastGC() / 1024, static_cast<size_t>(INT_MAX)));
|
| TRACE_COUNTER1(TRACE_DISABLED_BY_DEFAULT("blink_gc"), "Partitions::totalSizeOfCommittedPagesKB", std::min(WTF::Partitions::totalSizeOfCommittedPages() / 1024, static_cast<size_t>(INT_MAX)));
|
| }
|
|
|
| -size_t Heap::objectPayloadSizeForTesting()
|
| +size_t ThreadHeap::objectPayloadSizeForTesting()
|
| {
|
| size_t objectPayloadSize = 0;
|
| for (ThreadState* state : ThreadState::attachedThreads()) {
|
| @@ -672,49 +666,48 @@ size_t Heap::objectPayloadSizeForTesting()
|
| return objectPayloadSize;
|
| }
|
|
|
| -RegionTree* Heap::getRegionTree()
|
| +RegionTree* ThreadHeap::getRegionTree()
|
| {
|
| DEFINE_THREAD_SAFE_STATIC_LOCAL(RegionTree, tree, new RegionTree);
|
| return &tree;
|
| }
|
|
|
| -BasePage* Heap::lookup(Address address)
|
| +BasePage* ThreadHeap::lookup(Address address)
|
| {
|
| ASSERT(ThreadState::current()->isInGC());
|
| - if (PageMemoryRegion* region = Heap::getRegionTree()->lookup(address)) {
|
| + if (PageMemoryRegion* region = ThreadHeap::getRegionTree()->lookup(address)) {
|
| BasePage* page = region->pageFromAddress(address);
|
| return page && !page->orphaned() ? page : nullptr;
|
| }
|
| return nullptr;
|
| }
|
|
|
| -void Heap::resetHeapCounters()
|
| +void ThreadHeap::resetHeapCounters()
|
| {
|
| ASSERT(ThreadState::current()->isInGC());
|
|
|
| - Heap::reportMemoryUsageForTracing();
|
| + ThreadHeap::reportMemoryUsageForTracing();
|
|
|
| ProcessHeap::resetHeapCounters();
|
| - Heap::heapStats().reset();
|
| + ThreadHeap::heapStats().reset();
|
| for (ThreadState* state : ThreadState::attachedThreads())
|
| state->resetHeapCounters();
|
| }
|
|
|
| -// TODO(keishi): Make this a member of ThreadHeap.
|
| -ThreadHeapStats& Heap::heapStats()
|
| +ThreadHeapStats& ThreadHeap::heapStats()
|
| {
|
| DEFINE_THREAD_SAFE_STATIC_LOCAL(ThreadHeapStats, stats, new ThreadHeapStats());
|
| return stats;
|
| }
|
|
|
| -CallbackStack* Heap::s_markingStack;
|
| -CallbackStack* Heap::s_postMarkingCallbackStack;
|
| -CallbackStack* Heap::s_globalWeakCallbackStack;
|
| -CallbackStack* Heap::s_ephemeronStack;
|
| -HeapDoesNotContainCache* Heap::s_heapDoesNotContainCache;
|
| -FreePagePool* Heap::s_freePagePool;
|
| -OrphanedPagePool* Heap::s_orphanedPagePool;
|
| +CallbackStack* ThreadHeap::s_markingStack;
|
| +CallbackStack* ThreadHeap::s_postMarkingCallbackStack;
|
| +CallbackStack* ThreadHeap::s_globalWeakCallbackStack;
|
| +CallbackStack* ThreadHeap::s_ephemeronStack;
|
| +HeapDoesNotContainCache* ThreadHeap::s_heapDoesNotContainCache;
|
| +FreePagePool* ThreadHeap::s_freePagePool;
|
| +OrphanedPagePool* ThreadHeap::s_orphanedPagePool;
|
|
|
| -BlinkGC::GCReason Heap::s_lastGCReason = BlinkGC::NumberOfGCReason;
|
| +BlinkGC::GCReason ThreadHeap::s_lastGCReason = BlinkGC::NumberOfGCReason;
|
|
|
| } // namespace blink
|
|
|