Chromium Code Reviews| Index: Source/platform/heap/Heap.cpp |
| diff --git a/Source/platform/heap/Heap.cpp b/Source/platform/heap/Heap.cpp |
| index 74d925287637d70286165d267790ba432bb02ff4..29b52c1b91dd6db59f717495936b05bcee8af94b 100644 |
| --- a/Source/platform/heap/Heap.cpp |
| +++ b/Source/platform/heap/Heap.cpp |
| @@ -581,12 +581,12 @@ static bool isUninitializedMemory(void* objectPointer, size_t objectSize) |
| template<> |
| void LargeObject<FinalizedHeapObjectHeader>::mark(Visitor* visitor) |
| { |
| - if (heapObjectHeader()->hasVTable() && !vTableInitialized(payload())) { |
| - FinalizedHeapObjectHeader* header = heapObjectHeader(); |
| + FinalizedHeapObjectHeader* header = heapObjectHeader(); |
| + if (header->hasVTable() && !vTableInitialized(payload())) { |
| visitor->markNoTracing(header); |
| ASSERT(isUninitializedMemory(header->payload(), header->payloadSize())); |
| } else { |
| - visitor->mark(heapObjectHeader(), heapObjectHeader()->traceCallback()); |
| + visitor->mark(header, header->traceCallback()); |
| } |
| } |
| @@ -1969,27 +1969,27 @@ public: |
| typedef HashMap<uintptr_t, std::pair<uintptr_t, String> > ObjectGraph; |
| #endif |
| - MarkingVisitor(CallbackStack* markingStack) : m_markingStack(markingStack) |
| + MarkingVisitor(CallbackStack* markingStack) |
|
haraken
2014/12/02 06:16:04
Add explicit.
sof
2014/12/02 09:52:15
Done.
|
| + : m_markingStack(markingStack) |
| + , m_checkIfNeedsTracing(false) |
| { |
| } |
| inline void visitHeader(HeapObjectHeader* header, const void* objectPointer, TraceCallback callback) |
| { |
| ASSERT(header); |
| -#if ENABLE(ASSERT) |
| - { |
| - // Check that we are not marking objects that are outside |
| - // the heap by calling Heap::contains. However we cannot |
| - // call Heap::contains when outside a GC and we call mark |
| - // when doing weakness for ephemerons. Hence we only check |
| - // when called within. |
| - ASSERT(!Heap::isInGC() || Heap::containedInHeapOrOrphanedPage(header)); |
| - } |
| -#endif |
| ASSERT(objectPointer); |
| + // Check that we are not marking objects that are outside |
| + // the heap by calling Heap::contains. However we cannot |
| + // call Heap::contains when outside a GC and we call mark |
| + // when doing weakness for ephemerons. Hence we only check |
| + // when called within. |
| + ASSERT(!Heap::isInGC() || Heap::containedInHeapOrOrphanedPage(header)); |
| + |
| if (header->isMarked()) |
| return; |
| header->mark(); |
| + |
| #if ENABLE(GC_PROFILE_MARKING) |
| MutexLocker locker(objectGraphMutex()); |
| String className(classOf(objectPointer)); |
| @@ -2001,21 +2001,35 @@ public: |
| ASSERT(result.isNewEntry); |
| // fprintf(stderr, "%s[%p] -> %s[%p]\n", m_hostName.ascii().data(), m_hostObject, className.ascii().data(), objectPointer); |
| #endif |
| - if (callback) |
| +#if ENABLE(ASSERT) |
| + { |
| + BaseHeapPage* page = pageFromObject(objectPointer); |
| + // If you hit this ASSERT, it means that there is a dangling pointer |
| + // from a live thread heap to a dead thread heap. We must eliminate |
| + // the dangling pointer. |
| + // Release builds don't have the ASSERT, but it is OK because |
| + // release builds will crash at the following item->call |
|
haraken
2014/12/02 06:16:04
Update this comment. Now we don't have "the follow
|
| + // because all the entries of the orphaned heaps are zeroed out and |
| + // thus the item does not have a valid vtable. |
| + ASSERT(!page->orphaned()); |
| + } |
| +#endif |
| + if (callback) { |
| + if (UNLIKELY(m_checkIfNeedsTracing && !needsTracing(objectPointer))) |
| + return; |
| + |
| Heap::pushTraceCallback(m_markingStack, const_cast<void*>(objectPointer), callback); |
| + } |
| } |
| + // We need both HeapObjectHeader and FinalizedHeapObjectHeader versions to correctly find the payload. |
| virtual void mark(HeapObjectHeader* header, TraceCallback callback) override |
| { |
| - // We need both the HeapObjectHeader and FinalizedHeapObjectHeader |
| - // version to correctly find the payload. |
| visitHeader(header, header->payload(), callback); |
| } |
| virtual void mark(FinalizedHeapObjectHeader* header, TraceCallback callback) override |
| { |
| - // We need both the HeapObjectHeader and FinalizedHeapObjectHeader |
| - // version to correctly find the payload. |
| visitHeader(header, header->payload(), callback); |
| } |
| @@ -2054,6 +2068,57 @@ public: |
| return FinalizedHeapObjectHeader::fromPayload(objectPointer)->isMarked(); |
| } |
| + virtual bool ensureMarked(const void* objectPointer) override |
| + { |
| + if (!objectPointer) |
| + return false; |
| +#if ENABLE(ASSERT) |
| + if (isMarked(objectPointer)) |
| + return false; |
| + |
| + markNoTracing(objectPointer); |
| +#else |
| + FinalizedHeapObjectHeader* header = |
| + FinalizedHeapObjectHeader::fromPayload(objectPointer); |
| + if (header->isMarked()) |
| + return false; |
| + header->mark(); |
| + |
| + if (UNLIKELY(m_checkIfNeedsTracing && !needsTracing(objectPointer))) |
|
haraken
2014/12/02 06:16:04
Hmm, it's a bit unfortunate we need to have this b
sof
2014/12/02 06:19:27
As I said in the previous comment, I don't see a w
sof
2014/12/02 09:52:15
Had a go at the former; acceptable?
|
| + return false; |
| +#endif |
| + return true; |
| + } |
| + |
| +#if ENABLE(ASSERT) |
| +#define DEFINE_ENSURE_MARKED_METHOD(Type) \ |
| + virtual bool ensureMarked(const Type* objectPointer) override \ |
| + { \ |
| + if (!objectPointer) \ |
| + return false; \ |
| + COMPILE_ASSERT(!NeedsAdjustAndMark<Type>::value, CanOnlyUseIsMarkedOnNonAdjustedTypes); \ |
| + if (isMarked(objectPointer)) \ |
| + return false; \ |
| + markNoTracing(objectPointer); \ |
| + return true; \ |
| + } |
| +#else |
| +#define DEFINE_ENSURE_MARKED_METHOD(Type) \ |
| + virtual bool ensureMarked(const Type* objectPointer) override \ |
| + { \ |
| + if (!objectPointer) \ |
| + return false; \ |
| + HeapObjectHeader* header = \ |
| + HeapObjectHeader::fromPayload(objectPointer); \ |
| + if (header->isMarked()) \ |
| + return false; \ |
| + header->mark(); \ |
| + if (UNLIKELY(m_checkIfNeedsTracing && !needsTracing(objectPointer))) \ |
| + return false; \ |
| + return true; \ |
| + } |
| +#endif |
| + |
| // This macro defines the necessary visitor methods for typed heaps |
| #define DEFINE_VISITOR_METHODS(Type) \ |
| virtual void mark(const Type* objectPointer, TraceCallback callback) override \ |
| @@ -2067,7 +2132,8 @@ public: |
| virtual bool isMarked(const Type* objectPointer) override \ |
| { \ |
| return HeapObjectHeader::fromPayload(objectPointer)->isMarked(); \ |
| - } |
| + } \ |
| + DEFINE_ENSURE_MARKED_METHOD(Type) |
| FOR_EACH_TYPED_HEAP(DEFINE_VISITOR_METHODS) |
| #undef DEFINE_VISITOR_METHODS |
| @@ -2168,6 +2234,19 @@ public: |
| } |
| #endif |
| + static inline bool needsTracing(const void* objectPointer) |
| + { |
| + BaseHeapPage* page = pageFromObject(objectPointer); |
| + ASSERT(!page->orphaned()); |
| + // When doing a thread local GC, the marker checks if |
| + // the object resides in another thread's heap. The |
| + // object should not be traced, if it does. |
| + if (!page->terminating()) |
| + return false; |
| + |
| + return true; |
| + } |
| + |
| protected: |
| virtual void registerWeakCell(void** cell, WeakPointerCallback callback) override |
| { |
| @@ -2175,7 +2254,29 @@ protected: |
| } |
| private: |
| + friend class ThreadLocalMarkingScope; |
| + |
| CallbackStack* m_markingStack; |
| + bool m_checkIfNeedsTracing; |
| +}; |
| + |
| +class ThreadLocalMarkingScope { |
| + STACK_ALLOCATED(); |
| +public: |
| + ThreadLocalMarkingScope(MarkingVisitor* visitor) |
| + : m_visitor(visitor) |
| + { |
| + ASSERT(m_visitor); |
| + m_visitor->m_checkIfNeedsTracing = true; |
| + } |
| + |
| + ~ThreadLocalMarkingScope() |
| + { |
| + m_visitor->m_checkIfNeedsTracing = false; |
| + } |
| + |
| +private: |
| + MarkingVisitor* m_visitor; |
| }; |
| void Heap::init() |
| @@ -2328,42 +2429,16 @@ String Heap::createBacktraceString() |
| void Heap::pushTraceCallback(CallbackStack* stack, void* object, TraceCallback callback) |
| { |
| -#if ENABLE(ASSERT) |
| - { |
| - ASSERT(Heap::containedInHeapOrOrphanedPage(object)); |
| - } |
| -#endif |
| + ASSERT(Heap::containedInHeapOrOrphanedPage(object)); |
| CallbackStack::Item* slot = stack->allocateEntry(); |
| *slot = CallbackStack::Item(object, callback); |
| } |
| -template<CallbackInvocationMode Mode> |
| bool Heap::popAndInvokeTraceCallback(CallbackStack* stack, Visitor* visitor) |
| { |
| CallbackStack::Item* item = stack->pop(); |
| if (!item) |
| return false; |
| -#if ENABLE(ASSERT) |
| - if (Mode == GlobalMarking) { |
| - BaseHeapPage* page = pageFromObject(item->object()); |
| - // If you hit this ASSERT, it means that there is a dangling pointer |
| - // from a live thread heap to a dead thread heap. We must eliminate |
| - // the dangling pointer. |
| - // Release builds don't have the ASSERT, but it is OK because |
| - // release builds will crash at the following item->call |
| - // because all the entries of the orphaned heaps are zeroed out and |
| - // thus the item does not have a valid vtable. |
| - ASSERT(!page->orphaned()); |
| - } |
| -#endif |
| - if (Mode == ThreadLocalMarking) { |
| - BaseHeapPage* page = pageFromObject(item->object()); |
| - ASSERT(!page->orphaned()); |
| - // When doing a thread local GC, don't trace an object located in |
| - // a heap of another thread. |
| - if (!page->terminating()) |
| - return true; |
| - } |
| #if ENABLE(GC_PROFILE_MARKING) |
| visitor->setHostInfo(item->object(), classOf(item->object())); |
| @@ -2499,7 +2574,7 @@ void Heap::collectGarbage(ThreadState::StackState stackState, ThreadState::Cause |
| ThreadState::visitPersistentRoots(s_markingVisitor); |
| // 2. trace objects reachable from the persistent roots including ephemerons. |
| - processMarkingStack<GlobalMarking>(); |
| + processMarkingStack(); |
| // 3. trace objects reachable from the stack. We do this independent of the |
| // given stackState since other threads might have a different stack state. |
| @@ -2508,9 +2583,8 @@ void Heap::collectGarbage(ThreadState::StackState stackState, ThreadState::Cause |
| // 4. trace objects reachable from the stack "roots" including ephemerons. |
| // Only do the processing if we found a pointer to an object on one of the |
| // thread stacks. |
| - if (lastGCWasConservative()) { |
| - processMarkingStack<GlobalMarking>(); |
| - } |
| + if (lastGCWasConservative()) |
| + processMarkingStack(); |
| postMarkingProcessing(); |
| globalWeakProcessing(); |
| @@ -2549,21 +2623,25 @@ void Heap::collectGarbageForTerminatingThread(ThreadState* state) |
| enterGC(); |
| state->preGC(); |
| - // 1. trace the thread local persistent roots. For thread local GCs we |
| - // don't trace the stack (ie. no conservative scanning) since this is |
| - // only called during thread shutdown where there should be no objects |
| - // on the stack. |
| - // We also assume that orphaned pages have no objects reachable from |
| - // persistent handles on other threads or CrossThreadPersistents. The |
| - // only cases where this could happen is if a subsequent conservative |
| - // global GC finds a "pointer" on the stack or due to a programming |
| - // error where an object has a dangling cross-thread pointer to an |
| - // object on this heap. |
| - state->visitPersistents(s_markingVisitor); |
| - |
| - // 2. trace objects reachable from the thread's persistent roots |
| - // including ephemerons. |
| - processMarkingStack<ThreadLocalMarking>(); |
| + { |
| + ThreadLocalMarkingScope markingScope(static_cast<MarkingVisitor*>(s_markingVisitor)); |
| + |
| + // 1. trace the thread local persistent roots. For thread local GCs we |
| + // don't trace the stack (ie. no conservative scanning) since this is |
| + // only called during thread shutdown where there should be no objects |
| + // on the stack. |
| + // We also assume that orphaned pages have no objects reachable from |
| + // persistent handles on other threads or CrossThreadPersistents. The |
| + // only cases where this could happen is if a subsequent conservative |
| + // global GC finds a "pointer" on the stack or due to a programming |
| + // error where an object has a dangling cross-thread pointer to an |
| + // object on this heap. |
| + state->visitPersistents(s_markingVisitor); |
| + |
| + // 2. trace objects reachable from the thread's persistent roots |
| + // including ephemerons. |
| + processMarkingStack(); |
| + } |
| postMarkingProcessing(); |
| globalWeakProcessing(); |
| @@ -2574,18 +2652,15 @@ void Heap::collectGarbageForTerminatingThread(ThreadState* state) |
| state->performPendingSweep(); |
| } |
| -template<CallbackInvocationMode Mode> |
| void Heap::processMarkingStack() |
| { |
| // Ephemeron fixed point loop. |
| do { |
| { |
| // Iteratively mark all objects that are reachable from the objects |
| - // currently pushed onto the marking stack. If Mode is ThreadLocalMarking |
| - // don't continue tracing if the trace hits an object on another thread's |
| - // heap. |
| + // currently pushed onto the marking stack. |
| TRACE_EVENT0("blink_gc", "Heap::processMarkingStackSingleThreaded"); |
| - while (popAndInvokeTraceCallback<Mode>(s_markingStack, s_markingVisitor)) { } |
| + while (popAndInvokeTraceCallback(s_markingStack, s_markingVisitor)) { } |
| } |
| { |