| Index: Source/platform/heap/Visitor.h
|
| diff --git a/Source/platform/heap/Visitor.h b/Source/platform/heap/Visitor.h
|
| index f0b9ed70b31598dee872e40a39293c0055631ad8..274312bb1707c15c156e47a122c01bda19c67504 100644
|
| --- a/Source/platform/heap/Visitor.h
|
| +++ b/Source/platform/heap/Visitor.h
|
| @@ -155,8 +155,12 @@ public:
|
|
|
| template <typename T> const bool NeedsAdjustAndMark<T, false>::value;
|
|
|
| +template <typename T, bool = WTF::HasTraceNewMethod<T>::value> struct TraceGenericCompatibilityAdaptor;
|
| +
|
| template<typename T, bool = NeedsAdjustAndMark<T>::value> class DefaultTraceTrait;
|
|
|
| +class InlinedGlobalMarkingVisitor;
|
| +
|
| // The TraceTrait is used to specify how to mark an object pointer and
|
| // how to trace all of the pointers in the object.
|
| //
|
| @@ -173,15 +177,13 @@ class TraceTrait {
|
| public:
|
| // Default implementation of TraceTrait<T>::trace just statically
|
| // dispatches to the trace method of the class T.
|
| - static void trace(Visitor* visitor, void* self)
|
| + template<typename VisitorDispatcher>
|
| + static void trace(VisitorDispatcher visitor, void* self)
|
| {
|
| - static_cast<T*>(self)->trace(visitor);
|
| + TraceGenericCompatibilityAdaptor<T>::trace(visitor, static_cast<T*>(self));
|
| }
|
|
|
| - static void mark(Visitor* visitor, const T* t)
|
| - {
|
| - DefaultTraceTrait<T>::mark(visitor, t);
|
| - }
|
| + template<typename VisitorDispatcher> static void mark(VisitorDispatcher visitor, const T*);
|
|
|
| #if ENABLE(ASSERT)
|
| static void checkGCInfo(Visitor* visitor, const T* t)
|
| @@ -527,13 +529,24 @@ public:
|
| }
|
| #endif
|
|
|
| + virtual bool isGlobalMarkingVisitor() { return false; }
|
| +
|
| +#if 0
|
| inline bool canTraceEagerly() const { return m_traceDepth < kMaxEagerTraceDepth; }
|
| inline void incrementTraceDepth() { m_traceDepth++; }
|
| inline void decrementTraceDepth() { ASSERT(m_traceDepth > 0); m_traceDepth--; }
|
| +#else
|
| + inline bool canTraceEagerly() const { return true; }
|
| + inline void incrementTraceDepth() { }
|
| + inline void decrementTraceDepth() { }
|
| +#endif
|
| +
|
| + // This should be only used from InlinedGlobalMarkingVisitor
|
| + virtual void pushTraceCallback(void*, TraceCallback) { ASSERT_NOT_REACHED(); }
|
|
|
| protected:
|
| Visitor()
|
| - : m_traceDepth(0)
|
| + // : m_traceDepth(0)
|
| {
|
| }
|
|
|
| @@ -554,11 +567,298 @@ private:
|
|
|
| // The maximum depth of eager, unrolled trace() calls that is
|
| // considered safe and allowed.
|
| - const int kMaxEagerTraceDepth = 100;
|
| + // const int kMaxEagerTraceDepth = 100;
|
|
|
| - int m_traceDepth;
|
| + // int m_traceDepth;
|
| };
|
|
|
| +template<typename T>
|
| +inline T&& forward(typename WTF::Identity<T>::type&& x) { return x; }
|
| +
|
| +class InlinedGlobalMarkingVisitor {
|
| +public:
|
| + InlinedGlobalMarkingVisitor(Visitor* visitor)
|
| + : m_visitor(visitor)
|
| + {
|
| + ASSERT(visitor->isGlobalMarkingVisitor());
|
| + }
|
| +
|
| + // Hack to allow visitor->trace()
|
| + InlinedGlobalMarkingVisitor* operator->() { return this; }
|
| +
|
| + // One-argument templated mark method. This uses the static type of
|
| + // the argument to get the TraceTrait. By default, the mark method
|
| + // of the TraceTrait just calls the virtual two-argument mark method on this
|
| + // visitor, where the second argument is the static trace method of the trait.
|
| + template<typename T> void mark(T* t);
|
| +
|
| + // Member version of the one-argument templated trace method.
|
| + template<typename T>
|
| + void trace(const Member<T>& t)
|
| + {
|
| + mark(t.get());
|
| + }
|
| +
|
| + // Fallback method used only when we need to trace raw pointers of T.
|
| + // This is the case when a member is a union where we do not support members.
|
| + template<typename T>
|
| + void trace(const T* t)
|
| + {
|
| + mark(const_cast<T*>(t));
|
| + }
|
| +
|
| + template<typename T>
|
| + void trace(T* t)
|
| + {
|
| + mark(t);
|
| + }
|
| +
|
| + // WeakMember version of the templated trace method. It doesn't keep
|
| + // the traced thing alive, but will write null to the WeakMember later
|
| + // if the pointed-to object is dead. It's lying for this to be const,
|
| + // but the overloading resolver prioritizes constness too high when
|
| + // picking the correct overload, so all these trace methods have to have
|
| + // the same constness on their argument to allow the type to decide.
|
| + template<typename T>
|
| + void trace(const WeakMember<T>& t)
|
| + {
|
| + // Check that we actually know the definition of T when tracing.
|
| + COMPILE_ASSERT(sizeof(T), WeNeedToKnowTheDefinitionOfTheTypeWeAreTracing);
|
| + registerWeakCell(const_cast<WeakMember<T>&>(t).cell());
|
| + COMPILE_ASSERT_IS_GARBAGE_COLLECTED(T, AttemptedToWeakTraceNonGarbageCollectedObject);
|
| + }
|
| +
|
| + template<typename T>
|
| + void traceInCollection(T& t, WTF::ShouldWeakPointersBeMarkedStrongly strongify)
|
| + {
|
| + HashTraits<T>::traceInCollection(m_visitor, t, strongify);
|
| + }
|
| +
|
| + // Fallback trace method for part objects to allow individual trace methods
|
| + // to trace through a part object with visitor->trace(m_partObject). This
|
| + // takes a const argument, because otherwise it will match too eagerly: a
|
| + // non-const argument would match a non-const Vector<T>& argument better
|
| + // than the specialization that takes const Vector<T>&. For a similar reason,
|
| + // the other specializations take a const argument even though they are
|
| + // usually used with non-const arguments, otherwise this function would match
|
| + // too well.
|
| + template<typename T>
|
| + void trace(const T& t)
|
| + {
|
| + if (WTF::IsPolymorphic<T>::value) {
|
| + intptr_t vtable = *reinterpret_cast<const intptr_t*>(&t);
|
| + if (!vtable)
|
| + return;
|
| + }
|
| + const_cast<T&>(t).trace(*this);
|
| + }
|
| +
|
| + // The following trace methods are for off-heap collections.
|
| + template<typename T, size_t inlineCapacity>
|
| + void trace(const Vector<T, inlineCapacity>& vector)
|
| + {
|
| + OffHeapCollectionTraceTrait<Vector<T, inlineCapacity, WTF::DefaultAllocator> >::trace(m_visitor, vector);
|
| + }
|
| +
|
| + template<typename T, size_t N>
|
| + void trace(const Deque<T, N>& deque)
|
| + {
|
| + OffHeapCollectionTraceTrait<Deque<T, N> >::trace(m_visitor, deque);
|
| + }
|
| +
|
| +#if !ENABLE(OILPAN)
|
| + // These trace methods are needed to allow compiling and calling trace on
|
| + // transition types. We need to support calls in the non-oilpan build
|
| + // because a fully transitioned type (which will have its trace method
|
| + // called) might trace a field that is in transition. Once transition types
|
| + // are removed these can be removed.
|
| + template<typename T> void trace(const OwnPtr<T>&) { }
|
| + template<typename T> void trace(const RefPtr<T>&) { }
|
| + template<typename T> void trace(const RawPtr<T>&) { }
|
| + template<typename T> void trace(const WeakPtr<T>&) { }
|
| +#endif
|
| +
|
| + // This method marks an object and adds it to the set of objects
|
| + // that should have their trace method called. Since not all
|
| + // objects have vtables we have to have the callback as an
|
| + // explicit argument, but we can use the templated one-argument
|
| + // mark method above to automatically provide the callback
|
| + // function.
|
| + void mark(const void* o, TraceCallback callback);
|
| +
|
| + template<typename T> void markNoTracing(const T* pointer) { mark(pointer, reinterpret_cast<TraceCallback>(0)); }
|
| + void markNoTracing(const void* pointer) { mark(pointer, reinterpret_cast<TraceCallback>(0)); }
|
| + void markNoTracing(HeapObjectHeader* header) { mark(header, reinterpret_cast<TraceCallback>(0)); }
|
| + void markNoTracing(GeneralHeapObjectHeader* header) { mark(header, reinterpret_cast<TraceCallback>(0)); }
|
| +
|
| + // If the object calls this during the regular trace callback, then the
|
| + // WeakPointerCallback argument may be called later, when the strong roots
|
| + // have all been found. The WeakPointerCallback will normally use isAlive
|
| + // to find out whether some pointers are pointing to dying objects. When
|
| + // the WeakPointerCallback is done the object must have purged all pointers
|
| + // to objects where isAlive returned false. In the weak callback it is not
|
| + // allowed to touch other objects (except using isAlive) or to allocate on
|
| + // the GC heap. Note that even removing things from HeapHashSet or
|
| + // HeapHashMap can cause an allocation if the backing store resizes, but
|
| + // these collections know to remove WeakMember elements safely.
|
| + //
|
| + // The weak pointer callbacks are run on the thread that owns the
|
| + // object and other threads are not stopped during the
|
| + // callbacks. Since isAlive is used in the callback to determine
|
| + // if objects pointed to are alive it is crucial that the object
|
| + // pointed to belong to the same thread as the object receiving
|
| + // the weak callback. Since other threads have been resumed the
|
| + // mark bits are not valid for objects from other threads.
|
| + void registerWeakMembers(const void* object, WeakPointerCallback callback) { m_visitor->registerWeakMembers(object, object, callback); }
|
| + void registerWeakMembers(const void* a , const void* b, WeakPointerCallback callback) { m_visitor->registerWeakMembers(a, b, callback); }
|
| +
|
| + template<typename T, void (T::*method)(Visitor*)>
|
| + void registerWeakMembers(const T* obj)
|
| + {
|
| + registerWeakMembers(obj, &TraceMethodDelegate<T, method>::trampoline);
|
| + }
|
| +
|
| +#if 0
|
| + // For simple cases where you just want to zero out a cell when the thing
|
| + // it is pointing at is garbage, you can use this. This will register a
|
| + // callback for each cell that needs to be zeroed, so if you have a lot of
|
| + // weak cells in your object you should still consider using
|
| + // registerWeakMembers above.
|
| + //
|
| + // In contrast to registerWeakMembers, the weak cell callbacks are
|
| + // run on the thread performing garbage collection. Therefore, all
|
| + // threads are stopped during weak cell callbacks.
|
| + template<typename T>
|
| + void registerWeakCell(T** cell)
|
| + {
|
| + registerWeakCell(reinterpret_cast<void**>(cell), &handleWeakCell<T>);
|
| + }
|
| +
|
| + virtual void registerWeakTable(const void*, EphemeronCallback, EphemeronCallback) = 0;
|
| +#if ENABLE(ASSERT)
|
| + virtual bool weakTableRegistered(const void*) = 0;
|
| +#endif
|
| +#endif
|
| +
|
| + bool isMarked(const void* obj);
|
| + bool ensureMarked(const void* obj);
|
| +
|
| + template<typename T> inline bool isAlive(T* obj)
|
| + {
|
| + // Check that we actually know the definition of T when tracing.
|
| + COMPILE_ASSERT(sizeof(T), WeNeedToKnowTheDefinitionOfTheTypeWeAreTracing);
|
| + // The strongification of collections relies on the fact that once a
|
| + // collection has been strongified, there is no way that it can contain
|
| + // non-live entries, so no entries will be removed. Since you can't set
|
| + // the mark bit on a null pointer, that means that null pointers are
|
| + // always 'alive'.
|
| + if (!obj)
|
| + return true;
|
| + return ObjectAliveTrait<T>::isHeapObjectAlive(this, obj);
|
| + }
|
| + template<typename T> inline bool isAlive(const Member<T>& member)
|
| + {
|
| + return isAlive(member.get());
|
| + }
|
| + template<typename T> inline bool isAlive(RawPtr<T> ptr)
|
| + {
|
| + return isAlive(ptr.get());
|
| + }
|
| +
|
| +#if ENABLE(ASSERT)
|
| + void checkGCInfo(const void*, const GCInfo*);
|
| +#endif
|
| +
|
| + // Macro to declare methods needed for each typed heap.
|
| +#define DECLARE_VISITOR_METHODS(Type) \
|
| + DEBUG_ONLY(void checkGCInfo(const Type*, const GCInfo*);) \
|
| + void mark(const Type* t, TraceCallback callback) { m_visitor->mark(t, callback); } \
|
| + bool isMarked(const Type* t) { return m_visitor->isMarked(t); }\
|
| + bool ensureMarked(const Type* t) { return m_visitor->ensureMarked(t); }
|
| +
|
| + FOR_EACH_TYPED_HEAP(DECLARE_VISITOR_METHODS)
|
| +#undef DECLARE_VISITOR_METHODS
|
| +
|
| +#if ENABLE(GC_PROFILE_MARKING)
|
| + void setHostInfo(void* object, const String& name)
|
| + {
|
| + m_visitor->setHostInfo(object, name);
|
| + }
|
| +#endif
|
| +
|
| + inline bool canTraceEagerly() const { return true; }
|
| + inline void incrementTraceDepth() { }
|
| + inline void decrementTraceDepth() { }
|
| +
|
| + Visitor* getUninlined() { return m_visitor; }
|
| +
|
| +private:
|
| + void visitHeader(HeapObjectHeader* header, const void* objectPointer, TraceCallback callback);
|
| +
|
| + Visitor* m_visitor;
|
| +};
|
| +
|
| +template<typename T>
|
| +void InlinedGlobalMarkingVisitor::mark(T* t)
|
| +{
|
| + if (!t)
|
| + return;
|
| +#if ENABLE(ASSERT)
|
| + TraceTrait<T>::checkGCInfo(m_visitor, t);
|
| +#endif
|
| + TraceTrait<T>::mark(InlinedGlobalMarkingVisitor(*this), t);
|
| +
|
| + COMPILE_ASSERT_IS_GARBAGE_COLLECTED(T, AttemptedToMarkNonGarbageCollectedObject);
|
| +}
|
| +
|
| +template <typename T>
|
| +struct TraceGenericCompatibilityAdaptor<T, true> {
|
| + static inline void trace(Visitor* visitor, T* t)
|
| + {
|
| + t->trace(visitor);
|
| + }
|
| +
|
| + static inline void trace(InlinedGlobalMarkingVisitor visitor, T* t)
|
| + {
|
| + t->trace(InlinedGlobalMarkingVisitor(visitor));
|
| + }
|
| +};
|
| +
|
| +template <typename T>
|
| +struct TraceGenericCompatibilityAdaptor<T, false> {
|
| + static inline void trace(Visitor* visitor, T* t)
|
| + {
|
| + // Revert to old trace
|
| + t->trace(visitor);
|
| + }
|
| +
|
| + static inline void trace(InlinedGlobalMarkingVisitor visitor, T* t)
|
| + {
|
| + // visiting was inlined to here, but the object we are going to trace doesn't support inlined trace
|
| + t->trace(visitor.getUninlined());
|
| + }
|
| +};
|
| +
|
| +#define DECLARE_TRACE(virt, ovr) \
|
| + public: \
|
| + virt void trace(Visitor*) ovr; \
|
| + virt void trace(InlinedGlobalMarkingVisitor) ovr; \
|
| + private: \
|
| + template<typename VisitorDispatcher> void traceImpl(VisitorDispatcher); \
|
| + public:
|
| +
|
| +#define DEFINE_TRACE(CLASS) \
|
| + void CLASS::trace(Visitor* visitor) { traceImpl(visitor); } \
|
| + void CLASS::trace(InlinedGlobalMarkingVisitor visitor) { traceImpl(visitor); } \
|
| + template<typename VisitorDispatcher> \
|
| + ALWAYS_INLINE void CLASS::traceImpl(VisitorDispatcher visitor) \
|
| +
|
| +#define DEFINE_INLINE_TRACE(virt, ovr) \
|
| + virt void trace(Visitor* visitor) ovr { traceImpl(visitor); } \
|
| + virt void trace(InlinedGlobalMarkingVisitor visitor) ovr { traceImpl(visitor); } \
|
| + template<typename VisitorDispatcher> \
|
| + inline void traceImpl(VisitorDispatcher visitor)
|
| +
|
| // We trace vectors by using the trace trait on each element, which means you
|
| // can have vectors of general objects (not just pointers to objects) that can
|
| // be traced.
|
| @@ -598,6 +898,13 @@ public:
|
| };
|
|
|
| template<typename T>
|
| +template<typename VisitorDispatcher>
|
| +inline void TraceTrait<T>::mark(VisitorDispatcher visitor, const T* t)
|
| +{
|
| + DefaultTraceTrait<T>::mark(visitor, t);
|
| +}
|
| +
|
| +template<typename T>
|
| class DefaultTraceTrait<T, false> {
|
| public:
|
| static void mark(Visitor* visitor, const T* t)
|
| @@ -620,7 +927,10 @@ public:
|
| if (LIKELY(visitor->canTraceEagerly())) {
|
| if (visitor->ensureMarked(t)) {
|
| visitor->incrementTraceDepth();
|
| - TraceTrait<T>::trace(visitor, const_cast<T*>(t));
|
| + if (LIKELY(visitor->isGlobalMarkingVisitor()))
|
| + TraceTrait<T>::trace(InlinedGlobalMarkingVisitor(visitor), const_cast<T*>(t));
|
| + else
|
| + TraceTrait<T>::trace(visitor, const_cast<T*>(t));
|
| visitor->decrementTraceDepth();
|
| }
|
| return;
|
| @@ -629,6 +939,28 @@ public:
|
| visitor->mark(const_cast<T*>(t), &TraceTrait<T>::trace);
|
| }
|
|
|
| + static void mark(InlinedGlobalMarkingVisitor visitor, const T* t)
|
| + {
|
| + if (TraceEagerlyTrait<T>::value) {
|
| + // Protect against too deep trace call chains, and the
|
| + // unbounded system stack usage they can bring about.
|
| + //
|
| + // Assert against deep stacks so as to flush them out,
|
| + // but test and appropriately handle them should they occur
|
| + // in release builds.
|
| + ASSERT(visitor.canTraceEagerly());
|
| + if (LIKELY(visitor.canTraceEagerly())) {
|
| + if (visitor.ensureMarked(t)) {
|
| + visitor.incrementTraceDepth();
|
| + TraceTrait<T>::trace(visitor, const_cast<T*>(t));
|
| + visitor.decrementTraceDepth();
|
| + }
|
| + return;
|
| + }
|
| + }
|
| + visitor.mark(const_cast<T*>(t), &TraceTrait<T>::trace);
|
| + }
|
| +
|
| #if ENABLE(ASSERT)
|
| static void checkGCInfo(Visitor* visitor, const T* t)
|
| {
|
|
|