OLD | NEW |
1 /* | 1 /* |
2 * Copyright (C) 2013 Google Inc. All rights reserved. | 2 * Copyright (C) 2013 Google Inc. All rights reserved. |
3 * | 3 * |
4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
5 * modification, are permitted provided that the following conditions are | 5 * modification, are permitted provided that the following conditions are |
6 * met: | 6 * met: |
7 * | 7 * |
8 * * Redistributions of source code must retain the above copyright | 8 * * Redistributions of source code must retain the above copyright |
9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
10 * * Redistributions in binary form must reproduce the above | 10 * * Redistributions in binary form must reproduce the above |
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
148 template <typename T> const bool NeedsAdjustAndMark<T, true>::value; | 148 template <typename T> const bool NeedsAdjustAndMark<T, true>::value; |
149 | 149 |
150 template<typename T> | 150 template<typename T> |
151 class NeedsAdjustAndMark<T, false> { | 151 class NeedsAdjustAndMark<T, false> { |
152 public: | 152 public: |
153 static const bool value = WTF::IsSubclass<typename WTF::RemoveConst<T>::Type
, GarbageCollectedMixin>::value; | 153 static const bool value = WTF::IsSubclass<typename WTF::RemoveConst<T>::Type
, GarbageCollectedMixin>::value; |
154 }; | 154 }; |
155 | 155 |
156 template <typename T> const bool NeedsAdjustAndMark<T, false>::value; | 156 template <typename T> const bool NeedsAdjustAndMark<T, false>::value; |
157 | 157 |
| 158 template <typename T, bool = WTF::HasTraceNewMethod<T>::value> struct TraceGener
icCompatibilityAdaptor; |
| 159 |
158 template<typename T, bool = NeedsAdjustAndMark<T>::value> class DefaultTraceTrai
t; | 160 template<typename T, bool = NeedsAdjustAndMark<T>::value> class DefaultTraceTrai
t; |
159 | 161 |
| 162 class InlinedGlobalMarkingVisitor; |
| 163 |
160 // The TraceTrait is used to specify how to mark an object pointer and | 164 // The TraceTrait is used to specify how to mark an object pointer and |
161 // how to trace all of the pointers in the object. | 165 // how to trace all of the pointers in the object. |
162 // | 166 // |
163 // By default, the 'trace' method implemented on an object itself is | 167 // By default, the 'trace' method implemented on an object itself is |
164 // used to trace the pointers to other heap objects inside the object. | 168 // used to trace the pointers to other heap objects inside the object. |
165 // | 169 // |
166 // However, the TraceTrait can be specialized to use a different | 170 // However, the TraceTrait can be specialized to use a different |
167 // implementation. A common case where a TraceTrait specialization is | 171 // implementation. A common case where a TraceTrait specialization is |
168 // needed is when multiple inheritance leads to pointers that are not | 172 // needed is when multiple inheritance leads to pointers that are not |
169 // to the start of the object in the Blink garbage-collected heap. In | 173 // to the start of the object in the Blink garbage-collected heap. In |
170 // that case the pointer has to be adjusted before marking. | 174 // that case the pointer has to be adjusted before marking. |
171 template<typename T> | 175 template<typename T> |
172 class TraceTrait { | 176 class TraceTrait { |
173 public: | 177 public: |
174 // Default implementation of TraceTrait<T>::trace just statically | 178 // Default implementation of TraceTrait<T>::trace just statically |
175 // dispatches to the trace method of the class T. | 179 // dispatches to the trace method of the class T. |
176 static void trace(Visitor* visitor, void* self) | 180 template<typename VisitorDispatcher> |
| 181 static void trace(VisitorDispatcher visitor, void* self) |
177 { | 182 { |
178 static_cast<T*>(self)->trace(visitor); | 183 TraceGenericCompatibilityAdaptor<T>::trace(visitor, static_cast<T*>(self
)); |
179 } | 184 } |
180 | 185 |
181 static void mark(Visitor* visitor, const T* t) | 186 template<typename VisitorDispatcher> static void mark(VisitorDispatcher visi
tor, const T*); |
182 { | |
183 DefaultTraceTrait<T>::mark(visitor, t); | |
184 } | |
185 | 187 |
186 #if ENABLE(ASSERT) | 188 #if ENABLE(ASSERT) |
187 static void checkGCInfo(Visitor* visitor, const T* t) | 189 static void checkGCInfo(Visitor* visitor, const T* t) |
188 { | 190 { |
189 DefaultTraceTrait<T>::checkGCInfo(visitor, t); | 191 DefaultTraceTrait<T>::checkGCInfo(visitor, t); |
190 } | 192 } |
191 #endif | 193 #endif |
192 }; | 194 }; |
193 | 195 |
194 template<typename T> class TraceTrait<const T> : public TraceTrait<T> { }; | 196 template<typename T> class TraceTrait<const T> : public TraceTrait<T> { }; |
(...skipping 325 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
520 #undef DECLARE_VISITOR_METHODS | 522 #undef DECLARE_VISITOR_METHODS |
521 | 523 |
522 #if ENABLE(GC_PROFILE_MARKING) | 524 #if ENABLE(GC_PROFILE_MARKING) |
523 void setHostInfo(void* object, const String& name) | 525 void setHostInfo(void* object, const String& name) |
524 { | 526 { |
525 m_hostObject = object; | 527 m_hostObject = object; |
526 m_hostName = name; | 528 m_hostName = name; |
527 } | 529 } |
528 #endif | 530 #endif |
529 | 531 |
| 532 virtual bool isGlobalMarkingVisitor() { return false; } |
| 533 |
| 534 #if 0 |
530 inline bool canTraceEagerly() const { return m_traceDepth < kMaxEagerTraceDe
pth; } | 535 inline bool canTraceEagerly() const { return m_traceDepth < kMaxEagerTraceDe
pth; } |
531 inline void incrementTraceDepth() { m_traceDepth++; } | 536 inline void incrementTraceDepth() { m_traceDepth++; } |
532 inline void decrementTraceDepth() { ASSERT(m_traceDepth > 0); m_traceDepth--
; } | 537 inline void decrementTraceDepth() { ASSERT(m_traceDepth > 0); m_traceDepth--
; } |
| 538 #else |
| 539 inline bool canTraceEagerly() const { return true; } |
| 540 inline void incrementTraceDepth() { } |
| 541 inline void decrementTraceDepth() { } |
| 542 #endif |
| 543 |
| 544 // This should be only used from InlinedGlobalMarkingVisitor |
| 545 virtual void pushTraceCallback(void*, TraceCallback) { ASSERT_NOT_REACHED();
} |
533 | 546 |
534 protected: | 547 protected: |
535 Visitor() | 548 Visitor() |
536 : m_traceDepth(0) | 549 // : m_traceDepth(0) |
537 { | 550 { |
538 } | 551 } |
539 | 552 |
540 virtual void registerWeakCell(void**, WeakPointerCallback) = 0; | 553 virtual void registerWeakCell(void**, WeakPointerCallback) = 0; |
541 #if ENABLE(GC_PROFILE_MARKING) | 554 #if ENABLE(GC_PROFILE_MARKING) |
542 void* m_hostObject; | 555 void* m_hostObject; |
543 String m_hostName; | 556 String m_hostName; |
544 #endif | 557 #endif |
545 | 558 |
546 private: | 559 private: |
547 template<typename T> | 560 template<typename T> |
548 static void handleWeakCell(Visitor* self, void* obj) | 561 static void handleWeakCell(Visitor* self, void* obj) |
549 { | 562 { |
550 T** cell = reinterpret_cast<T**>(obj); | 563 T** cell = reinterpret_cast<T**>(obj); |
551 if (*cell && !self->isAlive(*cell)) | 564 if (*cell && !self->isAlive(*cell)) |
552 *cell = 0; | 565 *cell = 0; |
553 } | 566 } |
554 | 567 |
555 // The maximum depth of eager, unrolled trace() calls that is | 568 // The maximum depth of eager, unrolled trace() calls that is |
556 // considered safe and allowed. | 569 // considered safe and allowed. |
557 const int kMaxEagerTraceDepth = 100; | 570 // const int kMaxEagerTraceDepth = 100; |
558 | 571 |
559 int m_traceDepth; | 572 // int m_traceDepth; |
560 }; | 573 }; |
| 574 |
| 575 template<typename T> |
| 576 inline T&& forward(typename WTF::Identity<T>::type&& x) { return x; } |
| 577 |
| 578 class InlinedGlobalMarkingVisitor { |
| 579 public: |
| 580 InlinedGlobalMarkingVisitor(Visitor* visitor) |
| 581 : m_visitor(visitor) |
| 582 { |
| 583 ASSERT(visitor->isGlobalMarkingVisitor()); |
| 584 } |
| 585 |
| 586 // Hack to allow visitor->trace() |
| 587 InlinedGlobalMarkingVisitor* operator->() { return this; } |
| 588 |
| 589 // One-argument templated mark method. This uses the static type of |
| 590 // the argument to get the TraceTrait. By default, the mark method |
| 591 // of the TraceTrait just calls the virtual two-argument mark method on this |
| 592 // visitor, where the second argument is the static trace method of the trai
t. |
| 593 template<typename T> void mark(T* t); |
| 594 |
| 595 // Member version of the one-argument templated trace method. |
| 596 template<typename T> |
| 597 void trace(const Member<T>& t) |
| 598 { |
| 599 mark(t.get()); |
| 600 } |
| 601 |
| 602 // Fallback method used only when we need to trace raw pointers of T. |
| 603 // This is the case when a member is a union where we do not support members
. |
| 604 template<typename T> |
| 605 void trace(const T* t) |
| 606 { |
| 607 mark(const_cast<T*>(t)); |
| 608 } |
| 609 |
| 610 template<typename T> |
| 611 void trace(T* t) |
| 612 { |
| 613 mark(t); |
| 614 } |
| 615 |
| 616 // WeakMember version of the templated trace method. It doesn't keep |
| 617 // the traced thing alive, but will write null to the WeakMember later |
| 618 // if the pointed-to object is dead. It's lying for this to be const, |
| 619 // but the overloading resolver prioritizes constness too high when |
| 620 // picking the correct overload, so all these trace methods have to have |
| 621 // the same constness on their argument to allow the type to decide. |
| 622 template<typename T> |
| 623 void trace(const WeakMember<T>& t) |
| 624 { |
| 625 // Check that we actually know the definition of T when tracing. |
| 626 COMPILE_ASSERT(sizeof(T), WeNeedToKnowTheDefinitionOfTheTypeWeAreTracing
); |
| 627 registerWeakCell(const_cast<WeakMember<T>&>(t).cell()); |
| 628 COMPILE_ASSERT_IS_GARBAGE_COLLECTED(T, AttemptedToWeakTraceNonGarbageCol
lectedObject); |
| 629 } |
| 630 |
| 631 template<typename T> |
| 632 void traceInCollection(T& t, WTF::ShouldWeakPointersBeMarkedStrongly strongi
fy) |
| 633 { |
| 634 HashTraits<T>::traceInCollection(m_visitor, t, strongify); |
| 635 } |
| 636 |
| 637 // Fallback trace method for part objects to allow individual trace methods |
| 638 // to trace through a part object with visitor->trace(m_partObject). This |
| 639 // takes a const argument, because otherwise it will match too eagerly: a |
| 640 // non-const argument would match a non-const Vector<T>& argument better |
| 641 // than the specialization that takes const Vector<T>&. For a similar reason
, |
| 642 // the other specializations take a const argument even though they are |
| 643 // usually used with non-const arguments, otherwise this function would matc
h |
| 644 // too well. |
| 645 template<typename T> |
| 646 void trace(const T& t) |
| 647 { |
| 648 if (WTF::IsPolymorphic<T>::value) { |
| 649 intptr_t vtable = *reinterpret_cast<const intptr_t*>(&t); |
| 650 if (!vtable) |
| 651 return; |
| 652 } |
| 653 const_cast<T&>(t).trace(*this); |
| 654 } |
| 655 |
| 656 // The following trace methods are for off-heap collections. |
| 657 template<typename T, size_t inlineCapacity> |
| 658 void trace(const Vector<T, inlineCapacity>& vector) |
| 659 { |
| 660 OffHeapCollectionTraceTrait<Vector<T, inlineCapacity, WTF::DefaultAlloca
tor> >::trace(m_visitor, vector); |
| 661 } |
| 662 |
| 663 template<typename T, size_t N> |
| 664 void trace(const Deque<T, N>& deque) |
| 665 { |
| 666 OffHeapCollectionTraceTrait<Deque<T, N> >::trace(m_visitor, deque); |
| 667 } |
| 668 |
| 669 #if !ENABLE(OILPAN) |
| 670 // These trace methods are needed to allow compiling and calling trace on |
| 671 // transition types. We need to support calls in the non-oilpan build |
| 672 // because a fully transitioned type (which will have its trace method |
| 673 // called) might trace a field that is in transition. Once transition types |
| 674 // are removed these can be removed. |
| 675 template<typename T> void trace(const OwnPtr<T>&) { } |
| 676 template<typename T> void trace(const RefPtr<T>&) { } |
| 677 template<typename T> void trace(const RawPtr<T>&) { } |
| 678 template<typename T> void trace(const WeakPtr<T>&) { } |
| 679 #endif |
| 680 |
| 681 // This method marks an object and adds it to the set of objects |
| 682 // that should have their trace method called. Since not all |
| 683 // objects have vtables we have to have the callback as an |
| 684 // explicit argument, but we can use the templated one-argument |
| 685 // mark method above to automatically provide the callback |
| 686 // function. |
| 687 void mark(const void* o, TraceCallback callback); |
| 688 |
| 689 template<typename T> void markNoTracing(const T* pointer) { mark(pointer, re
interpret_cast<TraceCallback>(0)); } |
| 690 void markNoTracing(const void* pointer) { mark(pointer, reinterpret_cast<Tra
ceCallback>(0)); } |
| 691 void markNoTracing(HeapObjectHeader* header) { mark(header, reinterpret_cast
<TraceCallback>(0)); } |
| 692 void markNoTracing(GeneralHeapObjectHeader* header) { mark(header, reinterpr
et_cast<TraceCallback>(0)); } |
| 693 |
| 694 // If the object calls this during the regular trace callback, then the |
| 695 // WeakPointerCallback argument may be called later, when the strong roots |
| 696 // have all been found. The WeakPointerCallback will normally use isAlive |
| 697 // to find out whether some pointers are pointing to dying objects. When |
| 698 // the WeakPointerCallback is done the object must have purged all pointers |
| 699 // to objects where isAlive returned false. In the weak callback it is not |
| 700 // allowed to touch other objects (except using isAlive) or to allocate on |
| 701 // the GC heap. Note that even removing things from HeapHashSet or |
| 702 // HeapHashMap can cause an allocation if the backing store resizes, but |
| 703 // these collections know to remove WeakMember elements safely. |
| 704 // |
| 705 // The weak pointer callbacks are run on the thread that owns the |
| 706 // object and other threads are not stopped during the |
| 707 // callbacks. Since isAlive is used in the callback to determine |
| 708 // if objects pointed to are alive it is crucial that the object |
| 709 // pointed to belong to the same thread as the object receiving |
| 710 // the weak callback. Since other threads have been resumed the |
| 711 // mark bits are not valid for objects from other threads. |
| 712 void registerWeakMembers(const void* object, WeakPointerCallback callback) {
m_visitor->registerWeakMembers(object, object, callback); } |
| 713 void registerWeakMembers(const void* a , const void* b, WeakPointerCallback
callback) { m_visitor->registerWeakMembers(a, b, callback); } |
| 714 |
| 715 template<typename T, void (T::*method)(Visitor*)> |
| 716 void registerWeakMembers(const T* obj) |
| 717 { |
| 718 registerWeakMembers(obj, &TraceMethodDelegate<T, method>::trampoline); |
| 719 } |
| 720 |
| 721 #if 0 |
| 722 // For simple cases where you just want to zero out a cell when the thing |
| 723 // it is pointing at is garbage, you can use this. This will register a |
| 724 // callback for each cell that needs to be zeroed, so if you have a lot of |
| 725 // weak cells in your object you should still consider using |
| 726 // registerWeakMembers above. |
| 727 // |
| 728 // In contrast to registerWeakMembers, the weak cell callbacks are |
| 729 // run on the thread performing garbage collection. Therefore, all |
| 730 // threads are stopped during weak cell callbacks. |
| 731 template<typename T> |
| 732 void registerWeakCell(T** cell) |
| 733 { |
| 734 registerWeakCell(reinterpret_cast<void**>(cell), &handleWeakCell<T>); |
| 735 } |
| 736 |
| 737 virtual void registerWeakTable(const void*, EphemeronCallback, EphemeronCall
back) = 0; |
| 738 #if ENABLE(ASSERT) |
| 739 virtual bool weakTableRegistered(const void*) = 0; |
| 740 #endif |
| 741 #endif |
| 742 |
| 743 bool isMarked(const void* obj); |
| 744 bool ensureMarked(const void* obj); |
| 745 |
| 746 template<typename T> inline bool isAlive(T* obj) |
| 747 { |
| 748 // Check that we actually know the definition of T when tracing. |
| 749 COMPILE_ASSERT(sizeof(T), WeNeedToKnowTheDefinitionOfTheTypeWeAreTracing
); |
| 750 // The strongification of collections relies on the fact that once a |
| 751 // collection has been strongified, there is no way that it can contain |
| 752 // non-live entries, so no entries will be removed. Since you can't set |
| 753 // the mark bit on a null pointer, that means that null pointers are |
| 754 // always 'alive'. |
| 755 if (!obj) |
| 756 return true; |
| 757 return ObjectAliveTrait<T>::isHeapObjectAlive(this, obj); |
| 758 } |
| 759 template<typename T> inline bool isAlive(const Member<T>& member) |
| 760 { |
| 761 return isAlive(member.get()); |
| 762 } |
| 763 template<typename T> inline bool isAlive(RawPtr<T> ptr) |
| 764 { |
| 765 return isAlive(ptr.get()); |
| 766 } |
| 767 |
| 768 #if ENABLE(ASSERT) |
| 769 void checkGCInfo(const void*, const GCInfo*); |
| 770 #endif |
| 771 |
| 772 // Macro to declare methods needed for each typed heap. |
| 773 #define DECLARE_VISITOR_METHODS(Type) \ |
| 774 DEBUG_ONLY(void checkGCInfo(const Type*, const GCInfo*);) \ |
| 775 void mark(const Type* t, TraceCallback callback) { m_visitor->mark(t, callba
ck); } \ |
| 776 bool isMarked(const Type* t) { return m_visitor->isMarked(t); }\ |
| 777 bool ensureMarked(const Type* t) { return m_visitor->ensureMarked(t); } |
| 778 |
| 779 FOR_EACH_TYPED_HEAP(DECLARE_VISITOR_METHODS) |
| 780 #undef DECLARE_VISITOR_METHODS |
| 781 |
| 782 #if ENABLE(GC_PROFILE_MARKING) |
| 783 void setHostInfo(void* object, const String& name) |
| 784 { |
| 785 m_visitor->setHostInfo(object, name); |
| 786 } |
| 787 #endif |
| 788 |
| 789 inline bool canTraceEagerly() const { return true; } |
| 790 inline void incrementTraceDepth() { } |
| 791 inline void decrementTraceDepth() { } |
| 792 |
| 793 Visitor* getUninlined() { return m_visitor; } |
| 794 |
| 795 private: |
| 796 void visitHeader(HeapObjectHeader* header, const void* objectPointer, TraceC
allback callback); |
| 797 |
| 798 Visitor* m_visitor; |
| 799 }; |
| 800 |
| 801 template<typename T> |
| 802 void InlinedGlobalMarkingVisitor::mark(T* t) |
| 803 { |
| 804 if (!t) |
| 805 return; |
| 806 #if ENABLE(ASSERT) |
| 807 TraceTrait<T>::checkGCInfo(m_visitor, t); |
| 808 #endif |
| 809 TraceTrait<T>::mark(InlinedGlobalMarkingVisitor(*this), t); |
| 810 |
| 811 COMPILE_ASSERT_IS_GARBAGE_COLLECTED(T, AttemptedToMarkNonGarbageCollectedObj
ect); |
| 812 } |
| 813 |
| 814 template <typename T> |
| 815 struct TraceGenericCompatibilityAdaptor<T, true> { |
| 816 static inline void trace(Visitor* visitor, T* t) |
| 817 { |
| 818 t->trace(visitor); |
| 819 } |
| 820 |
| 821 static inline void trace(InlinedGlobalMarkingVisitor visitor, T* t) |
| 822 { |
| 823 t->trace(InlinedGlobalMarkingVisitor(visitor)); |
| 824 } |
| 825 }; |
| 826 |
| 827 template <typename T> |
| 828 struct TraceGenericCompatibilityAdaptor<T, false> { |
| 829 static inline void trace(Visitor* visitor, T* t) |
| 830 { |
| 831 // Revert to old trace |
| 832 t->trace(visitor); |
| 833 } |
| 834 |
| 835 static inline void trace(InlinedGlobalMarkingVisitor visitor, T* t) |
| 836 { |
| 837 // visiting was inlined to here, but the object we are going to trace do
esn't support inlined trace |
| 838 t->trace(visitor.getUninlined()); |
| 839 } |
| 840 }; |
| 841 |
| 842 #define DECLARE_TRACE(virt, ovr) \ |
| 843 public: \ |
| 844 virt void trace(Visitor*) ovr; \ |
| 845 virt void trace(InlinedGlobalMarkingVisitor) ovr; \ |
| 846 private: \ |
| 847 template<typename VisitorDispatcher> void traceImpl(VisitorDispatcher); \ |
| 848 public: |
| 849 |
| 850 #define DEFINE_TRACE(CLASS) \ |
| 851 void CLASS::trace(Visitor* visitor) { traceImpl(visitor); } \ |
| 852 void CLASS::trace(InlinedGlobalMarkingVisitor visitor) { traceImpl(visitor);
} \ |
| 853 template<typename VisitorDispatcher> \ |
| 854 ALWAYS_INLINE void CLASS::traceImpl(VisitorDispatcher visitor) \ |
| 855 |
| 856 #define DEFINE_INLINE_TRACE(virt, ovr) \ |
| 857 virt void trace(Visitor* visitor) ovr { traceImpl(visitor); } \ |
| 858 virt void trace(InlinedGlobalMarkingVisitor visitor) ovr { traceImpl(visitor
); } \ |
| 859 template<typename VisitorDispatcher> \ |
| 860 inline void traceImpl(VisitorDispatcher visitor) |
561 | 861 |
562 // We trace vectors by using the trace trait on each element, which means you | 862 // We trace vectors by using the trace trait on each element, which means you |
563 // can have vectors of general objects (not just pointers to objects) that can | 863 // can have vectors of general objects (not just pointers to objects) that can |
564 // be traced. | 864 // be traced. |
565 template<typename T, size_t N> | 865 template<typename T, size_t N> |
566 struct OffHeapCollectionTraceTrait<WTF::Vector<T, N, WTF::DefaultAllocator> > { | 866 struct OffHeapCollectionTraceTrait<WTF::Vector<T, N, WTF::DefaultAllocator> > { |
567 typedef WTF::Vector<T, N, WTF::DefaultAllocator> Vector; | 867 typedef WTF::Vector<T, N, WTF::DefaultAllocator> Vector; |
568 | 868 |
569 static void trace(Visitor* visitor, const Vector& vector) | 869 static void trace(Visitor* visitor, const Vector& vector) |
570 { | 870 { |
(...skipping 20 matching lines...) Expand all Loading... |
591 template<typename T, typename Traits = WTF::VectorTraits<T> > | 891 template<typename T, typename Traits = WTF::VectorTraits<T> > |
592 class HeapVectorBacking; | 892 class HeapVectorBacking; |
593 | 893 |
594 template<typename Table> | 894 template<typename Table> |
595 class HeapHashTableBacking { | 895 class HeapHashTableBacking { |
596 public: | 896 public: |
597 static void finalize(void* pointer); | 897 static void finalize(void* pointer); |
598 }; | 898 }; |
599 | 899 |
600 template<typename T> | 900 template<typename T> |
| 901 template<typename VisitorDispatcher> |
| 902 inline void TraceTrait<T>::mark(VisitorDispatcher visitor, const T* t) |
| 903 { |
| 904 DefaultTraceTrait<T>::mark(visitor, t); |
| 905 } |
| 906 |
| 907 template<typename T> |
601 class DefaultTraceTrait<T, false> { | 908 class DefaultTraceTrait<T, false> { |
602 public: | 909 public: |
603 static void mark(Visitor* visitor, const T* t) | 910 static void mark(Visitor* visitor, const T* t) |
604 { | 911 { |
605 // Default mark method of the trait just calls the two-argument mark | 912 // Default mark method of the trait just calls the two-argument mark |
606 // method on the visitor. The second argument is the static trace method | 913 // method on the visitor. The second argument is the static trace method |
607 // of the trait, which by default calls the instance method | 914 // of the trait, which by default calls the instance method |
608 // trace(Visitor*) on the object. | 915 // trace(Visitor*) on the object. |
609 // | 916 // |
610 // If the trait allows it, invoke the trace callback right here on the | 917 // If the trait allows it, invoke the trace callback right here on the |
611 // not-yet-marked object. | 918 // not-yet-marked object. |
612 if (TraceEagerlyTrait<T>::value) { | 919 if (TraceEagerlyTrait<T>::value) { |
613 // Protect against too deep trace call chains, and the | 920 // Protect against too deep trace call chains, and the |
614 // unbounded system stack usage they can bring about. | 921 // unbounded system stack usage they can bring about. |
615 // | 922 // |
616 // Assert against deep stacks so as to flush them out, | 923 // Assert against deep stacks so as to flush them out, |
617 // but test and appropriately handle them should they occur | 924 // but test and appropriately handle them should they occur |
618 // in release builds. | 925 // in release builds. |
619 ASSERT(visitor->canTraceEagerly()); | 926 ASSERT(visitor->canTraceEagerly()); |
620 if (LIKELY(visitor->canTraceEagerly())) { | 927 if (LIKELY(visitor->canTraceEagerly())) { |
621 if (visitor->ensureMarked(t)) { | 928 if (visitor->ensureMarked(t)) { |
622 visitor->incrementTraceDepth(); | 929 visitor->incrementTraceDepth(); |
623 TraceTrait<T>::trace(visitor, const_cast<T*>(t)); | 930 if (LIKELY(visitor->isGlobalMarkingVisitor())) |
| 931 TraceTrait<T>::trace(InlinedGlobalMarkingVisitor(visitor
), const_cast<T*>(t)); |
| 932 else |
| 933 TraceTrait<T>::trace(visitor, const_cast<T*>(t)); |
624 visitor->decrementTraceDepth(); | 934 visitor->decrementTraceDepth(); |
625 } | 935 } |
626 return; | 936 return; |
627 } | 937 } |
628 } | 938 } |
629 visitor->mark(const_cast<T*>(t), &TraceTrait<T>::trace); | 939 visitor->mark(const_cast<T*>(t), &TraceTrait<T>::trace); |
630 } | 940 } |
631 | 941 |
| 942 static void mark(InlinedGlobalMarkingVisitor visitor, const T* t) |
| 943 { |
| 944 if (TraceEagerlyTrait<T>::value) { |
| 945 // Protect against too deep trace call chains, and the |
| 946 // unbounded system stack usage they can bring about. |
| 947 // |
| 948 // Assert against deep stacks so as to flush them out, |
| 949 // but test and appropriately handle them should they occur |
| 950 // in release builds. |
| 951 ASSERT(visitor.canTraceEagerly()); |
| 952 if (LIKELY(visitor.canTraceEagerly())) { |
| 953 if (visitor.ensureMarked(t)) { |
| 954 visitor.incrementTraceDepth(); |
| 955 TraceTrait<T>::trace(visitor, const_cast<T*>(t)); |
| 956 visitor.decrementTraceDepth(); |
| 957 } |
| 958 return; |
| 959 } |
| 960 } |
| 961 visitor.mark(const_cast<T*>(t), &TraceTrait<T>::trace); |
| 962 } |
| 963 |
632 #if ENABLE(ASSERT) | 964 #if ENABLE(ASSERT) |
633 static void checkGCInfo(Visitor* visitor, const T* t) | 965 static void checkGCInfo(Visitor* visitor, const T* t) |
634 { | 966 { |
635 visitor->checkGCInfo(const_cast<T*>(t), GCInfoTrait<T>::get()); | 967 visitor->checkGCInfo(const_cast<T*>(t), GCInfoTrait<T>::get()); |
636 } | 968 } |
637 #endif | 969 #endif |
638 }; | 970 }; |
639 | 971 |
640 template<typename T> | 972 template<typename T> |
641 class DefaultTraceTrait<T, true> { | 973 class DefaultTraceTrait<T, true> { |
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
783 struct GCInfoTrait { | 1115 struct GCInfoTrait { |
784 static const GCInfo* get() | 1116 static const GCInfo* get() |
785 { | 1117 { |
786 return GCInfoAtBase<typename GetGarbageCollectedBase<T>::type>::get(); | 1118 return GCInfoAtBase<typename GetGarbageCollectedBase<T>::type>::get(); |
787 } | 1119 } |
788 }; | 1120 }; |
789 | 1121 |
790 } | 1122 } |
791 | 1123 |
792 #endif | 1124 #endif |
OLD | NEW |