| OLD | NEW | 
|     1 /* |     1 /* | 
|     2  * Copyright (C) 2013 Google Inc. All rights reserved. |     2  * Copyright (C) 2013 Google Inc. All rights reserved. | 
|     3  * |     3  * | 
|     4  * Redistribution and use in source and binary forms, with or without |     4  * Redistribution and use in source and binary forms, with or without | 
|     5  * modification, are permitted provided that the following conditions are |     5  * modification, are permitted provided that the following conditions are | 
|     6  * met: |     6  * met: | 
|     7  * |     7  * | 
|     8  *     * Redistributions of source code must retain the above copyright |     8  *     * Redistributions of source code must retain the above copyright | 
|     9  * notice, this list of conditions and the following disclaimer. |     9  * notice, this list of conditions and the following disclaimer. | 
|    10  *     * Redistributions in binary form must reproduce the above |    10  *     * Redistributions in binary form must reproduce the above | 
| (...skipping 162 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|   173     explicit HeapObjectHeader(size_t encodedSize) |   173     explicit HeapObjectHeader(size_t encodedSize) | 
|   174         : m_size(encodedSize) |   174         : m_size(encodedSize) | 
|   175 #if ENABLE(ASSERT) |   175 #if ENABLE(ASSERT) | 
|   176         , m_magic(magic) |   176         , m_magic(magic) | 
|   177 #endif |   177 #endif | 
|   178     { |   178     { | 
|   179         // sizeof(HeapObjectHeader) must be equal to or smaller than |   179         // sizeof(HeapObjectHeader) must be equal to or smaller than | 
|   180         // allocationGranurarity, because HeapObjectHeader is used as a header |   180         // allocationGranurarity, because HeapObjectHeader is used as a header | 
|   181         // for an freed entry.  Given that the smallest entry size is |   181         // for an freed entry.  Given that the smallest entry size is | 
|   182         // allocationGranurarity, HeapObjectHeader must fit into the size. |   182         // allocationGranurarity, HeapObjectHeader must fit into the size. | 
|   183         COMPILE_ASSERT(sizeof(HeapObjectHeader) <= allocationGranularity, SizeOf
      HeapObjectHeaderMustBeSmallerThanAllocationGranularity); |   183         static_assert(sizeof(HeapObjectHeader) <= allocationGranularity, "size o
      f HeapObjectHeader must be smaller than allocationGranularity"); | 
|   184     } |   184     } | 
|   185  |   185  | 
|   186     NO_SANITIZE_ADDRESS |   186     NO_SANITIZE_ADDRESS | 
|   187     HeapObjectHeader(size_t encodedSize, const GCInfo*) |   187     HeapObjectHeader(size_t encodedSize, const GCInfo*) | 
|   188         : m_size(encodedSize) |   188         : m_size(encodedSize) | 
|   189 #if ENABLE(ASSERT) |   189 #if ENABLE(ASSERT) | 
|   190         , m_magic(magic) |   190         , m_magic(magic) | 
|   191 #endif |   191 #endif | 
|   192     { |   192     { | 
|   193         COMPILE_ASSERT(sizeof(HeapObjectHeader) <= allocationGranularity, SizeOf
      HeapObjectHeaderMustBeSmallerThanAllocationGranularity); |   193         static_assert(sizeof(HeapObjectHeader) <= allocationGranularity, "size o
      f HeapObjectHeader must be smaller than allocationGranularity"); | 
|   194     } |   194     } | 
|   195  |   195  | 
|   196     static size_t freeListEncodedSize(size_t size) { return size | freeListMask;
       } |   196     static size_t freeListEncodedSize(size_t size) { return size | freeListMask;
       } | 
|   197  |   197  | 
|   198     NO_SANITIZE_ADDRESS |   198     NO_SANITIZE_ADDRESS | 
|   199     bool isFree() { return m_size & freeListMask; } |   199     bool isFree() { return m_size & freeListMask; } | 
|   200     NO_SANITIZE_ADDRESS |   200     NO_SANITIZE_ADDRESS | 
|   201     bool isPromptlyFreed() { return (m_size & promptlyFreedMask) == promptlyFree
      dMask; } |   201     bool isPromptlyFreed() { return (m_size & promptlyFreedMask) == promptlyFree
      dMask; } | 
|   202     NO_SANITIZE_ADDRESS |   202     NO_SANITIZE_ADDRESS | 
|   203     void markPromptlyFreed() { m_size |= promptlyFreedMask; } |   203     void markPromptlyFreed() { m_size |= promptlyFreedMask; } | 
| (...skipping 330 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|   534 // object. |   534 // object. | 
|   535 // |   535 // | 
|   536 // The layout of a large heap object is as follows: |   536 // The layout of a large heap object is as follows: | 
|   537 // |   537 // | 
|   538 // | BaseHeapPage | next pointer | GeneralHeapObjectHeader or HeapObjectHeader |
       payload | |   538 // | BaseHeapPage | next pointer | GeneralHeapObjectHeader or HeapObjectHeader |
       payload | | 
|   539 template<typename Header> |   539 template<typename Header> | 
|   540 class LargeObject final : public BaseHeapPage { |   540 class LargeObject final : public BaseHeapPage { | 
|   541 public: |   541 public: | 
|   542     LargeObject(PageMemory* storage, const GCInfo* gcInfo, ThreadState* state) :
       BaseHeapPage(storage, gcInfo, state) |   542     LargeObject(PageMemory* storage, const GCInfo* gcInfo, ThreadState* state) :
       BaseHeapPage(storage, gcInfo, state) | 
|   543     { |   543     { | 
|   544         COMPILE_ASSERT(!(sizeof(LargeObject<Header>) & allocationMask), large_he
      ap_object_header_misaligned); |   544         static_assert(!(sizeof(LargeObject<Header>) & allocationMask), "LargeObj
      ect<Header> misaligned"); | 
|   545     } |   545     } | 
|   546  |   546  | 
|   547     virtual void checkAndMarkPointer(Visitor*, Address) override; |   547     virtual void checkAndMarkPointer(Visitor*, Address) override; | 
|   548     virtual bool isLargeObject() override { return true; } |   548     virtual bool isLargeObject() override { return true; } | 
|   549  |   549  | 
|   550 #if ENABLE(GC_PROFILE_MARKING) |   550 #if ENABLE(GC_PROFILE_MARKING) | 
|   551     virtual const GCInfo* findGCInfo(Address address) |   551     virtual const GCInfo* findGCInfo(Address address) | 
|   552     { |   552     { | 
|   553         if (!objectContains(address)) |   553         if (!objectContains(address)) | 
|   554             return nullptr; |   554             return nullptr; | 
| (...skipping 1085 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  1640         void swap(AllocatorProvider& other) { } |  1640         void swap(AllocatorProvider& other) { } | 
|  1641     }; |  1641     }; | 
|  1642  |  1642  | 
|  1643     void deallocate(void* dummy) { } |  1643     void deallocate(void* dummy) { } | 
|  1644  |  1644  | 
|  1645     // This is not a static method even though it could be, because it needs to |  1645     // This is not a static method even though it could be, because it needs to | 
|  1646     // match the one that the (off-heap) ListHashSetAllocator has.  The 'this' |  1646     // match the one that the (off-heap) ListHashSetAllocator has.  The 'this' | 
|  1647     // pointer will always be null. |  1647     // pointer will always be null. | 
|  1648     void* allocateNode() |  1648     void* allocateNode() | 
|  1649     { |  1649     { | 
|  1650         COMPILE_ASSERT(!WTF::IsWeak<ValueArg>::value, WeakPointersInAListHashSet
      WillJustResultInNullEntriesInTheSetThatsNotWhatYouWantConsiderUsingLinkedHashSet
      Instead); |  1650         // Consider using a LinkedHashSet instead if this compile-time assert fa
      ils: | 
 |  1651         static_assert(!WTF::IsWeak<ValueArg>::value, "weak pointers in a ListHas
      hSet will result in null entries in the set"); | 
 |  1652  | 
|  1651         return malloc<void*, Node>(sizeof(Node)); |  1653         return malloc<void*, Node>(sizeof(Node)); | 
|  1652     } |  1654     } | 
|  1653  |  1655  | 
|  1654     static void traceValue(Visitor* visitor, Node* node) |  1656     static void traceValue(Visitor* visitor, Node* node) | 
|  1655     { |  1657     { | 
|  1656         traceListHashSetValue(visitor, node->m_value); |  1658         traceListHashSetValue(visitor, node->m_value); | 
|  1657     } |  1659     } | 
|  1658 }; |  1660 }; | 
|  1659  |  1661  | 
|  1660 // FIXME: These should just be template aliases: |  1662 // FIXME: These should just be template aliases: | 
| (...skipping 468 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  2129 // Vector backing that needs marking. We don't support weak members in vectors. |  2131 // Vector backing that needs marking. We don't support weak members in vectors. | 
|  2130 template<ShouldWeakPointersBeMarkedStrongly strongify, typename T, typename Trai
      ts> |  2132 template<ShouldWeakPointersBeMarkedStrongly strongify, typename T, typename Trai
      ts> | 
|  2131 struct TraceInCollectionTrait<NoWeakHandlingInCollections, strongify, blink::Hea
      pVectorBacking<T, Traits>, void> { |  2133 struct TraceInCollectionTrait<NoWeakHandlingInCollections, strongify, blink::Hea
      pVectorBacking<T, Traits>, void> { | 
|  2132     static bool trace(blink::Visitor* visitor, void* self) |  2134     static bool trace(blink::Visitor* visitor, void* self) | 
|  2133     { |  2135     { | 
|  2134         // The allocator can oversize the allocation a little, according to |  2136         // The allocator can oversize the allocation a little, according to | 
|  2135         // the allocation granularity.  The extra size is included in the |  2137         // the allocation granularity.  The extra size is included in the | 
|  2136         // payloadSize call below, since there is nowhere to store the |  2138         // payloadSize call below, since there is nowhere to store the | 
|  2137         // originally allocated memory.  This assert ensures that visiting the |  2139         // originally allocated memory.  This assert ensures that visiting the | 
|  2138         // last bit of memory can't cause trouble. |  2140         // last bit of memory can't cause trouble. | 
|  2139         COMPILE_ASSERT(!ShouldBeTraced<Traits>::value || sizeof(T) > blink::allo
      cationGranularity || Traits::canInitializeWithMemset, HeapOverallocationCanCause
      SpuriousVisits); |  2141         static_assert(!ShouldBeTraced<Traits>::value || sizeof(T) > blink::alloc
      ationGranularity || Traits::canInitializeWithMemset, "heap overallocation can ca
      use spurious visits"); | 
|  2140  |  2142  | 
|  2141         T* array = reinterpret_cast<T*>(self); |  2143         T* array = reinterpret_cast<T*>(self); | 
|  2142         blink::GeneralHeapObjectHeader* header = blink::GeneralHeapObjectHeader:
      :fromPayload(self); |  2144         blink::GeneralHeapObjectHeader* header = blink::GeneralHeapObjectHeader:
      :fromPayload(self); | 
|  2143         // Use the payload size as recorded by the heap to determine how many |  2145         // Use the payload size as recorded by the heap to determine how many | 
|  2144         // elements to mark. |  2146         // elements to mark. | 
|  2145         size_t length = header->payloadSize() / sizeof(T); |  2147         size_t length = header->payloadSize() / sizeof(T); | 
|  2146         for (size_t i = 0; i < length; ++i) |  2148         for (size_t i = 0; i < length; ++i) | 
|  2147             blink::CollectionBackingTraceTrait<ShouldBeTraced<Traits>::value, Tr
      aits::weakHandlingFlag, WeakPointersActStrong, T, Traits>::trace(visitor, array[
      i]); |  2149             blink::CollectionBackingTraceTrait<ShouldBeTraced<Traits>::value, Tr
      aits::weakHandlingFlag, WeakPointersActStrong, T, Traits>::trace(visitor, array[
      i]); | 
|  2148         return false; |  2150         return false; | 
|  2149     } |  2151     } | 
| (...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  2225         // side is alive, which causes the strong pointer on the key side to be |  2227         // side is alive, which causes the strong pointer on the key side to be | 
|  2226         // marked.  If that then results in the object pointed to by the weak |  2228         // marked.  If that then results in the object pointed to by the weak | 
|  2227         // pointer on the value side being marked live, then the whole |  2229         // pointer on the value side being marked live, then the whole | 
|  2228         // key-value entry is leaked.  To avoid unexpected leaking, we disallow |  2230         // key-value entry is leaked.  To avoid unexpected leaking, we disallow | 
|  2229         // this case, but if you run into this assert, please reach out to Blink |  2231         // this case, but if you run into this assert, please reach out to Blink | 
|  2230         // reviewers, and we may relax it. |  2232         // reviewers, and we may relax it. | 
|  2231         const bool keyIsWeak = Traits::KeyTraits::weakHandlingFlag == WeakHandli
      ngInCollections; |  2233         const bool keyIsWeak = Traits::KeyTraits::weakHandlingFlag == WeakHandli
      ngInCollections; | 
|  2232         const bool valueIsWeak = Traits::ValueTraits::weakHandlingFlag == WeakHa
      ndlingInCollections; |  2234         const bool valueIsWeak = Traits::ValueTraits::weakHandlingFlag == WeakHa
      ndlingInCollections; | 
|  2233         const bool keyHasStrongRefs = ShouldBeTraced<typename Traits::KeyTraits>
      ::value; |  2235         const bool keyHasStrongRefs = ShouldBeTraced<typename Traits::KeyTraits>
      ::value; | 
|  2234         const bool valueHasStrongRefs = ShouldBeTraced<typename Traits::ValueTra
      its>::value; |  2236         const bool valueHasStrongRefs = ShouldBeTraced<typename Traits::ValueTra
      its>::value; | 
|  2235         COMPILE_ASSERT(!keyIsWeak || !valueIsWeak || !keyHasStrongRefs || !value
      HasStrongRefs, ThisConfigurationWasDisallowedToAvoidUnexpectedLeaks); |  2237         static_assert(!keyIsWeak || !valueIsWeak || !keyHasStrongRefs || !valueH
      asStrongRefs, "this configuration is disallowed to avoid unexpected leaks"); | 
|  2236         if ((valueIsWeak && !keyIsWeak) || (valueIsWeak && keyIsWeak && !valueHa
      sStrongRefs)) { |  2238         if ((valueIsWeak && !keyIsWeak) || (valueIsWeak && keyIsWeak && !valueHa
      sStrongRefs)) { | 
|  2237             // Check value first. |  2239             // Check value first. | 
|  2238             bool deadWeakObjectsFoundOnValueSide = blink::CollectionBackingTrace
      Trait<ShouldBeTraced<typename Traits::ValueTraits>::value, Traits::ValueTraits::
      weakHandlingFlag, strongify, Value, typename Traits::ValueTraits>::trace(visitor
      , self.value); |  2240             bool deadWeakObjectsFoundOnValueSide = blink::CollectionBackingTrace
      Trait<ShouldBeTraced<typename Traits::ValueTraits>::value, Traits::ValueTraits::
      weakHandlingFlag, strongify, Value, typename Traits::ValueTraits>::trace(visitor
      , self.value); | 
|  2239             if (deadWeakObjectsFoundOnValueSide) |  2241             if (deadWeakObjectsFoundOnValueSide) | 
|  2240                 return true; |  2242                 return true; | 
|  2241             return blink::CollectionBackingTraceTrait<ShouldBeTraced<typename Tr
      aits::KeyTraits>::value, Traits::KeyTraits::weakHandlingFlag, strongify, Key, ty
      pename Traits::KeyTraits>::trace(visitor, self.key); |  2243             return blink::CollectionBackingTraceTrait<ShouldBeTraced<typename Tr
      aits::KeyTraits>::value, Traits::KeyTraits::weakHandlingFlag, strongify, Key, ty
      pename Traits::KeyTraits>::trace(visitor, self.key); | 
|  2242         } |  2244         } | 
|  2243         // Check key first. |  2245         // Check key first. | 
|  2244         bool deadWeakObjectsFoundOnKeySide = blink::CollectionBackingTraceTrait<
      ShouldBeTraced<typename Traits::KeyTraits>::value, Traits::KeyTraits::weakHandli
      ngFlag, strongify, Key, typename Traits::KeyTraits>::trace(visitor, self.key); |  2246         bool deadWeakObjectsFoundOnKeySide = blink::CollectionBackingTraceTrait<
      ShouldBeTraced<typename Traits::KeyTraits>::value, Traits::KeyTraits::weakHandli
      ngFlag, strongify, Key, typename Traits::KeyTraits>::trace(visitor, self.key); | 
|  2245         if (deadWeakObjectsFoundOnKeySide) |  2247         if (deadWeakObjectsFoundOnKeySide) | 
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  2299 }; |  2301 }; | 
|  2300  |  2302  | 
|  2301 template<typename T> |  2303 template<typename T> | 
|  2302 static void verifyGarbageCollectedIfMember(T*) |  2304 static void verifyGarbageCollectedIfMember(T*) | 
|  2303 { |  2305 { | 
|  2304 } |  2306 } | 
|  2305  |  2307  | 
|  2306 template<typename T> |  2308 template<typename T> | 
|  2307 static void verifyGarbageCollectedIfMember(Member<T>* t) |  2309 static void verifyGarbageCollectedIfMember(Member<T>* t) | 
|  2308 { |  2310 { | 
|  2309     COMPILE_ASSERT_IS_GARBAGE_COLLECTED(T, NonGarbageCollectedObjectInMember); |  2311     STATIC_ASSERT_IS_GARBAGE_COLLECTED(T, "non garbage collected object in membe
      r"); | 
|  2310 } |  2312 } | 
|  2311  |  2313  | 
|  2312 // Specialization for things that either need marking or have weak pointers or |  2314 // Specialization for things that either need marking or have weak pointers or | 
|  2313 // both. |  2315 // both. | 
|  2314 template<bool needsTracing, WTF::WeakHandlingFlag weakHandlingFlag, WTF::ShouldW
      eakPointersBeMarkedStrongly strongify, typename T, typename Traits> |  2316 template<bool needsTracing, WTF::WeakHandlingFlag weakHandlingFlag, WTF::ShouldW
      eakPointersBeMarkedStrongly strongify, typename T, typename Traits> | 
|  2315 struct CollectionBackingTraceTrait { |  2317 struct CollectionBackingTraceTrait { | 
|  2316     static bool trace(Visitor* visitor, T&t) |  2318     static bool trace(Visitor* visitor, T&t) | 
|  2317     { |  2319     { | 
|  2318         verifyGarbageCollectedIfMember(reinterpret_cast<T*>(0)); |  2320         verifyGarbageCollectedIfMember(reinterpret_cast<T*>(0)); | 
|  2319         return WTF::TraceInCollectionTrait<weakHandlingFlag, strongify, T, Trait
      s>::trace(visitor, t); |  2321         return WTF::TraceInCollectionTrait<weakHandlingFlag, strongify, T, Trait
      s>::trace(visitor, t); | 
| (...skipping 24 matching lines...) Expand all  Loading... | 
|  2344     { |  2346     { | 
|  2345         return t.traceInCollection(visitor, strongify); |  2347         return t.traceInCollection(visitor, strongify); | 
|  2346     } |  2348     } | 
|  2347 }; |  2349 }; | 
|  2348  |  2350  | 
|  2349 template<typename T, typename Traits> |  2351 template<typename T, typename Traits> | 
|  2350 struct TraceTrait<HeapVectorBacking<T, Traits>> { |  2352 struct TraceTrait<HeapVectorBacking<T, Traits>> { | 
|  2351     using Backing = HeapVectorBacking<T, Traits>; |  2353     using Backing = HeapVectorBacking<T, Traits>; | 
|  2352     static void trace(Visitor* visitor, void* self) |  2354     static void trace(Visitor* visitor, void* self) | 
|  2353     { |  2355     { | 
|  2354         COMPILE_ASSERT(!WTF::IsWeak<T>::value, WeDontSupportWeaknessInHeapVector
      sOrDeques); |  2356         static_assert(!WTF::IsWeak<T>::value, "weakness in HeapVectors and Deque
      s are not supported"); | 
|  2355         if (WTF::ShouldBeTraced<Traits>::value) |  2357         if (WTF::ShouldBeTraced<Traits>::value) | 
|  2356             WTF::TraceInCollectionTrait<WTF::NoWeakHandlingInCollections, WTF::W
      eakPointersActWeak, HeapVectorBacking<T, Traits>, void>::trace(visitor, self); |  2358             WTF::TraceInCollectionTrait<WTF::NoWeakHandlingInCollections, WTF::W
      eakPointersActWeak, HeapVectorBacking<T, Traits>, void>::trace(visitor, self); | 
|  2357     } |  2359     } | 
|  2358     static void mark(Visitor* visitor, const Backing* backing) |  2360     static void mark(Visitor* visitor, const Backing* backing) | 
|  2359     { |  2361     { | 
|  2360         visitor->mark(backing, &trace); |  2362         visitor->mark(backing, &trace); | 
|  2361     } |  2363     } | 
|  2362     static void checkGCInfo(Visitor* visitor, const Backing* backing) |  2364     static void checkGCInfo(Visitor* visitor, const Backing* backing) | 
|  2363     { |  2365     { | 
|  2364 #if ENABLE(ASSERT) |  2366 #if ENABLE(ASSERT) | 
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  2424 template<typename T, size_t inlineCapacity> |  2426 template<typename T, size_t inlineCapacity> | 
|  2425 struct GCInfoTrait<HeapVector<T, inlineCapacity>> : public GCInfoTrait<Vector<T,
       inlineCapacity, HeapAllocator>> { }; |  2427 struct GCInfoTrait<HeapVector<T, inlineCapacity>> : public GCInfoTrait<Vector<T,
       inlineCapacity, HeapAllocator>> { }; | 
|  2426 template<typename T, size_t inlineCapacity> |  2428 template<typename T, size_t inlineCapacity> | 
|  2427 struct GCInfoTrait<HeapDeque<T, inlineCapacity>> : public GCInfoTrait<Deque<T, i
      nlineCapacity, HeapAllocator>> { }; |  2429 struct GCInfoTrait<HeapDeque<T, inlineCapacity>> : public GCInfoTrait<Deque<T, i
      nlineCapacity, HeapAllocator>> { }; | 
|  2428 template<typename T, typename U, typename V> |  2430 template<typename T, typename U, typename V> | 
|  2429 struct GCInfoTrait<HeapHashCountedSet<T, U, V>> : public GCInfoTrait<HashCounted
      Set<T, U, V, HeapAllocator>> { }; |  2431 struct GCInfoTrait<HeapHashCountedSet<T, U, V>> : public GCInfoTrait<HashCounted
      Set<T, U, V, HeapAllocator>> { }; | 
|  2430  |  2432  | 
|  2431 } // namespace blink |  2433 } // namespace blink | 
|  2432  |  2434  | 
|  2433 #endif // Heap_h |  2435 #endif // Heap_h | 
| OLD | NEW |