| OLD | NEW |
| 1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef HeapAllocator_h | 5 #ifndef HeapAllocator_h |
| 6 #define HeapAllocator_h | 6 #define HeapAllocator_h |
| 7 | 7 |
| 8 #include "platform/heap/Heap.h" | 8 #include "platform/heap/Heap.h" |
| 9 #include "platform/heap/TraceTraits.h" | 9 #include "platform/heap/TraceTraits.h" |
| 10 #include "wtf/Allocator.h" | 10 #include "wtf/Allocator.h" |
| (...skipping 15 matching lines...) Expand all Loading... |
| 26 class PLATFORM_EXPORT HeapAllocator { | 26 class PLATFORM_EXPORT HeapAllocator { |
| 27 STATIC_ONLY(HeapAllocator); | 27 STATIC_ONLY(HeapAllocator); |
| 28 public: | 28 public: |
| 29 using Visitor = blink::Visitor; | 29 using Visitor = blink::Visitor; |
| 30 static const bool isGarbageCollected = true; | 30 static const bool isGarbageCollected = true; |
| 31 | 31 |
| 32 template<typename T> | 32 template<typename T> |
| 33 static size_t quantizedSize(size_t count) | 33 static size_t quantizedSize(size_t count) |
| 34 { | 34 { |
| 35 RELEASE_ASSERT(count <= maxHeapObjectSize / sizeof(T)); | 35 RELEASE_ASSERT(count <= maxHeapObjectSize / sizeof(T)); |
| 36 return Heap::allocationSizeFromSize(count * sizeof(T)) - sizeof(HeapObje
ctHeader); | 36 return ThreadHeap::allocationSizeFromSize(count * sizeof(T)) - sizeof(He
apObjectHeader); |
| 37 } | 37 } |
| 38 template <typename T> | 38 template <typename T> |
| 39 static T* allocateVectorBacking(size_t size) | 39 static T* allocateVectorBacking(size_t size) |
| 40 { | 40 { |
| 41 ThreadState* state = ThreadStateFor<ThreadingTrait<T>::Affinity>::state(
); | 41 ThreadState* state = ThreadStateFor<ThreadingTrait<T>::Affinity>::state(
); |
| 42 ASSERT(state->isAllocationAllowed()); | 42 ASSERT(state->isAllocationAllowed()); |
| 43 size_t gcInfoIndex = GCInfoTrait<HeapVectorBacking<T, VectorTraits<T>>>:
:index(); | 43 size_t gcInfoIndex = GCInfoTrait<HeapVectorBacking<T, VectorTraits<T>>>:
:index(); |
| 44 NormalPageArena* arena = static_cast<NormalPageArena*>(state->vectorBack
ingArena(gcInfoIndex)); | 44 NormalPageArena* arena = static_cast<NormalPageArena*>(state->vectorBack
ingArena(gcInfoIndex)); |
| 45 return reinterpret_cast<T*>(arena->allocateObject(Heap::allocationSizeFr
omSize(size), gcInfoIndex)); | 45 return reinterpret_cast<T*>(arena->allocateObject(ThreadHeap::allocation
SizeFromSize(size), gcInfoIndex)); |
| 46 } | 46 } |
| 47 template <typename T> | 47 template <typename T> |
| 48 static T* allocateExpandedVectorBacking(size_t size) | 48 static T* allocateExpandedVectorBacking(size_t size) |
| 49 { | 49 { |
| 50 ThreadState* state = ThreadStateFor<ThreadingTrait<T>::Affinity>::state(
); | 50 ThreadState* state = ThreadStateFor<ThreadingTrait<T>::Affinity>::state(
); |
| 51 ASSERT(state->isAllocationAllowed()); | 51 ASSERT(state->isAllocationAllowed()); |
| 52 size_t gcInfoIndex = GCInfoTrait<HeapVectorBacking<T, VectorTraits<T>>>:
:index(); | 52 size_t gcInfoIndex = GCInfoTrait<HeapVectorBacking<T, VectorTraits<T>>>:
:index(); |
| 53 NormalPageArena* arena = static_cast<NormalPageArena*>(state->expandedVe
ctorBackingArena(gcInfoIndex)); | 53 NormalPageArena* arena = static_cast<NormalPageArena*>(state->expandedVe
ctorBackingArena(gcInfoIndex)); |
| 54 return reinterpret_cast<T*>(arena->allocateObject(Heap::allocationSizeFr
omSize(size), gcInfoIndex)); | 54 return reinterpret_cast<T*>(arena->allocateObject(ThreadHeap::allocation
SizeFromSize(size), gcInfoIndex)); |
| 55 } | 55 } |
| 56 static void freeVectorBacking(void*); | 56 static void freeVectorBacking(void*); |
| 57 static bool expandVectorBacking(void*, size_t); | 57 static bool expandVectorBacking(void*, size_t); |
| 58 static bool shrinkVectorBacking(void* address, size_t quantizedCurrentSize,
size_t quantizedShrunkSize); | 58 static bool shrinkVectorBacking(void* address, size_t quantizedCurrentSize,
size_t quantizedShrunkSize); |
| 59 template <typename T> | 59 template <typename T> |
| 60 static T* allocateInlineVectorBacking(size_t size) | 60 static T* allocateInlineVectorBacking(size_t size) |
| 61 { | 61 { |
| 62 size_t gcInfoIndex = GCInfoTrait<HeapVectorBacking<T, VectorTraits<T>>>:
:index(); | 62 size_t gcInfoIndex = GCInfoTrait<HeapVectorBacking<T, VectorTraits<T>>>:
:index(); |
| 63 ThreadState* state = ThreadStateFor<ThreadingTrait<T>::Affinity>::state(
); | 63 ThreadState* state = ThreadStateFor<ThreadingTrait<T>::Affinity>::state(
); |
| 64 #define COMMA , | 64 #define COMMA , |
| 65 const char* typeName = WTF_HEAP_PROFILER_TYPE_NAME(HeapVectorBacking<T C
OMMA VectorTraits<T>>); | 65 const char* typeName = WTF_HEAP_PROFILER_TYPE_NAME(HeapVectorBacking<T C
OMMA VectorTraits<T>>); |
| 66 #undef COMMA | 66 #undef COMMA |
| 67 return reinterpret_cast<T*>(Heap::allocateOnArenaIndex(state, size, Blin
kGC::InlineVectorArenaIndex, gcInfoIndex, typeName)); | 67 return reinterpret_cast<T*>(ThreadHeap::allocateOnArenaIndex(state, size
, BlinkGC::InlineVectorArenaIndex, gcInfoIndex, typeName)); |
| 68 } | 68 } |
| 69 static void freeInlineVectorBacking(void*); | 69 static void freeInlineVectorBacking(void*); |
| 70 static bool expandInlineVectorBacking(void*, size_t); | 70 static bool expandInlineVectorBacking(void*, size_t); |
| 71 static bool shrinkInlineVectorBacking(void* address, size_t quantizedCurrent
Size, size_t quantizedShrunkSize); | 71 static bool shrinkInlineVectorBacking(void* address, size_t quantizedCurrent
Size, size_t quantizedShrunkSize); |
| 72 | 72 |
| 73 template <typename T, typename HashTable> | 73 template <typename T, typename HashTable> |
| 74 static T* allocateHashTableBacking(size_t size) | 74 static T* allocateHashTableBacking(size_t size) |
| 75 { | 75 { |
| 76 size_t gcInfoIndex = GCInfoTrait<HeapHashTableBacking<HashTable>>::index
(); | 76 size_t gcInfoIndex = GCInfoTrait<HeapHashTableBacking<HashTable>>::index
(); |
| 77 ThreadState* state = ThreadStateFor<ThreadingTrait<T>::Affinity>::state(
); | 77 ThreadState* state = ThreadStateFor<ThreadingTrait<T>::Affinity>::state(
); |
| 78 const char* typeName = WTF_HEAP_PROFILER_TYPE_NAME(HeapHashTableBacking<
HashTable>); | 78 const char* typeName = WTF_HEAP_PROFILER_TYPE_NAME(HeapHashTableBacking<
HashTable>); |
| 79 return reinterpret_cast<T*>(Heap::allocateOnArenaIndex(state, size, Blin
kGC::HashTableArenaIndex, gcInfoIndex, typeName)); | 79 return reinterpret_cast<T*>(ThreadHeap::allocateOnArenaIndex(state, size
, BlinkGC::HashTableArenaIndex, gcInfoIndex, typeName)); |
| 80 } | 80 } |
| 81 template <typename T, typename HashTable> | 81 template <typename T, typename HashTable> |
| 82 static T* allocateZeroedHashTableBacking(size_t size) | 82 static T* allocateZeroedHashTableBacking(size_t size) |
| 83 { | 83 { |
| 84 return allocateHashTableBacking<T, HashTable>(size); | 84 return allocateHashTableBacking<T, HashTable>(size); |
| 85 } | 85 } |
| 86 static void freeHashTableBacking(void* address); | 86 static void freeHashTableBacking(void* address); |
| 87 static bool expandHashTableBacking(void*, size_t); | 87 static bool expandHashTableBacking(void*, size_t); |
| 88 | 88 |
| 89 template <typename Return, typename Metadata> | 89 template <typename Return, typename Metadata> |
| 90 static Return malloc(size_t size, const char* typeName) | 90 static Return malloc(size_t size, const char* typeName) |
| 91 { | 91 { |
| 92 return reinterpret_cast<Return>(Heap::allocate<Metadata>(size, IsEagerly
FinalizedType<Metadata>::value)); | 92 return reinterpret_cast<Return>(ThreadHeap::allocate<Metadata>(size, IsE
agerlyFinalizedType<Metadata>::value)); |
| 93 } | 93 } |
| 94 static void free(void* address) { } | 94 static void free(void* address) { } |
| 95 template<typename T> | 95 template<typename T> |
| 96 static void* newArray(size_t bytes) | 96 static void* newArray(size_t bytes) |
| 97 { | 97 { |
| 98 ASSERT_NOT_REACHED(); | 98 ASSERT_NOT_REACHED(); |
| 99 return 0; | 99 return 0; |
| 100 } | 100 } |
| 101 | 101 |
| 102 static void deleteArray(void* ptr) | 102 static void deleteArray(void* ptr) |
| 103 { | 103 { |
| 104 ASSERT_NOT_REACHED(); | 104 ASSERT_NOT_REACHED(); |
| 105 } | 105 } |
| 106 | 106 |
| 107 static bool isAllocationAllowed() | 107 static bool isAllocationAllowed() |
| 108 { | 108 { |
| 109 return ThreadState::current()->isAllocationAllowed(); | 109 return ThreadState::current()->isAllocationAllowed(); |
| 110 } | 110 } |
| 111 | 111 |
| 112 template<typename T> | 112 template<typename T> |
| 113 static bool isHeapObjectAlive(T* object) | 113 static bool isHeapObjectAlive(T* object) |
| 114 { | 114 { |
| 115 return Heap::isHeapObjectAlive(object); | 115 return ThreadHeap::isHeapObjectAlive(object); |
| 116 } | 116 } |
| 117 | 117 |
| 118 template<typename VisitorDispatcher> | 118 template<typename VisitorDispatcher> |
| 119 static void markNoTracing(VisitorDispatcher visitor, const void* t) { visito
r->markNoTracing(t); } | 119 static void markNoTracing(VisitorDispatcher visitor, const void* t) { visito
r->markNoTracing(t); } |
| 120 | 120 |
| 121 template<typename VisitorDispatcher, typename T, typename Traits> | 121 template<typename VisitorDispatcher, typename T, typename Traits> |
| 122 static void trace(VisitorDispatcher visitor, T& t) | 122 static void trace(VisitorDispatcher visitor, T& t) |
| 123 { | 123 { |
| 124 TraceCollectionIfEnabled<WTF::NeedsTracingTrait<Traits>::value, Traits::
weakHandlingFlag, WTF::WeakPointersActWeak, T, Traits>::trace(visitor, t); | 124 TraceCollectionIfEnabled<WTF::NeedsTracingTrait<Traits>::value, Traits::
weakHandlingFlag, WTF::WeakPointersActWeak, T, Traits>::trace(visitor, t); |
| 125 } | 125 } |
| (...skipping 387 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 513 static PeekOutType peek(const blink::WeakMember<T>& value) { return value; } | 513 static PeekOutType peek(const blink::WeakMember<T>& value) { return value; } |
| 514 static PassOutType passOut(const blink::WeakMember<T>& value) { return value
; } | 514 static PassOutType passOut(const blink::WeakMember<T>& value) { return value
; } |
| 515 | 515 |
| 516 template<typename VisitorDispatcher> | 516 template<typename VisitorDispatcher> |
| 517 static bool traceInCollection(VisitorDispatcher visitor, blink::WeakMember<T
>& weakMember, ShouldWeakPointersBeMarkedStrongly strongify) | 517 static bool traceInCollection(VisitorDispatcher visitor, blink::WeakMember<T
>& weakMember, ShouldWeakPointersBeMarkedStrongly strongify) |
| 518 { | 518 { |
| 519 if (strongify == WeakPointersActStrong) { | 519 if (strongify == WeakPointersActStrong) { |
| 520 visitor->trace(weakMember.get()); // Strongified visit. | 520 visitor->trace(weakMember.get()); // Strongified visit. |
| 521 return false; | 521 return false; |
| 522 } | 522 } |
| 523 return !blink::Heap::isHeapObjectAlive(weakMember); | 523 return !blink::ThreadHeap::isHeapObjectAlive(weakMember); |
| 524 } | 524 } |
| 525 }; | 525 }; |
| 526 | 526 |
| 527 template<typename T> struct HashTraits<blink::UntracedMember<T>> : SimpleClassHa
shTraits<blink::UntracedMember<T>> { | 527 template<typename T> struct HashTraits<blink::UntracedMember<T>> : SimpleClassHa
shTraits<blink::UntracedMember<T>> { |
| 528 STATIC_ONLY(HashTraits); | 528 STATIC_ONLY(HashTraits); |
| 529 static const bool needsDestruction = false; | 529 static const bool needsDestruction = false; |
| 530 // FIXME: The distinction between PeekInType and PassInType is there for | 530 // FIXME: The distinction between PeekInType and PassInType is there for |
| 531 // the sake of the reference counting handles. When they are gone the two | 531 // the sake of the reference counting handles. When they are gone the two |
| 532 // types can be merged into PassInType. | 532 // types can be merged into PassInType. |
| 533 // FIXME: Implement proper const'ness for iterator types. | 533 // FIXME: Implement proper const'ness for iterator types. |
| (...skipping 23 matching lines...) Expand all Loading... |
| 557 static_assert(sizeof(T), "T must be fully defined"); | 557 static_assert(sizeof(T), "T must be fully defined"); |
| 558 // All heap allocated node pointers need visiting to keep the nodes alive, | 558 // All heap allocated node pointers need visiting to keep the nodes alive, |
| 559 // regardless of whether they contain pointers to other heap allocated | 559 // regardless of whether they contain pointers to other heap allocated |
| 560 // objects. | 560 // objects. |
| 561 static const bool value = true; | 561 static const bool value = true; |
| 562 }; | 562 }; |
| 563 | 563 |
| 564 } // namespace WTF | 564 } // namespace WTF |
| 565 | 565 |
| 566 #endif | 566 #endif |
| OLD | NEW |