| Index: Source/wtf/DefaultAllocator.h
|
| diff --git a/Source/wtf/DefaultAllocator.h b/Source/wtf/DefaultAllocator.h
|
| index 48160e93b6778b05f91662effde83ccbd8acbdfb..7ff1254149398bbb3d6b3306f5b455ec90d0581f 100644
|
| --- a/Source/wtf/DefaultAllocator.h
|
| +++ b/Source/wtf/DefaultAllocator.h
|
| @@ -42,6 +42,12 @@
|
|
|
| #include <string.h>
|
|
|
| +#define BUFFER_ALLOCATOR_DEBUG 0
|
| +
|
| +namespace blink {
|
| +class BufferAllocator;
|
| +}
|
| +
|
| namespace WTF {
|
|
|
| class DefaultAllocatorDummyVisitor;
|
| @@ -51,63 +57,229 @@ public:
|
| typedef DefaultAllocatorDummyVisitor Visitor;
|
| static const bool isGarbageCollected = false;
|
|
|
| - template<typename T>
|
| - static size_t quantizedSize(size_t count)
|
| + static size_t (*s_quantizedSize)(size_t, size_t);
|
| + static void* (*s_allocateVectorBacking)(size_t, void*);
|
| + static void* (*s_allocateExpandedVectorBacking)(size_t, void*);
|
| + static void (*s_freeVectorBacking)(void*);
|
| + static bool (*s_expandVectorBacking)(void*, size_t);
|
| + static bool (*s_shrinkVectorBacking)(void*, size_t, size_t);
|
| + static void* (*s_allocateInlineVectorBacking)(size_t, void*);
|
| + static void (*s_freeInlineVectorBacking)(void*);
|
| + static bool (*s_expandInlineVectorBacking)(void*, size_t);
|
| + static bool (*s_shrinkInlineVectorBacking)(void*, size_t, size_t);
|
| + static void* (*s_allocateHashTableBacking)(size_t, void*);
|
| + static void* (*s_allocateZeroedHashTableBacking)(size_t, void*);
|
| + static void* (*s_allocateExpandedHashTableBacking)(size_t, void*);
|
| + static void* (*s_allocateZeroedExpandedHashTableBacking)(size_t, void*);
|
| + static void (*s_freeHashTableBacking)(void*);
|
| + static bool (*s_expandHashTableBacking)(void*, size_t);
|
| + static void* (*s_allocateBufferStringBacking)(size_t, void*);
|
| + static void (*s_freeBufferStringBacking)(void*);
|
| + static bool (*s_expandBufferStringBacking)(void*, size_t);
|
| + static bool (*s_shrinkBufferStringBacking)(void*, size_t, size_t);
|
| +
|
| + static void initialize(size_t (*quantizedSize)(size_t, size_t)
|
| + , void* (*allocateVectorBacking)(size_t, void*)
|
| + , void* (*allocateExpandedVectorBacking)(size_t, void*)
|
| + , void (*freeVectorBacking)(void*)
|
| + , bool (*expandVectorBacking)(void*, size_t)
|
| + , bool (*shrinkVectorBacking)(void*, size_t, size_t)
|
| + , void* (*allocateInlineVectorBacking)(size_t, void*)
|
| + , void (*freeInlineVectorBacking)(void*)
|
| + , bool (*expandInlineVectorBacking)(void*, size_t)
|
| + , bool (*shrinkInlineVectorBacking)(void*, size_t, size_t)
|
| + , void* (*allocateHashTableBacking)(size_t, void*)
|
| + , void* (*allocateZeroedHashTableBacking)(size_t, void*)
|
| + , void* (*allocateExpandedHashTableBacking)(size_t, void*)
|
| + , void* (*allocateZeroedExpandedHashTableBacking)(size_t, void*)
|
| + , void (*freeHashTableBacking)(void*)
|
| + , bool (*expandHashTableBacking)(void*, size_t)
|
| + , void* (*allocateBufferStringBacking)(size_t, void*)
|
| + , void (*freeBufferStringBacking)(void*)
|
| + , bool (*expandBufferStringBacking)(void*, size_t)
|
| + , bool (*shrinkBufferStringBacking)(void*, size_t, size_t));
|
| +
|
| + static size_t quantizedSize(size_t count, size_t elementSize)
|
| {
|
| - RELEASE_ASSERT(count <= kGenericMaxDirectMapped / sizeof(T));
|
| - return partitionAllocActualSize(Partitions::bufferPartition(), count * sizeof(T));
|
| + return s_quantizedSize(count, elementSize);
|
| }
|
| +
|
| template <typename T>
|
| - static T* allocateVectorBacking(size_t size)
|
| + static T* allocateVectorBacking(size_t size, void* holder)
|
| {
|
| - return reinterpret_cast<T*>(allocateBacking(size));
|
| + return reinterpret_cast<T*>(s_allocateVectorBacking(size, holder));
|
| }
|
| template <typename T>
|
| - static T* allocateExpandedVectorBacking(size_t size)
|
| + static T* allocateExpandedVectorBacking(size_t size, void* holder)
|
| {
|
| - return reinterpret_cast<T*>(allocateBacking(size));
|
| + return reinterpret_cast<T*>(s_allocateExpandedVectorBacking(size, holder));
|
| }
|
| - static void freeVectorBacking(void* address);
|
| - static inline bool expandVectorBacking(void*, size_t)
|
| + static void freeVectorBacking(void* address)
|
| {
|
| - return false;
|
| + s_freeVectorBacking(address);
|
| + }
|
| + static inline bool expandVectorBacking(void* address, size_t size)
|
| + {
|
| + return s_expandVectorBacking(address, size);
|
| }
|
| static inline bool shrinkVectorBacking(void* address, size_t quantizedCurrentSize, size_t quantizedShrunkSize)
|
| {
|
| - // Optimization: if we're downsizing inside the same allocator bucket,
|
| - // we can skip reallocation.
|
| - return quantizedCurrentSize == quantizedShrunkSize;
|
| + return s_shrinkVectorBacking(address, quantizedCurrentSize, quantizedShrunkSize);
|
| }
|
| template <typename T>
|
| - static T* allocateInlineVectorBacking(size_t size) { return allocateVectorBacking<T>(size); }
|
| - static inline void freeInlineVectorBacking(void* address) { freeVectorBacking(address); }
|
| - static inline bool expandInlineVectorBacking(void*, size_t) { return false; }
|
| - static inline bool shrinkInlineVectorBacking(void* address, size_t quantizedCurrentSize, size_t quantizedShrunkSize) { return shrinkVectorBacking(address, quantizedCurrentSize, quantizedShrunkSize); }
|
| -
|
| + static T* allocateInlineVectorBacking(size_t size, void* holder)
|
| + {
|
| + return reinterpret_cast<T*>(s_allocateInlineVectorBacking(size, holder));
|
| + }
|
| + static inline void freeInlineVectorBacking(void* address)
|
| + {
|
| + s_freeInlineVectorBacking(address);
|
| + }
|
| + static inline bool expandInlineVectorBacking(void* address, size_t size)
|
| + {
|
| + return s_expandInlineVectorBacking(address, size);
|
| + }
|
| + static inline bool shrinkInlineVectorBacking(void* address, size_t quantizedCurrentSize, size_t quantizedShrunkSize)
|
| + {
|
| + return s_shrinkInlineVectorBacking(address, quantizedCurrentSize, quantizedShrunkSize);
|
| + }
|
| + template <typename T, typename HashTable>
|
| + static T* allocateHashTableBacking(size_t size, void* holder)
|
| + {
|
| + return reinterpret_cast<T*>(s_allocateHashTableBacking(size, holder));
|
| + }
|
| + template <typename T, typename HashTable>
|
| + static T* allocateZeroedHashTableBacking(size_t size, void* holder)
|
| + {
|
| + return reinterpret_cast<T*>(s_allocateZeroedHashTableBacking(size, holder));
|
| + }
|
| template <typename T, typename HashTable>
|
| - static T* allocateHashTableBacking(size_t size)
|
| + static T* allocateExpandedHashTableBacking(size_t size, void* holder)
|
| {
|
| - return reinterpret_cast<T*>(allocateBacking(size));
|
| + return reinterpret_cast<T*>(s_allocateExpandedHashTableBacking(size, holder));
|
| }
|
| template <typename T, typename HashTable>
|
| - static T* allocateZeroedHashTableBacking(size_t size)
|
| + static T* allocateZeroedExpandedHashTableBacking(size_t size, void* holder)
|
| + {
|
| + return reinterpret_cast<T*>(s_allocateZeroedExpandedHashTableBacking(size, holder));
|
| + }
|
| + static void freeHashTableBacking(void* address)
|
| + {
|
| + s_freeHashTableBacking(address);
|
| + }
|
| + static bool expandHashTableBacking(void* address, size_t size)
|
| + {
|
| + return s_expandHashTableBacking(address, size);
|
| + }
|
| + static void* allocateBufferStringBacking(size_t size, void* holder)
|
| + {
|
| + return s_allocateBufferStringBacking(size, holder);
|
| + }
|
| + static inline void freeBufferStringBacking(void* address)
|
| + {
|
| + s_freeBufferStringBacking(address);
|
| + }
|
| + static inline bool expandBufferStringBacking(void* address, size_t size)
|
| + {
|
| + return s_expandBufferStringBacking(address, size);
|
| + }
|
| + static inline bool shrinkBufferStringBacking(void* address, size_t quantizedCurrentSize, size_t quantizedShrunkSize)
|
| + {
|
| + return s_shrinkBufferStringBacking(address, quantizedCurrentSize, quantizedShrunkSize);
|
| + }
|
| +
|
| + // Default implementations.
|
| + static size_t defaultQuantizedSize(size_t count, size_t elementSize)
|
| + {
|
| + RELEASE_ASSERT(count <= kGenericMaxDirectMapped / elementSize);
|
| + return partitionAllocActualSize(Partitions::bufferPartition(), count * elementSize);
|
| + }
|
| + static void* defaultAllocateVectorBacking(size_t size, void* holder)
|
| + {
|
| + return partitionAllocGeneric(Partitions::bufferPartition(), size);
|
| + }
|
| + static void* defaultAllocateExpandedVectorBacking(size_t size, void* holder)
|
| + {
|
| + return partitionAllocGeneric(Partitions::bufferPartition(), size);
|
| + }
|
| + static void defaultFreeVectorBacking(void* address)
|
| {
|
| - void* result = allocateBacking(size);
|
| + partitionFreeGeneric(Partitions::bufferPartition(), address);
|
| + }
|
| + static bool defaultExpandVectorBacking(void* address, size_t)
|
| + {
|
| + return false;
|
| + }
|
| + static bool defaultShrinkVectorBacking(void* address, size_t quantizedCurrentSize, size_t quantizedShrunkSize)
|
| + {
|
| + return quantizedCurrentSize == quantizedShrunkSize;
|
| + }
|
| + static void* defaultAllocateInlineVectorBacking(size_t size, void* holder)
|
| + {
|
| + return partitionAllocGeneric(Partitions::bufferPartition(), size);
|
| + }
|
| + static void defaultFreeInlineVectorBacking(void* address)
|
| + {
|
| + partitionFreeGeneric(Partitions::bufferPartition(), address);
|
| + }
|
| + static bool defaultExpandInlineVectorBacking(void* address, size_t)
|
| + {
|
| + return false;
|
| + }
|
| + static bool defaultShrinkInlineVectorBacking(void* address, size_t quantizedCurrentSize, size_t quantizedShrunkSize)
|
| + {
|
| + return quantizedCurrentSize == quantizedShrunkSize;
|
| + }
|
| + static void* defaultAllocateHashTableBacking(size_t size, void* holder)
|
| + {
|
| + return partitionAllocGeneric(Partitions::bufferPartition(), size);
|
| + }
|
| + static void* defaultAllocateZeroedHashTableBacking(size_t size, void* holder)
|
| + {
|
| + void* result = defaultAllocateHashTableBacking(size, holder);
|
| memset(result, 0, size);
|
| - return reinterpret_cast<T*>(result);
|
| + return result;
|
| + }
|
| + static void* defaultAllocateExpandedHashTableBacking(size_t size, void* holder)
|
| + {
|
| + return partitionAllocGeneric(Partitions::bufferPartition(), size);
|
| + }
|
| + static void* defaultAllocateZeroedExpandedHashTableBacking(size_t size, void* holder)
|
| + {
|
| + void* result = defaultAllocateExpandedHashTableBacking(size, holder);
|
| + memset(result, 0, size);
|
| + return result;
|
| + }
|
| + static void defaultFreeHashTableBacking(void* address)
|
| + {
|
| + partitionFreeGeneric(Partitions::bufferPartition(), address);
|
| + }
|
| + static bool defaultExpandHashTableBacking(void*, size_t)
|
| + {
|
| + return false;
|
| + }
|
| + static void* defaultAllocateBufferStringBacking(size_t size, void* holder)
|
| + {
|
| + return partitionAllocGeneric(Partitions::bufferPartition(), size);
|
| + }
|
| + static void defaultFreeBufferStringBacking(void* address)
|
| + {
|
| + partitionFreeGeneric(Partitions::bufferPartition(), address);
|
| + }
|
| + static bool defaultExpandBufferStringBacking(void* address, size_t)
|
| + {
|
| + return false;
|
| + }
|
| + static bool defaultShrinkBufferStringBacking(void* address, size_t, size_t)
|
| + {
|
| + return false;
|
| }
|
| - static void freeHashTableBacking(void* address);
|
|
|
| template <typename Return, typename Metadata>
|
| static Return malloc(size_t size)
|
| {
|
| return reinterpret_cast<Return>(fastMalloc(size));
|
| }
|
| -
|
| - static inline bool expandHashTableBacking(void*, size_t)
|
| - {
|
| - return false;
|
| - }
|
| static void free(void* address)
|
| {
|
| fastFree(address);
|
| @@ -117,8 +289,7 @@ public:
|
| {
|
| return malloc<void*, void>(bytes);
|
| }
|
| - static void
|
| - deleteArray(void* ptr)
|
| + static void deleteArray(void* ptr)
|
| {
|
| free(ptr); // Not the system free, the one from this class.
|
| }
|
| @@ -180,9 +351,6 @@ public:
|
| static void leaveNoAllocationScope() { }
|
| static void enterGCForbiddenScope() { }
|
| static void leaveGCForbiddenScope() { }
|
| -
|
| -private:
|
| - static void* allocateBacking(size_t);
|
| };
|
|
|
| // The Windows compiler seems to be very eager to instantiate things it won't
|
|
|