| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2012 Google Inc. | 2 * Copyright 2012 Google Inc. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
| 5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
| 6 */ | 6 */ |
| 7 | 7 |
| 8 #include "GrMemoryPool.h" | 8 #include "GrMemoryPool.h" |
| 9 | 9 |
| 10 #ifdef SK_DEBUG | 10 #ifdef SK_DEBUG |
| (...skipping 22 matching lines...) Expand all Loading... |
| 33 GrMemoryPool::~GrMemoryPool() { | 33 GrMemoryPool::~GrMemoryPool() { |
| 34 VALIDATE; | 34 VALIDATE; |
| 35 SkASSERT(0 == fAllocationCnt); | 35 SkASSERT(0 == fAllocationCnt); |
| 36 SkASSERT(fHead == fTail); | 36 SkASSERT(fHead == fTail); |
| 37 SkASSERT(0 == fHead->fLiveCount); | 37 SkASSERT(0 == fHead->fLiveCount); |
| 38 DeleteBlock(fHead); | 38 DeleteBlock(fHead); |
| 39 }; | 39 }; |
| 40 | 40 |
| 41 void* GrMemoryPool::allocate(size_t size) { | 41 void* GrMemoryPool::allocate(size_t size) { |
| 42 VALIDATE; | 42 VALIDATE; |
| 43 size += kPerAllocPad; |
| 43 size = GrSizeAlignUp(size, kAlignment); | 44 size = GrSizeAlignUp(size, kAlignment); |
| 44 size += kPerAllocPad; | |
| 45 if (fTail->fFreeSize < size) { | 45 if (fTail->fFreeSize < size) { |
| 46 size_t blockSize = size; | 46 size_t blockSize = size; |
| 47 blockSize = SkTMax<size_t>(blockSize, fMinAllocSize); | 47 blockSize = SkTMax<size_t>(blockSize, fMinAllocSize); |
| 48 BlockHeader* block = CreateBlock(blockSize); | 48 BlockHeader* block = CreateBlock(blockSize); |
| 49 | 49 |
| 50 block->fPrev = fTail; | 50 block->fPrev = fTail; |
| 51 block->fNext = nullptr; | 51 block->fNext = nullptr; |
| 52 SkASSERT(nullptr == fTail->fNext); | 52 SkASSERT(nullptr == fTail->fNext); |
| 53 fTail->fNext = block; | 53 fTail->fNext = block; |
| 54 fTail = block; | 54 fTail = block; |
| 55 fSize += block->fSize; | 55 fSize += block->fSize; |
| 56 SkDEBUGCODE(++fAllocBlockCnt); | 56 SkDEBUGCODE(++fAllocBlockCnt); |
| 57 } | 57 } |
| 58 SkASSERT(fTail->fFreeSize >= size); | 58 SkASSERT(fTail->fFreeSize >= size); |
| 59 intptr_t ptr = fTail->fCurrPtr; | 59 intptr_t ptr = fTail->fCurrPtr; |
| 60 // We stash a pointer to the block header, just before the allocated space, | 60 // We stash a pointer to the block header, just before the allocated space, |
| 61 // so that we can decrement the live count on delete in constant time. | 61 // so that we can decrement the live count on delete in constant time. |
| 62 *reinterpret_cast<BlockHeader**>(ptr) = fTail; | 62 AllocHeader* allocData = reinterpret_cast<AllocHeader*>(ptr); |
| 63 SkDEBUGCODE(allocData->fSentinal = kAssignedMarker); |
| 64 allocData->fHeader = fTail; |
| 63 ptr += kPerAllocPad; | 65 ptr += kPerAllocPad; |
| 64 fTail->fPrevPtr = fTail->fCurrPtr; | 66 fTail->fPrevPtr = fTail->fCurrPtr; |
| 65 fTail->fCurrPtr += size; | 67 fTail->fCurrPtr += size; |
| 66 fTail->fFreeSize -= size; | 68 fTail->fFreeSize -= size; |
| 67 fTail->fLiveCount += 1; | 69 fTail->fLiveCount += 1; |
| 68 | 70 |
| 69 SkDEBUGCODE(++fAllocationCnt); | 71 SkDEBUGCODE(++fAllocationCnt); |
| 70 VALIDATE; | 72 VALIDATE; |
| 71 return reinterpret_cast<void*>(ptr); | 73 return reinterpret_cast<void*>(ptr); |
| 72 } | 74 } |
| 73 | 75 |
| 74 void GrMemoryPool::release(void* p) { | 76 void GrMemoryPool::release(void* p) { |
| 75 VALIDATE; | 77 VALIDATE; |
| 76 intptr_t ptr = reinterpret_cast<intptr_t>(p) - kPerAllocPad; | 78 intptr_t ptr = reinterpret_cast<intptr_t>(p) - kPerAllocPad; |
| 77 BlockHeader* block = *reinterpret_cast<BlockHeader**>(ptr); | 79 AllocHeader* allocData = reinterpret_cast<AllocHeader*>(ptr); |
| 80 SkASSERT(kAssignedMarker == allocData->fSentinal); |
| 81 SkDEBUGCODE(allocData->fSentinal = kFreedMarker); |
| 82 BlockHeader* block = allocData->fHeader; |
| 78 if (1 == block->fLiveCount) { | 83 if (1 == block->fLiveCount) { |
| 79 // the head block is special, it is reset rather than deleted | 84 // the head block is special, it is reset rather than deleted |
| 80 if (fHead == block) { | 85 if (fHead == block) { |
| 81 fHead->fCurrPtr = reinterpret_cast<intptr_t>(fHead) + kHeaderSize; | 86 fHead->fCurrPtr = reinterpret_cast<intptr_t>(fHead) + kHeaderSize; |
| 82 fHead->fLiveCount = 0; | 87 fHead->fLiveCount = 0; |
| 83 fHead->fFreeSize = fPreallocSize; | 88 fHead->fFreeSize = fPreallocSize; |
| 84 } else { | 89 } else { |
| 85 BlockHeader* prev = block->fPrev; | 90 BlockHeader* prev = block->fPrev; |
| 86 BlockHeader* next = block->fNext; | 91 BlockHeader* next = block->fNext; |
| 87 SkASSERT(prev); | 92 SkASSERT(prev); |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 152 if (fHead != block) { | 157 if (fHead != block) { |
| 153 SkASSERT(block->fLiveCount); | 158 SkASSERT(block->fLiveCount); |
| 154 SkASSERT(userSize >= fMinAllocSize); | 159 SkASSERT(userSize >= fMinAllocSize); |
| 155 } else { | 160 } else { |
| 156 SkASSERT(userSize == fPreallocSize); | 161 SkASSERT(userSize == fPreallocSize); |
| 157 } | 162 } |
| 158 if (!block->fLiveCount) { | 163 if (!block->fLiveCount) { |
| 159 SkASSERT(ptrOffset == kHeaderSize); | 164 SkASSERT(ptrOffset == kHeaderSize); |
| 160 SkASSERT(userStart == block->fCurrPtr); | 165 SkASSERT(userStart == block->fCurrPtr); |
| 161 } else { | 166 } else { |
| 162 SkASSERT(block == *reinterpret_cast<BlockHeader**>(userStart)); | 167 AllocHeader* allocData = reinterpret_cast<AllocHeader*>(userStart); |
| 168 SkASSERT(allocData->fSentinal == kAssignedMarker || |
| 169 allocData->fSentinal == kFreedMarker); |
| 170 SkASSERT(block == allocData->fHeader); |
| 163 } | 171 } |
| 172 |
| 164 prev = block; | 173 prev = block; |
| 165 } while ((block = block->fNext)); | 174 } while ((block = block->fNext)); |
| 166 SkASSERT(allocCount == fAllocationCnt); | 175 SkASSERT(allocCount == fAllocationCnt); |
| 167 SkASSERT(prev == fTail); | 176 SkASSERT(prev == fTail); |
| 168 SkASSERT(fAllocBlockCnt != 0 || fSize == 0); | 177 SkASSERT(fAllocBlockCnt != 0 || fSize == 0); |
| 169 #endif | 178 #endif |
| 170 } | 179 } |
| OLD | NEW |