| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2014 Google Inc. | 2 * Copyright 2014 Google Inc. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
| 5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
| 6 */ | 6 */ |
| 7 | 7 |
| 8 #include "GrPathRange.h" | 8 #include "GrPathRange.h" |
| 9 #include "SkPath.h" | 9 #include "SkPath.h" |
| 10 | 10 |
| 11 enum { | |
| 12 kPathsPerGroup = 16 // Paths get tracked in groups of 16 for lazy loading. | |
| 13 }; | |
| 14 | 11 |
| 15 GrPathRange::GrPathRange(GrGpu* gpu, | 12 GrPathRange::GrPathRange(GrGpu* gpu, |
| 16 PathGenerator* pathGenerator) | 13 PathGenerator* pathGenerator) |
| 17 : INHERITED(gpu, kCached_LifeCycle), | 14 : INHERITED(gpu, kCached_LifeCycle), |
| 18 fPathGenerator(SkRef(pathGenerator)), | 15 fPathGenerator(SkRef(pathGenerator)), |
| 19 fNumPaths(fPathGenerator->getNumPaths()) { | 16 fNumPaths(fPathGenerator->getNumPaths()) { |
| 20 const int numGroups = (fNumPaths + kPathsPerGroup - 1) / kPathsPerGroup; | 17 const int numGroups = (fNumPaths + kPathsPerGroup - 1) / kPathsPerGroup; |
| 21 fGeneratedPaths.reset((numGroups + 7) / 8); // 1 bit per path group. | 18 fGeneratedPaths.reset((numGroups + 7) / 8); // 1 bit per path group. |
| 22 memset(&fGeneratedPaths.front(), 0, fGeneratedPaths.count()); | 19 memset(&fGeneratedPaths.front(), 0, fGeneratedPaths.count()); |
| 23 } | 20 } |
| 24 | 21 |
| 25 GrPathRange::GrPathRange(GrGpu* gpu, | 22 GrPathRange::GrPathRange(GrGpu* gpu, |
| 26 int numPaths) | 23 int numPaths) |
| 27 : INHERITED(gpu, kCached_LifeCycle), | 24 : INHERITED(gpu, kCached_LifeCycle), |
| 28 fNumPaths(numPaths) { | 25 fNumPaths(numPaths) { |
| 29 } | 26 } |
| 30 | 27 |
| 31 void GrPathRange::willDrawPaths(const void* indices, PathIndexType indexType, in
t count) const { | 28 void GrPathRange::loadPathsIfNeeded(const void* indices, PathIndexType indexType
, int count) const { |
| 32 if (!fPathGenerator) { | |
| 33 return; | |
| 34 } | |
| 35 | |
| 36 switch (indexType) { | 29 switch (indexType) { |
| 37 case kU8_PathIndexType: return this->willDrawPaths<uint8_t>(indices, cou
nt); | 30 case kU8_PathIndexType: return this->loadPathsIfNeeded<uint8_t>(indices,
count); |
| 38 case kU16_PathIndexType: return this->willDrawPaths<uint16_t>(indices, c
ount); | 31 case kU16_PathIndexType: return this->loadPathsIfNeeded<uint16_t>(indice
s, count); |
| 39 case kU32_PathIndexType: return this->willDrawPaths<uint32_t>(indices, c
ount); | 32 case kU32_PathIndexType: return this->loadPathsIfNeeded<uint32_t>(indice
s, count); |
| 40 default: SkFAIL("Unknown path index type"); | 33 default: SkFAIL("Unknown path index type"); |
| 41 } | 34 } |
| 42 } | 35 } |
| 43 | 36 |
| 44 template<typename IndexType> void GrPathRange::willDrawPaths(const void* indices
, int count) const { | 37 #ifdef SK_DEBUG |
| 45 SkASSERT(fPathGenerator); | |
| 46 | 38 |
| 47 const IndexType* indexArray = reinterpret_cast<const IndexType*>(indices); | 39 void GrPathRange::assertPathsLoaded(const void* indices, PathIndexType indexType
, int count) const { |
| 48 bool didLoadPaths = false; | 40 switch (indexType) { |
| 49 | 41 case kU8_PathIndexType: return this->assertPathsLoaded<uint8_t>(indices,
count); |
| 50 for (int i = 0; i < count; ++i) { | 42 case kU16_PathIndexType: return this->assertPathsLoaded<uint16_t>(indice
s, count); |
| 51 SkASSERT(indexArray[i] < static_cast<uint32_t>(fNumPaths)); | 43 case kU32_PathIndexType: return this->assertPathsLoaded<uint32_t>(indice
s, count); |
| 52 | 44 default: SkFAIL("Unknown path index type"); |
| 53 const int groupIndex = indexArray[i] / kPathsPerGroup; | |
| 54 const int groupByte = groupIndex / 8; | |
| 55 const uint8_t groupBit = 1 << (groupIndex % 8); | |
| 56 | |
| 57 const bool hasPath = SkToBool(fGeneratedPaths[groupByte] & groupBit); | |
| 58 if (!hasPath) { | |
| 59 // We track which paths are loaded in groups of kPathsPerGroup. To | |
| 60 // mark a path as loaded we need to load the entire group. | |
| 61 const int groupFirstPath = groupIndex * kPathsPerGroup; | |
| 62 const int groupLastPath = SkTMin(groupFirstPath + kPathsPerGroup, fN
umPaths) - 1; | |
| 63 | |
| 64 SkPath path; | |
| 65 for (int pathIdx = groupFirstPath; pathIdx <= groupLastPath; ++pathI
dx) { | |
| 66 fPathGenerator->generatePath(pathIdx, &path); | |
| 67 this->onInitPath(pathIdx, path); | |
| 68 } | |
| 69 | |
| 70 fGeneratedPaths[groupByte] |= groupBit; | |
| 71 didLoadPaths = true; | |
| 72 } | |
| 73 } | |
| 74 | |
| 75 if (didLoadPaths) { | |
| 76 this->didChangeGpuMemorySize(); | |
| 77 } | 45 } |
| 78 } | 46 } |
| 47 |
| 48 #endif |
| OLD | NEW |