Index: src/gpu/GrPathRange.cpp |
diff --git a/src/gpu/GrPathRange.cpp b/src/gpu/GrPathRange.cpp |
index 5e71014ce8b5fd78d091a8090a0579fbacf77113..117051db19df05db121543f56a5d0698634971b8 100644 |
--- a/src/gpu/GrPathRange.cpp |
+++ b/src/gpu/GrPathRange.cpp |
@@ -8,9 +8,6 @@ |
#include "GrPathRange.h" |
#include "SkPath.h" |
-enum { |
- kPathsPerGroup = 16 // Paths get tracked in groups of 16 for lazy loading. |
-}; |
GrPathRange::GrPathRange(GrGpu* gpu, |
PathGenerator* pathGenerator) |
@@ -28,51 +25,24 @@ GrPathRange::GrPathRange(GrGpu* gpu, |
fNumPaths(numPaths) { |
} |
-void GrPathRange::willDrawPaths(const void* indices, PathIndexType indexType, int count) const { |
- if (!fPathGenerator) { |
- return; |
- } |
- |
+void GrPathRange::loadPathsIfNeeded(const void* indices, PathIndexType indexType, int count) const { |
switch (indexType) { |
- case kU8_PathIndexType: return this->willDrawPaths<uint8_t>(indices, count); |
- case kU16_PathIndexType: return this->willDrawPaths<uint16_t>(indices, count); |
- case kU32_PathIndexType: return this->willDrawPaths<uint32_t>(indices, count); |
+ case kU8_PathIndexType: return this->loadPathsIfNeeded<uint8_t>(indices, count); |
+ case kU16_PathIndexType: return this->loadPathsIfNeeded<uint16_t>(indices, count); |
+ case kU32_PathIndexType: return this->loadPathsIfNeeded<uint32_t>(indices, count); |
default: SkFAIL("Unknown path index type"); |
} |
} |
-template<typename IndexType> void GrPathRange::willDrawPaths(const void* indices, int count) const { |
- SkASSERT(fPathGenerator); |
- |
- const IndexType* indexArray = reinterpret_cast<const IndexType*>(indices); |
- bool didLoadPaths = false; |
- |
- for (int i = 0; i < count; ++i) { |
- SkASSERT(indexArray[i] < static_cast<uint32_t>(fNumPaths)); |
- |
- const int groupIndex = indexArray[i] / kPathsPerGroup; |
- const int groupByte = groupIndex / 8; |
- const uint8_t groupBit = 1 << (groupIndex % 8); |
- |
- const bool hasPath = SkToBool(fGeneratedPaths[groupByte] & groupBit); |
- if (!hasPath) { |
- // We track which paths are loaded in groups of kPathsPerGroup. To |
- // mark a path as loaded we need to load the entire group. |
- const int groupFirstPath = groupIndex * kPathsPerGroup; |
- const int groupLastPath = SkTMin(groupFirstPath + kPathsPerGroup, fNumPaths) - 1; |
+#ifdef SK_DEBUG |
- SkPath path; |
- for (int pathIdx = groupFirstPath; pathIdx <= groupLastPath; ++pathIdx) { |
- fPathGenerator->generatePath(pathIdx, &path); |
- this->onInitPath(pathIdx, path); |
- } |
- |
- fGeneratedPaths[groupByte] |= groupBit; |
- didLoadPaths = true; |
- } |
- } |
- |
- if (didLoadPaths) { |
- this->didChangeGpuMemorySize(); |
+void GrPathRange::assertPathsLoaded(const void* indices, PathIndexType indexType, int count) const { |
+ switch (indexType) { |
+ case kU8_PathIndexType: return this->assertPathsLoaded<uint8_t>(indices, count); |
+ case kU16_PathIndexType: return this->assertPathsLoaded<uint16_t>(indices, count); |
+ case kU32_PathIndexType: return this->assertPathsLoaded<uint32_t>(indices, count); |
+ default: SkFAIL("Unknown path index type"); |
} |
} |
+ |
+#endif |