| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2014 Google Inc. | 2 * Copyright 2014 Google Inc. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
| 5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
| 6 */ | 6 */ |
| 7 | 7 |
| 8 #include "GrPathRange.h" | 8 #include "GrPathRange.h" |
| 9 #include "SkPath.h" | 9 #include "SkPath.h" |
| 10 | 10 |
| (...skipping 14 matching lines...) Expand all Loading... |
| 25 } | 25 } |
| 26 | 26 |
| 27 GrPathRange::GrPathRange(GrGpu* gpu, | 27 GrPathRange::GrPathRange(GrGpu* gpu, |
| 28 int numPaths, | 28 int numPaths, |
| 29 const SkStrokeRec& stroke) | 29 const SkStrokeRec& stroke) |
| 30 : INHERITED(gpu, kIsWrapped), | 30 : INHERITED(gpu, kIsWrapped), |
| 31 fNumPaths(numPaths), | 31 fNumPaths(numPaths), |
| 32 fStroke(stroke) { | 32 fStroke(stroke) { |
| 33 } | 33 } |
| 34 | 34 |
| 35 void GrPathRange::willDrawPaths(const uint32_t indices[], int count) const { | 35 void GrPathRange::willDrawPaths(const void* indices, PathIndexType indexType, in
t count) const { |
| 36 if (NULL == fPathGenerator.get()) { | 36 if (!fPathGenerator) { |
| 37 return; | 37 return; |
| 38 } | 38 } |
| 39 | 39 |
| 40 switch (indexType) { |
| 41 case kU8_PathIndexType: return this->willDrawPaths<uint8_t>(indices, cou
nt); |
| 42 case kU16_PathIndexType: return this->willDrawPaths<uint16_t>(indices, c
ount); |
| 43 case kU32_PathIndexType: return this->willDrawPaths<uint32_t>(indices, c
ount); |
| 44 default: SkFAIL("Unknown path index type"); |
| 45 } |
| 46 } |
| 47 |
| 48 template<typename IndexType> void GrPathRange::willDrawPaths(const void* indices
, int count) const { |
| 49 SkASSERT(fPathGenerator); |
| 50 |
| 51 const IndexType* indexArray = reinterpret_cast<const IndexType*>(indices); |
| 40 bool didLoadPaths = false; | 52 bool didLoadPaths = false; |
| 41 | 53 |
| 42 for (int i = 0; i < count; ++i) { | 54 for (int i = 0; i < count; ++i) { |
| 43 SkASSERT(indices[i] < static_cast<uint32_t>(fNumPaths)); | 55 SkASSERT(indexArray[i] < static_cast<uint32_t>(fNumPaths)); |
| 44 | 56 |
| 45 const int groupIndex = indices[i] / kPathsPerGroup; | 57 const int groupIndex = indexArray[i] / kPathsPerGroup; |
| 46 const int groupByte = groupIndex / 8; | 58 const int groupByte = groupIndex / 8; |
| 47 const uint8_t groupBit = 1 << (groupIndex % 8); | 59 const uint8_t groupBit = 1 << (groupIndex % 8); |
| 48 | 60 |
| 49 const bool hasPath = SkToBool(fGeneratedPaths[groupByte] & groupBit); | 61 const bool hasPath = SkToBool(fGeneratedPaths[groupByte] & groupBit); |
| 50 if (!hasPath) { | 62 if (!hasPath) { |
| 51 // We track which paths are loaded in groups of kPathsPerGroup. To | 63 // We track which paths are loaded in groups of kPathsPerGroup. To |
| 52 // mark a path as loaded we need to load the entire group. | 64 // mark a path as loaded we need to load the entire group. |
| 53 const int groupFirstPath = groupIndex * kPathsPerGroup; | 65 const int groupFirstPath = groupIndex * kPathsPerGroup; |
| 54 const int groupLastPath = SkTMin(groupFirstPath + kPathsPerGroup, fN
umPaths) - 1; | 66 const int groupLastPath = SkTMin(groupFirstPath + kPathsPerGroup, fN
umPaths) - 1; |
| 55 | 67 |
| 56 SkPath path; | 68 SkPath path; |
| 57 for (int pathIdx = groupFirstPath; pathIdx <= groupLastPath; ++pathI
dx) { | 69 for (int pathIdx = groupFirstPath; pathIdx <= groupLastPath; ++pathI
dx) { |
| 58 fPathGenerator->generatePath(pathIdx, &path); | 70 fPathGenerator->generatePath(pathIdx, &path); |
| 59 this->onInitPath(pathIdx, path); | 71 this->onInitPath(pathIdx, path); |
| 60 } | 72 } |
| 61 | 73 |
| 62 fGeneratedPaths[groupByte] |= groupBit; | 74 fGeneratedPaths[groupByte] |= groupBit; |
| 63 didLoadPaths = true; | 75 didLoadPaths = true; |
| 64 } | 76 } |
| 65 } | 77 } |
| 66 | 78 |
| 67 if (didLoadPaths) { | 79 if (didLoadPaths) { |
| 68 this->didChangeGpuMemorySize(); | 80 this->didChangeGpuMemorySize(); |
| 69 } | 81 } |
| 70 } | 82 } |
| OLD | NEW |