| Index: src/gpu/GrPathRange.h
|
| diff --git a/src/gpu/GrPathRange.h b/src/gpu/GrPathRange.h
|
| index 23b8beb226ccf18f493774b22574f2ea1d82a095..00165d198e18e0744189f29ee91e13bc8e732ea9 100644
|
| --- a/src/gpu/GrPathRange.h
|
| +++ b/src/gpu/GrPathRange.h
|
| @@ -72,18 +72,17 @@ public:
|
|
|
| void loadPathsIfNeeded(const void* indices, PathIndexType, int count) const;
|
|
|
| - template<typename IndexType> void loadPathsIfNeeded(const void* indices, int count) const {
|
| + template<typename IndexType> void loadPathsIfNeeded(const IndexType* indices, int count) const {
|
| if (!fPathGenerator) {
|
| return;
|
| }
|
|
|
| - const IndexType* indexArray = reinterpret_cast<const IndexType*>(indices);
|
| bool didLoadPaths = false;
|
|
|
| for (int i = 0; i < count; ++i) {
|
| - SkASSERT(indexArray[i] < static_cast<uint32_t>(fNumPaths));
|
| + SkASSERT(indices[i] < static_cast<uint32_t>(fNumPaths));
|
|
|
| - const int groupIndex = indexArray[i] / kPathsPerGroup;
|
| + const int groupIndex = indices[i] / kPathsPerGroup;
|
| const int groupByte = groupIndex / 8;
|
| const uint8_t groupBit = 1 << (groupIndex % 8);
|
|
|
| @@ -113,17 +112,15 @@ public:
|
| #ifdef SK_DEBUG
|
| void assertPathsLoaded(const void* indices, PathIndexType, int count) const;
|
|
|
| - template<typename IndexType> void assertPathsLoaded(const void* indices, int count) const {
|
| + template<typename IndexType> void assertPathsLoaded(const IndexType* indices, int count) const {
|
| if (!fPathGenerator) {
|
| return;
|
| }
|
|
|
| - const IndexType* indexArray = reinterpret_cast<const IndexType*>(indices);
|
| -
|
| for (int i = 0; i < count; ++i) {
|
| - SkASSERT(indexArray[i] < static_cast<uint32_t>(fNumPaths));
|
| + SkASSERT(indices[i] < static_cast<uint32_t>(fNumPaths));
|
|
|
| - const int groupIndex = indexArray[i] / kPathsPerGroup;
|
| + const int groupIndex = indices[i] / kPathsPerGroup;
|
| const int groupByte = groupIndex / 8;
|
| const uint8_t groupBit = 1 << (groupIndex % 8);
|
|
|
|
|