| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2014 Google Inc. | 2 * Copyright 2014 Google Inc. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
| 5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
| 6 */ | 6 */ |
| 7 | 7 |
| 8 #include "GrPathRange.h" | 8 #include "GrPathRange.h" |
| 9 #include "SkPath.h" | 9 #include "SkPath.h" |
| 10 | 10 |
| 11 GrPathRange::GrPathRange(GrGpu* gpu, | 11 GrPathRange::GrPathRange(GrGpu* gpu, |
| 12 PathGenerator* pathGenerator) | 12 PathGenerator* pathGenerator) |
| 13 : INHERITED(gpu, kCached_LifeCycle), | 13 : INHERITED(gpu), |
| 14 fPathGenerator(SkRef(pathGenerator)), | 14 fPathGenerator(SkRef(pathGenerator)), |
| 15 fNumPaths(fPathGenerator->getNumPaths()) { | 15 fNumPaths(fPathGenerator->getNumPaths()) { |
| 16 const int numGroups = (fNumPaths + kPathsPerGroup - 1) / kPathsPerGroup; | 16 const int numGroups = (fNumPaths + kPathsPerGroup - 1) / kPathsPerGroup; |
| 17 fGeneratedPaths.reset((numGroups + 7) / 8); // 1 bit per path group. | 17 fGeneratedPaths.reset((numGroups + 7) / 8); // 1 bit per path group. |
| 18 memset(&fGeneratedPaths.front(), 0, fGeneratedPaths.count()); | 18 memset(&fGeneratedPaths.front(), 0, fGeneratedPaths.count()); |
| 19 } | 19 } |
| 20 | 20 |
| 21 GrPathRange::GrPathRange(GrGpu* gpu, | 21 GrPathRange::GrPathRange(GrGpu* gpu, |
| 22 int numPaths) | 22 int numPaths) |
| 23 : INHERITED(gpu, kCached_LifeCycle), | 23 : INHERITED(gpu), |
| 24 fNumPaths(numPaths) { | 24 fNumPaths(numPaths) { |
| 25 } | 25 } |
| 26 | 26 |
| 27 void GrPathRange::loadPathsIfNeeded(const void* indices, PathIndexType indexType
, int count) const { | 27 void GrPathRange::loadPathsIfNeeded(const void* indices, PathIndexType indexType
, int count) const { |
| 28 switch (indexType) { | 28 switch (indexType) { |
| 29 case kU8_PathIndexType: | 29 case kU8_PathIndexType: |
| 30 return this->loadPathsIfNeeded(reinterpret_cast<const uint8_t*>(indi
ces), count); | 30 return this->loadPathsIfNeeded(reinterpret_cast<const uint8_t*>(indi
ces), count); |
| 31 case kU16_PathIndexType: | 31 case kU16_PathIndexType: |
| 32 return this->loadPathsIfNeeded(reinterpret_cast<const uint16_t*>(ind
ices), count); | 32 return this->loadPathsIfNeeded(reinterpret_cast<const uint16_t*>(ind
ices), count); |
| 33 case kU32_PathIndexType: | 33 case kU32_PathIndexType: |
| (...skipping 12 matching lines...) Expand all Loading... |
| 46 case kU16_PathIndexType: | 46 case kU16_PathIndexType: |
| 47 return this->assertPathsLoaded(reinterpret_cast<const uint16_t*>(ind
ices), count); | 47 return this->assertPathsLoaded(reinterpret_cast<const uint16_t*>(ind
ices), count); |
| 48 case kU32_PathIndexType: | 48 case kU32_PathIndexType: |
| 49 return this->assertPathsLoaded(reinterpret_cast<const uint32_t*>(ind
ices), count); | 49 return this->assertPathsLoaded(reinterpret_cast<const uint32_t*>(ind
ices), count); |
| 50 default: | 50 default: |
| 51 SkFAIL("Unknown path index type"); | 51 SkFAIL("Unknown path index type"); |
| 52 } | 52 } |
| 53 } | 53 } |
| 54 | 54 |
| 55 #endif | 55 #endif |
| OLD | NEW |