| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2014 Google Inc. | 2 * Copyright 2014 Google Inc. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
| 5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
| 6 */ | 6 */ |
| 7 | 7 |
| 8 #ifndef GrPathRange_DEFINED | 8 #ifndef GrPathRange_DEFINED |
| 9 #define GrPathRange_DEFINED | 9 #define GrPathRange_DEFINED |
| 10 | 10 |
| 11 #include "GrGpuResource.h" | 11 #include "GrGpuResource.h" |
| 12 #include "SkRefCnt.h" | 12 #include "SkRefCnt.h" |
| 13 #include "SkStrokeRec.h" | |
| 14 #include "SkTArray.h" | 13 #include "SkTArray.h" |
| 15 | 14 |
| 16 class SkPath; | 15 class SkPath; |
| 17 class SkDescriptor; | 16 class SkDescriptor; |
| 18 | 17 |
| 19 /** | 18 /** |
| 20 * Represents a contiguous range of GPU path objects, all with a common stroke. | 19 * Represents a contiguous range of GPU path objects. |
| 21 * This object is immutable with the exception that individual paths may be | 20 * This object is immutable with the exception that individual paths may be |
| 22 * initialized lazily. | 21 * initialized lazily. |
| 23 */ | 22 */ |
| 24 | 23 |
| 25 class GrPathRange : public GrGpuResource { | 24 class GrPathRange : public GrGpuResource { |
| 26 public: | 25 public: |
| 27 SK_DECLARE_INST_COUNT(GrPathRange); | 26 SK_DECLARE_INST_COUNT(GrPathRange); |
| 28 | 27 |
| 29 enum PathIndexType { | 28 enum PathIndexType { |
| 30 kU8_PathIndexType, //!< uint8_t | 29 kU8_PathIndexType, //!< uint8_t |
| (...skipping 12 matching lines...) Expand all Loading... |
| 43 return 1 << type; | 42 return 1 << type; |
| 44 } | 43 } |
| 45 | 44 |
| 46 /** | 45 /** |
| 47 * Class that generates the paths for a specific range. | 46 * Class that generates the paths for a specific range. |
| 48 */ | 47 */ |
| 49 class PathGenerator : public SkRefCnt { | 48 class PathGenerator : public SkRefCnt { |
| 50 public: | 49 public: |
| 51 virtual int getNumPaths() = 0; | 50 virtual int getNumPaths() = 0; |
| 52 virtual void generatePath(int index, SkPath* out) = 0; | 51 virtual void generatePath(int index, SkPath* out) = 0; |
| 52 #ifdef SK_DEBUG |
| 53 virtual bool isEqualTo(const SkDescriptor&) const { return false; } | 53 virtual bool isEqualTo(const SkDescriptor&) const { return false; } |
| 54 #endif |
| 54 virtual ~PathGenerator() {} | 55 virtual ~PathGenerator() {} |
| 55 }; | 56 }; |
| 56 | 57 |
| 57 /** | 58 /** |
| 58 * Initialize a lazy-loaded path range. This class will generate an SkPath a
nd call | 59 * Initialize a lazy-loaded path range. This class will generate an SkPath a
nd call |
| 59 * onInitPath() for each path within the range before it is drawn for the fi
rst time. | 60 * onInitPath() for each path within the range before it is drawn for the fi
rst time. |
| 60 */ | 61 */ |
| 61 GrPathRange(GrGpu*, PathGenerator*, const SkStrokeRec& stroke); | 62 GrPathRange(GrGpu*, PathGenerator*); |
| 62 | 63 |
| 63 /** | 64 /** |
| 64 * Initialize an eager-loaded path range. The subclass is responsible for en
suring all | 65 * Initialize an eager-loaded path range. The subclass is responsible for en
suring all |
| 65 * the paths are initialized up front. | 66 * the paths are initialized up front. |
| 66 */ | 67 */ |
| 67 GrPathRange(GrGpu*, int numPaths, const SkStrokeRec& stroke); | 68 GrPathRange(GrGpu*, int numPaths); |
| 68 | 69 |
| 70 int getNumPaths() const { return fNumPaths; } |
| 71 const PathGenerator* getPathGenerator() const { return fPathGenerator.get();
} |
| 72 |
| 73 #ifdef SK_DEBUG |
| 69 virtual bool isEqualTo(const SkDescriptor& desc) const { | 74 virtual bool isEqualTo(const SkDescriptor& desc) const { |
| 70 return NULL != fPathGenerator.get() && fPathGenerator->isEqualTo(desc); | 75 return NULL != fPathGenerator.get() && fPathGenerator->isEqualTo(desc); |
| 71 } | 76 } |
| 72 | 77 #endif |
| 73 int getNumPaths() const { return fNumPaths; } | |
| 74 const SkStrokeRec& getStroke() const { return fStroke; } | |
| 75 const PathGenerator* getPathGenerator() const { return fPathGenerator.get();
} | |
| 76 | |
| 77 protected: | 78 protected: |
| 78 // Initialize a path in the range before drawing. This is only called when | 79 // Initialize a path in the range before drawing. This is only called when |
| 79 // fPathGenerator is non-null. The child class need not call didChangeGpuMem
orySize(), | 80 // fPathGenerator is non-null. The child class need not call didChangeGpuMem
orySize(), |
| 80 // GrPathRange will take care of that after the call is complete. | 81 // GrPathRange will take care of that after the call is complete. |
| 81 virtual void onInitPath(int index, const SkPath&) const = 0; | 82 virtual void onInitPath(int index, const SkPath&) const = 0; |
| 82 | 83 |
| 83 private: | 84 private: |
| 84 // Notify when paths will be drawn in case this is a lazy-loaded path range. | 85 // Notify when paths will be drawn in case this is a lazy-loaded path range. |
| 85 friend class GrGpu; | 86 friend class GrGpu; |
| 86 void willDrawPaths(const void* indices, PathIndexType, int count) const; | 87 void willDrawPaths(const void* indices, PathIndexType, int count) const; |
| 87 template<typename IndexType> void willDrawPaths(const void* indices, int cou
nt) const; | 88 template<typename IndexType> void willDrawPaths(const void* indices, int cou
nt) const; |
| 88 | 89 |
| 89 mutable SkAutoTUnref<PathGenerator> fPathGenerator; | 90 mutable SkAutoTUnref<PathGenerator> fPathGenerator; |
| 90 mutable SkTArray<uint8_t, true /*MEM_COPY*/> fGeneratedPaths; | 91 mutable SkTArray<uint8_t, true /*MEM_COPY*/> fGeneratedPaths; |
| 91 const int fNumPaths; | 92 const int fNumPaths; |
| 92 const SkStrokeRec fStroke; | |
| 93 | 93 |
| 94 typedef GrGpuResource INHERITED; | 94 typedef GrGpuResource INHERITED; |
| 95 }; | 95 }; |
| 96 | 96 |
| 97 #endif | 97 #endif |
| OLD | NEW |