| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2012 Google Inc. | 2 * Copyright 2012 Google Inc. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
| 5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
| 6 */ | 6 */ |
| 7 | 7 |
| 8 #include "GrPath.h" | 8 #include "GrPath.h" |
| 9 | 9 |
| 10 namespace { | 10 namespace { |
| 11 // Verb count limit for generating path key from content of a volatile path. | 11 // Verb count limit for generating path key from content of a volatile path. |
| 12 // The value should accomodate at least simple rects and rrects. | 12 // The value should accomodate at least simple rects and rrects. |
| 13 static const int kSimpleVolatilePathVerbLimit = 10; | 13 static const int kSimpleVolatilePathVerbLimit = 10; |
| 14 | 14 |
| 15 inline static bool compute_key_for_line_path(const SkPath& path, const GrStrokeI
nfo& stroke, | 15 inline static bool compute_key_for_line_path(const SkPath& path, const GrStrokeI
nfo& stroke, |
| 16 GrUniqueKey* key) { | 16 GrUniqueKey* key) { |
| 17 SkPoint pts[2]; | 17 SkPoint pts[2]; |
| 18 if (!path.isLine(pts)) { | 18 if (!path.isLine(pts)) { |
| 19 return false; | 19 return false; |
| 20 } | 20 } |
| 21 SK_COMPILE_ASSERT((sizeof(pts) % sizeof(uint32_t)) == 0 && sizeof(pts) > siz
eof(uint32_t), | 21 static_assert((sizeof(pts) % sizeof(uint32_t)) == 0 && sizeof(pts) > sizeof(
uint32_t), |
| 22 pts_needs_padding); | 22 "pts_needs_padding"); |
| 23 | 23 |
| 24 const int kBaseData32Cnt = 1 + sizeof(pts) / sizeof(uint32_t); | 24 const int kBaseData32Cnt = 1 + sizeof(pts) / sizeof(uint32_t); |
| 25 int strokeDataCnt = stroke.computeUniqueKeyFragmentData32Cnt(); | 25 int strokeDataCnt = stroke.computeUniqueKeyFragmentData32Cnt(); |
| 26 static const GrUniqueKey::Domain kOvalPathDomain = GrUniqueKey::GenerateDoma
in(); | 26 static const GrUniqueKey::Domain kOvalPathDomain = GrUniqueKey::GenerateDoma
in(); |
| 27 GrUniqueKey::Builder builder(key, kOvalPathDomain, kBaseData32Cnt + strokeDa
taCnt); | 27 GrUniqueKey::Builder builder(key, kOvalPathDomain, kBaseData32Cnt + strokeDa
taCnt); |
| 28 builder[0] = path.getFillType(); | 28 builder[0] = path.getFillType(); |
| 29 memcpy(&builder[1], &pts, sizeof(pts)); | 29 memcpy(&builder[1], &pts, sizeof(pts)); |
| 30 if (strokeDataCnt > 0) { | 30 if (strokeDataCnt > 0) { |
| 31 stroke.asUniqueKeyFragment(&builder[kBaseData32Cnt]); | 31 stroke.asUniqueKeyFragment(&builder[kBaseData32Cnt]); |
| 32 } | 32 } |
| 33 return true; | 33 return true; |
| 34 } | 34 } |
| 35 | 35 |
| 36 inline static bool compute_key_for_oval_path(const SkPath& path, const GrStrokeI
nfo& stroke, | 36 inline static bool compute_key_for_oval_path(const SkPath& path, const GrStrokeI
nfo& stroke, |
| 37 GrUniqueKey* key) { | 37 GrUniqueKey* key) { |
| 38 SkRect rect; | 38 SkRect rect; |
| 39 if (!path.isOval(&rect)) { | 39 if (!path.isOval(&rect)) { |
| 40 return false; | 40 return false; |
| 41 } | 41 } |
| 42 SK_COMPILE_ASSERT((sizeof(rect) % sizeof(uint32_t)) == 0 && sizeof(rect) > s
izeof(uint32_t), | 42 static_assert((sizeof(rect) % sizeof(uint32_t)) == 0 && sizeof(rect) > sizeo
f(uint32_t), |
| 43 rect_needs_padding); | 43 "rect_needs_padding"); |
| 44 | 44 |
| 45 const int kBaseData32Cnt = 1 + sizeof(rect) / sizeof(uint32_t); | 45 const int kBaseData32Cnt = 1 + sizeof(rect) / sizeof(uint32_t); |
| 46 int strokeDataCnt = stroke.computeUniqueKeyFragmentData32Cnt(); | 46 int strokeDataCnt = stroke.computeUniqueKeyFragmentData32Cnt(); |
| 47 static const GrUniqueKey::Domain kOvalPathDomain = GrUniqueKey::GenerateDoma
in(); | 47 static const GrUniqueKey::Domain kOvalPathDomain = GrUniqueKey::GenerateDoma
in(); |
| 48 GrUniqueKey::Builder builder(key, kOvalPathDomain, kBaseData32Cnt + strokeDa
taCnt); | 48 GrUniqueKey::Builder builder(key, kOvalPathDomain, kBaseData32Cnt + strokeDa
taCnt); |
| 49 builder[0] = path.getFillType(); | 49 builder[0] = path.getFillType(); |
| 50 memcpy(&builder[1], &rect, sizeof(rect)); | 50 memcpy(&builder[1], &rect, sizeof(rect)); |
| 51 if (strokeDataCnt > 0) { | 51 if (strokeDataCnt > 0) { |
| 52 stroke.asUniqueKeyFragment(&builder[kBaseData32Cnt]); | 52 stroke.asUniqueKeyFragment(&builder[kBaseData32Cnt]); |
| 53 } | 53 } |
| 54 return true; | 54 return true; |
| 55 } | 55 } |
| 56 | 56 |
| 57 // Encodes the full path data to the unique key for very small, volatile paths.
This is typically | 57 // Encodes the full path data to the unique key for very small, volatile paths.
This is typically |
| 58 // hit when clipping stencils the clip stack. Intention is that this handles rec
ts too, since | 58 // hit when clipping stencils the clip stack. Intention is that this handles rec
ts too, since |
| 59 // SkPath::isRect seems to do non-trivial amount of work. | 59 // SkPath::isRect seems to do non-trivial amount of work. |
| 60 inline static bool compute_key_for_simple_path(const SkPath& path, const GrStrok
eInfo& stroke, | 60 inline static bool compute_key_for_simple_path(const SkPath& path, const GrStrok
eInfo& stroke, |
| 61 GrUniqueKey* key) { | 61 GrUniqueKey* key) { |
| 62 if (!path.isVolatile()) { | 62 if (!path.isVolatile()) { |
| 63 return false; | 63 return false; |
| 64 } | 64 } |
| 65 // The check below should take care of negative values casted positive. | 65 // The check below should take care of negative values casted positive. |
| 66 const int verbCnt = path.countVerbs(); | 66 const int verbCnt = path.countVerbs(); |
| 67 if (verbCnt > kSimpleVolatilePathVerbLimit) { | 67 if (verbCnt > kSimpleVolatilePathVerbLimit) { |
| 68 return false; | 68 return false; |
| 69 } | 69 } |
| 70 | 70 |
| 71 // If somebody goes wild with the constant, it might cause an overflow. | 71 // If somebody goes wild with the constant, it might cause an overflow. |
| 72 SK_COMPILE_ASSERT(kSimpleVolatilePathVerbLimit <= 100, | 72 static_assert(kSimpleVolatilePathVerbLimit <= 100, |
| 73 big_simple_volatile_path_verb_limit_may_cause_overflow); | 73 "big_simple_volatile_path_verb_limit_may_cause_overflow"); |
| 74 | 74 |
| 75 const int pointCnt = path.countPoints(); | 75 const int pointCnt = path.countPoints(); |
| 76 if (pointCnt < 0) { | 76 if (pointCnt < 0) { |
| 77 SkASSERT(false); | 77 SkASSERT(false); |
| 78 return false; | 78 return false; |
| 79 } | 79 } |
| 80 | 80 |
| 81 // Construct counts that align as uint32_t counts. | 81 // Construct counts that align as uint32_t counts. |
| 82 #define ARRAY_DATA32_COUNT(array_type, count) \ | 82 #define ARRAY_DATA32_COUNT(array_type, count) \ |
| 83 static_cast<int>((((count) * sizeof(array_type) + sizeof(uint32_t) - 1) / si
zeof(uint32_t))) | 83 static_cast<int>((((count) * sizeof(array_type) + sizeof(uint32_t) - 1) / si
zeof(uint32_t))) |
| (...skipping 30 matching lines...) Expand all Loading... |
| 114 | 114 |
| 115 // Fill the last uint32_t with 0 first, since the last uint8_ts of the uint3
2_t may be | 115 // Fill the last uint32_t with 0 first, since the last uint8_ts of the uint3
2_t may be |
| 116 // uninitialized. This does not produce ambiguous verb data, since we have s
erialized the exact | 116 // uninitialized. This does not produce ambiguous verb data, since we have s
erialized the exact |
| 117 // verb count. | 117 // verb count. |
| 118 if (verbData32Cnt != static_cast<int>((verbCnt * sizeof(uint8_t) / sizeof(ui
nt32_t)))) { | 118 if (verbData32Cnt != static_cast<int>((verbCnt * sizeof(uint8_t) / sizeof(ui
nt32_t)))) { |
| 119 builder[i + verbData32Cnt - 1] = 0; | 119 builder[i + verbData32Cnt - 1] = 0; |
| 120 } | 120 } |
| 121 path.getVerbs(reinterpret_cast<uint8_t*>(&builder[i]), verbCnt); | 121 path.getVerbs(reinterpret_cast<uint8_t*>(&builder[i]), verbCnt); |
| 122 i += verbData32Cnt; | 122 i += verbData32Cnt; |
| 123 | 123 |
| 124 SK_COMPILE_ASSERT(((sizeof(SkPoint) % sizeof(uint32_t)) == 0) && | 124 static_assert(((sizeof(SkPoint) % sizeof(uint32_t)) == 0) && sizeof(SkPoint)
> sizeof(uint32_t), |
| 125 sizeof(SkPoint) > sizeof(uint32_t), skpoint_array_needs_pa
dding); | 125 "skpoint_array_needs_padding"); |
| 126 | 126 |
| 127 // Here we assume getPoints does a memcpy, so that we do not need to worry a
bout the alignment. | 127 // Here we assume getPoints does a memcpy, so that we do not need to worry a
bout the alignment. |
| 128 path.getPoints(reinterpret_cast<SkPoint*>(&builder[i]), pointCnt); | 128 path.getPoints(reinterpret_cast<SkPoint*>(&builder[i]), pointCnt); |
| 129 SkDEBUGCODE(i += pointData32Cnt); | 129 SkDEBUGCODE(i += pointData32Cnt); |
| 130 | 130 |
| 131 SkASSERT(i == baseData32Cnt); | 131 SkASSERT(i == baseData32Cnt); |
| 132 if (strokeDataCnt > 0) { | 132 if (strokeDataCnt > 0) { |
| 133 stroke.asUniqueKeyFragment(&builder[baseData32Cnt]); | 133 stroke.asUniqueKeyFragment(&builder[baseData32Cnt]); |
| 134 } | 134 } |
| 135 return true; | 135 return true; |
| (...skipping 28 matching lines...) Expand all Loading... |
| 164 | 164 |
| 165 if (compute_key_for_simple_path(path, stroke, key)) { | 165 if (compute_key_for_simple_path(path, stroke, key)) { |
| 166 *outIsVolatile = false; | 166 *outIsVolatile = false; |
| 167 return; | 167 return; |
| 168 } | 168 } |
| 169 | 169 |
| 170 compute_key_for_general_path(path, stroke, key); | 170 compute_key_for_general_path(path, stroke, key); |
| 171 *outIsVolatile = path.isVolatile(); | 171 *outIsVolatile = path.isVolatile(); |
| 172 } | 172 } |
| 173 | 173 |
| OLD | NEW |