OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2015 Google Inc. | 2 * Copyright 2015 Google Inc. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
6 */ | 6 */ |
7 | 7 |
8 inline Sk4px::Sk4px(SkPMColor px) : INHERITED((uint8x16_t)vdupq_n_u32(px)) {} | 8 inline Sk4px::Sk4px(SkPMColor px) : INHERITED((uint8x16_t)vdupq_n_u32(px)) {} |
9 | 9 |
10 inline Sk4px Sk4px::Load4(const SkPMColor px[4]) { | 10 inline Sk4px Sk4px::Load4(const SkPMColor px[4]) { |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
71 | 71 |
72 inline Sk4px Sk4px::Load2Alphas(const SkAlpha a[2]) { | 72 inline Sk4px Sk4px::Load2Alphas(const SkAlpha a[2]) { |
73 uint8x16_t a8 = vdupq_n_u8(0); // ____ ____ ____ ____ | 73 uint8x16_t a8 = vdupq_n_u8(0); // ____ ____ ____ ____ |
74 a8 = vld1q_lane_u8(a+0, a8, 0); // ____ ____ ____ ___0 | 74 a8 = vld1q_lane_u8(a+0, a8, 0); // ____ ____ ____ ___0 |
75 a8 = vld1q_lane_u8(a+1, a8, 4); // ____ ____ ___1 ___0 | 75 a8 = vld1q_lane_u8(a+1, a8, 4); // ____ ____ ___1 ___0 |
76 auto a32 = (uint32x4_t)a8; // | 76 auto a32 = (uint32x4_t)a8; // |
77 a32 = vorrq_u32(a32, vshlq_n_u32(a32, 8)); // ____ ____ __11 __00 | 77 a32 = vorrq_u32(a32, vshlq_n_u32(a32, 8)); // ____ ____ __11 __00 |
78 a32 = vorrq_u32(a32, vshlq_n_u32(a32, 16)); // ____ ____ 1111 0000 | 78 a32 = vorrq_u32(a32, vshlq_n_u32(a32, 16)); // ____ ____ 1111 0000 |
79 return Sk16b((uint8x16_t)a32); | 79 return Sk16b((uint8x16_t)a32); |
80 } | 80 } |
| 81 |
| 82 inline Sk4px Sk4px::zeroColors() const { |
| 83 return Sk16b(vandq_u8(this->fVec, (uint8x16_t)vdupq_n_u32(0xFF << SK_A32_SHI
FT))); |
| 84 } |
| 85 |
| 86 inline Sk4px Sk4px::zeroAlphas() const { |
| 87 // vbic(a,b) == a & ~b |
| 88 return Sk16b(vbicq_u8(this->fVec, (uint8x16_t)vdupq_n_u32(0xFF << SK_A32_SHI
FT))); |
| 89 } |
| 90 |
OLD | NEW |