| OLD | NEW |
| (Empty) |
| 1 /* | |
| 2 * Copyright 2012 The LibYuv Project Authors. All rights reserved. | |
| 3 * | |
| 4 * Use of this source code is governed by a BSD-style license | |
| 5 * that can be found in the LICENSE file in the root of the source | |
| 6 * tree. An additional intellectual property rights grant can be found | |
| 7 * in the file PATENTS. All contributing project authors may | |
| 8 * be found in the AUTHORS file in the root of the source tree. | |
| 9 */ | |
| 10 | |
| 11 #include "libyuv/basic_types.h" | |
| 12 #include "libyuv/row.h" | |
| 13 | |
| 14 #ifdef __cplusplus | |
| 15 namespace libyuv { | |
| 16 extern "C" { | |
| 17 #endif | |
| 18 | |
| 19 #if !defined(LIBYUV_DISABLE_X86) && (defined(__x86_64__) || defined(__i386__)) | |
| 20 | |
| 21 uint32 SumSquareError_SSE2(const uint8* src_a, const uint8* src_b, int count) { | |
| 22 uint32 sse; | |
| 23 asm volatile ( // NOLINT | |
| 24 "pxor %%xmm0,%%xmm0 \n" | |
| 25 "pxor %%xmm5,%%xmm5 \n" | |
| 26 LABELALIGN | |
| 27 "1: \n" | |
| 28 "movdqu " MEMACCESS(0) ",%%xmm1 \n" | |
| 29 "lea " MEMLEA(0x10, 0) ",%0 \n" | |
| 30 "movdqu " MEMACCESS(1) ",%%xmm2 \n" | |
| 31 "lea " MEMLEA(0x10, 1) ",%1 \n" | |
| 32 "movdqa %%xmm1,%%xmm3 \n" | |
| 33 "psubusb %%xmm2,%%xmm1 \n" | |
| 34 "psubusb %%xmm3,%%xmm2 \n" | |
| 35 "por %%xmm2,%%xmm1 \n" | |
| 36 "movdqa %%xmm1,%%xmm2 \n" | |
| 37 "punpcklbw %%xmm5,%%xmm1 \n" | |
| 38 "punpckhbw %%xmm5,%%xmm2 \n" | |
| 39 "pmaddwd %%xmm1,%%xmm1 \n" | |
| 40 "pmaddwd %%xmm2,%%xmm2 \n" | |
| 41 "paddd %%xmm1,%%xmm0 \n" | |
| 42 "paddd %%xmm2,%%xmm0 \n" | |
| 43 "sub $0x10,%2 \n" | |
| 44 "jg 1b \n" | |
| 45 | |
| 46 "pshufd $0xee,%%xmm0,%%xmm1 \n" | |
| 47 "paddd %%xmm1,%%xmm0 \n" | |
| 48 "pshufd $0x1,%%xmm0,%%xmm1 \n" | |
| 49 "paddd %%xmm1,%%xmm0 \n" | |
| 50 "movd %%xmm0,%3 \n" | |
| 51 | |
| 52 : "+r"(src_a), // %0 | |
| 53 "+r"(src_b), // %1 | |
| 54 "+r"(count), // %2 | |
| 55 "=g"(sse) // %3 | |
| 56 :: "memory", "cc", "xmm0", "xmm1", "xmm2", "xmm3", "xmm5" | |
| 57 ); // NOLINT | |
| 58 return sse; | |
| 59 } | |
| 60 | |
| 61 #endif // defined(__x86_64__) || defined(__i386__) | |
| 62 | |
| 63 #if !defined(LIBYUV_DISABLE_X86) && \ | |
| 64 (defined(__x86_64__) || (defined(__i386__) && !defined(__pic__))) | |
| 65 #define HAS_HASHDJB2_SSE41 | |
| 66 static uvec32 kHash16x33 = { 0x92d9e201, 0, 0, 0 }; // 33 ^ 16 | |
| 67 static uvec32 kHashMul0 = { | |
| 68 0x0c3525e1, // 33 ^ 15 | |
| 69 0xa3476dc1, // 33 ^ 14 | |
| 70 0x3b4039a1, // 33 ^ 13 | |
| 71 0x4f5f0981, // 33 ^ 12 | |
| 72 }; | |
| 73 static uvec32 kHashMul1 = { | |
| 74 0x30f35d61, // 33 ^ 11 | |
| 75 0x855cb541, // 33 ^ 10 | |
| 76 0x040a9121, // 33 ^ 9 | |
| 77 0x747c7101, // 33 ^ 8 | |
| 78 }; | |
| 79 static uvec32 kHashMul2 = { | |
| 80 0xec41d4e1, // 33 ^ 7 | |
| 81 0x4cfa3cc1, // 33 ^ 6 | |
| 82 0x025528a1, // 33 ^ 5 | |
| 83 0x00121881, // 33 ^ 4 | |
| 84 }; | |
| 85 static uvec32 kHashMul3 = { | |
| 86 0x00008c61, // 33 ^ 3 | |
| 87 0x00000441, // 33 ^ 2 | |
| 88 0x00000021, // 33 ^ 1 | |
| 89 0x00000001, // 33 ^ 0 | |
| 90 }; | |
| 91 | |
| 92 uint32 HashDjb2_SSE41(const uint8* src, int count, uint32 seed) { | |
| 93 uint32 hash; | |
| 94 asm volatile ( // NOLINT | |
| 95 "movd %2,%%xmm0 \n" | |
| 96 "pxor %%xmm7,%%xmm7 \n" | |
| 97 "movdqa %4,%%xmm6 \n" | |
| 98 LABELALIGN | |
| 99 "1: \n" | |
| 100 "movdqu " MEMACCESS(0) ",%%xmm1 \n" | |
| 101 "lea " MEMLEA(0x10, 0) ",%0 \n" | |
| 102 "pmulld %%xmm6,%%xmm0 \n" | |
| 103 "movdqa %5,%%xmm5 \n" | |
| 104 "movdqa %%xmm1,%%xmm2 \n" | |
| 105 "punpcklbw %%xmm7,%%xmm2 \n" | |
| 106 "movdqa %%xmm2,%%xmm3 \n" | |
| 107 "punpcklwd %%xmm7,%%xmm3 \n" | |
| 108 "pmulld %%xmm5,%%xmm3 \n" | |
| 109 "movdqa %6,%%xmm5 \n" | |
| 110 "movdqa %%xmm2,%%xmm4 \n" | |
| 111 "punpckhwd %%xmm7,%%xmm4 \n" | |
| 112 "pmulld %%xmm5,%%xmm4 \n" | |
| 113 "movdqa %7,%%xmm5 \n" | |
| 114 "punpckhbw %%xmm7,%%xmm1 \n" | |
| 115 "movdqa %%xmm1,%%xmm2 \n" | |
| 116 "punpcklwd %%xmm7,%%xmm2 \n" | |
| 117 "pmulld %%xmm5,%%xmm2 \n" | |
| 118 "movdqa %8,%%xmm5 \n" | |
| 119 "punpckhwd %%xmm7,%%xmm1 \n" | |
| 120 "pmulld %%xmm5,%%xmm1 \n" | |
| 121 "paddd %%xmm4,%%xmm3 \n" | |
| 122 "paddd %%xmm2,%%xmm1 \n" | |
| 123 "paddd %%xmm3,%%xmm1 \n" | |
| 124 "pshufd $0xe,%%xmm1,%%xmm2 \n" | |
| 125 "paddd %%xmm2,%%xmm1 \n" | |
| 126 "pshufd $0x1,%%xmm1,%%xmm2 \n" | |
| 127 "paddd %%xmm2,%%xmm1 \n" | |
| 128 "paddd %%xmm1,%%xmm0 \n" | |
| 129 "sub $0x10,%1 \n" | |
| 130 "jg 1b \n" | |
| 131 "movd %%xmm0,%3 \n" | |
| 132 : "+r"(src), // %0 | |
| 133 "+r"(count), // %1 | |
| 134 "+rm"(seed), // %2 | |
| 135 "=g"(hash) // %3 | |
| 136 : "m"(kHash16x33), // %4 | |
| 137 "m"(kHashMul0), // %5 | |
| 138 "m"(kHashMul1), // %6 | |
| 139 "m"(kHashMul2), // %7 | |
| 140 "m"(kHashMul3) // %8 | |
| 141 : "memory", "cc" | |
| 142 , "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7" | |
| 143 ); // NOLINT | |
| 144 return hash; | |
| 145 } | |
| 146 #endif // defined(__x86_64__) || (defined(__i386__) && !defined(__pic__))) | |
| 147 | |
| 148 #ifdef __cplusplus | |
| 149 } // extern "C" | |
| 150 } // namespace libyuv | |
| 151 #endif | |
| 152 | |
| OLD | NEW |