| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved. | 2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 81 #define cpuid(func,a,b,c,d)\ | 81 #define cpuid(func,a,b,c,d)\ |
| 82 __asm mov eax, func\ | 82 __asm mov eax, func\ |
| 83 __asm cpuid\ | 83 __asm cpuid\ |
| 84 __asm mov a, eax\ | 84 __asm mov a, eax\ |
| 85 __asm mov b, ebx\ | 85 __asm mov b, ebx\ |
| 86 __asm mov c, ecx\ | 86 __asm mov c, ecx\ |
| 87 __asm mov d, edx | 87 __asm mov d, edx |
| 88 #endif | 88 #endif |
| 89 #endif /* end others */ | 89 #endif /* end others */ |
| 90 | 90 |
| 91 #define HAS_MMX 0x01 | 91 #define HAS_MMX 0x01 |
| 92 #define HAS_SSE 0x02 | 92 #define HAS_SSE 0x02 |
| 93 #define HAS_SSE2 0x04 | 93 #define HAS_SSE2 0x04 |
| 94 #define HAS_SSE3 0x08 | 94 #define HAS_SSE3 0x08 |
| 95 #define HAS_SSSE3 0x10 | 95 #define HAS_SSSE3 0x10 |
| 96 #define HAS_SSE4_1 0x20 | 96 #define HAS_SSE4_1 0x20 |
| 97 #define HAS_AVX 0x40 |
| 98 #define HAS_AVX2 0x80 |
| 97 #ifndef BIT | 99 #ifndef BIT |
| 98 #define BIT(n) (1<<n) | 100 #define BIT(n) (1<<n) |
| 99 #endif | 101 #endif |
| 100 | 102 |
| 101 static int | 103 static int |
| 102 x86_simd_caps(void) { | 104 x86_simd_caps(void) { |
| 103 unsigned int flags = 0; | 105 unsigned int flags = 0; |
| 104 unsigned int mask = ~0; | 106 unsigned int mask = ~0; |
| 105 unsigned int reg_eax, reg_ebx, reg_ecx, reg_edx; | 107 unsigned int reg_eax, reg_ebx, reg_ecx, reg_edx; |
| 106 char *env; | 108 char *env; |
| (...skipping 18 matching lines...) Expand all Loading... |
| 125 | 127 |
| 126 /* Get the standard feature flags */ | 128 /* Get the standard feature flags */ |
| 127 cpuid(1, reg_eax, reg_ebx, reg_ecx, reg_edx); | 129 cpuid(1, reg_eax, reg_ebx, reg_ecx, reg_edx); |
| 128 | 130 |
| 129 if (reg_edx & BIT(23)) flags |= HAS_MMX; | 131 if (reg_edx & BIT(23)) flags |= HAS_MMX; |
| 130 | 132 |
| 131 if (reg_edx & BIT(25)) flags |= HAS_SSE; /* aka xmm */ | 133 if (reg_edx & BIT(25)) flags |= HAS_SSE; /* aka xmm */ |
| 132 | 134 |
| 133 if (reg_edx & BIT(26)) flags |= HAS_SSE2; /* aka wmt */ | 135 if (reg_edx & BIT(26)) flags |= HAS_SSE2; /* aka wmt */ |
| 134 | 136 |
| 135 if (reg_ecx & BIT(0)) flags |= HAS_SSE3; | 137 if (reg_ecx & BIT(0)) flags |= HAS_SSE3; |
| 136 | 138 |
| 137 if (reg_ecx & BIT(9)) flags |= HAS_SSSE3; | 139 if (reg_ecx & BIT(9)) flags |= HAS_SSSE3; |
| 138 | 140 |
| 139 if (reg_ecx & BIT(19)) flags |= HAS_SSE4_1; | 141 if (reg_ecx & BIT(19)) flags |= HAS_SSE4_1; |
| 140 | 142 |
| 143 if (reg_ecx & BIT(28)) flags |= HAS_AVX; |
| 144 |
| 145 if (reg_ebx & BIT(5)) flags |= HAS_AVX2; |
| 146 |
| 141 return flags & mask; | 147 return flags & mask; |
| 142 } | 148 } |
| 143 | 149 |
| 144 vpx_cpu_t vpx_x86_vendor(void); | 150 vpx_cpu_t vpx_x86_vendor(void); |
| 145 | 151 |
| 146 #if ARCH_X86_64 && defined(_MSC_VER) | 152 #if ARCH_X86_64 && defined(_MSC_VER) |
| 147 unsigned __int64 __rdtsc(void); | 153 unsigned __int64 __rdtsc(void); |
| 148 #pragma intrinsic(__rdtsc) | 154 #pragma intrinsic(__rdtsc) |
| 149 #endif | 155 #endif |
| 150 static unsigned int | 156 static unsigned int |
| (...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 228 x87_set_double_precision(void) { | 234 x87_set_double_precision(void) { |
| 229 unsigned short mode = x87_get_control_word(); | 235 unsigned short mode = x87_get_control_word(); |
| 230 x87_set_control_word((mode&~0x300) | 0x200); | 236 x87_set_control_word((mode&~0x300) | 0x200); |
| 231 return mode; | 237 return mode; |
| 232 } | 238 } |
| 233 | 239 |
| 234 | 240 |
| 235 extern void vpx_reset_mmx_state(void); | 241 extern void vpx_reset_mmx_state(void); |
| 236 #endif | 242 #endif |
| 237 | 243 |
| OLD | NEW |