Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(219)

Side by Side Diff: source/config/win/ia32/vp9_rtcd.h

Issue 232133009: libvpx: Pull from upstream (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/deps/third_party/libvpx/
Patch Set: Created 6 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « source/config/win/ia32/vp8_rtcd.h ('k') | source/config/win/ia32/vpx_config.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #ifndef VP9_RTCD_H_ 1 #ifndef VP9_RTCD_H_
2 #define VP9_RTCD_H_ 2 #define VP9_RTCD_H_
3 3
4 #ifdef RTCD_C 4 #ifdef RTCD_C
5 #define RTCD_EXTERN 5 #define RTCD_EXTERN
6 #else 6 #else
7 #define RTCD_EXTERN extern 7 #define RTCD_EXTERN extern
8 #endif 8 #endif
9 9
10 #ifdef __cplusplus 10 #ifdef __cplusplus
(...skipping 253 matching lines...) Expand 10 before | Expand all | Expand 10 after
264 RTCD_EXTERN int (*vp9_full_search_sad)(const struct macroblock *x, const struct mv *ref_mv, int sad_per_bit, int distance, const struct vp9_variance_vtable *fn_ ptr, DEC_MVCOSTS, const struct mv *center_mv, struct mv *best_mv); 264 RTCD_EXTERN int (*vp9_full_search_sad)(const struct macroblock *x, const struct mv *ref_mv, int sad_per_bit, int distance, const struct vp9_variance_vtable *fn_ ptr, DEC_MVCOSTS, const struct mv *center_mv, struct mv *best_mv);
265 265
266 void vp9_fwht4x4_c(const int16_t *input, int16_t *output, int stride); 266 void vp9_fwht4x4_c(const int16_t *input, int16_t *output, int stride);
267 #define vp9_fwht4x4 vp9_fwht4x4_c 267 #define vp9_fwht4x4 vp9_fwht4x4_c
268 268
269 unsigned int vp9_get_mb_ss_c(const int16_t *); 269 unsigned int vp9_get_mb_ss_c(const int16_t *);
270 unsigned int vp9_get_mb_ss_mmx(const int16_t *); 270 unsigned int vp9_get_mb_ss_mmx(const int16_t *);
271 unsigned int vp9_get_mb_ss_sse2(const int16_t *); 271 unsigned int vp9_get_mb_ss_sse2(const int16_t *);
272 RTCD_EXTERN unsigned int (*vp9_get_mb_ss)(const int16_t *); 272 RTCD_EXTERN unsigned int (*vp9_get_mb_ss)(const int16_t *);
273 273
274 void vp9_get_sse_sum_16x16_c(const uint8_t *src_ptr, int source_stride, const ui nt8_t *ref_ptr, int ref_stride, unsigned int *sse, int *sum);
275 void vp9_get16x16var_sse2(const uint8_t *src_ptr, int source_stride, const uint8 _t *ref_ptr, int ref_stride, unsigned int *sse, int *sum);
276 RTCD_EXTERN void (*vp9_get_sse_sum_16x16)(const uint8_t *src_ptr, int source_str ide, const uint8_t *ref_ptr, int ref_stride, unsigned int *sse, int *sum);
277
274 void vp9_get_sse_sum_8x8_c(const uint8_t *src_ptr, int source_stride, const uint 8_t *ref_ptr, int ref_stride, unsigned int *sse, int *sum); 278 void vp9_get_sse_sum_8x8_c(const uint8_t *src_ptr, int source_stride, const uint 8_t *ref_ptr, int ref_stride, unsigned int *sse, int *sum);
275 void vp9_get8x8var_sse2(const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, int ref_stride, unsigned int *sse, int *sum); 279 void vp9_get8x8var_sse2(const uint8_t *src_ptr, int source_stride, const uint8_t *ref_ptr, int ref_stride, unsigned int *sse, int *sum);
276 RTCD_EXTERN void (*vp9_get_sse_sum_8x8)(const uint8_t *src_ptr, int source_strid e, const uint8_t *ref_ptr, int ref_stride, unsigned int *sse, int *sum); 280 RTCD_EXTERN void (*vp9_get_sse_sum_8x8)(const uint8_t *src_ptr, int source_strid e, const uint8_t *ref_ptr, int ref_stride, unsigned int *sse, int *sum);
277 281
278 void vp9_h_predictor_16x16_c(uint8_t *dst, ptrdiff_t y_stride, const uint8_t *ab ove, const uint8_t *left); 282 void vp9_h_predictor_16x16_c(uint8_t *dst, ptrdiff_t y_stride, const uint8_t *ab ove, const uint8_t *left);
279 void vp9_h_predictor_16x16_ssse3(uint8_t *dst, ptrdiff_t y_stride, const uint8_t *above, const uint8_t *left); 283 void vp9_h_predictor_16x16_ssse3(uint8_t *dst, ptrdiff_t y_stride, const uint8_t *above, const uint8_t *left);
280 RTCD_EXTERN void (*vp9_h_predictor_16x16)(uint8_t *dst, ptrdiff_t y_stride, cons t uint8_t *above, const uint8_t *left); 284 RTCD_EXTERN void (*vp9_h_predictor_16x16)(uint8_t *dst, ptrdiff_t y_stride, cons t uint8_t *above, const uint8_t *left);
281 285
282 void vp9_h_predictor_32x32_c(uint8_t *dst, ptrdiff_t y_stride, const uint8_t *ab ove, const uint8_t *left); 286 void vp9_h_predictor_32x32_c(uint8_t *dst, ptrdiff_t y_stride, const uint8_t *ab ove, const uint8_t *left);
283 void vp9_h_predictor_32x32_ssse3(uint8_t *dst, ptrdiff_t y_stride, const uint8_t *above, const uint8_t *left); 287 void vp9_h_predictor_32x32_ssse3(uint8_t *dst, ptrdiff_t y_stride, const uint8_t *above, const uint8_t *left);
(...skipping 618 matching lines...) Expand 10 before | Expand all | Expand 10 after
902 void vp9_rtcd(void); 906 void vp9_rtcd(void);
903 907
904 #ifdef RTCD_C 908 #ifdef RTCD_C
905 #include "vpx_ports/x86.h" 909 #include "vpx_ports/x86.h"
906 static void setup_rtcd_internal(void) 910 static void setup_rtcd_internal(void)
907 { 911 {
908 int flags = x86_simd_caps(); 912 int flags = x86_simd_caps();
909 913
910 (void)flags; 914 (void)flags;
911 915
912
913
914
915 vp9_block_error = vp9_block_error_c; 916 vp9_block_error = vp9_block_error_c;
916 if (flags & HAS_SSE2) vp9_block_error = vp9_block_error_sse2; 917 if (flags & HAS_SSE2) vp9_block_error = vp9_block_error_sse2;
917
918 vp9_convolve8 = vp9_convolve8_c; 918 vp9_convolve8 = vp9_convolve8_c;
919 if (flags & HAS_SSE2) vp9_convolve8 = vp9_convolve8_sse2; 919 if (flags & HAS_SSE2) vp9_convolve8 = vp9_convolve8_sse2;
920 if (flags & HAS_SSSE3) vp9_convolve8 = vp9_convolve8_ssse3; 920 if (flags & HAS_SSSE3) vp9_convolve8 = vp9_convolve8_ssse3;
921
922 vp9_convolve8_avg = vp9_convolve8_avg_c; 921 vp9_convolve8_avg = vp9_convolve8_avg_c;
923 if (flags & HAS_SSE2) vp9_convolve8_avg = vp9_convolve8_avg_sse2; 922 if (flags & HAS_SSE2) vp9_convolve8_avg = vp9_convolve8_avg_sse2;
924 if (flags & HAS_SSSE3) vp9_convolve8_avg = vp9_convolve8_avg_ssse3; 923 if (flags & HAS_SSSE3) vp9_convolve8_avg = vp9_convolve8_avg_ssse3;
925
926 vp9_convolve8_avg_horiz = vp9_convolve8_avg_horiz_c; 924 vp9_convolve8_avg_horiz = vp9_convolve8_avg_horiz_c;
927 if (flags & HAS_SSE2) vp9_convolve8_avg_horiz = vp9_convolve8_avg_horiz_sse2 ; 925 if (flags & HAS_SSE2) vp9_convolve8_avg_horiz = vp9_convolve8_avg_horiz_sse2 ;
928 if (flags & HAS_SSSE3) vp9_convolve8_avg_horiz = vp9_convolve8_avg_horiz_sss e3; 926 if (flags & HAS_SSSE3) vp9_convolve8_avg_horiz = vp9_convolve8_avg_horiz_sss e3;
929
930 vp9_convolve8_avg_vert = vp9_convolve8_avg_vert_c; 927 vp9_convolve8_avg_vert = vp9_convolve8_avg_vert_c;
931 if (flags & HAS_SSE2) vp9_convolve8_avg_vert = vp9_convolve8_avg_vert_sse2; 928 if (flags & HAS_SSE2) vp9_convolve8_avg_vert = vp9_convolve8_avg_vert_sse2;
932 if (flags & HAS_SSSE3) vp9_convolve8_avg_vert = vp9_convolve8_avg_vert_ssse3 ; 929 if (flags & HAS_SSSE3) vp9_convolve8_avg_vert = vp9_convolve8_avg_vert_ssse3 ;
933
934 vp9_convolve8_horiz = vp9_convolve8_horiz_c; 930 vp9_convolve8_horiz = vp9_convolve8_horiz_c;
935 if (flags & HAS_SSE2) vp9_convolve8_horiz = vp9_convolve8_horiz_sse2; 931 if (flags & HAS_SSE2) vp9_convolve8_horiz = vp9_convolve8_horiz_sse2;
936 if (flags & HAS_SSSE3) vp9_convolve8_horiz = vp9_convolve8_horiz_ssse3; 932 if (flags & HAS_SSSE3) vp9_convolve8_horiz = vp9_convolve8_horiz_ssse3;
937
938 vp9_convolve8_vert = vp9_convolve8_vert_c; 933 vp9_convolve8_vert = vp9_convolve8_vert_c;
939 if (flags & HAS_SSE2) vp9_convolve8_vert = vp9_convolve8_vert_sse2; 934 if (flags & HAS_SSE2) vp9_convolve8_vert = vp9_convolve8_vert_sse2;
940 if (flags & HAS_SSSE3) vp9_convolve8_vert = vp9_convolve8_vert_ssse3; 935 if (flags & HAS_SSSE3) vp9_convolve8_vert = vp9_convolve8_vert_ssse3;
941
942 vp9_convolve_avg = vp9_convolve_avg_c; 936 vp9_convolve_avg = vp9_convolve_avg_c;
943 if (flags & HAS_SSE2) vp9_convolve_avg = vp9_convolve_avg_sse2; 937 if (flags & HAS_SSE2) vp9_convolve_avg = vp9_convolve_avg_sse2;
944
945 vp9_convolve_copy = vp9_convolve_copy_c; 938 vp9_convolve_copy = vp9_convolve_copy_c;
946 if (flags & HAS_SSE2) vp9_convolve_copy = vp9_convolve_copy_sse2; 939 if (flags & HAS_SSE2) vp9_convolve_copy = vp9_convolve_copy_sse2;
947
948
949
950
951
952
953
954
955
956 vp9_d153_predictor_16x16 = vp9_d153_predictor_16x16_c; 940 vp9_d153_predictor_16x16 = vp9_d153_predictor_16x16_c;
957 if (flags & HAS_SSSE3) vp9_d153_predictor_16x16 = vp9_d153_predictor_16x16_s sse3; 941 if (flags & HAS_SSSE3) vp9_d153_predictor_16x16 = vp9_d153_predictor_16x16_s sse3;
958
959
960 vp9_d153_predictor_4x4 = vp9_d153_predictor_4x4_c; 942 vp9_d153_predictor_4x4 = vp9_d153_predictor_4x4_c;
961 if (flags & HAS_SSSE3) vp9_d153_predictor_4x4 = vp9_d153_predictor_4x4_ssse3 ; 943 if (flags & HAS_SSSE3) vp9_d153_predictor_4x4 = vp9_d153_predictor_4x4_ssse3 ;
962
963 vp9_d153_predictor_8x8 = vp9_d153_predictor_8x8_c; 944 vp9_d153_predictor_8x8 = vp9_d153_predictor_8x8_c;
964 if (flags & HAS_SSSE3) vp9_d153_predictor_8x8 = vp9_d153_predictor_8x8_ssse3 ; 945 if (flags & HAS_SSSE3) vp9_d153_predictor_8x8 = vp9_d153_predictor_8x8_ssse3 ;
965
966 vp9_d207_predictor_16x16 = vp9_d207_predictor_16x16_c; 946 vp9_d207_predictor_16x16 = vp9_d207_predictor_16x16_c;
967 if (flags & HAS_SSSE3) vp9_d207_predictor_16x16 = vp9_d207_predictor_16x16_s sse3; 947 if (flags & HAS_SSSE3) vp9_d207_predictor_16x16 = vp9_d207_predictor_16x16_s sse3;
968
969 vp9_d207_predictor_32x32 = vp9_d207_predictor_32x32_c; 948 vp9_d207_predictor_32x32 = vp9_d207_predictor_32x32_c;
970 if (flags & HAS_SSSE3) vp9_d207_predictor_32x32 = vp9_d207_predictor_32x32_s sse3; 949 if (flags & HAS_SSSE3) vp9_d207_predictor_32x32 = vp9_d207_predictor_32x32_s sse3;
971
972 vp9_d207_predictor_4x4 = vp9_d207_predictor_4x4_c; 950 vp9_d207_predictor_4x4 = vp9_d207_predictor_4x4_c;
973 if (flags & HAS_SSSE3) vp9_d207_predictor_4x4 = vp9_d207_predictor_4x4_ssse3 ; 951 if (flags & HAS_SSSE3) vp9_d207_predictor_4x4 = vp9_d207_predictor_4x4_ssse3 ;
974
975 vp9_d207_predictor_8x8 = vp9_d207_predictor_8x8_c; 952 vp9_d207_predictor_8x8 = vp9_d207_predictor_8x8_c;
976 if (flags & HAS_SSSE3) vp9_d207_predictor_8x8 = vp9_d207_predictor_8x8_ssse3 ; 953 if (flags & HAS_SSSE3) vp9_d207_predictor_8x8 = vp9_d207_predictor_8x8_ssse3 ;
977
978 vp9_d45_predictor_16x16 = vp9_d45_predictor_16x16_c; 954 vp9_d45_predictor_16x16 = vp9_d45_predictor_16x16_c;
979 if (flags & HAS_SSSE3) vp9_d45_predictor_16x16 = vp9_d45_predictor_16x16_sss e3; 955 if (flags & HAS_SSSE3) vp9_d45_predictor_16x16 = vp9_d45_predictor_16x16_sss e3;
980
981 vp9_d45_predictor_32x32 = vp9_d45_predictor_32x32_c; 956 vp9_d45_predictor_32x32 = vp9_d45_predictor_32x32_c;
982 if (flags & HAS_SSSE3) vp9_d45_predictor_32x32 = vp9_d45_predictor_32x32_sss e3; 957 if (flags & HAS_SSSE3) vp9_d45_predictor_32x32 = vp9_d45_predictor_32x32_sss e3;
983
984 vp9_d45_predictor_4x4 = vp9_d45_predictor_4x4_c; 958 vp9_d45_predictor_4x4 = vp9_d45_predictor_4x4_c;
985 if (flags & HAS_SSSE3) vp9_d45_predictor_4x4 = vp9_d45_predictor_4x4_ssse3; 959 if (flags & HAS_SSSE3) vp9_d45_predictor_4x4 = vp9_d45_predictor_4x4_ssse3;
986
987 vp9_d45_predictor_8x8 = vp9_d45_predictor_8x8_c; 960 vp9_d45_predictor_8x8 = vp9_d45_predictor_8x8_c;
988 if (flags & HAS_SSSE3) vp9_d45_predictor_8x8 = vp9_d45_predictor_8x8_ssse3; 961 if (flags & HAS_SSSE3) vp9_d45_predictor_8x8 = vp9_d45_predictor_8x8_ssse3;
989
990 vp9_d63_predictor_16x16 = vp9_d63_predictor_16x16_c; 962 vp9_d63_predictor_16x16 = vp9_d63_predictor_16x16_c;
991 if (flags & HAS_SSSE3) vp9_d63_predictor_16x16 = vp9_d63_predictor_16x16_sss e3; 963 if (flags & HAS_SSSE3) vp9_d63_predictor_16x16 = vp9_d63_predictor_16x16_sss e3;
992
993 vp9_d63_predictor_32x32 = vp9_d63_predictor_32x32_c; 964 vp9_d63_predictor_32x32 = vp9_d63_predictor_32x32_c;
994 if (flags & HAS_SSSE3) vp9_d63_predictor_32x32 = vp9_d63_predictor_32x32_sss e3; 965 if (flags & HAS_SSSE3) vp9_d63_predictor_32x32 = vp9_d63_predictor_32x32_sss e3;
995
996 vp9_d63_predictor_4x4 = vp9_d63_predictor_4x4_c; 966 vp9_d63_predictor_4x4 = vp9_d63_predictor_4x4_c;
997 if (flags & HAS_SSSE3) vp9_d63_predictor_4x4 = vp9_d63_predictor_4x4_ssse3; 967 if (flags & HAS_SSSE3) vp9_d63_predictor_4x4 = vp9_d63_predictor_4x4_ssse3;
998
999 vp9_d63_predictor_8x8 = vp9_d63_predictor_8x8_c; 968 vp9_d63_predictor_8x8 = vp9_d63_predictor_8x8_c;
1000 if (flags & HAS_SSSE3) vp9_d63_predictor_8x8 = vp9_d63_predictor_8x8_ssse3; 969 if (flags & HAS_SSSE3) vp9_d63_predictor_8x8 = vp9_d63_predictor_8x8_ssse3;
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010 vp9_dc_predictor_16x16 = vp9_dc_predictor_16x16_c; 970 vp9_dc_predictor_16x16 = vp9_dc_predictor_16x16_c;
1011 if (flags & HAS_SSE2) vp9_dc_predictor_16x16 = vp9_dc_predictor_16x16_sse2; 971 if (flags & HAS_SSE2) vp9_dc_predictor_16x16 = vp9_dc_predictor_16x16_sse2;
1012
1013 vp9_dc_predictor_32x32 = vp9_dc_predictor_32x32_c; 972 vp9_dc_predictor_32x32 = vp9_dc_predictor_32x32_c;
1014 if (flags & HAS_SSE2) vp9_dc_predictor_32x32 = vp9_dc_predictor_32x32_sse2; 973 if (flags & HAS_SSE2) vp9_dc_predictor_32x32 = vp9_dc_predictor_32x32_sse2;
1015
1016 vp9_dc_predictor_4x4 = vp9_dc_predictor_4x4_c; 974 vp9_dc_predictor_4x4 = vp9_dc_predictor_4x4_c;
1017 if (flags & HAS_SSE) vp9_dc_predictor_4x4 = vp9_dc_predictor_4x4_sse; 975 if (flags & HAS_SSE) vp9_dc_predictor_4x4 = vp9_dc_predictor_4x4_sse;
1018
1019 vp9_dc_predictor_8x8 = vp9_dc_predictor_8x8_c; 976 vp9_dc_predictor_8x8 = vp9_dc_predictor_8x8_c;
1020 if (flags & HAS_SSE) vp9_dc_predictor_8x8 = vp9_dc_predictor_8x8_sse; 977 if (flags & HAS_SSE) vp9_dc_predictor_8x8 = vp9_dc_predictor_8x8_sse;
1021
1022
1023
1024
1025
1026 vp9_diamond_search_sad = vp9_diamond_search_sad_c; 978 vp9_diamond_search_sad = vp9_diamond_search_sad_c;
1027 if (flags & HAS_SSE3) vp9_diamond_search_sad = vp9_diamond_search_sadx4; 979 if (flags & HAS_SSE3) vp9_diamond_search_sad = vp9_diamond_search_sadx4;
1028
1029 vp9_fdct16x16 = vp9_fdct16x16_c; 980 vp9_fdct16x16 = vp9_fdct16x16_c;
1030 if (flags & HAS_SSE2) vp9_fdct16x16 = vp9_fdct16x16_sse2; 981 if (flags & HAS_SSE2) vp9_fdct16x16 = vp9_fdct16x16_sse2;
1031
1032 vp9_fdct32x32 = vp9_fdct32x32_c; 982 vp9_fdct32x32 = vp9_fdct32x32_c;
1033 if (flags & HAS_SSE2) vp9_fdct32x32 = vp9_fdct32x32_sse2; 983 if (flags & HAS_SSE2) vp9_fdct32x32 = vp9_fdct32x32_sse2;
1034
1035 vp9_fdct32x32_rd = vp9_fdct32x32_rd_c; 984 vp9_fdct32x32_rd = vp9_fdct32x32_rd_c;
1036 if (flags & HAS_SSE2) vp9_fdct32x32_rd = vp9_fdct32x32_rd_sse2; 985 if (flags & HAS_SSE2) vp9_fdct32x32_rd = vp9_fdct32x32_rd_sse2;
1037
1038 vp9_fdct4x4 = vp9_fdct4x4_c; 986 vp9_fdct4x4 = vp9_fdct4x4_c;
1039 if (flags & HAS_SSE2) vp9_fdct4x4 = vp9_fdct4x4_sse2; 987 if (flags & HAS_SSE2) vp9_fdct4x4 = vp9_fdct4x4_sse2;
1040
1041 vp9_fdct8x8 = vp9_fdct8x8_c; 988 vp9_fdct8x8 = vp9_fdct8x8_c;
1042 if (flags & HAS_SSE2) vp9_fdct8x8 = vp9_fdct8x8_sse2; 989 if (flags & HAS_SSE2) vp9_fdct8x8 = vp9_fdct8x8_sse2;
1043
1044 vp9_fht16x16 = vp9_fht16x16_c; 990 vp9_fht16x16 = vp9_fht16x16_c;
1045 if (flags & HAS_SSE2) vp9_fht16x16 = vp9_fht16x16_sse2; 991 if (flags & HAS_SSE2) vp9_fht16x16 = vp9_fht16x16_sse2;
1046
1047 vp9_fht4x4 = vp9_fht4x4_c; 992 vp9_fht4x4 = vp9_fht4x4_c;
1048 if (flags & HAS_SSE2) vp9_fht4x4 = vp9_fht4x4_sse2; 993 if (flags & HAS_SSE2) vp9_fht4x4 = vp9_fht4x4_sse2;
1049
1050 vp9_fht8x8 = vp9_fht8x8_c; 994 vp9_fht8x8 = vp9_fht8x8_c;
1051 if (flags & HAS_SSE2) vp9_fht8x8 = vp9_fht8x8_sse2; 995 if (flags & HAS_SSE2) vp9_fht8x8 = vp9_fht8x8_sse2;
1052
1053
1054 vp9_full_search_sad = vp9_full_search_sad_c; 996 vp9_full_search_sad = vp9_full_search_sad_c;
1055 if (flags & HAS_SSE3) vp9_full_search_sad = vp9_full_search_sadx3; 997 if (flags & HAS_SSE3) vp9_full_search_sad = vp9_full_search_sadx3;
1056 if (flags & HAS_SSE4_1) vp9_full_search_sad = vp9_full_search_sadx8; 998 if (flags & HAS_SSE4_1) vp9_full_search_sad = vp9_full_search_sadx8;
1057
1058
1059 vp9_get_mb_ss = vp9_get_mb_ss_c; 999 vp9_get_mb_ss = vp9_get_mb_ss_c;
1060 if (flags & HAS_MMX) vp9_get_mb_ss = vp9_get_mb_ss_mmx; 1000 if (flags & HAS_MMX) vp9_get_mb_ss = vp9_get_mb_ss_mmx;
1061 if (flags & HAS_SSE2) vp9_get_mb_ss = vp9_get_mb_ss_sse2; 1001 if (flags & HAS_SSE2) vp9_get_mb_ss = vp9_get_mb_ss_sse2;
1062 1002 vp9_get_sse_sum_16x16 = vp9_get_sse_sum_16x16_c;
1003 if (flags & HAS_SSE2) vp9_get_sse_sum_16x16 = vp9_get16x16var_sse2;
1063 vp9_get_sse_sum_8x8 = vp9_get_sse_sum_8x8_c; 1004 vp9_get_sse_sum_8x8 = vp9_get_sse_sum_8x8_c;
1064 if (flags & HAS_SSE2) vp9_get_sse_sum_8x8 = vp9_get8x8var_sse2; 1005 if (flags & HAS_SSE2) vp9_get_sse_sum_8x8 = vp9_get8x8var_sse2;
1065
1066 vp9_h_predictor_16x16 = vp9_h_predictor_16x16_c; 1006 vp9_h_predictor_16x16 = vp9_h_predictor_16x16_c;
1067 if (flags & HAS_SSSE3) vp9_h_predictor_16x16 = vp9_h_predictor_16x16_ssse3; 1007 if (flags & HAS_SSSE3) vp9_h_predictor_16x16 = vp9_h_predictor_16x16_ssse3;
1068
1069 vp9_h_predictor_32x32 = vp9_h_predictor_32x32_c; 1008 vp9_h_predictor_32x32 = vp9_h_predictor_32x32_c;
1070 if (flags & HAS_SSSE3) vp9_h_predictor_32x32 = vp9_h_predictor_32x32_ssse3; 1009 if (flags & HAS_SSSE3) vp9_h_predictor_32x32 = vp9_h_predictor_32x32_ssse3;
1071
1072 vp9_h_predictor_4x4 = vp9_h_predictor_4x4_c; 1010 vp9_h_predictor_4x4 = vp9_h_predictor_4x4_c;
1073 if (flags & HAS_SSSE3) vp9_h_predictor_4x4 = vp9_h_predictor_4x4_ssse3; 1011 if (flags & HAS_SSSE3) vp9_h_predictor_4x4 = vp9_h_predictor_4x4_ssse3;
1074
1075 vp9_h_predictor_8x8 = vp9_h_predictor_8x8_c; 1012 vp9_h_predictor_8x8 = vp9_h_predictor_8x8_c;
1076 if (flags & HAS_SSSE3) vp9_h_predictor_8x8 = vp9_h_predictor_8x8_ssse3; 1013 if (flags & HAS_SSSE3) vp9_h_predictor_8x8 = vp9_h_predictor_8x8_ssse3;
1077
1078 vp9_idct16x16_10_add = vp9_idct16x16_10_add_c; 1014 vp9_idct16x16_10_add = vp9_idct16x16_10_add_c;
1079 if (flags & HAS_SSE2) vp9_idct16x16_10_add = vp9_idct16x16_10_add_sse2; 1015 if (flags & HAS_SSE2) vp9_idct16x16_10_add = vp9_idct16x16_10_add_sse2;
1080
1081 vp9_idct16x16_1_add = vp9_idct16x16_1_add_c; 1016 vp9_idct16x16_1_add = vp9_idct16x16_1_add_c;
1082 if (flags & HAS_SSE2) vp9_idct16x16_1_add = vp9_idct16x16_1_add_sse2; 1017 if (flags & HAS_SSE2) vp9_idct16x16_1_add = vp9_idct16x16_1_add_sse2;
1083
1084 vp9_idct16x16_256_add = vp9_idct16x16_256_add_c; 1018 vp9_idct16x16_256_add = vp9_idct16x16_256_add_c;
1085 if (flags & HAS_SSE2) vp9_idct16x16_256_add = vp9_idct16x16_256_add_sse2; 1019 if (flags & HAS_SSE2) vp9_idct16x16_256_add = vp9_idct16x16_256_add_sse2;
1086
1087 vp9_idct32x32_1024_add = vp9_idct32x32_1024_add_c; 1020 vp9_idct32x32_1024_add = vp9_idct32x32_1024_add_c;
1088 if (flags & HAS_SSE2) vp9_idct32x32_1024_add = vp9_idct32x32_1024_add_sse2; 1021 if (flags & HAS_SSE2) vp9_idct32x32_1024_add = vp9_idct32x32_1024_add_sse2;
1089
1090 vp9_idct32x32_1_add = vp9_idct32x32_1_add_c; 1022 vp9_idct32x32_1_add = vp9_idct32x32_1_add_c;
1091 if (flags & HAS_SSE2) vp9_idct32x32_1_add = vp9_idct32x32_1_add_sse2; 1023 if (flags & HAS_SSE2) vp9_idct32x32_1_add = vp9_idct32x32_1_add_sse2;
1092
1093 vp9_idct32x32_34_add = vp9_idct32x32_34_add_c; 1024 vp9_idct32x32_34_add = vp9_idct32x32_34_add_c;
1094 if (flags & HAS_SSE2) vp9_idct32x32_34_add = vp9_idct32x32_34_add_sse2; 1025 if (flags & HAS_SSE2) vp9_idct32x32_34_add = vp9_idct32x32_34_add_sse2;
1095
1096 vp9_idct4x4_16_add = vp9_idct4x4_16_add_c; 1026 vp9_idct4x4_16_add = vp9_idct4x4_16_add_c;
1097 if (flags & HAS_SSE2) vp9_idct4x4_16_add = vp9_idct4x4_16_add_sse2; 1027 if (flags & HAS_SSE2) vp9_idct4x4_16_add = vp9_idct4x4_16_add_sse2;
1098
1099 vp9_idct4x4_1_add = vp9_idct4x4_1_add_c; 1028 vp9_idct4x4_1_add = vp9_idct4x4_1_add_c;
1100 if (flags & HAS_SSE2) vp9_idct4x4_1_add = vp9_idct4x4_1_add_sse2; 1029 if (flags & HAS_SSE2) vp9_idct4x4_1_add = vp9_idct4x4_1_add_sse2;
1101
1102 vp9_idct8x8_10_add = vp9_idct8x8_10_add_c; 1030 vp9_idct8x8_10_add = vp9_idct8x8_10_add_c;
1103 if (flags & HAS_SSE2) vp9_idct8x8_10_add = vp9_idct8x8_10_add_sse2; 1031 if (flags & HAS_SSE2) vp9_idct8x8_10_add = vp9_idct8x8_10_add_sse2;
1104
1105 vp9_idct8x8_1_add = vp9_idct8x8_1_add_c; 1032 vp9_idct8x8_1_add = vp9_idct8x8_1_add_c;
1106 if (flags & HAS_SSE2) vp9_idct8x8_1_add = vp9_idct8x8_1_add_sse2; 1033 if (flags & HAS_SSE2) vp9_idct8x8_1_add = vp9_idct8x8_1_add_sse2;
1107
1108 vp9_idct8x8_64_add = vp9_idct8x8_64_add_c; 1034 vp9_idct8x8_64_add = vp9_idct8x8_64_add_c;
1109 if (flags & HAS_SSE2) vp9_idct8x8_64_add = vp9_idct8x8_64_add_sse2; 1035 if (flags & HAS_SSE2) vp9_idct8x8_64_add = vp9_idct8x8_64_add_sse2;
1110
1111 vp9_iht16x16_256_add = vp9_iht16x16_256_add_c; 1036 vp9_iht16x16_256_add = vp9_iht16x16_256_add_c;
1112 if (flags & HAS_SSE2) vp9_iht16x16_256_add = vp9_iht16x16_256_add_sse2; 1037 if (flags & HAS_SSE2) vp9_iht16x16_256_add = vp9_iht16x16_256_add_sse2;
1113
1114 vp9_iht4x4_16_add = vp9_iht4x4_16_add_c; 1038 vp9_iht4x4_16_add = vp9_iht4x4_16_add_c;
1115 if (flags & HAS_SSE2) vp9_iht4x4_16_add = vp9_iht4x4_16_add_sse2; 1039 if (flags & HAS_SSE2) vp9_iht4x4_16_add = vp9_iht4x4_16_add_sse2;
1116
1117 vp9_iht8x8_64_add = vp9_iht8x8_64_add_c; 1040 vp9_iht8x8_64_add = vp9_iht8x8_64_add_c;
1118 if (flags & HAS_SSE2) vp9_iht8x8_64_add = vp9_iht8x8_64_add_sse2; 1041 if (flags & HAS_SSE2) vp9_iht8x8_64_add = vp9_iht8x8_64_add_sse2;
1119
1120
1121
1122 vp9_lpf_horizontal_16 = vp9_lpf_horizontal_16_c; 1042 vp9_lpf_horizontal_16 = vp9_lpf_horizontal_16_c;
1123 if (flags & HAS_SSE2) vp9_lpf_horizontal_16 = vp9_lpf_horizontal_16_sse2; 1043 if (flags & HAS_SSE2) vp9_lpf_horizontal_16 = vp9_lpf_horizontal_16_sse2;
1124
1125 vp9_lpf_horizontal_4 = vp9_lpf_horizontal_4_c; 1044 vp9_lpf_horizontal_4 = vp9_lpf_horizontal_4_c;
1126 if (flags & HAS_MMX) vp9_lpf_horizontal_4 = vp9_lpf_horizontal_4_mmx; 1045 if (flags & HAS_MMX) vp9_lpf_horizontal_4 = vp9_lpf_horizontal_4_mmx;
1127
1128 vp9_lpf_horizontal_4_dual = vp9_lpf_horizontal_4_dual_c; 1046 vp9_lpf_horizontal_4_dual = vp9_lpf_horizontal_4_dual_c;
1129 if (flags & HAS_SSE2) vp9_lpf_horizontal_4_dual = vp9_lpf_horizontal_4_dual_ sse2; 1047 if (flags & HAS_SSE2) vp9_lpf_horizontal_4_dual = vp9_lpf_horizontal_4_dual_ sse2;
1130
1131 vp9_lpf_horizontal_8 = vp9_lpf_horizontal_8_c; 1048 vp9_lpf_horizontal_8 = vp9_lpf_horizontal_8_c;
1132 if (flags & HAS_SSE2) vp9_lpf_horizontal_8 = vp9_lpf_horizontal_8_sse2; 1049 if (flags & HAS_SSE2) vp9_lpf_horizontal_8 = vp9_lpf_horizontal_8_sse2;
1133
1134 vp9_lpf_horizontal_8_dual = vp9_lpf_horizontal_8_dual_c; 1050 vp9_lpf_horizontal_8_dual = vp9_lpf_horizontal_8_dual_c;
1135 if (flags & HAS_SSE2) vp9_lpf_horizontal_8_dual = vp9_lpf_horizontal_8_dual_ sse2; 1051 if (flags & HAS_SSE2) vp9_lpf_horizontal_8_dual = vp9_lpf_horizontal_8_dual_ sse2;
1136
1137 vp9_lpf_vertical_16 = vp9_lpf_vertical_16_c; 1052 vp9_lpf_vertical_16 = vp9_lpf_vertical_16_c;
1138 if (flags & HAS_SSE2) vp9_lpf_vertical_16 = vp9_lpf_vertical_16_sse2; 1053 if (flags & HAS_SSE2) vp9_lpf_vertical_16 = vp9_lpf_vertical_16_sse2;
1139
1140 vp9_lpf_vertical_16_dual = vp9_lpf_vertical_16_dual_c; 1054 vp9_lpf_vertical_16_dual = vp9_lpf_vertical_16_dual_c;
1141 if (flags & HAS_SSE2) vp9_lpf_vertical_16_dual = vp9_lpf_vertical_16_dual_ss e2; 1055 if (flags & HAS_SSE2) vp9_lpf_vertical_16_dual = vp9_lpf_vertical_16_dual_ss e2;
1142
1143 vp9_lpf_vertical_4 = vp9_lpf_vertical_4_c; 1056 vp9_lpf_vertical_4 = vp9_lpf_vertical_4_c;
1144 if (flags & HAS_MMX) vp9_lpf_vertical_4 = vp9_lpf_vertical_4_mmx; 1057 if (flags & HAS_MMX) vp9_lpf_vertical_4 = vp9_lpf_vertical_4_mmx;
1145
1146 vp9_lpf_vertical_4_dual = vp9_lpf_vertical_4_dual_c; 1058 vp9_lpf_vertical_4_dual = vp9_lpf_vertical_4_dual_c;
1147 if (flags & HAS_SSE2) vp9_lpf_vertical_4_dual = vp9_lpf_vertical_4_dual_sse2 ; 1059 if (flags & HAS_SSE2) vp9_lpf_vertical_4_dual = vp9_lpf_vertical_4_dual_sse2 ;
1148
1149 vp9_lpf_vertical_8 = vp9_lpf_vertical_8_c; 1060 vp9_lpf_vertical_8 = vp9_lpf_vertical_8_c;
1150 if (flags & HAS_SSE2) vp9_lpf_vertical_8 = vp9_lpf_vertical_8_sse2; 1061 if (flags & HAS_SSE2) vp9_lpf_vertical_8 = vp9_lpf_vertical_8_sse2;
1151
1152 vp9_lpf_vertical_8_dual = vp9_lpf_vertical_8_dual_c; 1062 vp9_lpf_vertical_8_dual = vp9_lpf_vertical_8_dual_c;
1153 if (flags & HAS_SSE2) vp9_lpf_vertical_8_dual = vp9_lpf_vertical_8_dual_sse2 ; 1063 if (flags & HAS_SSE2) vp9_lpf_vertical_8_dual = vp9_lpf_vertical_8_dual_sse2 ;
1154
1155 vp9_mse16x16 = vp9_mse16x16_c; 1064 vp9_mse16x16 = vp9_mse16x16_c;
1156 if (flags & HAS_MMX) vp9_mse16x16 = vp9_mse16x16_mmx; 1065 if (flags & HAS_MMX) vp9_mse16x16 = vp9_mse16x16_mmx;
1157 if (flags & HAS_SSE2) vp9_mse16x16 = vp9_mse16x16_sse2; 1066 if (flags & HAS_SSE2) vp9_mse16x16 = vp9_mse16x16_sse2;
1158
1159
1160
1161
1162
1163
1164 vp9_refining_search_sad = vp9_refining_search_sad_c; 1067 vp9_refining_search_sad = vp9_refining_search_sad_c;
1165 if (flags & HAS_SSE3) vp9_refining_search_sad = vp9_refining_search_sadx4; 1068 if (flags & HAS_SSE3) vp9_refining_search_sad = vp9_refining_search_sadx4;
1166
1167 vp9_sad16x16 = vp9_sad16x16_c; 1069 vp9_sad16x16 = vp9_sad16x16_c;
1168 if (flags & HAS_MMX) vp9_sad16x16 = vp9_sad16x16_mmx; 1070 if (flags & HAS_MMX) vp9_sad16x16 = vp9_sad16x16_mmx;
1169 if (flags & HAS_SSE2) vp9_sad16x16 = vp9_sad16x16_sse2; 1071 if (flags & HAS_SSE2) vp9_sad16x16 = vp9_sad16x16_sse2;
1170
1171 vp9_sad16x16_avg = vp9_sad16x16_avg_c; 1072 vp9_sad16x16_avg = vp9_sad16x16_avg_c;
1172 if (flags & HAS_SSE2) vp9_sad16x16_avg = vp9_sad16x16_avg_sse2; 1073 if (flags & HAS_SSE2) vp9_sad16x16_avg = vp9_sad16x16_avg_sse2;
1173
1174 vp9_sad16x16x3 = vp9_sad16x16x3_c; 1074 vp9_sad16x16x3 = vp9_sad16x16x3_c;
1175 if (flags & HAS_SSE3) vp9_sad16x16x3 = vp9_sad16x16x3_sse3; 1075 if (flags & HAS_SSE3) vp9_sad16x16x3 = vp9_sad16x16x3_sse3;
1176 if (flags & HAS_SSSE3) vp9_sad16x16x3 = vp9_sad16x16x3_ssse3; 1076 if (flags & HAS_SSSE3) vp9_sad16x16x3 = vp9_sad16x16x3_ssse3;
1177
1178 vp9_sad16x16x4d = vp9_sad16x16x4d_c; 1077 vp9_sad16x16x4d = vp9_sad16x16x4d_c;
1179 if (flags & HAS_SSE2) vp9_sad16x16x4d = vp9_sad16x16x4d_sse2; 1078 if (flags & HAS_SSE2) vp9_sad16x16x4d = vp9_sad16x16x4d_sse2;
1180
1181
1182 vp9_sad16x32 = vp9_sad16x32_c; 1079 vp9_sad16x32 = vp9_sad16x32_c;
1183 if (flags & HAS_SSE2) vp9_sad16x32 = vp9_sad16x32_sse2; 1080 if (flags & HAS_SSE2) vp9_sad16x32 = vp9_sad16x32_sse2;
1184
1185 vp9_sad16x32_avg = vp9_sad16x32_avg_c; 1081 vp9_sad16x32_avg = vp9_sad16x32_avg_c;
1186 if (flags & HAS_SSE2) vp9_sad16x32_avg = vp9_sad16x32_avg_sse2; 1082 if (flags & HAS_SSE2) vp9_sad16x32_avg = vp9_sad16x32_avg_sse2;
1187
1188 vp9_sad16x32x4d = vp9_sad16x32x4d_c; 1083 vp9_sad16x32x4d = vp9_sad16x32x4d_c;
1189 if (flags & HAS_SSE2) vp9_sad16x32x4d = vp9_sad16x32x4d_sse2; 1084 if (flags & HAS_SSE2) vp9_sad16x32x4d = vp9_sad16x32x4d_sse2;
1190
1191 vp9_sad16x8 = vp9_sad16x8_c; 1085 vp9_sad16x8 = vp9_sad16x8_c;
1192 if (flags & HAS_MMX) vp9_sad16x8 = vp9_sad16x8_mmx; 1086 if (flags & HAS_MMX) vp9_sad16x8 = vp9_sad16x8_mmx;
1193 if (flags & HAS_SSE2) vp9_sad16x8 = vp9_sad16x8_sse2; 1087 if (flags & HAS_SSE2) vp9_sad16x8 = vp9_sad16x8_sse2;
1194
1195 vp9_sad16x8_avg = vp9_sad16x8_avg_c; 1088 vp9_sad16x8_avg = vp9_sad16x8_avg_c;
1196 if (flags & HAS_SSE2) vp9_sad16x8_avg = vp9_sad16x8_avg_sse2; 1089 if (flags & HAS_SSE2) vp9_sad16x8_avg = vp9_sad16x8_avg_sse2;
1197
1198 vp9_sad16x8x3 = vp9_sad16x8x3_c; 1090 vp9_sad16x8x3 = vp9_sad16x8x3_c;
1199 if (flags & HAS_SSE3) vp9_sad16x8x3 = vp9_sad16x8x3_sse3; 1091 if (flags & HAS_SSE3) vp9_sad16x8x3 = vp9_sad16x8x3_sse3;
1200 if (flags & HAS_SSSE3) vp9_sad16x8x3 = vp9_sad16x8x3_ssse3; 1092 if (flags & HAS_SSSE3) vp9_sad16x8x3 = vp9_sad16x8x3_ssse3;
1201
1202 vp9_sad16x8x4d = vp9_sad16x8x4d_c; 1093 vp9_sad16x8x4d = vp9_sad16x8x4d_c;
1203 if (flags & HAS_SSE2) vp9_sad16x8x4d = vp9_sad16x8x4d_sse2; 1094 if (flags & HAS_SSE2) vp9_sad16x8x4d = vp9_sad16x8x4d_sse2;
1204
1205
1206 vp9_sad32x16 = vp9_sad32x16_c; 1095 vp9_sad32x16 = vp9_sad32x16_c;
1207 if (flags & HAS_SSE2) vp9_sad32x16 = vp9_sad32x16_sse2; 1096 if (flags & HAS_SSE2) vp9_sad32x16 = vp9_sad32x16_sse2;
1208
1209 vp9_sad32x16_avg = vp9_sad32x16_avg_c; 1097 vp9_sad32x16_avg = vp9_sad32x16_avg_c;
1210 if (flags & HAS_SSE2) vp9_sad32x16_avg = vp9_sad32x16_avg_sse2; 1098 if (flags & HAS_SSE2) vp9_sad32x16_avg = vp9_sad32x16_avg_sse2;
1211
1212 vp9_sad32x16x4d = vp9_sad32x16x4d_c; 1099 vp9_sad32x16x4d = vp9_sad32x16x4d_c;
1213 if (flags & HAS_SSE2) vp9_sad32x16x4d = vp9_sad32x16x4d_sse2; 1100 if (flags & HAS_SSE2) vp9_sad32x16x4d = vp9_sad32x16x4d_sse2;
1214
1215 vp9_sad32x32 = vp9_sad32x32_c; 1101 vp9_sad32x32 = vp9_sad32x32_c;
1216 if (flags & HAS_SSE2) vp9_sad32x32 = vp9_sad32x32_sse2; 1102 if (flags & HAS_SSE2) vp9_sad32x32 = vp9_sad32x32_sse2;
1217
1218 vp9_sad32x32_avg = vp9_sad32x32_avg_c; 1103 vp9_sad32x32_avg = vp9_sad32x32_avg_c;
1219 if (flags & HAS_SSE2) vp9_sad32x32_avg = vp9_sad32x32_avg_sse2; 1104 if (flags & HAS_SSE2) vp9_sad32x32_avg = vp9_sad32x32_avg_sse2;
1220
1221
1222 vp9_sad32x32x4d = vp9_sad32x32x4d_c; 1105 vp9_sad32x32x4d = vp9_sad32x32x4d_c;
1223 if (flags & HAS_SSE2) vp9_sad32x32x4d = vp9_sad32x32x4d_sse2; 1106 if (flags & HAS_SSE2) vp9_sad32x32x4d = vp9_sad32x32x4d_sse2;
1224
1225
1226 vp9_sad32x64 = vp9_sad32x64_c; 1107 vp9_sad32x64 = vp9_sad32x64_c;
1227 if (flags & HAS_SSE2) vp9_sad32x64 = vp9_sad32x64_sse2; 1108 if (flags & HAS_SSE2) vp9_sad32x64 = vp9_sad32x64_sse2;
1228
1229 vp9_sad32x64_avg = vp9_sad32x64_avg_c; 1109 vp9_sad32x64_avg = vp9_sad32x64_avg_c;
1230 if (flags & HAS_SSE2) vp9_sad32x64_avg = vp9_sad32x64_avg_sse2; 1110 if (flags & HAS_SSE2) vp9_sad32x64_avg = vp9_sad32x64_avg_sse2;
1231
1232 vp9_sad32x64x4d = vp9_sad32x64x4d_c; 1111 vp9_sad32x64x4d = vp9_sad32x64x4d_c;
1233 if (flags & HAS_SSE2) vp9_sad32x64x4d = vp9_sad32x64x4d_sse2; 1112 if (flags & HAS_SSE2) vp9_sad32x64x4d = vp9_sad32x64x4d_sse2;
1234
1235 vp9_sad4x4 = vp9_sad4x4_c; 1113 vp9_sad4x4 = vp9_sad4x4_c;
1236 if (flags & HAS_MMX) vp9_sad4x4 = vp9_sad4x4_mmx; 1114 if (flags & HAS_MMX) vp9_sad4x4 = vp9_sad4x4_mmx;
1237 if (flags & HAS_SSE) vp9_sad4x4 = vp9_sad4x4_sse; 1115 if (flags & HAS_SSE) vp9_sad4x4 = vp9_sad4x4_sse;
1238
1239 vp9_sad4x4_avg = vp9_sad4x4_avg_c; 1116 vp9_sad4x4_avg = vp9_sad4x4_avg_c;
1240 if (flags & HAS_SSE) vp9_sad4x4_avg = vp9_sad4x4_avg_sse; 1117 if (flags & HAS_SSE) vp9_sad4x4_avg = vp9_sad4x4_avg_sse;
1241
1242 vp9_sad4x4x3 = vp9_sad4x4x3_c; 1118 vp9_sad4x4x3 = vp9_sad4x4x3_c;
1243 if (flags & HAS_SSE3) vp9_sad4x4x3 = vp9_sad4x4x3_sse3; 1119 if (flags & HAS_SSE3) vp9_sad4x4x3 = vp9_sad4x4x3_sse3;
1244
1245 vp9_sad4x4x4d = vp9_sad4x4x4d_c; 1120 vp9_sad4x4x4d = vp9_sad4x4x4d_c;
1246 if (flags & HAS_SSE) vp9_sad4x4x4d = vp9_sad4x4x4d_sse; 1121 if (flags & HAS_SSE) vp9_sad4x4x4d = vp9_sad4x4x4d_sse;
1247
1248
1249 vp9_sad4x8 = vp9_sad4x8_c; 1122 vp9_sad4x8 = vp9_sad4x8_c;
1250 if (flags & HAS_SSE) vp9_sad4x8 = vp9_sad4x8_sse; 1123 if (flags & HAS_SSE) vp9_sad4x8 = vp9_sad4x8_sse;
1251
1252 vp9_sad4x8_avg = vp9_sad4x8_avg_c; 1124 vp9_sad4x8_avg = vp9_sad4x8_avg_c;
1253 if (flags & HAS_SSE) vp9_sad4x8_avg = vp9_sad4x8_avg_sse; 1125 if (flags & HAS_SSE) vp9_sad4x8_avg = vp9_sad4x8_avg_sse;
1254
1255 vp9_sad4x8x4d = vp9_sad4x8x4d_c; 1126 vp9_sad4x8x4d = vp9_sad4x8x4d_c;
1256 if (flags & HAS_SSE) vp9_sad4x8x4d = vp9_sad4x8x4d_sse; 1127 if (flags & HAS_SSE) vp9_sad4x8x4d = vp9_sad4x8x4d_sse;
1257
1258
1259 vp9_sad64x32 = vp9_sad64x32_c; 1128 vp9_sad64x32 = vp9_sad64x32_c;
1260 if (flags & HAS_SSE2) vp9_sad64x32 = vp9_sad64x32_sse2; 1129 if (flags & HAS_SSE2) vp9_sad64x32 = vp9_sad64x32_sse2;
1261
1262 vp9_sad64x32_avg = vp9_sad64x32_avg_c; 1130 vp9_sad64x32_avg = vp9_sad64x32_avg_c;
1263 if (flags & HAS_SSE2) vp9_sad64x32_avg = vp9_sad64x32_avg_sse2; 1131 if (flags & HAS_SSE2) vp9_sad64x32_avg = vp9_sad64x32_avg_sse2;
1264
1265 vp9_sad64x32x4d = vp9_sad64x32x4d_c; 1132 vp9_sad64x32x4d = vp9_sad64x32x4d_c;
1266 if (flags & HAS_SSE2) vp9_sad64x32x4d = vp9_sad64x32x4d_sse2; 1133 if (flags & HAS_SSE2) vp9_sad64x32x4d = vp9_sad64x32x4d_sse2;
1267
1268 vp9_sad64x64 = vp9_sad64x64_c; 1134 vp9_sad64x64 = vp9_sad64x64_c;
1269 if (flags & HAS_SSE2) vp9_sad64x64 = vp9_sad64x64_sse2; 1135 if (flags & HAS_SSE2) vp9_sad64x64 = vp9_sad64x64_sse2;
1270
1271 vp9_sad64x64_avg = vp9_sad64x64_avg_c; 1136 vp9_sad64x64_avg = vp9_sad64x64_avg_c;
1272 if (flags & HAS_SSE2) vp9_sad64x64_avg = vp9_sad64x64_avg_sse2; 1137 if (flags & HAS_SSE2) vp9_sad64x64_avg = vp9_sad64x64_avg_sse2;
1273
1274
1275 vp9_sad64x64x4d = vp9_sad64x64x4d_c; 1138 vp9_sad64x64x4d = vp9_sad64x64x4d_c;
1276 if (flags & HAS_SSE2) vp9_sad64x64x4d = vp9_sad64x64x4d_sse2; 1139 if (flags & HAS_SSE2) vp9_sad64x64x4d = vp9_sad64x64x4d_sse2;
1277
1278
1279 vp9_sad8x16 = vp9_sad8x16_c; 1140 vp9_sad8x16 = vp9_sad8x16_c;
1280 if (flags & HAS_MMX) vp9_sad8x16 = vp9_sad8x16_mmx; 1141 if (flags & HAS_MMX) vp9_sad8x16 = vp9_sad8x16_mmx;
1281 if (flags & HAS_SSE2) vp9_sad8x16 = vp9_sad8x16_sse2; 1142 if (flags & HAS_SSE2) vp9_sad8x16 = vp9_sad8x16_sse2;
1282
1283 vp9_sad8x16_avg = vp9_sad8x16_avg_c; 1143 vp9_sad8x16_avg = vp9_sad8x16_avg_c;
1284 if (flags & HAS_SSE2) vp9_sad8x16_avg = vp9_sad8x16_avg_sse2; 1144 if (flags & HAS_SSE2) vp9_sad8x16_avg = vp9_sad8x16_avg_sse2;
1285
1286 vp9_sad8x16x3 = vp9_sad8x16x3_c; 1145 vp9_sad8x16x3 = vp9_sad8x16x3_c;
1287 if (flags & HAS_SSE3) vp9_sad8x16x3 = vp9_sad8x16x3_sse3; 1146 if (flags & HAS_SSE3) vp9_sad8x16x3 = vp9_sad8x16x3_sse3;
1288
1289 vp9_sad8x16x4d = vp9_sad8x16x4d_c; 1147 vp9_sad8x16x4d = vp9_sad8x16x4d_c;
1290 if (flags & HAS_SSE2) vp9_sad8x16x4d = vp9_sad8x16x4d_sse2; 1148 if (flags & HAS_SSE2) vp9_sad8x16x4d = vp9_sad8x16x4d_sse2;
1291
1292
1293 vp9_sad8x4 = vp9_sad8x4_c; 1149 vp9_sad8x4 = vp9_sad8x4_c;
1294 if (flags & HAS_SSE2) vp9_sad8x4 = vp9_sad8x4_sse2; 1150 if (flags & HAS_SSE2) vp9_sad8x4 = vp9_sad8x4_sse2;
1295
1296 vp9_sad8x4_avg = vp9_sad8x4_avg_c; 1151 vp9_sad8x4_avg = vp9_sad8x4_avg_c;
1297 if (flags & HAS_SSE2) vp9_sad8x4_avg = vp9_sad8x4_avg_sse2; 1152 if (flags & HAS_SSE2) vp9_sad8x4_avg = vp9_sad8x4_avg_sse2;
1298
1299 vp9_sad8x4x4d = vp9_sad8x4x4d_c; 1153 vp9_sad8x4x4d = vp9_sad8x4x4d_c;
1300 if (flags & HAS_SSE2) vp9_sad8x4x4d = vp9_sad8x4x4d_sse2; 1154 if (flags & HAS_SSE2) vp9_sad8x4x4d = vp9_sad8x4x4d_sse2;
1301
1302
1303 vp9_sad8x8 = vp9_sad8x8_c; 1155 vp9_sad8x8 = vp9_sad8x8_c;
1304 if (flags & HAS_MMX) vp9_sad8x8 = vp9_sad8x8_mmx; 1156 if (flags & HAS_MMX) vp9_sad8x8 = vp9_sad8x8_mmx;
1305 if (flags & HAS_SSE2) vp9_sad8x8 = vp9_sad8x8_sse2; 1157 if (flags & HAS_SSE2) vp9_sad8x8 = vp9_sad8x8_sse2;
1306
1307 vp9_sad8x8_avg = vp9_sad8x8_avg_c; 1158 vp9_sad8x8_avg = vp9_sad8x8_avg_c;
1308 if (flags & HAS_SSE2) vp9_sad8x8_avg = vp9_sad8x8_avg_sse2; 1159 if (flags & HAS_SSE2) vp9_sad8x8_avg = vp9_sad8x8_avg_sse2;
1309
1310 vp9_sad8x8x3 = vp9_sad8x8x3_c; 1160 vp9_sad8x8x3 = vp9_sad8x8x3_c;
1311 if (flags & HAS_SSE3) vp9_sad8x8x3 = vp9_sad8x8x3_sse3; 1161 if (flags & HAS_SSE3) vp9_sad8x8x3 = vp9_sad8x8x3_sse3;
1312
1313 vp9_sad8x8x4d = vp9_sad8x8x4d_c; 1162 vp9_sad8x8x4d = vp9_sad8x8x4d_c;
1314 if (flags & HAS_SSE2) vp9_sad8x8x4d = vp9_sad8x8x4d_sse2; 1163 if (flags & HAS_SSE2) vp9_sad8x8x4d = vp9_sad8x8x4d_sse2;
1315
1316
1317 vp9_sub_pixel_avg_variance16x16 = vp9_sub_pixel_avg_variance16x16_c; 1164 vp9_sub_pixel_avg_variance16x16 = vp9_sub_pixel_avg_variance16x16_c;
1318 if (flags & HAS_SSE2) vp9_sub_pixel_avg_variance16x16 = vp9_sub_pixel_avg_va riance16x16_sse2; 1165 if (flags & HAS_SSE2) vp9_sub_pixel_avg_variance16x16 = vp9_sub_pixel_avg_va riance16x16_sse2;
1319 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance16x16 = vp9_sub_pixel_avg_v ariance16x16_ssse3; 1166 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance16x16 = vp9_sub_pixel_avg_v ariance16x16_ssse3;
1320
1321 vp9_sub_pixel_avg_variance16x32 = vp9_sub_pixel_avg_variance16x32_c; 1167 vp9_sub_pixel_avg_variance16x32 = vp9_sub_pixel_avg_variance16x32_c;
1322 if (flags & HAS_SSE2) vp9_sub_pixel_avg_variance16x32 = vp9_sub_pixel_avg_va riance16x32_sse2; 1168 if (flags & HAS_SSE2) vp9_sub_pixel_avg_variance16x32 = vp9_sub_pixel_avg_va riance16x32_sse2;
1323 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance16x32 = vp9_sub_pixel_avg_v ariance16x32_ssse3; 1169 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance16x32 = vp9_sub_pixel_avg_v ariance16x32_ssse3;
1324
1325 vp9_sub_pixel_avg_variance16x8 = vp9_sub_pixel_avg_variance16x8_c; 1170 vp9_sub_pixel_avg_variance16x8 = vp9_sub_pixel_avg_variance16x8_c;
1326 if (flags & HAS_SSE2) vp9_sub_pixel_avg_variance16x8 = vp9_sub_pixel_avg_var iance16x8_sse2; 1171 if (flags & HAS_SSE2) vp9_sub_pixel_avg_variance16x8 = vp9_sub_pixel_avg_var iance16x8_sse2;
1327 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance16x8 = vp9_sub_pixel_avg_va riance16x8_ssse3; 1172 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance16x8 = vp9_sub_pixel_avg_va riance16x8_ssse3;
1328
1329 vp9_sub_pixel_avg_variance32x16 = vp9_sub_pixel_avg_variance32x16_c; 1173 vp9_sub_pixel_avg_variance32x16 = vp9_sub_pixel_avg_variance32x16_c;
1330 if (flags & HAS_SSE2) vp9_sub_pixel_avg_variance32x16 = vp9_sub_pixel_avg_va riance32x16_sse2; 1174 if (flags & HAS_SSE2) vp9_sub_pixel_avg_variance32x16 = vp9_sub_pixel_avg_va riance32x16_sse2;
1331 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance32x16 = vp9_sub_pixel_avg_v ariance32x16_ssse3; 1175 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance32x16 = vp9_sub_pixel_avg_v ariance32x16_ssse3;
1332
1333 vp9_sub_pixel_avg_variance32x32 = vp9_sub_pixel_avg_variance32x32_c; 1176 vp9_sub_pixel_avg_variance32x32 = vp9_sub_pixel_avg_variance32x32_c;
1334 if (flags & HAS_SSE2) vp9_sub_pixel_avg_variance32x32 = vp9_sub_pixel_avg_va riance32x32_sse2; 1177 if (flags & HAS_SSE2) vp9_sub_pixel_avg_variance32x32 = vp9_sub_pixel_avg_va riance32x32_sse2;
1335 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance32x32 = vp9_sub_pixel_avg_v ariance32x32_ssse3; 1178 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance32x32 = vp9_sub_pixel_avg_v ariance32x32_ssse3;
1336
1337 vp9_sub_pixel_avg_variance32x64 = vp9_sub_pixel_avg_variance32x64_c; 1179 vp9_sub_pixel_avg_variance32x64 = vp9_sub_pixel_avg_variance32x64_c;
1338 if (flags & HAS_SSE2) vp9_sub_pixel_avg_variance32x64 = vp9_sub_pixel_avg_va riance32x64_sse2; 1180 if (flags & HAS_SSE2) vp9_sub_pixel_avg_variance32x64 = vp9_sub_pixel_avg_va riance32x64_sse2;
1339 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance32x64 = vp9_sub_pixel_avg_v ariance32x64_ssse3; 1181 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance32x64 = vp9_sub_pixel_avg_v ariance32x64_ssse3;
1340
1341 vp9_sub_pixel_avg_variance4x4 = vp9_sub_pixel_avg_variance4x4_c; 1182 vp9_sub_pixel_avg_variance4x4 = vp9_sub_pixel_avg_variance4x4_c;
1342 if (flags & HAS_SSE) vp9_sub_pixel_avg_variance4x4 = vp9_sub_pixel_avg_varia nce4x4_sse; 1183 if (flags & HAS_SSE) vp9_sub_pixel_avg_variance4x4 = vp9_sub_pixel_avg_varia nce4x4_sse;
1343 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance4x4 = vp9_sub_pixel_avg_var iance4x4_ssse3; 1184 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance4x4 = vp9_sub_pixel_avg_var iance4x4_ssse3;
1344
1345 vp9_sub_pixel_avg_variance4x8 = vp9_sub_pixel_avg_variance4x8_c; 1185 vp9_sub_pixel_avg_variance4x8 = vp9_sub_pixel_avg_variance4x8_c;
1346 if (flags & HAS_SSE) vp9_sub_pixel_avg_variance4x8 = vp9_sub_pixel_avg_varia nce4x8_sse; 1186 if (flags & HAS_SSE) vp9_sub_pixel_avg_variance4x8 = vp9_sub_pixel_avg_varia nce4x8_sse;
1347 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance4x8 = vp9_sub_pixel_avg_var iance4x8_ssse3; 1187 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance4x8 = vp9_sub_pixel_avg_var iance4x8_ssse3;
1348
1349 vp9_sub_pixel_avg_variance64x32 = vp9_sub_pixel_avg_variance64x32_c; 1188 vp9_sub_pixel_avg_variance64x32 = vp9_sub_pixel_avg_variance64x32_c;
1350 if (flags & HAS_SSE2) vp9_sub_pixel_avg_variance64x32 = vp9_sub_pixel_avg_va riance64x32_sse2; 1189 if (flags & HAS_SSE2) vp9_sub_pixel_avg_variance64x32 = vp9_sub_pixel_avg_va riance64x32_sse2;
1351 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance64x32 = vp9_sub_pixel_avg_v ariance64x32_ssse3; 1190 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance64x32 = vp9_sub_pixel_avg_v ariance64x32_ssse3;
1352
1353 vp9_sub_pixel_avg_variance64x64 = vp9_sub_pixel_avg_variance64x64_c; 1191 vp9_sub_pixel_avg_variance64x64 = vp9_sub_pixel_avg_variance64x64_c;
1354 if (flags & HAS_SSE2) vp9_sub_pixel_avg_variance64x64 = vp9_sub_pixel_avg_va riance64x64_sse2; 1192 if (flags & HAS_SSE2) vp9_sub_pixel_avg_variance64x64 = vp9_sub_pixel_avg_va riance64x64_sse2;
1355 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance64x64 = vp9_sub_pixel_avg_v ariance64x64_ssse3; 1193 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance64x64 = vp9_sub_pixel_avg_v ariance64x64_ssse3;
1356
1357 vp9_sub_pixel_avg_variance8x16 = vp9_sub_pixel_avg_variance8x16_c; 1194 vp9_sub_pixel_avg_variance8x16 = vp9_sub_pixel_avg_variance8x16_c;
1358 if (flags & HAS_SSE2) vp9_sub_pixel_avg_variance8x16 = vp9_sub_pixel_avg_var iance8x16_sse2; 1195 if (flags & HAS_SSE2) vp9_sub_pixel_avg_variance8x16 = vp9_sub_pixel_avg_var iance8x16_sse2;
1359 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance8x16 = vp9_sub_pixel_avg_va riance8x16_ssse3; 1196 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance8x16 = vp9_sub_pixel_avg_va riance8x16_ssse3;
1360
1361 vp9_sub_pixel_avg_variance8x4 = vp9_sub_pixel_avg_variance8x4_c; 1197 vp9_sub_pixel_avg_variance8x4 = vp9_sub_pixel_avg_variance8x4_c;
1362 if (flags & HAS_SSE2) vp9_sub_pixel_avg_variance8x4 = vp9_sub_pixel_avg_vari ance8x4_sse2; 1198 if (flags & HAS_SSE2) vp9_sub_pixel_avg_variance8x4 = vp9_sub_pixel_avg_vari ance8x4_sse2;
1363 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance8x4 = vp9_sub_pixel_avg_var iance8x4_ssse3; 1199 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance8x4 = vp9_sub_pixel_avg_var iance8x4_ssse3;
1364
1365 vp9_sub_pixel_avg_variance8x8 = vp9_sub_pixel_avg_variance8x8_c; 1200 vp9_sub_pixel_avg_variance8x8 = vp9_sub_pixel_avg_variance8x8_c;
1366 if (flags & HAS_SSE2) vp9_sub_pixel_avg_variance8x8 = vp9_sub_pixel_avg_vari ance8x8_sse2; 1201 if (flags & HAS_SSE2) vp9_sub_pixel_avg_variance8x8 = vp9_sub_pixel_avg_vari ance8x8_sse2;
1367 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance8x8 = vp9_sub_pixel_avg_var iance8x8_ssse3; 1202 if (flags & HAS_SSSE3) vp9_sub_pixel_avg_variance8x8 = vp9_sub_pixel_avg_var iance8x8_ssse3;
1368
1369
1370
1371 vp9_sub_pixel_variance16x16 = vp9_sub_pixel_variance16x16_c; 1203 vp9_sub_pixel_variance16x16 = vp9_sub_pixel_variance16x16_c;
1372 if (flags & HAS_SSE2) vp9_sub_pixel_variance16x16 = vp9_sub_pixel_variance16 x16_sse2; 1204 if (flags & HAS_SSE2) vp9_sub_pixel_variance16x16 = vp9_sub_pixel_variance16 x16_sse2;
1373 if (flags & HAS_SSSE3) vp9_sub_pixel_variance16x16 = vp9_sub_pixel_variance1 6x16_ssse3; 1205 if (flags & HAS_SSSE3) vp9_sub_pixel_variance16x16 = vp9_sub_pixel_variance1 6x16_ssse3;
1374
1375 vp9_sub_pixel_variance16x32 = vp9_sub_pixel_variance16x32_c; 1206 vp9_sub_pixel_variance16x32 = vp9_sub_pixel_variance16x32_c;
1376 if (flags & HAS_SSE2) vp9_sub_pixel_variance16x32 = vp9_sub_pixel_variance16 x32_sse2; 1207 if (flags & HAS_SSE2) vp9_sub_pixel_variance16x32 = vp9_sub_pixel_variance16 x32_sse2;
1377 if (flags & HAS_SSSE3) vp9_sub_pixel_variance16x32 = vp9_sub_pixel_variance1 6x32_ssse3; 1208 if (flags & HAS_SSSE3) vp9_sub_pixel_variance16x32 = vp9_sub_pixel_variance1 6x32_ssse3;
1378
1379 vp9_sub_pixel_variance16x8 = vp9_sub_pixel_variance16x8_c; 1209 vp9_sub_pixel_variance16x8 = vp9_sub_pixel_variance16x8_c;
1380 if (flags & HAS_SSE2) vp9_sub_pixel_variance16x8 = vp9_sub_pixel_variance16x 8_sse2; 1210 if (flags & HAS_SSE2) vp9_sub_pixel_variance16x8 = vp9_sub_pixel_variance16x 8_sse2;
1381 if (flags & HAS_SSSE3) vp9_sub_pixel_variance16x8 = vp9_sub_pixel_variance16 x8_ssse3; 1211 if (flags & HAS_SSSE3) vp9_sub_pixel_variance16x8 = vp9_sub_pixel_variance16 x8_ssse3;
1382
1383 vp9_sub_pixel_variance32x16 = vp9_sub_pixel_variance32x16_c; 1212 vp9_sub_pixel_variance32x16 = vp9_sub_pixel_variance32x16_c;
1384 if (flags & HAS_SSE2) vp9_sub_pixel_variance32x16 = vp9_sub_pixel_variance32 x16_sse2; 1213 if (flags & HAS_SSE2) vp9_sub_pixel_variance32x16 = vp9_sub_pixel_variance32 x16_sse2;
1385 if (flags & HAS_SSSE3) vp9_sub_pixel_variance32x16 = vp9_sub_pixel_variance3 2x16_ssse3; 1214 if (flags & HAS_SSSE3) vp9_sub_pixel_variance32x16 = vp9_sub_pixel_variance3 2x16_ssse3;
1386
1387 vp9_sub_pixel_variance32x32 = vp9_sub_pixel_variance32x32_c; 1215 vp9_sub_pixel_variance32x32 = vp9_sub_pixel_variance32x32_c;
1388 if (flags & HAS_SSE2) vp9_sub_pixel_variance32x32 = vp9_sub_pixel_variance32 x32_sse2; 1216 if (flags & HAS_SSE2) vp9_sub_pixel_variance32x32 = vp9_sub_pixel_variance32 x32_sse2;
1389 if (flags & HAS_SSSE3) vp9_sub_pixel_variance32x32 = vp9_sub_pixel_variance3 2x32_ssse3; 1217 if (flags & HAS_SSSE3) vp9_sub_pixel_variance32x32 = vp9_sub_pixel_variance3 2x32_ssse3;
1390
1391 vp9_sub_pixel_variance32x64 = vp9_sub_pixel_variance32x64_c; 1218 vp9_sub_pixel_variance32x64 = vp9_sub_pixel_variance32x64_c;
1392 if (flags & HAS_SSE2) vp9_sub_pixel_variance32x64 = vp9_sub_pixel_variance32 x64_sse2; 1219 if (flags & HAS_SSE2) vp9_sub_pixel_variance32x64 = vp9_sub_pixel_variance32 x64_sse2;
1393 if (flags & HAS_SSSE3) vp9_sub_pixel_variance32x64 = vp9_sub_pixel_variance3 2x64_ssse3; 1220 if (flags & HAS_SSSE3) vp9_sub_pixel_variance32x64 = vp9_sub_pixel_variance3 2x64_ssse3;
1394
1395 vp9_sub_pixel_variance4x4 = vp9_sub_pixel_variance4x4_c; 1221 vp9_sub_pixel_variance4x4 = vp9_sub_pixel_variance4x4_c;
1396 if (flags & HAS_SSE) vp9_sub_pixel_variance4x4 = vp9_sub_pixel_variance4x4_s se; 1222 if (flags & HAS_SSE) vp9_sub_pixel_variance4x4 = vp9_sub_pixel_variance4x4_s se;
1397 if (flags & HAS_SSSE3) vp9_sub_pixel_variance4x4 = vp9_sub_pixel_variance4x4 _ssse3; 1223 if (flags & HAS_SSSE3) vp9_sub_pixel_variance4x4 = vp9_sub_pixel_variance4x4 _ssse3;
1398
1399 vp9_sub_pixel_variance4x8 = vp9_sub_pixel_variance4x8_c; 1224 vp9_sub_pixel_variance4x8 = vp9_sub_pixel_variance4x8_c;
1400 if (flags & HAS_SSE) vp9_sub_pixel_variance4x8 = vp9_sub_pixel_variance4x8_s se; 1225 if (flags & HAS_SSE) vp9_sub_pixel_variance4x8 = vp9_sub_pixel_variance4x8_s se;
1401 if (flags & HAS_SSSE3) vp9_sub_pixel_variance4x8 = vp9_sub_pixel_variance4x8 _ssse3; 1226 if (flags & HAS_SSSE3) vp9_sub_pixel_variance4x8 = vp9_sub_pixel_variance4x8 _ssse3;
1402
1403 vp9_sub_pixel_variance64x32 = vp9_sub_pixel_variance64x32_c; 1227 vp9_sub_pixel_variance64x32 = vp9_sub_pixel_variance64x32_c;
1404 if (flags & HAS_SSE2) vp9_sub_pixel_variance64x32 = vp9_sub_pixel_variance64 x32_sse2; 1228 if (flags & HAS_SSE2) vp9_sub_pixel_variance64x32 = vp9_sub_pixel_variance64 x32_sse2;
1405 if (flags & HAS_SSSE3) vp9_sub_pixel_variance64x32 = vp9_sub_pixel_variance6 4x32_ssse3; 1229 if (flags & HAS_SSSE3) vp9_sub_pixel_variance64x32 = vp9_sub_pixel_variance6 4x32_ssse3;
1406
1407 vp9_sub_pixel_variance64x64 = vp9_sub_pixel_variance64x64_c; 1230 vp9_sub_pixel_variance64x64 = vp9_sub_pixel_variance64x64_c;
1408 if (flags & HAS_SSE2) vp9_sub_pixel_variance64x64 = vp9_sub_pixel_variance64 x64_sse2; 1231 if (flags & HAS_SSE2) vp9_sub_pixel_variance64x64 = vp9_sub_pixel_variance64 x64_sse2;
1409 if (flags & HAS_SSSE3) vp9_sub_pixel_variance64x64 = vp9_sub_pixel_variance6 4x64_ssse3; 1232 if (flags & HAS_SSSE3) vp9_sub_pixel_variance64x64 = vp9_sub_pixel_variance6 4x64_ssse3;
1410
1411 vp9_sub_pixel_variance8x16 = vp9_sub_pixel_variance8x16_c; 1233 vp9_sub_pixel_variance8x16 = vp9_sub_pixel_variance8x16_c;
1412 if (flags & HAS_SSE2) vp9_sub_pixel_variance8x16 = vp9_sub_pixel_variance8x1 6_sse2; 1234 if (flags & HAS_SSE2) vp9_sub_pixel_variance8x16 = vp9_sub_pixel_variance8x1 6_sse2;
1413 if (flags & HAS_SSSE3) vp9_sub_pixel_variance8x16 = vp9_sub_pixel_variance8x 16_ssse3; 1235 if (flags & HAS_SSSE3) vp9_sub_pixel_variance8x16 = vp9_sub_pixel_variance8x 16_ssse3;
1414
1415 vp9_sub_pixel_variance8x4 = vp9_sub_pixel_variance8x4_c; 1236 vp9_sub_pixel_variance8x4 = vp9_sub_pixel_variance8x4_c;
1416 if (flags & HAS_SSE2) vp9_sub_pixel_variance8x4 = vp9_sub_pixel_variance8x4_ sse2; 1237 if (flags & HAS_SSE2) vp9_sub_pixel_variance8x4 = vp9_sub_pixel_variance8x4_ sse2;
1417 if (flags & HAS_SSSE3) vp9_sub_pixel_variance8x4 = vp9_sub_pixel_variance8x4 _ssse3; 1238 if (flags & HAS_SSSE3) vp9_sub_pixel_variance8x4 = vp9_sub_pixel_variance8x4 _ssse3;
1418
1419 vp9_sub_pixel_variance8x8 = vp9_sub_pixel_variance8x8_c; 1239 vp9_sub_pixel_variance8x8 = vp9_sub_pixel_variance8x8_c;
1420 if (flags & HAS_SSE2) vp9_sub_pixel_variance8x8 = vp9_sub_pixel_variance8x8_ sse2; 1240 if (flags & HAS_SSE2) vp9_sub_pixel_variance8x8 = vp9_sub_pixel_variance8x8_ sse2;
1421 if (flags & HAS_SSSE3) vp9_sub_pixel_variance8x8 = vp9_sub_pixel_variance8x8 _ssse3; 1241 if (flags & HAS_SSSE3) vp9_sub_pixel_variance8x8 = vp9_sub_pixel_variance8x8 _ssse3;
1422
1423 vp9_subtract_block = vp9_subtract_block_c; 1242 vp9_subtract_block = vp9_subtract_block_c;
1424 if (flags & HAS_SSE2) vp9_subtract_block = vp9_subtract_block_sse2; 1243 if (flags & HAS_SSE2) vp9_subtract_block = vp9_subtract_block_sse2;
1425
1426 vp9_temporal_filter_apply = vp9_temporal_filter_apply_c; 1244 vp9_temporal_filter_apply = vp9_temporal_filter_apply_c;
1427 if (flags & HAS_SSE2) vp9_temporal_filter_apply = vp9_temporal_filter_apply_ sse2; 1245 if (flags & HAS_SSE2) vp9_temporal_filter_apply = vp9_temporal_filter_apply_ sse2;
1428
1429 vp9_tm_predictor_16x16 = vp9_tm_predictor_16x16_c; 1246 vp9_tm_predictor_16x16 = vp9_tm_predictor_16x16_c;
1430 if (flags & HAS_SSE2) vp9_tm_predictor_16x16 = vp9_tm_predictor_16x16_sse2; 1247 if (flags & HAS_SSE2) vp9_tm_predictor_16x16 = vp9_tm_predictor_16x16_sse2;
1431
1432
1433 vp9_tm_predictor_4x4 = vp9_tm_predictor_4x4_c; 1248 vp9_tm_predictor_4x4 = vp9_tm_predictor_4x4_c;
1434 if (flags & HAS_SSE) vp9_tm_predictor_4x4 = vp9_tm_predictor_4x4_sse; 1249 if (flags & HAS_SSE) vp9_tm_predictor_4x4 = vp9_tm_predictor_4x4_sse;
1435
1436 vp9_tm_predictor_8x8 = vp9_tm_predictor_8x8_c; 1250 vp9_tm_predictor_8x8 = vp9_tm_predictor_8x8_c;
1437 if (flags & HAS_SSE2) vp9_tm_predictor_8x8 = vp9_tm_predictor_8x8_sse2; 1251 if (flags & HAS_SSE2) vp9_tm_predictor_8x8 = vp9_tm_predictor_8x8_sse2;
1438
1439 vp9_v_predictor_16x16 = vp9_v_predictor_16x16_c; 1252 vp9_v_predictor_16x16 = vp9_v_predictor_16x16_c;
1440 if (flags & HAS_SSE2) vp9_v_predictor_16x16 = vp9_v_predictor_16x16_sse2; 1253 if (flags & HAS_SSE2) vp9_v_predictor_16x16 = vp9_v_predictor_16x16_sse2;
1441
1442 vp9_v_predictor_32x32 = vp9_v_predictor_32x32_c; 1254 vp9_v_predictor_32x32 = vp9_v_predictor_32x32_c;
1443 if (flags & HAS_SSE2) vp9_v_predictor_32x32 = vp9_v_predictor_32x32_sse2; 1255 if (flags & HAS_SSE2) vp9_v_predictor_32x32 = vp9_v_predictor_32x32_sse2;
1444
1445 vp9_v_predictor_4x4 = vp9_v_predictor_4x4_c; 1256 vp9_v_predictor_4x4 = vp9_v_predictor_4x4_c;
1446 if (flags & HAS_SSE) vp9_v_predictor_4x4 = vp9_v_predictor_4x4_sse; 1257 if (flags & HAS_SSE) vp9_v_predictor_4x4 = vp9_v_predictor_4x4_sse;
1447
1448 vp9_v_predictor_8x8 = vp9_v_predictor_8x8_c; 1258 vp9_v_predictor_8x8 = vp9_v_predictor_8x8_c;
1449 if (flags & HAS_SSE) vp9_v_predictor_8x8 = vp9_v_predictor_8x8_sse; 1259 if (flags & HAS_SSE) vp9_v_predictor_8x8 = vp9_v_predictor_8x8_sse;
1450
1451 vp9_variance16x16 = vp9_variance16x16_c; 1260 vp9_variance16x16 = vp9_variance16x16_c;
1452 if (flags & HAS_MMX) vp9_variance16x16 = vp9_variance16x16_mmx; 1261 if (flags & HAS_MMX) vp9_variance16x16 = vp9_variance16x16_mmx;
1453 if (flags & HAS_SSE2) vp9_variance16x16 = vp9_variance16x16_sse2; 1262 if (flags & HAS_SSE2) vp9_variance16x16 = vp9_variance16x16_sse2;
1454
1455 vp9_variance16x32 = vp9_variance16x32_c; 1263 vp9_variance16x32 = vp9_variance16x32_c;
1456 if (flags & HAS_SSE2) vp9_variance16x32 = vp9_variance16x32_sse2; 1264 if (flags & HAS_SSE2) vp9_variance16x32 = vp9_variance16x32_sse2;
1457
1458 vp9_variance16x8 = vp9_variance16x8_c; 1265 vp9_variance16x8 = vp9_variance16x8_c;
1459 if (flags & HAS_MMX) vp9_variance16x8 = vp9_variance16x8_mmx; 1266 if (flags & HAS_MMX) vp9_variance16x8 = vp9_variance16x8_mmx;
1460 if (flags & HAS_SSE2) vp9_variance16x8 = vp9_variance16x8_sse2; 1267 if (flags & HAS_SSE2) vp9_variance16x8 = vp9_variance16x8_sse2;
1461
1462 vp9_variance32x16 = vp9_variance32x16_c; 1268 vp9_variance32x16 = vp9_variance32x16_c;
1463 if (flags & HAS_SSE2) vp9_variance32x16 = vp9_variance32x16_sse2; 1269 if (flags & HAS_SSE2) vp9_variance32x16 = vp9_variance32x16_sse2;
1464
1465 vp9_variance32x32 = vp9_variance32x32_c; 1270 vp9_variance32x32 = vp9_variance32x32_c;
1466 if (flags & HAS_SSE2) vp9_variance32x32 = vp9_variance32x32_sse2; 1271 if (flags & HAS_SSE2) vp9_variance32x32 = vp9_variance32x32_sse2;
1467
1468 vp9_variance32x64 = vp9_variance32x64_c; 1272 vp9_variance32x64 = vp9_variance32x64_c;
1469 if (flags & HAS_SSE2) vp9_variance32x64 = vp9_variance32x64_sse2; 1273 if (flags & HAS_SSE2) vp9_variance32x64 = vp9_variance32x64_sse2;
1470
1471 vp9_variance4x4 = vp9_variance4x4_c; 1274 vp9_variance4x4 = vp9_variance4x4_c;
1472 if (flags & HAS_MMX) vp9_variance4x4 = vp9_variance4x4_mmx; 1275 if (flags & HAS_MMX) vp9_variance4x4 = vp9_variance4x4_mmx;
1473 if (flags & HAS_SSE2) vp9_variance4x4 = vp9_variance4x4_sse2; 1276 if (flags & HAS_SSE2) vp9_variance4x4 = vp9_variance4x4_sse2;
1474
1475 vp9_variance4x8 = vp9_variance4x8_c; 1277 vp9_variance4x8 = vp9_variance4x8_c;
1476 if (flags & HAS_SSE2) vp9_variance4x8 = vp9_variance4x8_sse2; 1278 if (flags & HAS_SSE2) vp9_variance4x8 = vp9_variance4x8_sse2;
1477
1478 vp9_variance64x32 = vp9_variance64x32_c; 1279 vp9_variance64x32 = vp9_variance64x32_c;
1479 if (flags & HAS_SSE2) vp9_variance64x32 = vp9_variance64x32_sse2; 1280 if (flags & HAS_SSE2) vp9_variance64x32 = vp9_variance64x32_sse2;
1480
1481 vp9_variance64x64 = vp9_variance64x64_c; 1281 vp9_variance64x64 = vp9_variance64x64_c;
1482 if (flags & HAS_SSE2) vp9_variance64x64 = vp9_variance64x64_sse2; 1282 if (flags & HAS_SSE2) vp9_variance64x64 = vp9_variance64x64_sse2;
1483
1484 vp9_variance8x16 = vp9_variance8x16_c; 1283 vp9_variance8x16 = vp9_variance8x16_c;
1485 if (flags & HAS_MMX) vp9_variance8x16 = vp9_variance8x16_mmx; 1284 if (flags & HAS_MMX) vp9_variance8x16 = vp9_variance8x16_mmx;
1486 if (flags & HAS_SSE2) vp9_variance8x16 = vp9_variance8x16_sse2; 1285 if (flags & HAS_SSE2) vp9_variance8x16 = vp9_variance8x16_sse2;
1487
1488 vp9_variance8x4 = vp9_variance8x4_c; 1286 vp9_variance8x4 = vp9_variance8x4_c;
1489 if (flags & HAS_SSE2) vp9_variance8x4 = vp9_variance8x4_sse2; 1287 if (flags & HAS_SSE2) vp9_variance8x4 = vp9_variance8x4_sse2;
1490
1491 vp9_variance8x8 = vp9_variance8x8_c; 1288 vp9_variance8x8 = vp9_variance8x8_c;
1492 if (flags & HAS_MMX) vp9_variance8x8 = vp9_variance8x8_mmx; 1289 if (flags & HAS_MMX) vp9_variance8x8 = vp9_variance8x8_mmx;
1493 if (flags & HAS_SSE2) vp9_variance8x8 = vp9_variance8x8_sse2; 1290 if (flags & HAS_SSE2) vp9_variance8x8 = vp9_variance8x8_sse2;
1494
1495 vp9_variance_halfpixvar16x16_h = vp9_variance_halfpixvar16x16_h_c; 1291 vp9_variance_halfpixvar16x16_h = vp9_variance_halfpixvar16x16_h_c;
1496 if (flags & HAS_SSE2) vp9_variance_halfpixvar16x16_h = vp9_variance_halfpixv ar16x16_h_sse2; 1292 if (flags & HAS_SSE2) vp9_variance_halfpixvar16x16_h = vp9_variance_halfpixv ar16x16_h_sse2;
1497
1498 vp9_variance_halfpixvar16x16_hv = vp9_variance_halfpixvar16x16_hv_c; 1293 vp9_variance_halfpixvar16x16_hv = vp9_variance_halfpixvar16x16_hv_c;
1499 if (flags & HAS_SSE2) vp9_variance_halfpixvar16x16_hv = vp9_variance_halfpix var16x16_hv_sse2; 1294 if (flags & HAS_SSE2) vp9_variance_halfpixvar16x16_hv = vp9_variance_halfpix var16x16_hv_sse2;
1500
1501 vp9_variance_halfpixvar16x16_v = vp9_variance_halfpixvar16x16_v_c; 1295 vp9_variance_halfpixvar16x16_v = vp9_variance_halfpixvar16x16_v_c;
1502 if (flags & HAS_SSE2) vp9_variance_halfpixvar16x16_v = vp9_variance_halfpixv ar16x16_v_sse2; 1296 if (flags & HAS_SSE2) vp9_variance_halfpixvar16x16_v = vp9_variance_halfpixv ar16x16_v_sse2;
1503 } 1297 }
1504 #endif 1298 #endif
1505 1299
1506 #ifdef __cplusplus 1300 #ifdef __cplusplus
1507 } // extern "C" 1301 } // extern "C"
1508 #endif 1302 #endif
1509 1303
1510 #endif 1304 #endif
OLDNEW
« no previous file with comments | « source/config/win/ia32/vp8_rtcd.h ('k') | source/config/win/ia32/vpx_config.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698