| OLD | NEW |
| 1 #ifndef VP8_RTCD_H_ | 1 #ifndef VP8_RTCD_H_ |
| 2 #define VP8_RTCD_H_ | 2 #define VP8_RTCD_H_ |
| 3 | 3 |
| 4 #ifdef RTCD_C | 4 #ifdef RTCD_C |
| 5 #define RTCD_EXTERN | 5 #define RTCD_EXTERN |
| 6 #else | 6 #else |
| 7 #define RTCD_EXTERN extern | 7 #define RTCD_EXTERN extern |
| 8 #endif | 8 #endif |
| 9 | 9 |
| 10 #ifdef __cplusplus | 10 #ifdef __cplusplus |
| (...skipping 474 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 485 static void setup_rtcd_internal(void) | 485 static void setup_rtcd_internal(void) |
| 486 { | 486 { |
| 487 int flags = x86_simd_caps(); | 487 int flags = x86_simd_caps(); |
| 488 | 488 |
| 489 (void)flags; | 489 (void)flags; |
| 490 | 490 |
| 491 vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16_c; | 491 vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16_c; |
| 492 if (flags & HAS_MMX) vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16_m
mx; | 492 if (flags & HAS_MMX) vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16_m
mx; |
| 493 if (flags & HAS_SSE2) vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16_
sse2; | 493 if (flags & HAS_SSE2) vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16_
sse2; |
| 494 if (flags & HAS_SSSE3) vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16
_ssse3; | 494 if (flags & HAS_SSSE3) vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16
_ssse3; |
| 495 | |
| 496 vp8_bilinear_predict4x4 = vp8_bilinear_predict4x4_c; | 495 vp8_bilinear_predict4x4 = vp8_bilinear_predict4x4_c; |
| 497 if (flags & HAS_MMX) vp8_bilinear_predict4x4 = vp8_bilinear_predict4x4_mmx; | 496 if (flags & HAS_MMX) vp8_bilinear_predict4x4 = vp8_bilinear_predict4x4_mmx; |
| 498 | |
| 499 vp8_bilinear_predict8x4 = vp8_bilinear_predict8x4_c; | 497 vp8_bilinear_predict8x4 = vp8_bilinear_predict8x4_c; |
| 500 if (flags & HAS_MMX) vp8_bilinear_predict8x4 = vp8_bilinear_predict8x4_mmx; | 498 if (flags & HAS_MMX) vp8_bilinear_predict8x4 = vp8_bilinear_predict8x4_mmx; |
| 501 | |
| 502 vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_c; | 499 vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_c; |
| 503 if (flags & HAS_MMX) vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_mmx; | 500 if (flags & HAS_MMX) vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_mmx; |
| 504 if (flags & HAS_SSE2) vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_sse2
; | 501 if (flags & HAS_SSE2) vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_sse2
; |
| 505 if (flags & HAS_SSSE3) vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_sss
e3; | 502 if (flags & HAS_SSSE3) vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_sss
e3; |
| 506 | |
| 507 | |
| 508 | |
| 509 | |
| 510 vp8_block_error = vp8_block_error_c; | 503 vp8_block_error = vp8_block_error_c; |
| 511 if (flags & HAS_MMX) vp8_block_error = vp8_block_error_mmx; | 504 if (flags & HAS_MMX) vp8_block_error = vp8_block_error_mmx; |
| 512 if (flags & HAS_SSE2) vp8_block_error = vp8_block_error_xmm; | 505 if (flags & HAS_SSE2) vp8_block_error = vp8_block_error_xmm; |
| 513 | |
| 514 vp8_build_intra_predictors_mbuv_s = vp8_build_intra_predictors_mbuv_s_c; | 506 vp8_build_intra_predictors_mbuv_s = vp8_build_intra_predictors_mbuv_s_c; |
| 515 if (flags & HAS_SSE2) vp8_build_intra_predictors_mbuv_s = vp8_build_intra_pr
edictors_mbuv_s_sse2; | 507 if (flags & HAS_SSE2) vp8_build_intra_predictors_mbuv_s = vp8_build_intra_pr
edictors_mbuv_s_sse2; |
| 516 if (flags & HAS_SSSE3) vp8_build_intra_predictors_mbuv_s = vp8_build_intra_p
redictors_mbuv_s_ssse3; | 508 if (flags & HAS_SSSE3) vp8_build_intra_predictors_mbuv_s = vp8_build_intra_p
redictors_mbuv_s_ssse3; |
| 517 | |
| 518 vp8_build_intra_predictors_mby_s = vp8_build_intra_predictors_mby_s_c; | 509 vp8_build_intra_predictors_mby_s = vp8_build_intra_predictors_mby_s_c; |
| 519 if (flags & HAS_SSE2) vp8_build_intra_predictors_mby_s = vp8_build_intra_pre
dictors_mby_s_sse2; | 510 if (flags & HAS_SSE2) vp8_build_intra_predictors_mby_s = vp8_build_intra_pre
dictors_mby_s_sse2; |
| 520 if (flags & HAS_SSSE3) vp8_build_intra_predictors_mby_s = vp8_build_intra_pr
edictors_mby_s_ssse3; | 511 if (flags & HAS_SSSE3) vp8_build_intra_predictors_mby_s = vp8_build_intra_pr
edictors_mby_s_ssse3; |
| 521 | |
| 522 vp8_clear_system_state = vp8_clear_system_state_c; | 512 vp8_clear_system_state = vp8_clear_system_state_c; |
| 523 if (flags & HAS_MMX) vp8_clear_system_state = vpx_reset_mmx_state; | 513 if (flags & HAS_MMX) vp8_clear_system_state = vpx_reset_mmx_state; |
| 524 | |
| 525 vp8_copy32xn = vp8_copy32xn_c; | 514 vp8_copy32xn = vp8_copy32xn_c; |
| 526 if (flags & HAS_SSE2) vp8_copy32xn = vp8_copy32xn_sse2; | 515 if (flags & HAS_SSE2) vp8_copy32xn = vp8_copy32xn_sse2; |
| 527 if (flags & HAS_SSE3) vp8_copy32xn = vp8_copy32xn_sse3; | 516 if (flags & HAS_SSE3) vp8_copy32xn = vp8_copy32xn_sse3; |
| 528 | |
| 529 vp8_copy_mem16x16 = vp8_copy_mem16x16_c; | 517 vp8_copy_mem16x16 = vp8_copy_mem16x16_c; |
| 530 if (flags & HAS_MMX) vp8_copy_mem16x16 = vp8_copy_mem16x16_mmx; | 518 if (flags & HAS_MMX) vp8_copy_mem16x16 = vp8_copy_mem16x16_mmx; |
| 531 if (flags & HAS_SSE2) vp8_copy_mem16x16 = vp8_copy_mem16x16_sse2; | 519 if (flags & HAS_SSE2) vp8_copy_mem16x16 = vp8_copy_mem16x16_sse2; |
| 532 | |
| 533 vp8_copy_mem8x4 = vp8_copy_mem8x4_c; | 520 vp8_copy_mem8x4 = vp8_copy_mem8x4_c; |
| 534 if (flags & HAS_MMX) vp8_copy_mem8x4 = vp8_copy_mem8x4_mmx; | 521 if (flags & HAS_MMX) vp8_copy_mem8x4 = vp8_copy_mem8x4_mmx; |
| 535 | |
| 536 vp8_copy_mem8x8 = vp8_copy_mem8x8_c; | 522 vp8_copy_mem8x8 = vp8_copy_mem8x8_c; |
| 537 if (flags & HAS_MMX) vp8_copy_mem8x8 = vp8_copy_mem8x8_mmx; | 523 if (flags & HAS_MMX) vp8_copy_mem8x8 = vp8_copy_mem8x8_mmx; |
| 538 | |
| 539 vp8_dc_only_idct_add = vp8_dc_only_idct_add_c; | 524 vp8_dc_only_idct_add = vp8_dc_only_idct_add_c; |
| 540 if (flags & HAS_MMX) vp8_dc_only_idct_add = vp8_dc_only_idct_add_mmx; | 525 if (flags & HAS_MMX) vp8_dc_only_idct_add = vp8_dc_only_idct_add_mmx; |
| 541 | |
| 542 vp8_denoiser_filter = vp8_denoiser_filter_c; | 526 vp8_denoiser_filter = vp8_denoiser_filter_c; |
| 543 if (flags & HAS_SSE2) vp8_denoiser_filter = vp8_denoiser_filter_sse2; | 527 if (flags & HAS_SSE2) vp8_denoiser_filter = vp8_denoiser_filter_sse2; |
| 544 | |
| 545 vp8_dequant_idct_add = vp8_dequant_idct_add_c; | 528 vp8_dequant_idct_add = vp8_dequant_idct_add_c; |
| 546 if (flags & HAS_MMX) vp8_dequant_idct_add = vp8_dequant_idct_add_mmx; | 529 if (flags & HAS_MMX) vp8_dequant_idct_add = vp8_dequant_idct_add_mmx; |
| 547 | |
| 548 vp8_dequant_idct_add_uv_block = vp8_dequant_idct_add_uv_block_c; | 530 vp8_dequant_idct_add_uv_block = vp8_dequant_idct_add_uv_block_c; |
| 549 if (flags & HAS_MMX) vp8_dequant_idct_add_uv_block = vp8_dequant_idct_add_uv
_block_mmx; | 531 if (flags & HAS_MMX) vp8_dequant_idct_add_uv_block = vp8_dequant_idct_add_uv
_block_mmx; |
| 550 if (flags & HAS_SSE2) vp8_dequant_idct_add_uv_block = vp8_dequant_idct_add_u
v_block_sse2; | 532 if (flags & HAS_SSE2) vp8_dequant_idct_add_uv_block = vp8_dequant_idct_add_u
v_block_sse2; |
| 551 | |
| 552 vp8_dequant_idct_add_y_block = vp8_dequant_idct_add_y_block_c; | 533 vp8_dequant_idct_add_y_block = vp8_dequant_idct_add_y_block_c; |
| 553 if (flags & HAS_MMX) vp8_dequant_idct_add_y_block = vp8_dequant_idct_add_y_b
lock_mmx; | 534 if (flags & HAS_MMX) vp8_dequant_idct_add_y_block = vp8_dequant_idct_add_y_b
lock_mmx; |
| 554 if (flags & HAS_SSE2) vp8_dequant_idct_add_y_block = vp8_dequant_idct_add_y_
block_sse2; | 535 if (flags & HAS_SSE2) vp8_dequant_idct_add_y_block = vp8_dequant_idct_add_y_
block_sse2; |
| 555 | |
| 556 vp8_dequantize_b = vp8_dequantize_b_c; | 536 vp8_dequantize_b = vp8_dequantize_b_c; |
| 557 if (flags & HAS_MMX) vp8_dequantize_b = vp8_dequantize_b_mmx; | 537 if (flags & HAS_MMX) vp8_dequantize_b = vp8_dequantize_b_mmx; |
| 558 | |
| 559 vp8_diamond_search_sad = vp8_diamond_search_sad_c; | 538 vp8_diamond_search_sad = vp8_diamond_search_sad_c; |
| 560 if (flags & HAS_SSE3) vp8_diamond_search_sad = vp8_diamond_search_sadx4; | 539 if (flags & HAS_SSE3) vp8_diamond_search_sad = vp8_diamond_search_sadx4; |
| 561 | |
| 562 vp8_fast_quantize_b = vp8_fast_quantize_b_c; | 540 vp8_fast_quantize_b = vp8_fast_quantize_b_c; |
| 563 if (flags & HAS_SSE2) vp8_fast_quantize_b = vp8_fast_quantize_b_sse2; | 541 if (flags & HAS_SSE2) vp8_fast_quantize_b = vp8_fast_quantize_b_sse2; |
| 564 if (flags & HAS_SSSE3) vp8_fast_quantize_b = vp8_fast_quantize_b_ssse3; | 542 if (flags & HAS_SSSE3) vp8_fast_quantize_b = vp8_fast_quantize_b_ssse3; |
| 565 | |
| 566 | |
| 567 vp8_filter_by_weight16x16 = vp8_filter_by_weight16x16_c; | 543 vp8_filter_by_weight16x16 = vp8_filter_by_weight16x16_c; |
| 568 if (flags & HAS_SSE2) vp8_filter_by_weight16x16 = vp8_filter_by_weight16x16_
sse2; | 544 if (flags & HAS_SSE2) vp8_filter_by_weight16x16 = vp8_filter_by_weight16x16_
sse2; |
| 569 | |
| 570 | |
| 571 vp8_filter_by_weight8x8 = vp8_filter_by_weight8x8_c; | 545 vp8_filter_by_weight8x8 = vp8_filter_by_weight8x8_c; |
| 572 if (flags & HAS_SSE2) vp8_filter_by_weight8x8 = vp8_filter_by_weight8x8_sse2
; | 546 if (flags & HAS_SSE2) vp8_filter_by_weight8x8 = vp8_filter_by_weight8x8_sse2
; |
| 573 | |
| 574 vp8_full_search_sad = vp8_full_search_sad_c; | 547 vp8_full_search_sad = vp8_full_search_sad_c; |
| 575 if (flags & HAS_SSE3) vp8_full_search_sad = vp8_full_search_sadx3; | 548 if (flags & HAS_SSE3) vp8_full_search_sad = vp8_full_search_sadx3; |
| 576 if (flags & HAS_SSE4_1) vp8_full_search_sad = vp8_full_search_sadx8; | 549 if (flags & HAS_SSE4_1) vp8_full_search_sad = vp8_full_search_sadx8; |
| 577 | |
| 578 vp8_get4x4sse_cs = vp8_get4x4sse_cs_c; | 550 vp8_get4x4sse_cs = vp8_get4x4sse_cs_c; |
| 579 if (flags & HAS_MMX) vp8_get4x4sse_cs = vp8_get4x4sse_cs_mmx; | 551 if (flags & HAS_MMX) vp8_get4x4sse_cs = vp8_get4x4sse_cs_mmx; |
| 580 | |
| 581 vp8_get_mb_ss = vp8_get_mb_ss_c; | 552 vp8_get_mb_ss = vp8_get_mb_ss_c; |
| 582 if (flags & HAS_MMX) vp8_get_mb_ss = vp8_get_mb_ss_mmx; | 553 if (flags & HAS_MMX) vp8_get_mb_ss = vp8_get_mb_ss_mmx; |
| 583 if (flags & HAS_SSE2) vp8_get_mb_ss = vp8_get_mb_ss_sse2; | 554 if (flags & HAS_SSE2) vp8_get_mb_ss = vp8_get_mb_ss_sse2; |
| 584 | |
| 585 | |
| 586 vp8_loop_filter_bh = vp8_loop_filter_bh_c; | 555 vp8_loop_filter_bh = vp8_loop_filter_bh_c; |
| 587 if (flags & HAS_MMX) vp8_loop_filter_bh = vp8_loop_filter_bh_mmx; | 556 if (flags & HAS_MMX) vp8_loop_filter_bh = vp8_loop_filter_bh_mmx; |
| 588 if (flags & HAS_SSE2) vp8_loop_filter_bh = vp8_loop_filter_bh_sse2; | 557 if (flags & HAS_SSE2) vp8_loop_filter_bh = vp8_loop_filter_bh_sse2; |
| 589 | |
| 590 vp8_loop_filter_bv = vp8_loop_filter_bv_c; | 558 vp8_loop_filter_bv = vp8_loop_filter_bv_c; |
| 591 if (flags & HAS_MMX) vp8_loop_filter_bv = vp8_loop_filter_bv_mmx; | 559 if (flags & HAS_MMX) vp8_loop_filter_bv = vp8_loop_filter_bv_mmx; |
| 592 if (flags & HAS_SSE2) vp8_loop_filter_bv = vp8_loop_filter_bv_sse2; | 560 if (flags & HAS_SSE2) vp8_loop_filter_bv = vp8_loop_filter_bv_sse2; |
| 593 | |
| 594 vp8_loop_filter_mbh = vp8_loop_filter_mbh_c; | 561 vp8_loop_filter_mbh = vp8_loop_filter_mbh_c; |
| 595 if (flags & HAS_MMX) vp8_loop_filter_mbh = vp8_loop_filter_mbh_mmx; | 562 if (flags & HAS_MMX) vp8_loop_filter_mbh = vp8_loop_filter_mbh_mmx; |
| 596 if (flags & HAS_SSE2) vp8_loop_filter_mbh = vp8_loop_filter_mbh_sse2; | 563 if (flags & HAS_SSE2) vp8_loop_filter_mbh = vp8_loop_filter_mbh_sse2; |
| 597 | |
| 598 vp8_loop_filter_mbv = vp8_loop_filter_mbv_c; | 564 vp8_loop_filter_mbv = vp8_loop_filter_mbv_c; |
| 599 if (flags & HAS_MMX) vp8_loop_filter_mbv = vp8_loop_filter_mbv_mmx; | 565 if (flags & HAS_MMX) vp8_loop_filter_mbv = vp8_loop_filter_mbv_mmx; |
| 600 if (flags & HAS_SSE2) vp8_loop_filter_mbv = vp8_loop_filter_mbv_sse2; | 566 if (flags & HAS_SSE2) vp8_loop_filter_mbv = vp8_loop_filter_mbv_sse2; |
| 601 | |
| 602 vp8_loop_filter_simple_bh = vp8_loop_filter_bhs_c; | 567 vp8_loop_filter_simple_bh = vp8_loop_filter_bhs_c; |
| 603 if (flags & HAS_MMX) vp8_loop_filter_simple_bh = vp8_loop_filter_bhs_mmx; | 568 if (flags & HAS_MMX) vp8_loop_filter_simple_bh = vp8_loop_filter_bhs_mmx; |
| 604 if (flags & HAS_SSE2) vp8_loop_filter_simple_bh = vp8_loop_filter_bhs_sse2; | 569 if (flags & HAS_SSE2) vp8_loop_filter_simple_bh = vp8_loop_filter_bhs_sse2; |
| 605 | |
| 606 vp8_loop_filter_simple_bv = vp8_loop_filter_bvs_c; | 570 vp8_loop_filter_simple_bv = vp8_loop_filter_bvs_c; |
| 607 if (flags & HAS_MMX) vp8_loop_filter_simple_bv = vp8_loop_filter_bvs_mmx; | 571 if (flags & HAS_MMX) vp8_loop_filter_simple_bv = vp8_loop_filter_bvs_mmx; |
| 608 if (flags & HAS_SSE2) vp8_loop_filter_simple_bv = vp8_loop_filter_bvs_sse2; | 572 if (flags & HAS_SSE2) vp8_loop_filter_simple_bv = vp8_loop_filter_bvs_sse2; |
| 609 | |
| 610 vp8_loop_filter_simple_mbh = vp8_loop_filter_simple_horizontal_edge_c; | 573 vp8_loop_filter_simple_mbh = vp8_loop_filter_simple_horizontal_edge_c; |
| 611 if (flags & HAS_MMX) vp8_loop_filter_simple_mbh = vp8_loop_filter_simple_hor
izontal_edge_mmx; | 574 if (flags & HAS_MMX) vp8_loop_filter_simple_mbh = vp8_loop_filter_simple_hor
izontal_edge_mmx; |
| 612 if (flags & HAS_SSE2) vp8_loop_filter_simple_mbh = vp8_loop_filter_simple_ho
rizontal_edge_sse2; | 575 if (flags & HAS_SSE2) vp8_loop_filter_simple_mbh = vp8_loop_filter_simple_ho
rizontal_edge_sse2; |
| 613 | |
| 614 vp8_loop_filter_simple_mbv = vp8_loop_filter_simple_vertical_edge_c; | 576 vp8_loop_filter_simple_mbv = vp8_loop_filter_simple_vertical_edge_c; |
| 615 if (flags & HAS_MMX) vp8_loop_filter_simple_mbv = vp8_loop_filter_simple_ver
tical_edge_mmx; | 577 if (flags & HAS_MMX) vp8_loop_filter_simple_mbv = vp8_loop_filter_simple_ver
tical_edge_mmx; |
| 616 if (flags & HAS_SSE2) vp8_loop_filter_simple_mbv = vp8_loop_filter_simple_ve
rtical_edge_sse2; | 578 if (flags & HAS_SSE2) vp8_loop_filter_simple_mbv = vp8_loop_filter_simple_ve
rtical_edge_sse2; |
| 617 | |
| 618 vp8_mbblock_error = vp8_mbblock_error_c; | 579 vp8_mbblock_error = vp8_mbblock_error_c; |
| 619 if (flags & HAS_MMX) vp8_mbblock_error = vp8_mbblock_error_mmx; | 580 if (flags & HAS_MMX) vp8_mbblock_error = vp8_mbblock_error_mmx; |
| 620 if (flags & HAS_SSE2) vp8_mbblock_error = vp8_mbblock_error_xmm; | 581 if (flags & HAS_SSE2) vp8_mbblock_error = vp8_mbblock_error_xmm; |
| 621 | |
| 622 vp8_mbpost_proc_across_ip = vp8_mbpost_proc_across_ip_c; | 582 vp8_mbpost_proc_across_ip = vp8_mbpost_proc_across_ip_c; |
| 623 if (flags & HAS_SSE2) vp8_mbpost_proc_across_ip = vp8_mbpost_proc_across_ip_
xmm; | 583 if (flags & HAS_SSE2) vp8_mbpost_proc_across_ip = vp8_mbpost_proc_across_ip_
xmm; |
| 624 | |
| 625 vp8_mbpost_proc_down = vp8_mbpost_proc_down_c; | 584 vp8_mbpost_proc_down = vp8_mbpost_proc_down_c; |
| 626 if (flags & HAS_MMX) vp8_mbpost_proc_down = vp8_mbpost_proc_down_mmx; | 585 if (flags & HAS_MMX) vp8_mbpost_proc_down = vp8_mbpost_proc_down_mmx; |
| 627 if (flags & HAS_SSE2) vp8_mbpost_proc_down = vp8_mbpost_proc_down_xmm; | 586 if (flags & HAS_SSE2) vp8_mbpost_proc_down = vp8_mbpost_proc_down_xmm; |
| 628 | |
| 629 vp8_mbuverror = vp8_mbuverror_c; | 587 vp8_mbuverror = vp8_mbuverror_c; |
| 630 if (flags & HAS_MMX) vp8_mbuverror = vp8_mbuverror_mmx; | 588 if (flags & HAS_MMX) vp8_mbuverror = vp8_mbuverror_mmx; |
| 631 if (flags & HAS_SSE2) vp8_mbuverror = vp8_mbuverror_xmm; | 589 if (flags & HAS_SSE2) vp8_mbuverror = vp8_mbuverror_xmm; |
| 632 | |
| 633 vp8_mse16x16 = vp8_mse16x16_c; | 590 vp8_mse16x16 = vp8_mse16x16_c; |
| 634 if (flags & HAS_MMX) vp8_mse16x16 = vp8_mse16x16_mmx; | 591 if (flags & HAS_MMX) vp8_mse16x16 = vp8_mse16x16_mmx; |
| 635 if (flags & HAS_SSE2) vp8_mse16x16 = vp8_mse16x16_wmt; | 592 if (flags & HAS_SSE2) vp8_mse16x16 = vp8_mse16x16_wmt; |
| 636 | |
| 637 vp8_plane_add_noise = vp8_plane_add_noise_c; | 593 vp8_plane_add_noise = vp8_plane_add_noise_c; |
| 638 if (flags & HAS_MMX) vp8_plane_add_noise = vp8_plane_add_noise_mmx; | 594 if (flags & HAS_MMX) vp8_plane_add_noise = vp8_plane_add_noise_mmx; |
| 639 if (flags & HAS_SSE2) vp8_plane_add_noise = vp8_plane_add_noise_wmt; | 595 if (flags & HAS_SSE2) vp8_plane_add_noise = vp8_plane_add_noise_wmt; |
| 640 | |
| 641 vp8_post_proc_down_and_across_mb_row = vp8_post_proc_down_and_across_mb_row_
c; | 596 vp8_post_proc_down_and_across_mb_row = vp8_post_proc_down_and_across_mb_row_
c; |
| 642 if (flags & HAS_SSE2) vp8_post_proc_down_and_across_mb_row = vp8_post_proc_d
own_and_across_mb_row_sse2; | 597 if (flags & HAS_SSE2) vp8_post_proc_down_and_across_mb_row = vp8_post_proc_d
own_and_across_mb_row_sse2; |
| 643 | |
| 644 | |
| 645 | |
| 646 | |
| 647 vp8_refining_search_sad = vp8_refining_search_sad_c; | 598 vp8_refining_search_sad = vp8_refining_search_sad_c; |
| 648 if (flags & HAS_SSE3) vp8_refining_search_sad = vp8_refining_search_sadx4; | 599 if (flags & HAS_SSE3) vp8_refining_search_sad = vp8_refining_search_sadx4; |
| 649 | |
| 650 vp8_regular_quantize_b = vp8_regular_quantize_b_c; | 600 vp8_regular_quantize_b = vp8_regular_quantize_b_c; |
| 651 if (flags & HAS_SSE2) vp8_regular_quantize_b = vp8_regular_quantize_b_sse2; | 601 if (flags & HAS_SSE2) vp8_regular_quantize_b = vp8_regular_quantize_b_sse2; |
| 652 | |
| 653 | |
| 654 vp8_sad16x16 = vp8_sad16x16_c; | 602 vp8_sad16x16 = vp8_sad16x16_c; |
| 655 if (flags & HAS_MMX) vp8_sad16x16 = vp8_sad16x16_mmx; | 603 if (flags & HAS_MMX) vp8_sad16x16 = vp8_sad16x16_mmx; |
| 656 if (flags & HAS_SSE2) vp8_sad16x16 = vp8_sad16x16_wmt; | 604 if (flags & HAS_SSE2) vp8_sad16x16 = vp8_sad16x16_wmt; |
| 657 if (flags & HAS_SSE3) vp8_sad16x16 = vp8_sad16x16_sse3; | 605 if (flags & HAS_SSE3) vp8_sad16x16 = vp8_sad16x16_sse3; |
| 658 | |
| 659 vp8_sad16x16x3 = vp8_sad16x16x3_c; | 606 vp8_sad16x16x3 = vp8_sad16x16x3_c; |
| 660 if (flags & HAS_SSE3) vp8_sad16x16x3 = vp8_sad16x16x3_sse3; | 607 if (flags & HAS_SSE3) vp8_sad16x16x3 = vp8_sad16x16x3_sse3; |
| 661 if (flags & HAS_SSSE3) vp8_sad16x16x3 = vp8_sad16x16x3_ssse3; | 608 if (flags & HAS_SSSE3) vp8_sad16x16x3 = vp8_sad16x16x3_ssse3; |
| 662 | |
| 663 vp8_sad16x16x4d = vp8_sad16x16x4d_c; | 609 vp8_sad16x16x4d = vp8_sad16x16x4d_c; |
| 664 if (flags & HAS_SSE3) vp8_sad16x16x4d = vp8_sad16x16x4d_sse3; | 610 if (flags & HAS_SSE3) vp8_sad16x16x4d = vp8_sad16x16x4d_sse3; |
| 665 | |
| 666 vp8_sad16x16x8 = vp8_sad16x16x8_c; | 611 vp8_sad16x16x8 = vp8_sad16x16x8_c; |
| 667 if (flags & HAS_SSE4_1) vp8_sad16x16x8 = vp8_sad16x16x8_sse4; | 612 if (flags & HAS_SSE4_1) vp8_sad16x16x8 = vp8_sad16x16x8_sse4; |
| 668 | |
| 669 vp8_sad16x8 = vp8_sad16x8_c; | 613 vp8_sad16x8 = vp8_sad16x8_c; |
| 670 if (flags & HAS_MMX) vp8_sad16x8 = vp8_sad16x8_mmx; | 614 if (flags & HAS_MMX) vp8_sad16x8 = vp8_sad16x8_mmx; |
| 671 if (flags & HAS_SSE2) vp8_sad16x8 = vp8_sad16x8_wmt; | 615 if (flags & HAS_SSE2) vp8_sad16x8 = vp8_sad16x8_wmt; |
| 672 | |
| 673 vp8_sad16x8x3 = vp8_sad16x8x3_c; | 616 vp8_sad16x8x3 = vp8_sad16x8x3_c; |
| 674 if (flags & HAS_SSE3) vp8_sad16x8x3 = vp8_sad16x8x3_sse3; | 617 if (flags & HAS_SSE3) vp8_sad16x8x3 = vp8_sad16x8x3_sse3; |
| 675 if (flags & HAS_SSSE3) vp8_sad16x8x3 = vp8_sad16x8x3_ssse3; | 618 if (flags & HAS_SSSE3) vp8_sad16x8x3 = vp8_sad16x8x3_ssse3; |
| 676 | |
| 677 vp8_sad16x8x4d = vp8_sad16x8x4d_c; | 619 vp8_sad16x8x4d = vp8_sad16x8x4d_c; |
| 678 if (flags & HAS_SSE3) vp8_sad16x8x4d = vp8_sad16x8x4d_sse3; | 620 if (flags & HAS_SSE3) vp8_sad16x8x4d = vp8_sad16x8x4d_sse3; |
| 679 | |
| 680 vp8_sad16x8x8 = vp8_sad16x8x8_c; | 621 vp8_sad16x8x8 = vp8_sad16x8x8_c; |
| 681 if (flags & HAS_SSE4_1) vp8_sad16x8x8 = vp8_sad16x8x8_sse4; | 622 if (flags & HAS_SSE4_1) vp8_sad16x8x8 = vp8_sad16x8x8_sse4; |
| 682 | |
| 683 vp8_sad4x4 = vp8_sad4x4_c; | 623 vp8_sad4x4 = vp8_sad4x4_c; |
| 684 if (flags & HAS_MMX) vp8_sad4x4 = vp8_sad4x4_mmx; | 624 if (flags & HAS_MMX) vp8_sad4x4 = vp8_sad4x4_mmx; |
| 685 if (flags & HAS_SSE2) vp8_sad4x4 = vp8_sad4x4_wmt; | 625 if (flags & HAS_SSE2) vp8_sad4x4 = vp8_sad4x4_wmt; |
| 686 | |
| 687 vp8_sad4x4x3 = vp8_sad4x4x3_c; | 626 vp8_sad4x4x3 = vp8_sad4x4x3_c; |
| 688 if (flags & HAS_SSE3) vp8_sad4x4x3 = vp8_sad4x4x3_sse3; | 627 if (flags & HAS_SSE3) vp8_sad4x4x3 = vp8_sad4x4x3_sse3; |
| 689 | |
| 690 vp8_sad4x4x4d = vp8_sad4x4x4d_c; | 628 vp8_sad4x4x4d = vp8_sad4x4x4d_c; |
| 691 if (flags & HAS_SSE3) vp8_sad4x4x4d = vp8_sad4x4x4d_sse3; | 629 if (flags & HAS_SSE3) vp8_sad4x4x4d = vp8_sad4x4x4d_sse3; |
| 692 | |
| 693 vp8_sad4x4x8 = vp8_sad4x4x8_c; | 630 vp8_sad4x4x8 = vp8_sad4x4x8_c; |
| 694 if (flags & HAS_SSE4_1) vp8_sad4x4x8 = vp8_sad4x4x8_sse4; | 631 if (flags & HAS_SSE4_1) vp8_sad4x4x8 = vp8_sad4x4x8_sse4; |
| 695 | |
| 696 vp8_sad8x16 = vp8_sad8x16_c; | 632 vp8_sad8x16 = vp8_sad8x16_c; |
| 697 if (flags & HAS_MMX) vp8_sad8x16 = vp8_sad8x16_mmx; | 633 if (flags & HAS_MMX) vp8_sad8x16 = vp8_sad8x16_mmx; |
| 698 if (flags & HAS_SSE2) vp8_sad8x16 = vp8_sad8x16_wmt; | 634 if (flags & HAS_SSE2) vp8_sad8x16 = vp8_sad8x16_wmt; |
| 699 | |
| 700 vp8_sad8x16x3 = vp8_sad8x16x3_c; | 635 vp8_sad8x16x3 = vp8_sad8x16x3_c; |
| 701 if (flags & HAS_SSE3) vp8_sad8x16x3 = vp8_sad8x16x3_sse3; | 636 if (flags & HAS_SSE3) vp8_sad8x16x3 = vp8_sad8x16x3_sse3; |
| 702 | |
| 703 vp8_sad8x16x4d = vp8_sad8x16x4d_c; | 637 vp8_sad8x16x4d = vp8_sad8x16x4d_c; |
| 704 if (flags & HAS_SSE3) vp8_sad8x16x4d = vp8_sad8x16x4d_sse3; | 638 if (flags & HAS_SSE3) vp8_sad8x16x4d = vp8_sad8x16x4d_sse3; |
| 705 | |
| 706 vp8_sad8x16x8 = vp8_sad8x16x8_c; | 639 vp8_sad8x16x8 = vp8_sad8x16x8_c; |
| 707 if (flags & HAS_SSE4_1) vp8_sad8x16x8 = vp8_sad8x16x8_sse4; | 640 if (flags & HAS_SSE4_1) vp8_sad8x16x8 = vp8_sad8x16x8_sse4; |
| 708 | |
| 709 vp8_sad8x8 = vp8_sad8x8_c; | 641 vp8_sad8x8 = vp8_sad8x8_c; |
| 710 if (flags & HAS_MMX) vp8_sad8x8 = vp8_sad8x8_mmx; | 642 if (flags & HAS_MMX) vp8_sad8x8 = vp8_sad8x8_mmx; |
| 711 if (flags & HAS_SSE2) vp8_sad8x8 = vp8_sad8x8_wmt; | 643 if (flags & HAS_SSE2) vp8_sad8x8 = vp8_sad8x8_wmt; |
| 712 | |
| 713 vp8_sad8x8x3 = vp8_sad8x8x3_c; | 644 vp8_sad8x8x3 = vp8_sad8x8x3_c; |
| 714 if (flags & HAS_SSE3) vp8_sad8x8x3 = vp8_sad8x8x3_sse3; | 645 if (flags & HAS_SSE3) vp8_sad8x8x3 = vp8_sad8x8x3_sse3; |
| 715 | |
| 716 vp8_sad8x8x4d = vp8_sad8x8x4d_c; | 646 vp8_sad8x8x4d = vp8_sad8x8x4d_c; |
| 717 if (flags & HAS_SSE3) vp8_sad8x8x4d = vp8_sad8x8x4d_sse3; | 647 if (flags & HAS_SSE3) vp8_sad8x8x4d = vp8_sad8x8x4d_sse3; |
| 718 | |
| 719 vp8_sad8x8x8 = vp8_sad8x8x8_c; | 648 vp8_sad8x8x8 = vp8_sad8x8x8_c; |
| 720 if (flags & HAS_SSE4_1) vp8_sad8x8x8 = vp8_sad8x8x8_sse4; | 649 if (flags & HAS_SSE4_1) vp8_sad8x8x8 = vp8_sad8x8x8_sse4; |
| 721 | |
| 722 vp8_short_fdct4x4 = vp8_short_fdct4x4_c; | 650 vp8_short_fdct4x4 = vp8_short_fdct4x4_c; |
| 723 if (flags & HAS_MMX) vp8_short_fdct4x4 = vp8_short_fdct4x4_mmx; | 651 if (flags & HAS_MMX) vp8_short_fdct4x4 = vp8_short_fdct4x4_mmx; |
| 724 if (flags & HAS_SSE2) vp8_short_fdct4x4 = vp8_short_fdct4x4_sse2; | 652 if (flags & HAS_SSE2) vp8_short_fdct4x4 = vp8_short_fdct4x4_sse2; |
| 725 | |
| 726 vp8_short_fdct8x4 = vp8_short_fdct8x4_c; | 653 vp8_short_fdct8x4 = vp8_short_fdct8x4_c; |
| 727 if (flags & HAS_MMX) vp8_short_fdct8x4 = vp8_short_fdct8x4_mmx; | 654 if (flags & HAS_MMX) vp8_short_fdct8x4 = vp8_short_fdct8x4_mmx; |
| 728 if (flags & HAS_SSE2) vp8_short_fdct8x4 = vp8_short_fdct8x4_sse2; | 655 if (flags & HAS_SSE2) vp8_short_fdct8x4 = vp8_short_fdct8x4_sse2; |
| 729 | |
| 730 vp8_short_idct4x4llm = vp8_short_idct4x4llm_c; | 656 vp8_short_idct4x4llm = vp8_short_idct4x4llm_c; |
| 731 if (flags & HAS_MMX) vp8_short_idct4x4llm = vp8_short_idct4x4llm_mmx; | 657 if (flags & HAS_MMX) vp8_short_idct4x4llm = vp8_short_idct4x4llm_mmx; |
| 732 | |
| 733 vp8_short_inv_walsh4x4 = vp8_short_inv_walsh4x4_c; | 658 vp8_short_inv_walsh4x4 = vp8_short_inv_walsh4x4_c; |
| 734 if (flags & HAS_MMX) vp8_short_inv_walsh4x4 = vp8_short_inv_walsh4x4_mmx; | 659 if (flags & HAS_MMX) vp8_short_inv_walsh4x4 = vp8_short_inv_walsh4x4_mmx; |
| 735 if (flags & HAS_SSE2) vp8_short_inv_walsh4x4 = vp8_short_inv_walsh4x4_sse2; | 660 if (flags & HAS_SSE2) vp8_short_inv_walsh4x4 = vp8_short_inv_walsh4x4_sse2; |
| 736 | |
| 737 | |
| 738 vp8_short_walsh4x4 = vp8_short_walsh4x4_c; | 661 vp8_short_walsh4x4 = vp8_short_walsh4x4_c; |
| 739 if (flags & HAS_SSE2) vp8_short_walsh4x4 = vp8_short_walsh4x4_sse2; | 662 if (flags & HAS_SSE2) vp8_short_walsh4x4 = vp8_short_walsh4x4_sse2; |
| 740 | |
| 741 vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_c; | 663 vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_c; |
| 742 if (flags & HAS_MMX) vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_mmx; | 664 if (flags & HAS_MMX) vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_mmx; |
| 743 if (flags & HAS_SSE2) vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_sse2
; | 665 if (flags & HAS_SSE2) vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_sse2
; |
| 744 if (flags & HAS_SSSE3) vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_sss
e3; | 666 if (flags & HAS_SSSE3) vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_sss
e3; |
| 745 | |
| 746 vp8_sixtap_predict4x4 = vp8_sixtap_predict4x4_c; | 667 vp8_sixtap_predict4x4 = vp8_sixtap_predict4x4_c; |
| 747 if (flags & HAS_MMX) vp8_sixtap_predict4x4 = vp8_sixtap_predict4x4_mmx; | 668 if (flags & HAS_MMX) vp8_sixtap_predict4x4 = vp8_sixtap_predict4x4_mmx; |
| 748 if (flags & HAS_SSSE3) vp8_sixtap_predict4x4 = vp8_sixtap_predict4x4_ssse3; | 669 if (flags & HAS_SSSE3) vp8_sixtap_predict4x4 = vp8_sixtap_predict4x4_ssse3; |
| 749 | |
| 750 vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_c; | 670 vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_c; |
| 751 if (flags & HAS_MMX) vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_mmx; | 671 if (flags & HAS_MMX) vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_mmx; |
| 752 if (flags & HAS_SSE2) vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_sse2; | 672 if (flags & HAS_SSE2) vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_sse2; |
| 753 if (flags & HAS_SSSE3) vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_ssse3; | 673 if (flags & HAS_SSSE3) vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_ssse3; |
| 754 | |
| 755 vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_c; | 674 vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_c; |
| 756 if (flags & HAS_MMX) vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_mmx; | 675 if (flags & HAS_MMX) vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_mmx; |
| 757 if (flags & HAS_SSE2) vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_sse2; | 676 if (flags & HAS_SSE2) vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_sse2; |
| 758 if (flags & HAS_SSSE3) vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_ssse3; | 677 if (flags & HAS_SSSE3) vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_ssse3; |
| 759 | |
| 760 vp8_sub_pixel_mse16x16 = vp8_sub_pixel_mse16x16_c; | 678 vp8_sub_pixel_mse16x16 = vp8_sub_pixel_mse16x16_c; |
| 761 if (flags & HAS_MMX) vp8_sub_pixel_mse16x16 = vp8_sub_pixel_mse16x16_mmx; | 679 if (flags & HAS_MMX) vp8_sub_pixel_mse16x16 = vp8_sub_pixel_mse16x16_mmx; |
| 762 if (flags & HAS_SSE2) vp8_sub_pixel_mse16x16 = vp8_sub_pixel_mse16x16_wmt; | 680 if (flags & HAS_SSE2) vp8_sub_pixel_mse16x16 = vp8_sub_pixel_mse16x16_wmt; |
| 763 | |
| 764 vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance16x16_c; | 681 vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance16x16_c; |
| 765 if (flags & HAS_MMX) vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance16x
16_mmx; | 682 if (flags & HAS_MMX) vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance16x
16_mmx; |
| 766 if (flags & HAS_SSE2) vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance16
x16_wmt; | 683 if (flags & HAS_SSE2) vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance16
x16_wmt; |
| 767 if (flags & HAS_SSSE3) vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance1
6x16_ssse3; | 684 if (flags & HAS_SSSE3) vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance1
6x16_ssse3; |
| 768 | |
| 769 vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16x8_c; | 685 vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16x8_c; |
| 770 if (flags & HAS_MMX) vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16x8
_mmx; | 686 if (flags & HAS_MMX) vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16x8
_mmx; |
| 771 if (flags & HAS_SSE2) vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16x
8_wmt; | 687 if (flags & HAS_SSE2) vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16x
8_wmt; |
| 772 if (flags & HAS_SSSE3) vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16
x8_ssse3; | 688 if (flags & HAS_SSSE3) vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16
x8_ssse3; |
| 773 | |
| 774 vp8_sub_pixel_variance4x4 = vp8_sub_pixel_variance4x4_c; | 689 vp8_sub_pixel_variance4x4 = vp8_sub_pixel_variance4x4_c; |
| 775 if (flags & HAS_MMX) vp8_sub_pixel_variance4x4 = vp8_sub_pixel_variance4x4_m
mx; | 690 if (flags & HAS_MMX) vp8_sub_pixel_variance4x4 = vp8_sub_pixel_variance4x4_m
mx; |
| 776 if (flags & HAS_SSE2) vp8_sub_pixel_variance4x4 = vp8_sub_pixel_variance4x4_
wmt; | 691 if (flags & HAS_SSE2) vp8_sub_pixel_variance4x4 = vp8_sub_pixel_variance4x4_
wmt; |
| 777 | |
| 778 vp8_sub_pixel_variance8x16 = vp8_sub_pixel_variance8x16_c; | 692 vp8_sub_pixel_variance8x16 = vp8_sub_pixel_variance8x16_c; |
| 779 if (flags & HAS_MMX) vp8_sub_pixel_variance8x16 = vp8_sub_pixel_variance8x16
_mmx; | 693 if (flags & HAS_MMX) vp8_sub_pixel_variance8x16 = vp8_sub_pixel_variance8x16
_mmx; |
| 780 if (flags & HAS_SSE2) vp8_sub_pixel_variance8x16 = vp8_sub_pixel_variance8x1
6_wmt; | 694 if (flags & HAS_SSE2) vp8_sub_pixel_variance8x16 = vp8_sub_pixel_variance8x1
6_wmt; |
| 781 | |
| 782 vp8_sub_pixel_variance8x8 = vp8_sub_pixel_variance8x8_c; | 695 vp8_sub_pixel_variance8x8 = vp8_sub_pixel_variance8x8_c; |
| 783 if (flags & HAS_MMX) vp8_sub_pixel_variance8x8 = vp8_sub_pixel_variance8x8_m
mx; | 696 if (flags & HAS_MMX) vp8_sub_pixel_variance8x8 = vp8_sub_pixel_variance8x8_m
mx; |
| 784 if (flags & HAS_SSE2) vp8_sub_pixel_variance8x8 = vp8_sub_pixel_variance8x8_
wmt; | 697 if (flags & HAS_SSE2) vp8_sub_pixel_variance8x8 = vp8_sub_pixel_variance8x8_
wmt; |
| 785 | |
| 786 vp8_subtract_b = vp8_subtract_b_c; | 698 vp8_subtract_b = vp8_subtract_b_c; |
| 787 if (flags & HAS_MMX) vp8_subtract_b = vp8_subtract_b_mmx; | 699 if (flags & HAS_MMX) vp8_subtract_b = vp8_subtract_b_mmx; |
| 788 if (flags & HAS_SSE2) vp8_subtract_b = vp8_subtract_b_sse2; | 700 if (flags & HAS_SSE2) vp8_subtract_b = vp8_subtract_b_sse2; |
| 789 | |
| 790 vp8_subtract_mbuv = vp8_subtract_mbuv_c; | 701 vp8_subtract_mbuv = vp8_subtract_mbuv_c; |
| 791 if (flags & HAS_MMX) vp8_subtract_mbuv = vp8_subtract_mbuv_mmx; | 702 if (flags & HAS_MMX) vp8_subtract_mbuv = vp8_subtract_mbuv_mmx; |
| 792 if (flags & HAS_SSE2) vp8_subtract_mbuv = vp8_subtract_mbuv_sse2; | 703 if (flags & HAS_SSE2) vp8_subtract_mbuv = vp8_subtract_mbuv_sse2; |
| 793 | |
| 794 vp8_subtract_mby = vp8_subtract_mby_c; | 704 vp8_subtract_mby = vp8_subtract_mby_c; |
| 795 if (flags & HAS_MMX) vp8_subtract_mby = vp8_subtract_mby_mmx; | 705 if (flags & HAS_MMX) vp8_subtract_mby = vp8_subtract_mby_mmx; |
| 796 if (flags & HAS_SSE2) vp8_subtract_mby = vp8_subtract_mby_sse2; | 706 if (flags & HAS_SSE2) vp8_subtract_mby = vp8_subtract_mby_sse2; |
| 797 | |
| 798 vp8_variance16x16 = vp8_variance16x16_c; | 707 vp8_variance16x16 = vp8_variance16x16_c; |
| 799 if (flags & HAS_MMX) vp8_variance16x16 = vp8_variance16x16_mmx; | 708 if (flags & HAS_MMX) vp8_variance16x16 = vp8_variance16x16_mmx; |
| 800 if (flags & HAS_SSE2) vp8_variance16x16 = vp8_variance16x16_wmt; | 709 if (flags & HAS_SSE2) vp8_variance16x16 = vp8_variance16x16_wmt; |
| 801 | |
| 802 vp8_variance16x8 = vp8_variance16x8_c; | 710 vp8_variance16x8 = vp8_variance16x8_c; |
| 803 if (flags & HAS_MMX) vp8_variance16x8 = vp8_variance16x8_mmx; | 711 if (flags & HAS_MMX) vp8_variance16x8 = vp8_variance16x8_mmx; |
| 804 if (flags & HAS_SSE2) vp8_variance16x8 = vp8_variance16x8_wmt; | 712 if (flags & HAS_SSE2) vp8_variance16x8 = vp8_variance16x8_wmt; |
| 805 | |
| 806 vp8_variance4x4 = vp8_variance4x4_c; | 713 vp8_variance4x4 = vp8_variance4x4_c; |
| 807 if (flags & HAS_MMX) vp8_variance4x4 = vp8_variance4x4_mmx; | 714 if (flags & HAS_MMX) vp8_variance4x4 = vp8_variance4x4_mmx; |
| 808 if (flags & HAS_SSE2) vp8_variance4x4 = vp8_variance4x4_wmt; | 715 if (flags & HAS_SSE2) vp8_variance4x4 = vp8_variance4x4_wmt; |
| 809 | |
| 810 vp8_variance8x16 = vp8_variance8x16_c; | 716 vp8_variance8x16 = vp8_variance8x16_c; |
| 811 if (flags & HAS_MMX) vp8_variance8x16 = vp8_variance8x16_mmx; | 717 if (flags & HAS_MMX) vp8_variance8x16 = vp8_variance8x16_mmx; |
| 812 if (flags & HAS_SSE2) vp8_variance8x16 = vp8_variance8x16_wmt; | 718 if (flags & HAS_SSE2) vp8_variance8x16 = vp8_variance8x16_wmt; |
| 813 | |
| 814 vp8_variance8x8 = vp8_variance8x8_c; | 719 vp8_variance8x8 = vp8_variance8x8_c; |
| 815 if (flags & HAS_MMX) vp8_variance8x8 = vp8_variance8x8_mmx; | 720 if (flags & HAS_MMX) vp8_variance8x8 = vp8_variance8x8_mmx; |
| 816 if (flags & HAS_SSE2) vp8_variance8x8 = vp8_variance8x8_wmt; | 721 if (flags & HAS_SSE2) vp8_variance8x8 = vp8_variance8x8_wmt; |
| 817 | |
| 818 vp8_variance_halfpixvar16x16_h = vp8_variance_halfpixvar16x16_h_c; | 722 vp8_variance_halfpixvar16x16_h = vp8_variance_halfpixvar16x16_h_c; |
| 819 if (flags & HAS_MMX) vp8_variance_halfpixvar16x16_h = vp8_variance_halfpixva
r16x16_h_mmx; | 723 if (flags & HAS_MMX) vp8_variance_halfpixvar16x16_h = vp8_variance_halfpixva
r16x16_h_mmx; |
| 820 if (flags & HAS_SSE2) vp8_variance_halfpixvar16x16_h = vp8_variance_halfpixv
ar16x16_h_wmt; | 724 if (flags & HAS_SSE2) vp8_variance_halfpixvar16x16_h = vp8_variance_halfpixv
ar16x16_h_wmt; |
| 821 | |
| 822 vp8_variance_halfpixvar16x16_hv = vp8_variance_halfpixvar16x16_hv_c; | 725 vp8_variance_halfpixvar16x16_hv = vp8_variance_halfpixvar16x16_hv_c; |
| 823 if (flags & HAS_MMX) vp8_variance_halfpixvar16x16_hv = vp8_variance_halfpixv
ar16x16_hv_mmx; | 726 if (flags & HAS_MMX) vp8_variance_halfpixvar16x16_hv = vp8_variance_halfpixv
ar16x16_hv_mmx; |
| 824 if (flags & HAS_SSE2) vp8_variance_halfpixvar16x16_hv = vp8_variance_halfpix
var16x16_hv_wmt; | 727 if (flags & HAS_SSE2) vp8_variance_halfpixvar16x16_hv = vp8_variance_halfpix
var16x16_hv_wmt; |
| 825 | |
| 826 vp8_variance_halfpixvar16x16_v = vp8_variance_halfpixvar16x16_v_c; | 728 vp8_variance_halfpixvar16x16_v = vp8_variance_halfpixvar16x16_v_c; |
| 827 if (flags & HAS_MMX) vp8_variance_halfpixvar16x16_v = vp8_variance_halfpixva
r16x16_v_mmx; | 729 if (flags & HAS_MMX) vp8_variance_halfpixvar16x16_v = vp8_variance_halfpixva
r16x16_v_mmx; |
| 828 if (flags & HAS_SSE2) vp8_variance_halfpixvar16x16_v = vp8_variance_halfpixv
ar16x16_v_wmt; | 730 if (flags & HAS_SSE2) vp8_variance_halfpixvar16x16_v = vp8_variance_halfpixv
ar16x16_v_wmt; |
| 829 } | 731 } |
| 830 #endif | 732 #endif |
| 831 | 733 |
| 832 #ifdef __cplusplus | 734 #ifdef __cplusplus |
| 833 } // extern "C" | 735 } // extern "C" |
| 834 #endif | 736 #endif |
| 835 | 737 |
| 836 #endif | 738 #endif |
| OLD | NEW |