Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(88)

Side by Side Diff: linux-x86_64/crypto/ec/p256-x86_64-asm.S

Issue 2569253003: BoringSSL: Roll generated files forward. (Closed)
Patch Set: Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « linux-x86/crypto/rc4/rc4-586.S ('k') | linux-x86_64/crypto/rc4/rc4-x86_64.S » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #if defined(__x86_64__) 1 #if defined(__x86_64__)
2 .text 2 .text
3 .extern OPENSSL_ia32cap_P 3 .extern OPENSSL_ia32cap_P
4 .hidden OPENSSL_ia32cap_P 4 .hidden OPENSSL_ia32cap_P
5 5
6 6
7 .align 64 7 .align 64
8 .Lpoly: 8 .Lpoly:
9 .quad 0xffffffffffffffff, 0x00000000ffffffff, 0x0000000000000000, 0xffffffff00 000001 9 .quad 0xffffffffffffffff, 0x00000000ffffffff, 0x0000000000000000, 0xffffffff00 000001
10 10
11 .LOne: 11 .LOne:
12 .long 1,1,1,1,1,1,1,1 12 .long 1,1,1,1,1,1,1,1
13 .LTwo: 13 .LTwo:
14 .long 2,2,2,2,2,2,2,2 14 .long 2,2,2,2,2,2,2,2
15 .LThree: 15 .LThree:
16 .long 3,3,3,3,3,3,3,3 16 .long 3,3,3,3,3,3,3,3
17 .LONE_mont: 17 .LONE_mont:
18 .quad 0x0000000000000001, 0xffffffff00000000, 0xffffffffffffffff, 0x00000000ff fffffe 18 .quad 0x0000000000000001, 0xffffffff00000000, 0xffffffffffffffff, 0x00000000ff fffffe
19 19
20 .type ecp_nistz256_mul_by_2,@function 20 .type ecp_nistz256_mul_by_2,@function
21 .align 64 21 .align 64
22 ecp_nistz256_mul_by_2: 22 ecp_nistz256_mul_by_2:
23 pushq %r12 23 pushq %r12
24 pushq %r13 24 pushq %r13
25 25
26 movq 0(%rsi),%r8 26 movq 0(%rsi),%r8
27 xorq %r13,%r13
27 movq 8(%rsi),%r9 28 movq 8(%rsi),%r9
28 addq %r8,%r8 29 addq %r8,%r8
29 movq 16(%rsi),%r10 30 movq 16(%rsi),%r10
30 adcq %r9,%r9 31 adcq %r9,%r9
31 movq 24(%rsi),%r11 32 movq 24(%rsi),%r11
32 leaq .Lpoly(%rip),%rsi 33 leaq .Lpoly(%rip),%rsi
33 movq %r8,%rax 34 movq %r8,%rax
34 adcq %r10,%r10 35 adcq %r10,%r10
35 adcq %r11,%r11 36 adcq %r11,%r11
36 movq %r9,%rdx 37 movq %r9,%rdx
37 » sbbq» %r13,%r13 38 » adcq» $0,%r13
38 39
39 subq 0(%rsi),%r8 40 subq 0(%rsi),%r8
40 movq %r10,%rcx 41 movq %r10,%rcx
41 sbbq 8(%rsi),%r9 42 sbbq 8(%rsi),%r9
42 sbbq 16(%rsi),%r10 43 sbbq 16(%rsi),%r10
43 movq %r11,%r12 44 movq %r11,%r12
44 sbbq 24(%rsi),%r11 45 sbbq 24(%rsi),%r11
45 » testq» %r13,%r13 46 » sbbq» $0,%r13
46 47
47 » cmovzq» %rax,%r8 48 » cmovcq» %rax,%r8
48 » cmovzq» %rdx,%r9 49 » cmovcq» %rdx,%r9
49 movq %r8,0(%rdi) 50 movq %r8,0(%rdi)
50 » cmovzq» %rcx,%r10 51 » cmovcq» %rcx,%r10
51 movq %r9,8(%rdi) 52 movq %r9,8(%rdi)
52 » cmovzq» %r12,%r11 53 » cmovcq» %r12,%r11
53 movq %r10,16(%rdi) 54 movq %r10,16(%rdi)
54 movq %r11,24(%rdi) 55 movq %r11,24(%rdi)
55 56
56 popq %r13 57 popq %r13
57 popq %r12 58 popq %r12
58 .byte 0xf3,0xc3 59 .byte 0xf3,0xc3
59 .size ecp_nistz256_mul_by_2,.-ecp_nistz256_mul_by_2 60 .size ecp_nistz256_mul_by_2,.-ecp_nistz256_mul_by_2
60 61
61 62
62 63
(...skipping 555 matching lines...) Expand 10 before | Expand all | Expand 10 after
618 movq %rdx,%r10 619 movq %rdx,%r10
619 mulq %r13 620 mulq %r13
620 shrq $32,%rcx 621 shrq $32,%rcx
621 addq %r11,%r8 622 addq %r11,%r8
622 adcq %rcx,%r9 623 adcq %rcx,%r9
623 movq %r8,%rcx 624 movq %r8,%rcx
624 adcq %rax,%r10 625 adcq %rax,%r10
625 movq %r9,%rsi 626 movq %r9,%rsi
626 adcq $0,%rdx 627 adcq $0,%rdx
627 628
629
630
628 subq $-1,%r8 631 subq $-1,%r8
629 movq %r10,%rax 632 movq %r10,%rax
630 sbbq %r12,%r9 633 sbbq %r12,%r9
631 sbbq $0,%r10 634 sbbq $0,%r10
632 movq %rdx,%r11 635 movq %rdx,%r11
633 sbbq %r13,%rdx 636 sbbq %r13,%rdx
634 sbbq %r13,%r13 637 sbbq %r13,%r13
635 638
636 cmovnzq %rcx,%r8 639 cmovnzq %rcx,%r8
637 cmovnzq %rsi,%r9 640 cmovnzq %rsi,%r9
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
758 .hidden ecp_nistz256_avx2_select_w7 761 .hidden ecp_nistz256_avx2_select_w7
759 .type ecp_nistz256_avx2_select_w7,@function 762 .type ecp_nistz256_avx2_select_w7,@function
760 .align 32 763 .align 32
761 ecp_nistz256_avx2_select_w7: 764 ecp_nistz256_avx2_select_w7:
762 .byte 0x0f,0x0b 765 .byte 0x0f,0x0b
763 .byte 0xf3,0xc3 766 .byte 0xf3,0xc3
764 .size ecp_nistz256_avx2_select_w7,.-ecp_nistz256_avx2_select_w7 767 .size ecp_nistz256_avx2_select_w7,.-ecp_nistz256_avx2_select_w7
765 .type __ecp_nistz256_add_toq,@function 768 .type __ecp_nistz256_add_toq,@function
766 .align 32 769 .align 32
767 __ecp_nistz256_add_toq: 770 __ecp_nistz256_add_toq:
771 xorq %r11,%r11
768 addq 0(%rbx),%r12 772 addq 0(%rbx),%r12
769 adcq 8(%rbx),%r13 773 adcq 8(%rbx),%r13
770 movq %r12,%rax 774 movq %r12,%rax
771 adcq 16(%rbx),%r8 775 adcq 16(%rbx),%r8
772 adcq 24(%rbx),%r9 776 adcq 24(%rbx),%r9
773 movq %r13,%rbp 777 movq %r13,%rbp
774 » sbbq» %r11,%r11 778 » adcq» $0,%r11
775 779
776 subq $-1,%r12 780 subq $-1,%r12
777 movq %r8,%rcx 781 movq %r8,%rcx
778 sbbq %r14,%r13 782 sbbq %r14,%r13
779 sbbq $0,%r8 783 sbbq $0,%r8
780 movq %r9,%r10 784 movq %r9,%r10
781 sbbq %r15,%r9 785 sbbq %r15,%r9
782 » testq» %r11,%r11 786 » sbbq» $0,%r11
783 787
784 » cmovzq» %rax,%r12 788 » cmovcq» %rax,%r12
785 » cmovzq» %rbp,%r13 789 » cmovcq» %rbp,%r13
786 movq %r12,0(%rdi) 790 movq %r12,0(%rdi)
787 » cmovzq» %rcx,%r8 791 » cmovcq» %rcx,%r8
788 movq %r13,8(%rdi) 792 movq %r13,8(%rdi)
789 » cmovzq» %r10,%r9 793 » cmovcq» %r10,%r9
790 movq %r8,16(%rdi) 794 movq %r8,16(%rdi)
791 movq %r9,24(%rdi) 795 movq %r9,24(%rdi)
792 796
793 .byte 0xf3,0xc3 797 .byte 0xf3,0xc3
794 .size __ecp_nistz256_add_toq,.-__ecp_nistz256_add_toq 798 .size __ecp_nistz256_add_toq,.-__ecp_nistz256_add_toq
795 799
796 .type __ecp_nistz256_sub_fromq,@function 800 .type __ecp_nistz256_sub_fromq,@function
797 .align 32 801 .align 32
798 __ecp_nistz256_sub_fromq: 802 __ecp_nistz256_sub_fromq:
799 subq 0(%rbx),%r12 803 subq 0(%rbx),%r12
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
847 cmovnzq %rbp,%r13 851 cmovnzq %rbp,%r13
848 cmovnzq %rcx,%r8 852 cmovnzq %rcx,%r8
849 cmovnzq %r10,%r9 853 cmovnzq %r10,%r9
850 854
851 .byte 0xf3,0xc3 855 .byte 0xf3,0xc3
852 .size __ecp_nistz256_subq,.-__ecp_nistz256_subq 856 .size __ecp_nistz256_subq,.-__ecp_nistz256_subq
853 857
854 .type __ecp_nistz256_mul_by_2q,@function 858 .type __ecp_nistz256_mul_by_2q,@function
855 .align 32 859 .align 32
856 __ecp_nistz256_mul_by_2q: 860 __ecp_nistz256_mul_by_2q:
861 xorq %r11,%r11
857 addq %r12,%r12 862 addq %r12,%r12
858 adcq %r13,%r13 863 adcq %r13,%r13
859 movq %r12,%rax 864 movq %r12,%rax
860 adcq %r8,%r8 865 adcq %r8,%r8
861 adcq %r9,%r9 866 adcq %r9,%r9
862 movq %r13,%rbp 867 movq %r13,%rbp
863 » sbbq» %r11,%r11 868 » adcq» $0,%r11
864 869
865 subq $-1,%r12 870 subq $-1,%r12
866 movq %r8,%rcx 871 movq %r8,%rcx
867 sbbq %r14,%r13 872 sbbq %r14,%r13
868 sbbq $0,%r8 873 sbbq $0,%r8
869 movq %r9,%r10 874 movq %r9,%r10
870 sbbq %r15,%r9 875 sbbq %r15,%r9
871 » testq» %r11,%r11 876 » sbbq» $0,%r11
872 877
873 » cmovzq» %rax,%r12 878 » cmovcq» %rax,%r12
874 » cmovzq» %rbp,%r13 879 » cmovcq» %rbp,%r13
875 movq %r12,0(%rdi) 880 movq %r12,0(%rdi)
876 » cmovzq» %rcx,%r8 881 » cmovcq» %rcx,%r8
877 movq %r13,8(%rdi) 882 movq %r13,8(%rdi)
878 » cmovzq» %r10,%r9 883 » cmovcq» %r10,%r9
879 movq %r8,16(%rdi) 884 movq %r8,16(%rdi)
880 movq %r9,24(%rdi) 885 movq %r9,24(%rdi)
881 886
882 .byte 0xf3,0xc3 887 .byte 0xf3,0xc3
883 .size __ecp_nistz256_mul_by_2q,.-__ecp_nistz256_mul_by_2q 888 .size __ecp_nistz256_mul_by_2q,.-__ecp_nistz256_mul_by_2q
884 .globl ecp_nistz256_point_double 889 .globl ecp_nistz256_point_double
885 .hidden ecp_nistz256_point_double 890 .hidden ecp_nistz256_point_double
886 .type ecp_nistz256_point_double,@function 891 .type ecp_nistz256_point_double,@function
887 .align 32 892 .align 32
888 ecp_nistz256_point_double: 893 ecp_nistz256_point_double:
(...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after
1100 movdqu 0(%rsi),%xmm0 1105 movdqu 0(%rsi),%xmm0
1101 movdqu 16(%rsi),%xmm1 1106 movdqu 16(%rsi),%xmm1
1102 movdqu 32(%rsi),%xmm2 1107 movdqu 32(%rsi),%xmm2
1103 movdqu 48(%rsi),%xmm3 1108 movdqu 48(%rsi),%xmm3
1104 movdqu 64(%rsi),%xmm4 1109 movdqu 64(%rsi),%xmm4
1105 movdqu 80(%rsi),%xmm5 1110 movdqu 80(%rsi),%xmm5
1106 movq %rsi,%rbx 1111 movq %rsi,%rbx
1107 movq %rdx,%rsi 1112 movq %rdx,%rsi
1108 movdqa %xmm0,384(%rsp) 1113 movdqa %xmm0,384(%rsp)
1109 movdqa %xmm1,384+16(%rsp) 1114 movdqa %xmm1,384+16(%rsp)
1110 por %xmm0,%xmm1
1111 movdqa %xmm2,416(%rsp) 1115 movdqa %xmm2,416(%rsp)
1112 movdqa %xmm3,416+16(%rsp) 1116 movdqa %xmm3,416+16(%rsp)
1113 por %xmm2,%xmm3
1114 movdqa %xmm4,448(%rsp) 1117 movdqa %xmm4,448(%rsp)
1115 movdqa %xmm5,448+16(%rsp) 1118 movdqa %xmm5,448+16(%rsp)
1116 » por» %xmm1,%xmm3 1119 » por» %xmm4,%xmm5
1117 1120
1118 movdqu 0(%rsi),%xmm0 1121 movdqu 0(%rsi),%xmm0
1119 » pshufd» $0xb1,%xmm3,%xmm5 1122 » pshufd» $0xb1,%xmm5,%xmm3
1120 movdqu 16(%rsi),%xmm1 1123 movdqu 16(%rsi),%xmm1
1121 movdqu 32(%rsi),%xmm2 1124 movdqu 32(%rsi),%xmm2
1122 por %xmm3,%xmm5 1125 por %xmm3,%xmm5
1123 movdqu 48(%rsi),%xmm3 1126 movdqu 48(%rsi),%xmm3
1124 movq 64+0(%rsi),%rax 1127 movq 64+0(%rsi),%rax
1125 movq 64+8(%rsi),%r14 1128 movq 64+8(%rsi),%r14
1126 movq 64+16(%rsi),%r15 1129 movq 64+16(%rsi),%r15
1127 movq 64+24(%rsi),%r8 1130 movq 64+24(%rsi),%r8
1128 movdqa %xmm0,480(%rsp) 1131 movdqa %xmm0,480(%rsp)
1129 pshufd $0x1e,%xmm5,%xmm4 1132 pshufd $0x1e,%xmm5,%xmm4
1130 movdqa %xmm1,480+16(%rsp) 1133 movdqa %xmm1,480+16(%rsp)
1134 movdqu 64(%rsi),%xmm0
1135 movdqu 80(%rsi),%xmm1
1136 movdqa %xmm2,512(%rsp)
1137 movdqa %xmm3,512+16(%rsp)
1138 por %xmm4,%xmm5
1139 pxor %xmm4,%xmm4
1131 por %xmm0,%xmm1 1140 por %xmm0,%xmm1
1132 .byte 102,72,15,110,199 1141 .byte 102,72,15,110,199
1133 movdqa %xmm2,512(%rsp)
1134 movdqa %xmm3,512+16(%rsp)
1135 por %xmm2,%xmm3
1136 por %xmm4,%xmm5
1137 pxor %xmm4,%xmm4
1138 por %xmm1,%xmm3
1139 1142
1140 leaq 64-0(%rsi),%rsi 1143 leaq 64-0(%rsi),%rsi
1141 movq %rax,544+0(%rsp) 1144 movq %rax,544+0(%rsp)
1142 movq %r14,544+8(%rsp) 1145 movq %r14,544+8(%rsp)
1143 movq %r15,544+16(%rsp) 1146 movq %r15,544+16(%rsp)
1144 movq %r8,544+24(%rsp) 1147 movq %r8,544+24(%rsp)
1145 leaq 96(%rsp),%rdi 1148 leaq 96(%rsp),%rdi
1146 call __ecp_nistz256_sqr_montq 1149 call __ecp_nistz256_sqr_montq
1147 1150
1148 pcmpeqd %xmm4,%xmm5 1151 pcmpeqd %xmm4,%xmm5
1149 » pshufd» $0xb1,%xmm3,%xmm4 1152 » pshufd» $0xb1,%xmm1,%xmm4
1150 » por» %xmm3,%xmm4 1153 » por» %xmm1,%xmm4
1151 pshufd $0,%xmm5,%xmm5 1154 pshufd $0,%xmm5,%xmm5
1152 pshufd $0x1e,%xmm4,%xmm3 1155 pshufd $0x1e,%xmm4,%xmm3
1153 por %xmm3,%xmm4 1156 por %xmm3,%xmm4
1154 pxor %xmm3,%xmm3 1157 pxor %xmm3,%xmm3
1155 pcmpeqd %xmm3,%xmm4 1158 pcmpeqd %xmm3,%xmm4
1156 pshufd $0,%xmm4,%xmm4 1159 pshufd $0,%xmm4,%xmm4
1157 movq 64+0(%rbx),%rax 1160 movq 64+0(%rbx),%rax
1158 movq 64+8(%rbx),%r14 1161 movq 64+8(%rbx),%r14
1159 movq 64+16(%rbx),%r15 1162 movq 64+16(%rbx),%r15
1160 movq 64+24(%rbx),%r8 1163 movq 64+24(%rbx),%r8
(...skipping 162 matching lines...) Expand 10 before | Expand all | Expand 10 after
1323 movq 8+32(%rsp),%r10 1326 movq 8+32(%rsp),%r10
1324 leaq 0+32(%rsp),%rsi 1327 leaq 0+32(%rsp),%rsi
1325 movq 16+32(%rsp),%r11 1328 movq 16+32(%rsp),%r11
1326 movq 24+32(%rsp),%r12 1329 movq 24+32(%rsp),%r12
1327 leaq 192(%rsp),%rdi 1330 leaq 192(%rsp),%rdi
1328 call __ecp_nistz256_mul_montq 1331 call __ecp_nistz256_mul_montq
1329 1332
1330 1333
1331 1334
1332 1335
1336 xorq %r11,%r11
1333 addq %r12,%r12 1337 addq %r12,%r12
1334 leaq 96(%rsp),%rsi 1338 leaq 96(%rsp),%rsi
1335 adcq %r13,%r13 1339 adcq %r13,%r13
1336 movq %r12,%rax 1340 movq %r12,%rax
1337 adcq %r8,%r8 1341 adcq %r8,%r8
1338 adcq %r9,%r9 1342 adcq %r9,%r9
1339 movq %r13,%rbp 1343 movq %r13,%rbp
1340 » sbbq» %r11,%r11 1344 » adcq» $0,%r11
1341 1345
1342 subq $-1,%r12 1346 subq $-1,%r12
1343 movq %r8,%rcx 1347 movq %r8,%rcx
1344 sbbq %r14,%r13 1348 sbbq %r14,%r13
1345 sbbq $0,%r8 1349 sbbq $0,%r8
1346 movq %r9,%r10 1350 movq %r9,%r10
1347 sbbq %r15,%r9 1351 sbbq %r15,%r9
1348 » testq» %r11,%r11 1352 » sbbq» $0,%r11
1349 1353
1350 » cmovzq» %rax,%r12 1354 » cmovcq» %rax,%r12
1351 movq 0(%rsi),%rax 1355 movq 0(%rsi),%rax
1352 » cmovzq» %rbp,%r13 1356 » cmovcq» %rbp,%r13
1353 movq 8(%rsi),%rbp 1357 movq 8(%rsi),%rbp
1354 » cmovzq» %rcx,%r8 1358 » cmovcq» %rcx,%r8
1355 movq 16(%rsi),%rcx 1359 movq 16(%rsi),%rcx
1356 » cmovzq» %r10,%r9 1360 » cmovcq» %r10,%r9
1357 movq 24(%rsi),%r10 1361 movq 24(%rsi),%r10
1358 1362
1359 call __ecp_nistz256_subq 1363 call __ecp_nistz256_subq
1360 1364
1361 leaq 128(%rsp),%rbx 1365 leaq 128(%rsp),%rbx
1362 leaq 288(%rsp),%rdi 1366 leaq 288(%rsp),%rdi
1363 call __ecp_nistz256_sub_fromq 1367 call __ecp_nistz256_sub_fromq
1364 1368
1365 movq 192+0(%rsp),%rax 1369 movq 192+0(%rsp),%rax
1366 movq 192+8(%rsp),%rbp 1370 movq 192+8(%rsp),%rbp
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after
1501 movdqu 32(%rsi),%xmm2 1505 movdqu 32(%rsi),%xmm2
1502 movdqu 48(%rsi),%xmm3 1506 movdqu 48(%rsi),%xmm3
1503 movdqu 64(%rsi),%xmm4 1507 movdqu 64(%rsi),%xmm4
1504 movdqu 80(%rsi),%xmm5 1508 movdqu 80(%rsi),%xmm5
1505 movq 64+0(%rsi),%rax 1509 movq 64+0(%rsi),%rax
1506 movq 64+8(%rsi),%r14 1510 movq 64+8(%rsi),%r14
1507 movq 64+16(%rsi),%r15 1511 movq 64+16(%rsi),%r15
1508 movq 64+24(%rsi),%r8 1512 movq 64+24(%rsi),%r8
1509 movdqa %xmm0,320(%rsp) 1513 movdqa %xmm0,320(%rsp)
1510 movdqa %xmm1,320+16(%rsp) 1514 movdqa %xmm1,320+16(%rsp)
1511 por %xmm0,%xmm1
1512 movdqa %xmm2,352(%rsp) 1515 movdqa %xmm2,352(%rsp)
1513 movdqa %xmm3,352+16(%rsp) 1516 movdqa %xmm3,352+16(%rsp)
1514 por %xmm2,%xmm3
1515 movdqa %xmm4,384(%rsp) 1517 movdqa %xmm4,384(%rsp)
1516 movdqa %xmm5,384+16(%rsp) 1518 movdqa %xmm5,384+16(%rsp)
1517 » por» %xmm1,%xmm3 1519 » por» %xmm4,%xmm5
1518 1520
1519 movdqu 0(%rbx),%xmm0 1521 movdqu 0(%rbx),%xmm0
1520 » pshufd» $0xb1,%xmm3,%xmm5 1522 » pshufd» $0xb1,%xmm5,%xmm3
1521 movdqu 16(%rbx),%xmm1 1523 movdqu 16(%rbx),%xmm1
1522 movdqu 32(%rbx),%xmm2 1524 movdqu 32(%rbx),%xmm2
1523 por %xmm3,%xmm5 1525 por %xmm3,%xmm5
1524 movdqu 48(%rbx),%xmm3 1526 movdqu 48(%rbx),%xmm3
1525 movdqa %xmm0,416(%rsp) 1527 movdqa %xmm0,416(%rsp)
1526 pshufd $0x1e,%xmm5,%xmm4 1528 pshufd $0x1e,%xmm5,%xmm4
1527 movdqa %xmm1,416+16(%rsp) 1529 movdqa %xmm1,416+16(%rsp)
1528 por %xmm0,%xmm1 1530 por %xmm0,%xmm1
1529 .byte 102,72,15,110,199 1531 .byte 102,72,15,110,199
1530 movdqa %xmm2,448(%rsp) 1532 movdqa %xmm2,448(%rsp)
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
1628 movq 8+128(%rsp),%r10 1630 movq 8+128(%rsp),%r10
1629 leaq 0+128(%rsp),%rsi 1631 leaq 0+128(%rsp),%rsi
1630 movq 16+128(%rsp),%r11 1632 movq 16+128(%rsp),%r11
1631 movq 24+128(%rsp),%r12 1633 movq 24+128(%rsp),%r12
1632 leaq 0(%rsp),%rdi 1634 leaq 0(%rsp),%rdi
1633 call __ecp_nistz256_mul_montq 1635 call __ecp_nistz256_mul_montq
1634 1636
1635 1637
1636 1638
1637 1639
1640 xorq %r11,%r11
1638 addq %r12,%r12 1641 addq %r12,%r12
1639 leaq 192(%rsp),%rsi 1642 leaq 192(%rsp),%rsi
1640 adcq %r13,%r13 1643 adcq %r13,%r13
1641 movq %r12,%rax 1644 movq %r12,%rax
1642 adcq %r8,%r8 1645 adcq %r8,%r8
1643 adcq %r9,%r9 1646 adcq %r9,%r9
1644 movq %r13,%rbp 1647 movq %r13,%rbp
1645 » sbbq» %r11,%r11 1648 » adcq» $0,%r11
1646 1649
1647 subq $-1,%r12 1650 subq $-1,%r12
1648 movq %r8,%rcx 1651 movq %r8,%rcx
1649 sbbq %r14,%r13 1652 sbbq %r14,%r13
1650 sbbq $0,%r8 1653 sbbq $0,%r8
1651 movq %r9,%r10 1654 movq %r9,%r10
1652 sbbq %r15,%r9 1655 sbbq %r15,%r9
1653 » testq» %r11,%r11 1656 » sbbq» $0,%r11
1654 1657
1655 » cmovzq» %rax,%r12 1658 » cmovcq» %rax,%r12
1656 movq 0(%rsi),%rax 1659 movq 0(%rsi),%rax
1657 » cmovzq» %rbp,%r13 1660 » cmovcq» %rbp,%r13
1658 movq 8(%rsi),%rbp 1661 movq 8(%rsi),%rbp
1659 » cmovzq» %rcx,%r8 1662 » cmovcq» %rcx,%r8
1660 movq 16(%rsi),%rcx 1663 movq 16(%rsi),%rcx
1661 » cmovzq» %r10,%r9 1664 » cmovcq» %r10,%r9
1662 movq 24(%rsi),%r10 1665 movq 24(%rsi),%r10
1663 1666
1664 call __ecp_nistz256_subq 1667 call __ecp_nistz256_subq
1665 1668
1666 leaq 160(%rsp),%rbx 1669 leaq 160(%rsp),%rbx
1667 leaq 224(%rsp),%rdi 1670 leaq 224(%rsp),%rdi
1668 call __ecp_nistz256_sub_fromq 1671 call __ecp_nistz256_sub_fromq
1669 1672
1670 movq 0+0(%rsp),%rax 1673 movq 0+0(%rsp),%rax
1671 movq 0+8(%rsp),%rbp 1674 movq 0+8(%rsp),%rbp
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
1780 addq $480+8,%rsp 1783 addq $480+8,%rsp
1781 popq %r15 1784 popq %r15
1782 popq %r14 1785 popq %r14
1783 popq %r13 1786 popq %r13
1784 popq %r12 1787 popq %r12
1785 popq %rbx 1788 popq %rbx
1786 popq %rbp 1789 popq %rbp
1787 .byte 0xf3,0xc3 1790 .byte 0xf3,0xc3
1788 .size ecp_nistz256_point_add_affine,.-ecp_nistz256_point_add_affine 1791 .size ecp_nistz256_point_add_affine,.-ecp_nistz256_point_add_affine
1789 #endif 1792 #endif
OLDNEW
« no previous file with comments | « linux-x86/crypto/rc4/rc4-586.S ('k') | linux-x86_64/crypto/rc4/rc4-x86_64.S » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698