Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(58)

Side by Side Diff: mac-x86_64/crypto/ec/p256-x86_64-asm.S

Issue 2569253003: BoringSSL: Roll generated files forward. (Closed)
Patch Set: Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « mac-x86/crypto/rc4/rc4-586.S ('k') | mac-x86_64/crypto/rc4/rc4-x86_64.S » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #if defined(__x86_64__) 1 #if defined(__x86_64__)
2 .text 2 .text
3 3
4 4
5 5
6 .p2align 6 6 .p2align 6
7 L$poly: 7 L$poly:
8 .quad 0xffffffffffffffff, 0x00000000ffffffff, 0x0000000000000000, 0xffffffff00 000001 8 .quad 0xffffffffffffffff, 0x00000000ffffffff, 0x0000000000000000, 0xffffffff00 000001
9 9
10 L$One: 10 L$One:
11 .long 1,1,1,1,1,1,1,1 11 .long 1,1,1,1,1,1,1,1
12 L$Two: 12 L$Two:
13 .long 2,2,2,2,2,2,2,2 13 .long 2,2,2,2,2,2,2,2
14 L$Three: 14 L$Three:
15 .long 3,3,3,3,3,3,3,3 15 .long 3,3,3,3,3,3,3,3
16 L$ONE_mont: 16 L$ONE_mont:
17 .quad 0x0000000000000001, 0xffffffff00000000, 0xffffffffffffffff, 0x00000000ff fffffe 17 .quad 0x0000000000000001, 0xffffffff00000000, 0xffffffffffffffff, 0x00000000ff fffffe
18 18
19 19
20 .p2align 6 20 .p2align 6
21 ecp_nistz256_mul_by_2: 21 ecp_nistz256_mul_by_2:
22 pushq %r12 22 pushq %r12
23 pushq %r13 23 pushq %r13
24 24
25 movq 0(%rsi),%r8 25 movq 0(%rsi),%r8
26 xorq %r13,%r13
26 movq 8(%rsi),%r9 27 movq 8(%rsi),%r9
27 addq %r8,%r8 28 addq %r8,%r8
28 movq 16(%rsi),%r10 29 movq 16(%rsi),%r10
29 adcq %r9,%r9 30 adcq %r9,%r9
30 movq 24(%rsi),%r11 31 movq 24(%rsi),%r11
31 leaq L$poly(%rip),%rsi 32 leaq L$poly(%rip),%rsi
32 movq %r8,%rax 33 movq %r8,%rax
33 adcq %r10,%r10 34 adcq %r10,%r10
34 adcq %r11,%r11 35 adcq %r11,%r11
35 movq %r9,%rdx 36 movq %r9,%rdx
36 » sbbq» %r13,%r13 37 » adcq» $0,%r13
37 38
38 subq 0(%rsi),%r8 39 subq 0(%rsi),%r8
39 movq %r10,%rcx 40 movq %r10,%rcx
40 sbbq 8(%rsi),%r9 41 sbbq 8(%rsi),%r9
41 sbbq 16(%rsi),%r10 42 sbbq 16(%rsi),%r10
42 movq %r11,%r12 43 movq %r11,%r12
43 sbbq 24(%rsi),%r11 44 sbbq 24(%rsi),%r11
44 » testq» %r13,%r13 45 » sbbq» $0,%r13
45 46
46 » cmovzq» %rax,%r8 47 » cmovcq» %rax,%r8
47 » cmovzq» %rdx,%r9 48 » cmovcq» %rdx,%r9
48 movq %r8,0(%rdi) 49 movq %r8,0(%rdi)
49 » cmovzq» %rcx,%r10 50 » cmovcq» %rcx,%r10
50 movq %r9,8(%rdi) 51 movq %r9,8(%rdi)
51 » cmovzq» %r12,%r11 52 » cmovcq» %r12,%r11
52 movq %r10,16(%rdi) 53 movq %r10,16(%rdi)
53 movq %r11,24(%rdi) 54 movq %r11,24(%rdi)
54 55
55 popq %r13 56 popq %r13
56 popq %r12 57 popq %r12
57 .byte 0xf3,0xc3 58 .byte 0xf3,0xc3
58 59
59 60
60 61
61 62
(...skipping 555 matching lines...) Expand 10 before | Expand all | Expand 10 after
617 movq %rdx,%r10 618 movq %rdx,%r10
618 mulq %r13 619 mulq %r13
619 shrq $32,%rcx 620 shrq $32,%rcx
620 addq %r11,%r8 621 addq %r11,%r8
621 adcq %rcx,%r9 622 adcq %rcx,%r9
622 movq %r8,%rcx 623 movq %r8,%rcx
623 adcq %rax,%r10 624 adcq %rax,%r10
624 movq %r9,%rsi 625 movq %r9,%rsi
625 adcq $0,%rdx 626 adcq $0,%rdx
626 627
628
629
627 subq $-1,%r8 630 subq $-1,%r8
628 movq %r10,%rax 631 movq %r10,%rax
629 sbbq %r12,%r9 632 sbbq %r12,%r9
630 sbbq $0,%r10 633 sbbq $0,%r10
631 movq %rdx,%r11 634 movq %rdx,%r11
632 sbbq %r13,%rdx 635 sbbq %r13,%rdx
633 sbbq %r13,%r13 636 sbbq %r13,%r13
634 637
635 cmovnzq %rcx,%r8 638 cmovnzq %rcx,%r8
636 cmovnzq %rsi,%r9 639 cmovnzq %rsi,%r9
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
757 .private_extern _ecp_nistz256_avx2_select_w7 760 .private_extern _ecp_nistz256_avx2_select_w7
758 761
759 .p2align 5 762 .p2align 5
760 _ecp_nistz256_avx2_select_w7: 763 _ecp_nistz256_avx2_select_w7:
761 .byte 0x0f,0x0b 764 .byte 0x0f,0x0b
762 .byte 0xf3,0xc3 765 .byte 0xf3,0xc3
763 766
764 767
765 .p2align 5 768 .p2align 5
766 __ecp_nistz256_add_toq: 769 __ecp_nistz256_add_toq:
770 xorq %r11,%r11
767 addq 0(%rbx),%r12 771 addq 0(%rbx),%r12
768 adcq 8(%rbx),%r13 772 adcq 8(%rbx),%r13
769 movq %r12,%rax 773 movq %r12,%rax
770 adcq 16(%rbx),%r8 774 adcq 16(%rbx),%r8
771 adcq 24(%rbx),%r9 775 adcq 24(%rbx),%r9
772 movq %r13,%rbp 776 movq %r13,%rbp
773 » sbbq» %r11,%r11 777 » adcq» $0,%r11
774 778
775 subq $-1,%r12 779 subq $-1,%r12
776 movq %r8,%rcx 780 movq %r8,%rcx
777 sbbq %r14,%r13 781 sbbq %r14,%r13
778 sbbq $0,%r8 782 sbbq $0,%r8
779 movq %r9,%r10 783 movq %r9,%r10
780 sbbq %r15,%r9 784 sbbq %r15,%r9
781 » testq» %r11,%r11 785 » sbbq» $0,%r11
782 786
783 » cmovzq» %rax,%r12 787 » cmovcq» %rax,%r12
784 » cmovzq» %rbp,%r13 788 » cmovcq» %rbp,%r13
785 movq %r12,0(%rdi) 789 movq %r12,0(%rdi)
786 » cmovzq» %rcx,%r8 790 » cmovcq» %rcx,%r8
787 movq %r13,8(%rdi) 791 movq %r13,8(%rdi)
788 » cmovzq» %r10,%r9 792 » cmovcq» %r10,%r9
789 movq %r8,16(%rdi) 793 movq %r8,16(%rdi)
790 movq %r9,24(%rdi) 794 movq %r9,24(%rdi)
791 795
792 .byte 0xf3,0xc3 796 .byte 0xf3,0xc3
793 797
794 798
795 799
796 .p2align 5 800 .p2align 5
797 __ecp_nistz256_sub_fromq: 801 __ecp_nistz256_sub_fromq:
798 subq 0(%rbx),%r12 802 subq 0(%rbx),%r12
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
846 cmovnzq %rbp,%r13 850 cmovnzq %rbp,%r13
847 cmovnzq %rcx,%r8 851 cmovnzq %rcx,%r8
848 cmovnzq %r10,%r9 852 cmovnzq %r10,%r9
849 853
850 .byte 0xf3,0xc3 854 .byte 0xf3,0xc3
851 855
852 856
853 857
854 .p2align 5 858 .p2align 5
855 __ecp_nistz256_mul_by_2q: 859 __ecp_nistz256_mul_by_2q:
860 xorq %r11,%r11
856 addq %r12,%r12 861 addq %r12,%r12
857 adcq %r13,%r13 862 adcq %r13,%r13
858 movq %r12,%rax 863 movq %r12,%rax
859 adcq %r8,%r8 864 adcq %r8,%r8
860 adcq %r9,%r9 865 adcq %r9,%r9
861 movq %r13,%rbp 866 movq %r13,%rbp
862 » sbbq» %r11,%r11 867 » adcq» $0,%r11
863 868
864 subq $-1,%r12 869 subq $-1,%r12
865 movq %r8,%rcx 870 movq %r8,%rcx
866 sbbq %r14,%r13 871 sbbq %r14,%r13
867 sbbq $0,%r8 872 sbbq $0,%r8
868 movq %r9,%r10 873 movq %r9,%r10
869 sbbq %r15,%r9 874 sbbq %r15,%r9
870 » testq» %r11,%r11 875 » sbbq» $0,%r11
871 876
872 » cmovzq» %rax,%r12 877 » cmovcq» %rax,%r12
873 » cmovzq» %rbp,%r13 878 » cmovcq» %rbp,%r13
874 movq %r12,0(%rdi) 879 movq %r12,0(%rdi)
875 » cmovzq» %rcx,%r8 880 » cmovcq» %rcx,%r8
876 movq %r13,8(%rdi) 881 movq %r13,8(%rdi)
877 » cmovzq» %r10,%r9 882 » cmovcq» %r10,%r9
878 movq %r8,16(%rdi) 883 movq %r8,16(%rdi)
879 movq %r9,24(%rdi) 884 movq %r9,24(%rdi)
880 885
881 .byte 0xf3,0xc3 886 .byte 0xf3,0xc3
882 887
883 .globl _ecp_nistz256_point_double 888 .globl _ecp_nistz256_point_double
884 .private_extern _ecp_nistz256_point_double 889 .private_extern _ecp_nistz256_point_double
885 890
886 .p2align 5 891 .p2align 5
887 _ecp_nistz256_point_double: 892 _ecp_nistz256_point_double:
(...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after
1099 movdqu 0(%rsi),%xmm0 1104 movdqu 0(%rsi),%xmm0
1100 movdqu 16(%rsi),%xmm1 1105 movdqu 16(%rsi),%xmm1
1101 movdqu 32(%rsi),%xmm2 1106 movdqu 32(%rsi),%xmm2
1102 movdqu 48(%rsi),%xmm3 1107 movdqu 48(%rsi),%xmm3
1103 movdqu 64(%rsi),%xmm4 1108 movdqu 64(%rsi),%xmm4
1104 movdqu 80(%rsi),%xmm5 1109 movdqu 80(%rsi),%xmm5
1105 movq %rsi,%rbx 1110 movq %rsi,%rbx
1106 movq %rdx,%rsi 1111 movq %rdx,%rsi
1107 movdqa %xmm0,384(%rsp) 1112 movdqa %xmm0,384(%rsp)
1108 movdqa %xmm1,384+16(%rsp) 1113 movdqa %xmm1,384+16(%rsp)
1109 por %xmm0,%xmm1
1110 movdqa %xmm2,416(%rsp) 1114 movdqa %xmm2,416(%rsp)
1111 movdqa %xmm3,416+16(%rsp) 1115 movdqa %xmm3,416+16(%rsp)
1112 por %xmm2,%xmm3
1113 movdqa %xmm4,448(%rsp) 1116 movdqa %xmm4,448(%rsp)
1114 movdqa %xmm5,448+16(%rsp) 1117 movdqa %xmm5,448+16(%rsp)
1115 » por» %xmm1,%xmm3 1118 » por» %xmm4,%xmm5
1116 1119
1117 movdqu 0(%rsi),%xmm0 1120 movdqu 0(%rsi),%xmm0
1118 » pshufd» $0xb1,%xmm3,%xmm5 1121 » pshufd» $0xb1,%xmm5,%xmm3
1119 movdqu 16(%rsi),%xmm1 1122 movdqu 16(%rsi),%xmm1
1120 movdqu 32(%rsi),%xmm2 1123 movdqu 32(%rsi),%xmm2
1121 por %xmm3,%xmm5 1124 por %xmm3,%xmm5
1122 movdqu 48(%rsi),%xmm3 1125 movdqu 48(%rsi),%xmm3
1123 movq 64+0(%rsi),%rax 1126 movq 64+0(%rsi),%rax
1124 movq 64+8(%rsi),%r14 1127 movq 64+8(%rsi),%r14
1125 movq 64+16(%rsi),%r15 1128 movq 64+16(%rsi),%r15
1126 movq 64+24(%rsi),%r8 1129 movq 64+24(%rsi),%r8
1127 movdqa %xmm0,480(%rsp) 1130 movdqa %xmm0,480(%rsp)
1128 pshufd $0x1e,%xmm5,%xmm4 1131 pshufd $0x1e,%xmm5,%xmm4
1129 movdqa %xmm1,480+16(%rsp) 1132 movdqa %xmm1,480+16(%rsp)
1133 movdqu 64(%rsi),%xmm0
1134 movdqu 80(%rsi),%xmm1
1135 movdqa %xmm2,512(%rsp)
1136 movdqa %xmm3,512+16(%rsp)
1137 por %xmm4,%xmm5
1138 pxor %xmm4,%xmm4
1130 por %xmm0,%xmm1 1139 por %xmm0,%xmm1
1131 .byte 102,72,15,110,199 1140 .byte 102,72,15,110,199
1132 movdqa %xmm2,512(%rsp)
1133 movdqa %xmm3,512+16(%rsp)
1134 por %xmm2,%xmm3
1135 por %xmm4,%xmm5
1136 pxor %xmm4,%xmm4
1137 por %xmm1,%xmm3
1138 1141
1139 leaq 64-0(%rsi),%rsi 1142 leaq 64-0(%rsi),%rsi
1140 movq %rax,544+0(%rsp) 1143 movq %rax,544+0(%rsp)
1141 movq %r14,544+8(%rsp) 1144 movq %r14,544+8(%rsp)
1142 movq %r15,544+16(%rsp) 1145 movq %r15,544+16(%rsp)
1143 movq %r8,544+24(%rsp) 1146 movq %r8,544+24(%rsp)
1144 leaq 96(%rsp),%rdi 1147 leaq 96(%rsp),%rdi
1145 call __ecp_nistz256_sqr_montq 1148 call __ecp_nistz256_sqr_montq
1146 1149
1147 pcmpeqd %xmm4,%xmm5 1150 pcmpeqd %xmm4,%xmm5
1148 » pshufd» $0xb1,%xmm3,%xmm4 1151 » pshufd» $0xb1,%xmm1,%xmm4
1149 » por» %xmm3,%xmm4 1152 » por» %xmm1,%xmm4
1150 pshufd $0,%xmm5,%xmm5 1153 pshufd $0,%xmm5,%xmm5
1151 pshufd $0x1e,%xmm4,%xmm3 1154 pshufd $0x1e,%xmm4,%xmm3
1152 por %xmm3,%xmm4 1155 por %xmm3,%xmm4
1153 pxor %xmm3,%xmm3 1156 pxor %xmm3,%xmm3
1154 pcmpeqd %xmm3,%xmm4 1157 pcmpeqd %xmm3,%xmm4
1155 pshufd $0,%xmm4,%xmm4 1158 pshufd $0,%xmm4,%xmm4
1156 movq 64+0(%rbx),%rax 1159 movq 64+0(%rbx),%rax
1157 movq 64+8(%rbx),%r14 1160 movq 64+8(%rbx),%r14
1158 movq 64+16(%rbx),%r15 1161 movq 64+16(%rbx),%r15
1159 movq 64+24(%rbx),%r8 1162 movq 64+24(%rbx),%r8
(...skipping 162 matching lines...) Expand 10 before | Expand all | Expand 10 after
1322 movq 8+32(%rsp),%r10 1325 movq 8+32(%rsp),%r10
1323 leaq 0+32(%rsp),%rsi 1326 leaq 0+32(%rsp),%rsi
1324 movq 16+32(%rsp),%r11 1327 movq 16+32(%rsp),%r11
1325 movq 24+32(%rsp),%r12 1328 movq 24+32(%rsp),%r12
1326 leaq 192(%rsp),%rdi 1329 leaq 192(%rsp),%rdi
1327 call __ecp_nistz256_mul_montq 1330 call __ecp_nistz256_mul_montq
1328 1331
1329 1332
1330 1333
1331 1334
1335 xorq %r11,%r11
1332 addq %r12,%r12 1336 addq %r12,%r12
1333 leaq 96(%rsp),%rsi 1337 leaq 96(%rsp),%rsi
1334 adcq %r13,%r13 1338 adcq %r13,%r13
1335 movq %r12,%rax 1339 movq %r12,%rax
1336 adcq %r8,%r8 1340 adcq %r8,%r8
1337 adcq %r9,%r9 1341 adcq %r9,%r9
1338 movq %r13,%rbp 1342 movq %r13,%rbp
1339 » sbbq» %r11,%r11 1343 » adcq» $0,%r11
1340 1344
1341 subq $-1,%r12 1345 subq $-1,%r12
1342 movq %r8,%rcx 1346 movq %r8,%rcx
1343 sbbq %r14,%r13 1347 sbbq %r14,%r13
1344 sbbq $0,%r8 1348 sbbq $0,%r8
1345 movq %r9,%r10 1349 movq %r9,%r10
1346 sbbq %r15,%r9 1350 sbbq %r15,%r9
1347 » testq» %r11,%r11 1351 » sbbq» $0,%r11
1348 1352
1349 » cmovzq» %rax,%r12 1353 » cmovcq» %rax,%r12
1350 movq 0(%rsi),%rax 1354 movq 0(%rsi),%rax
1351 » cmovzq» %rbp,%r13 1355 » cmovcq» %rbp,%r13
1352 movq 8(%rsi),%rbp 1356 movq 8(%rsi),%rbp
1353 » cmovzq» %rcx,%r8 1357 » cmovcq» %rcx,%r8
1354 movq 16(%rsi),%rcx 1358 movq 16(%rsi),%rcx
1355 » cmovzq» %r10,%r9 1359 » cmovcq» %r10,%r9
1356 movq 24(%rsi),%r10 1360 movq 24(%rsi),%r10
1357 1361
1358 call __ecp_nistz256_subq 1362 call __ecp_nistz256_subq
1359 1363
1360 leaq 128(%rsp),%rbx 1364 leaq 128(%rsp),%rbx
1361 leaq 288(%rsp),%rdi 1365 leaq 288(%rsp),%rdi
1362 call __ecp_nistz256_sub_fromq 1366 call __ecp_nistz256_sub_fromq
1363 1367
1364 movq 192+0(%rsp),%rax 1368 movq 192+0(%rsp),%rax
1365 movq 192+8(%rsp),%rbp 1369 movq 192+8(%rsp),%rbp
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after
1500 movdqu 32(%rsi),%xmm2 1504 movdqu 32(%rsi),%xmm2
1501 movdqu 48(%rsi),%xmm3 1505 movdqu 48(%rsi),%xmm3
1502 movdqu 64(%rsi),%xmm4 1506 movdqu 64(%rsi),%xmm4
1503 movdqu 80(%rsi),%xmm5 1507 movdqu 80(%rsi),%xmm5
1504 movq 64+0(%rsi),%rax 1508 movq 64+0(%rsi),%rax
1505 movq 64+8(%rsi),%r14 1509 movq 64+8(%rsi),%r14
1506 movq 64+16(%rsi),%r15 1510 movq 64+16(%rsi),%r15
1507 movq 64+24(%rsi),%r8 1511 movq 64+24(%rsi),%r8
1508 movdqa %xmm0,320(%rsp) 1512 movdqa %xmm0,320(%rsp)
1509 movdqa %xmm1,320+16(%rsp) 1513 movdqa %xmm1,320+16(%rsp)
1510 por %xmm0,%xmm1
1511 movdqa %xmm2,352(%rsp) 1514 movdqa %xmm2,352(%rsp)
1512 movdqa %xmm3,352+16(%rsp) 1515 movdqa %xmm3,352+16(%rsp)
1513 por %xmm2,%xmm3
1514 movdqa %xmm4,384(%rsp) 1516 movdqa %xmm4,384(%rsp)
1515 movdqa %xmm5,384+16(%rsp) 1517 movdqa %xmm5,384+16(%rsp)
1516 » por» %xmm1,%xmm3 1518 » por» %xmm4,%xmm5
1517 1519
1518 movdqu 0(%rbx),%xmm0 1520 movdqu 0(%rbx),%xmm0
1519 » pshufd» $0xb1,%xmm3,%xmm5 1521 » pshufd» $0xb1,%xmm5,%xmm3
1520 movdqu 16(%rbx),%xmm1 1522 movdqu 16(%rbx),%xmm1
1521 movdqu 32(%rbx),%xmm2 1523 movdqu 32(%rbx),%xmm2
1522 por %xmm3,%xmm5 1524 por %xmm3,%xmm5
1523 movdqu 48(%rbx),%xmm3 1525 movdqu 48(%rbx),%xmm3
1524 movdqa %xmm0,416(%rsp) 1526 movdqa %xmm0,416(%rsp)
1525 pshufd $0x1e,%xmm5,%xmm4 1527 pshufd $0x1e,%xmm5,%xmm4
1526 movdqa %xmm1,416+16(%rsp) 1528 movdqa %xmm1,416+16(%rsp)
1527 por %xmm0,%xmm1 1529 por %xmm0,%xmm1
1528 .byte 102,72,15,110,199 1530 .byte 102,72,15,110,199
1529 movdqa %xmm2,448(%rsp) 1531 movdqa %xmm2,448(%rsp)
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
1627 movq 8+128(%rsp),%r10 1629 movq 8+128(%rsp),%r10
1628 leaq 0+128(%rsp),%rsi 1630 leaq 0+128(%rsp),%rsi
1629 movq 16+128(%rsp),%r11 1631 movq 16+128(%rsp),%r11
1630 movq 24+128(%rsp),%r12 1632 movq 24+128(%rsp),%r12
1631 leaq 0(%rsp),%rdi 1633 leaq 0(%rsp),%rdi
1632 call __ecp_nistz256_mul_montq 1634 call __ecp_nistz256_mul_montq
1633 1635
1634 1636
1635 1637
1636 1638
1639 xorq %r11,%r11
1637 addq %r12,%r12 1640 addq %r12,%r12
1638 leaq 192(%rsp),%rsi 1641 leaq 192(%rsp),%rsi
1639 adcq %r13,%r13 1642 adcq %r13,%r13
1640 movq %r12,%rax 1643 movq %r12,%rax
1641 adcq %r8,%r8 1644 adcq %r8,%r8
1642 adcq %r9,%r9 1645 adcq %r9,%r9
1643 movq %r13,%rbp 1646 movq %r13,%rbp
1644 » sbbq» %r11,%r11 1647 » adcq» $0,%r11
1645 1648
1646 subq $-1,%r12 1649 subq $-1,%r12
1647 movq %r8,%rcx 1650 movq %r8,%rcx
1648 sbbq %r14,%r13 1651 sbbq %r14,%r13
1649 sbbq $0,%r8 1652 sbbq $0,%r8
1650 movq %r9,%r10 1653 movq %r9,%r10
1651 sbbq %r15,%r9 1654 sbbq %r15,%r9
1652 » testq» %r11,%r11 1655 » sbbq» $0,%r11
1653 1656
1654 » cmovzq» %rax,%r12 1657 » cmovcq» %rax,%r12
1655 movq 0(%rsi),%rax 1658 movq 0(%rsi),%rax
1656 » cmovzq» %rbp,%r13 1659 » cmovcq» %rbp,%r13
1657 movq 8(%rsi),%rbp 1660 movq 8(%rsi),%rbp
1658 » cmovzq» %rcx,%r8 1661 » cmovcq» %rcx,%r8
1659 movq 16(%rsi),%rcx 1662 movq 16(%rsi),%rcx
1660 » cmovzq» %r10,%r9 1663 » cmovcq» %r10,%r9
1661 movq 24(%rsi),%r10 1664 movq 24(%rsi),%r10
1662 1665
1663 call __ecp_nistz256_subq 1666 call __ecp_nistz256_subq
1664 1667
1665 leaq 160(%rsp),%rbx 1668 leaq 160(%rsp),%rbx
1666 leaq 224(%rsp),%rdi 1669 leaq 224(%rsp),%rdi
1667 call __ecp_nistz256_sub_fromq 1670 call __ecp_nistz256_sub_fromq
1668 1671
1669 movq 0+0(%rsp),%rax 1672 movq 0+0(%rsp),%rax
1670 movq 0+8(%rsp),%rbp 1673 movq 0+8(%rsp),%rbp
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
1779 addq $480+8,%rsp 1782 addq $480+8,%rsp
1780 popq %r15 1783 popq %r15
1781 popq %r14 1784 popq %r14
1782 popq %r13 1785 popq %r13
1783 popq %r12 1786 popq %r12
1784 popq %rbx 1787 popq %rbx
1785 popq %rbp 1788 popq %rbp
1786 .byte 0xf3,0xc3 1789 .byte 0xf3,0xc3
1787 1790
1788 #endif 1791 #endif
OLDNEW
« no previous file with comments | « mac-x86/crypto/rc4/rc4-586.S ('k') | mac-x86_64/crypto/rc4/rc4-x86_64.S » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698