OLD | NEW |
1 #if defined(__x86_64__) | 1 #if defined(__x86_64__) |
2 .text | 2 .text |
3 | 3 |
4 | 4 |
5 | 5 |
6 | 6 |
7 | 7 |
8 | 8 |
9 | 9 |
10 | 10 |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
54 .byte 102,15,56,0,211 | 54 .byte 102,15,56,0,211 |
55 movdqa %xmm0,%xmm3 | 55 movdqa %xmm0,%xmm3 |
56 pxor %xmm5,%xmm2 | 56 pxor %xmm5,%xmm2 |
57 .byte 102,15,56,0,193 | 57 .byte 102,15,56,0,193 |
58 addq $16,%r9 | 58 addq $16,%r9 |
59 pxor %xmm2,%xmm0 | 59 pxor %xmm2,%xmm0 |
60 .byte 102,15,56,0,220 | 60 .byte 102,15,56,0,220 |
61 addq $16,%r11 | 61 addq $16,%r11 |
62 pxor %xmm0,%xmm3 | 62 pxor %xmm0,%xmm3 |
63 .byte 102,15,56,0,193 | 63 .byte 102,15,56,0,193 |
64 » andq» $48,%r11 | 64 » andq» $0x30,%r11 |
65 subq $1,%rax | 65 subq $1,%rax |
66 pxor %xmm3,%xmm0 | 66 pxor %xmm3,%xmm0 |
67 | 67 |
68 .Lenc_entry: | 68 .Lenc_entry: |
69 | 69 |
70 movdqa %xmm9,%xmm1 | 70 movdqa %xmm9,%xmm1 |
71 movdqa %xmm11,%xmm5 | 71 movdqa %xmm11,%xmm5 |
72 pandn %xmm0,%xmm1 | 72 pandn %xmm0,%xmm1 |
73 psrld $4,%xmm1 | 73 psrld $4,%xmm1 |
74 pand %xmm9,%xmm0 | 74 pand %xmm9,%xmm0 |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
114 movdqa %xmm9,%xmm1 | 114 movdqa %xmm9,%xmm1 |
115 movdqa .Lk_dipt(%rip),%xmm2 | 115 movdqa .Lk_dipt(%rip),%xmm2 |
116 pandn %xmm0,%xmm1 | 116 pandn %xmm0,%xmm1 |
117 movq %rax,%r11 | 117 movq %rax,%r11 |
118 psrld $4,%xmm1 | 118 psrld $4,%xmm1 |
119 movdqu (%r9),%xmm5 | 119 movdqu (%r9),%xmm5 |
120 shlq $4,%r11 | 120 shlq $4,%r11 |
121 pand %xmm9,%xmm0 | 121 pand %xmm9,%xmm0 |
122 .byte 102,15,56,0,208 | 122 .byte 102,15,56,0,208 |
123 movdqa .Lk_dipt+16(%rip),%xmm0 | 123 movdqa .Lk_dipt+16(%rip),%xmm0 |
124 » xorq» $48,%r11 | 124 » xorq» $0x30,%r11 |
125 leaq .Lk_dsbd(%rip),%r10 | 125 leaq .Lk_dsbd(%rip),%r10 |
126 .byte 102,15,56,0,193 | 126 .byte 102,15,56,0,193 |
127 » andq» $48,%r11 | 127 » andq» $0x30,%r11 |
128 pxor %xmm5,%xmm2 | 128 pxor %xmm5,%xmm2 |
129 movdqa .Lk_mc_forward+48(%rip),%xmm5 | 129 movdqa .Lk_mc_forward+48(%rip),%xmm5 |
130 pxor %xmm2,%xmm0 | 130 pxor %xmm2,%xmm0 |
131 addq $16,%r9 | 131 addq $16,%r9 |
132 addq %r10,%r11 | 132 addq %r10,%r11 |
133 jmp .Ldec_entry | 133 jmp .Ldec_entry |
134 | 134 |
135 .align 16 | 135 .align 16 |
136 .Ldec_loop: | 136 .Ldec_loop: |
137 | 137 |
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
236 | 236 |
237 | 237 |
238 movdqu %xmm0,(%rdx) | 238 movdqu %xmm0,(%rdx) |
239 jmp .Lschedule_go | 239 jmp .Lschedule_go |
240 | 240 |
241 .Lschedule_am_decrypting: | 241 .Lschedule_am_decrypting: |
242 | 242 |
243 movdqa (%r8,%r10,1),%xmm1 | 243 movdqa (%r8,%r10,1),%xmm1 |
244 .byte 102,15,56,0,217 | 244 .byte 102,15,56,0,217 |
245 movdqu %xmm3,(%rdx) | 245 movdqu %xmm3,(%rdx) |
246 » xorq» $48,%r8 | 246 » xorq» $0x30,%r8 |
247 | 247 |
248 .Lschedule_go: | 248 .Lschedule_go: |
249 cmpl $192,%esi | 249 cmpl $192,%esi |
250 ja .Lschedule_256 | 250 ja .Lschedule_256 |
251 je .Lschedule_192 | 251 je .Lschedule_192 |
252 | 252 |
253 | 253 |
254 | 254 |
255 | 255 |
256 | 256 |
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
326 call _vpaes_schedule_mangle | 326 call _vpaes_schedule_mangle |
327 movdqa %xmm0,%xmm6 | 327 movdqa %xmm0,%xmm6 |
328 | 328 |
329 | 329 |
330 call _vpaes_schedule_round | 330 call _vpaes_schedule_round |
331 decq %rsi | 331 decq %rsi |
332 jz .Lschedule_mangle_last | 332 jz .Lschedule_mangle_last |
333 call _vpaes_schedule_mangle | 333 call _vpaes_schedule_mangle |
334 | 334 |
335 | 335 |
336 » pshufd» $255,%xmm0,%xmm0 | 336 » pshufd» $0xFF,%xmm0,%xmm0 |
337 movdqa %xmm7,%xmm5 | 337 movdqa %xmm7,%xmm5 |
338 movdqa %xmm6,%xmm7 | 338 movdqa %xmm6,%xmm7 |
339 call _vpaes_schedule_low_round | 339 call _vpaes_schedule_low_round |
340 movdqa %xmm5,%xmm7 | 340 movdqa %xmm5,%xmm7 |
341 | 341 |
342 jmp .Loop_schedule_256 | 342 jmp .Loop_schedule_256 |
343 | 343 |
344 | 344 |
345 | 345 |
346 | 346 |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
393 | 393 |
394 | 394 |
395 | 395 |
396 | 396 |
397 | 397 |
398 | 398 |
399 | 399 |
400 .type _vpaes_schedule_192_smear,@function | 400 .type _vpaes_schedule_192_smear,@function |
401 .align 16 | 401 .align 16 |
402 _vpaes_schedule_192_smear: | 402 _vpaes_schedule_192_smear: |
403 » pshufd» $128,%xmm6,%xmm1 | 403 » pshufd» $0x80,%xmm6,%xmm1 |
404 » pshufd» $254,%xmm7,%xmm0 | 404 » pshufd» $0xFE,%xmm7,%xmm0 |
405 pxor %xmm1,%xmm6 | 405 pxor %xmm1,%xmm6 |
406 pxor %xmm1,%xmm1 | 406 pxor %xmm1,%xmm1 |
407 pxor %xmm0,%xmm6 | 407 pxor %xmm0,%xmm6 |
408 movdqa %xmm6,%xmm0 | 408 movdqa %xmm6,%xmm0 |
409 movhlps %xmm1,%xmm6 | 409 movhlps %xmm1,%xmm6 |
410 .byte 0xf3,0xc3 | 410 .byte 0xf3,0xc3 |
411 .size _vpaes_schedule_192_smear,.-_vpaes_schedule_192_smear | 411 .size _vpaes_schedule_192_smear,.-_vpaes_schedule_192_smear |
412 | 412 |
413 | 413 |
414 | 414 |
(...skipping 16 matching lines...) Expand all Loading... |
431 .type _vpaes_schedule_round,@function | 431 .type _vpaes_schedule_round,@function |
432 .align 16 | 432 .align 16 |
433 _vpaes_schedule_round: | 433 _vpaes_schedule_round: |
434 | 434 |
435 pxor %xmm1,%xmm1 | 435 pxor %xmm1,%xmm1 |
436 .byte 102,65,15,58,15,200,15 | 436 .byte 102,65,15,58,15,200,15 |
437 .byte 102,69,15,58,15,192,15 | 437 .byte 102,69,15,58,15,192,15 |
438 pxor %xmm1,%xmm7 | 438 pxor %xmm1,%xmm7 |
439 | 439 |
440 | 440 |
441 » pshufd» $255,%xmm0,%xmm0 | 441 » pshufd» $0xFF,%xmm0,%xmm0 |
442 .byte 102,15,58,15,192,1 | 442 .byte 102,15,58,15,192,1 |
443 | 443 |
444 | 444 |
445 | 445 |
446 | 446 |
447 _vpaes_schedule_low_round: | 447 _vpaes_schedule_low_round: |
448 | 448 |
449 movdqa %xmm7,%xmm1 | 449 movdqa %xmm7,%xmm1 |
450 pslldq $4,%xmm7 | 450 pslldq $4,%xmm7 |
451 pxor %xmm1,%xmm7 | 451 pxor %xmm1,%xmm7 |
(...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
590 movdqa 112(%r11),%xmm3 | 590 movdqa 112(%r11),%xmm3 |
591 .byte 102,15,56,0,217 | 591 .byte 102,15,56,0,217 |
592 pxor %xmm2,%xmm3 | 592 pxor %xmm2,%xmm3 |
593 | 593 |
594 addq $-16,%rdx | 594 addq $-16,%rdx |
595 | 595 |
596 .Lschedule_mangle_both: | 596 .Lschedule_mangle_both: |
597 movdqa (%r8,%r10,1),%xmm1 | 597 movdqa (%r8,%r10,1),%xmm1 |
598 .byte 102,15,56,0,217 | 598 .byte 102,15,56,0,217 |
599 addq $-16,%r8 | 599 addq $-16,%r8 |
600 » andq» $48,%r8 | 600 » andq» $0x30,%r8 |
601 movdqu %xmm3,(%rdx) | 601 movdqu %xmm3,(%rdx) |
602 .byte 0xf3,0xc3 | 602 .byte 0xf3,0xc3 |
603 .size _vpaes_schedule_mangle,.-_vpaes_schedule_mangle | 603 .size _vpaes_schedule_mangle,.-_vpaes_schedule_mangle |
604 | 604 |
605 | 605 |
606 | 606 |
607 | 607 |
608 .globl vpaes_set_encrypt_key | 608 .globl vpaes_set_encrypt_key |
609 .hidden vpaes_set_encrypt_key | 609 .hidden vpaes_set_encrypt_key |
610 .type vpaes_set_encrypt_key,@function | 610 .type vpaes_set_encrypt_key,@function |
611 .align 16 | 611 .align 16 |
612 vpaes_set_encrypt_key: | 612 vpaes_set_encrypt_key: |
613 movl %esi,%eax | 613 movl %esi,%eax |
614 shrl $5,%eax | 614 shrl $5,%eax |
615 addl $5,%eax | 615 addl $5,%eax |
616 movl %eax,240(%rdx) | 616 movl %eax,240(%rdx) |
617 | 617 |
618 movl $0,%ecx | 618 movl $0,%ecx |
619 » movl» $48,%r8d | 619 » movl» $0x30,%r8d |
620 call _vpaes_schedule_core | 620 call _vpaes_schedule_core |
621 xorl %eax,%eax | 621 xorl %eax,%eax |
622 .byte 0xf3,0xc3 | 622 .byte 0xf3,0xc3 |
623 .size vpaes_set_encrypt_key,.-vpaes_set_encrypt_key | 623 .size vpaes_set_encrypt_key,.-vpaes_set_encrypt_key |
624 | 624 |
625 .globl vpaes_set_decrypt_key | 625 .globl vpaes_set_decrypt_key |
626 .hidden vpaes_set_decrypt_key | 626 .hidden vpaes_set_decrypt_key |
627 .type vpaes_set_decrypt_key,@function | 627 .type vpaes_set_decrypt_key,@function |
628 .align 16 | 628 .align 16 |
629 vpaes_set_decrypt_key: | 629 vpaes_set_decrypt_key: |
(...skipping 195 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
825 .Lk_dsbe: | 825 .Lk_dsbe: |
826 .quad 0x46F2929626D4D000, 0x2242600464B4F6B0 | 826 .quad 0x46F2929626D4D000, 0x2242600464B4F6B0 |
827 .quad 0x0C55A6CDFFAAC100, 0x9467F36B98593E32 | 827 .quad 0x0C55A6CDFFAAC100, 0x9467F36B98593E32 |
828 .Lk_dsbo: | 828 .Lk_dsbo: |
829 .quad 0x1387EA537EF94000, 0xC7AA6DB9D4943E2D | 829 .quad 0x1387EA537EF94000, 0xC7AA6DB9D4943E2D |
830 .quad 0x12D7560F93441D00, 0xCA4B8159D8C58E9C | 830 .quad 0x12D7560F93441D00, 0xCA4B8159D8C58E9C |
831 .byte 86,101,99,116,111,114,32,80,101,114,109,117,116,97,116,105,111,110,32,65
,69,83,32,102,111,114,32,120,56,54,95,54,52,47,83,83,83,69,51,44,32,77,105,107,1
01,32,72,97,109,98,117,114,103,32,40,83,116,97,110,102,111,114,100,32,85,110,105
,118,101,114,115,105,116,121,41,0 | 831 .byte 86,101,99,116,111,114,32,80,101,114,109,117,116,97,116,105,111,110,32,65
,69,83,32,102,111,114,32,120,56,54,95,54,52,47,83,83,83,69,51,44,32,77,105,107,1
01,32,72,97,109,98,117,114,103,32,40,83,116,97,110,102,111,114,100,32,85,110,105
,118,101,114,115,105,116,121,41,0 |
832 .align 64 | 832 .align 64 |
833 .size _vpaes_consts,.-_vpaes_consts | 833 .size _vpaes_consts,.-_vpaes_consts |
834 #endif | 834 #endif |
OLD | NEW |