OLD | NEW |
1 #if defined(__i386__) | 1 #if defined(__i386__) |
2 .file "src/crypto/aes/asm/aesni-x86.S" | 2 .file "src/crypto/aes/asm/aesni-x86.S" |
3 .text | 3 .text |
4 .globl aesni_encrypt | 4 .globl aesni_encrypt |
5 .hidden aesni_encrypt | 5 .hidden aesni_encrypt |
6 .type aesni_encrypt,@function | 6 .type aesni_encrypt,@function |
7 .align 16 | 7 .align 16 |
8 aesni_encrypt: | 8 aesni_encrypt: |
9 .L_aesni_encrypt_begin: | 9 .L_aesni_encrypt_begin: |
10 movl 4(%esp),%eax | 10 movl 4(%esp),%eax |
11 movl 12(%esp),%edx | 11 movl 12(%esp),%edx |
12 movups (%eax),%xmm2 | 12 movups (%eax),%xmm2 |
13 movl 240(%edx),%ecx | 13 movl 240(%edx),%ecx |
14 movl 8(%esp),%eax | 14 movl 8(%esp),%eax |
15 movups (%edx),%xmm0 | 15 movups (%edx),%xmm0 |
16 movups 16(%edx),%xmm1 | 16 movups 16(%edx),%xmm1 |
17 leal 32(%edx),%edx | 17 leal 32(%edx),%edx |
18 xorps %xmm0,%xmm2 | 18 xorps %xmm0,%xmm2 |
19 .L000enc1_loop_1: | 19 .L000enc1_loop_1: |
20 .byte 102,15,56,220,209 | 20 .byte 102,15,56,220,209 |
21 decl %ecx | 21 decl %ecx |
22 movups (%edx),%xmm1 | 22 movups (%edx),%xmm1 |
23 leal 16(%edx),%edx | 23 leal 16(%edx),%edx |
24 jnz .L000enc1_loop_1 | 24 jnz .L000enc1_loop_1 |
25 .byte 102,15,56,221,209 | 25 .byte 102,15,56,221,209 |
26 pxor %xmm0,%xmm0 | |
27 pxor %xmm1,%xmm1 | |
28 movups %xmm2,(%eax) | 26 movups %xmm2,(%eax) |
29 pxor %xmm2,%xmm2 | |
30 ret | 27 ret |
31 .size aesni_encrypt,.-.L_aesni_encrypt_begin | 28 .size aesni_encrypt,.-.L_aesni_encrypt_begin |
32 .globl aesni_decrypt | 29 .globl aesni_decrypt |
33 .hidden aesni_decrypt | 30 .hidden aesni_decrypt |
34 .type aesni_decrypt,@function | 31 .type aesni_decrypt,@function |
35 .align 16 | 32 .align 16 |
36 aesni_decrypt: | 33 aesni_decrypt: |
37 .L_aesni_decrypt_begin: | 34 .L_aesni_decrypt_begin: |
38 movl 4(%esp),%eax | 35 movl 4(%esp),%eax |
39 movl 12(%esp),%edx | 36 movl 12(%esp),%edx |
40 movups (%eax),%xmm2 | 37 movups (%eax),%xmm2 |
41 movl 240(%edx),%ecx | 38 movl 240(%edx),%ecx |
42 movl 8(%esp),%eax | 39 movl 8(%esp),%eax |
43 movups (%edx),%xmm0 | 40 movups (%edx),%xmm0 |
44 movups 16(%edx),%xmm1 | 41 movups 16(%edx),%xmm1 |
45 leal 32(%edx),%edx | 42 leal 32(%edx),%edx |
46 xorps %xmm0,%xmm2 | 43 xorps %xmm0,%xmm2 |
47 .L001dec1_loop_2: | 44 .L001dec1_loop_2: |
48 .byte 102,15,56,222,209 | 45 .byte 102,15,56,222,209 |
49 decl %ecx | 46 decl %ecx |
50 movups (%edx),%xmm1 | 47 movups (%edx),%xmm1 |
51 leal 16(%edx),%edx | 48 leal 16(%edx),%edx |
52 jnz .L001dec1_loop_2 | 49 jnz .L001dec1_loop_2 |
53 .byte 102,15,56,223,209 | 50 .byte 102,15,56,223,209 |
54 pxor %xmm0,%xmm0 | |
55 pxor %xmm1,%xmm1 | |
56 movups %xmm2,(%eax) | 51 movups %xmm2,(%eax) |
57 pxor %xmm2,%xmm2 | |
58 ret | 52 ret |
59 .size aesni_decrypt,.-.L_aesni_decrypt_begin | 53 .size aesni_decrypt,.-.L_aesni_decrypt_begin |
60 .hidden _aesni_encrypt2 | 54 .hidden _aesni_encrypt2 |
61 .type _aesni_encrypt2,@function | 55 .type _aesni_encrypt2,@function |
62 .align 16 | 56 .align 16 |
63 _aesni_encrypt2: | 57 _aesni_encrypt2: |
64 movups (%edx),%xmm0 | 58 movups (%edx),%xmm0 |
65 shll $4,%ecx | 59 shll $4,%ecx |
66 movups 16(%edx),%xmm1 | 60 movups 16(%edx),%xmm1 |
67 xorps %xmm0,%xmm2 | 61 xorps %xmm0,%xmm2 |
(...skipping 200 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
268 pxor %xmm0,%xmm3 | 262 pxor %xmm0,%xmm3 |
269 pxor %xmm0,%xmm4 | 263 pxor %xmm0,%xmm4 |
270 .byte 102,15,56,220,209 | 264 .byte 102,15,56,220,209 |
271 pxor %xmm0,%xmm5 | 265 pxor %xmm0,%xmm5 |
272 pxor %xmm0,%xmm6 | 266 pxor %xmm0,%xmm6 |
273 .byte 102,15,56,220,217 | 267 .byte 102,15,56,220,217 |
274 leal 32(%edx,%ecx,1),%edx | 268 leal 32(%edx,%ecx,1),%edx |
275 negl %ecx | 269 negl %ecx |
276 .byte 102,15,56,220,225 | 270 .byte 102,15,56,220,225 |
277 pxor %xmm0,%xmm7 | 271 pxor %xmm0,%xmm7 |
278 movups (%edx,%ecx,1),%xmm0 | |
279 addl $16,%ecx | 272 addl $16,%ecx |
280 » jmp» .L008_aesni_encrypt6_inner | 273 .byte» 102,15,56,220,233 |
| 274 .byte» 102,15,56,220,241 |
| 275 .byte» 102,15,56,220,249 |
| 276 » movups» -16(%edx,%ecx,1),%xmm0 |
| 277 » jmp» .L_aesni_encrypt6_enter |
281 .align 16 | 278 .align 16 |
282 .L009enc6_loop: | 279 .L008enc6_loop: |
283 .byte 102,15,56,220,209 | 280 .byte 102,15,56,220,209 |
284 .byte 102,15,56,220,217 | 281 .byte 102,15,56,220,217 |
285 .byte 102,15,56,220,225 | 282 .byte 102,15,56,220,225 |
286 .L008_aesni_encrypt6_inner: | |
287 .byte 102,15,56,220,233 | 283 .byte 102,15,56,220,233 |
288 .byte 102,15,56,220,241 | 284 .byte 102,15,56,220,241 |
289 .byte 102,15,56,220,249 | 285 .byte 102,15,56,220,249 |
290 .L_aesni_encrypt6_enter: | 286 .L_aesni_encrypt6_enter: |
291 movups (%edx,%ecx,1),%xmm1 | 287 movups (%edx,%ecx,1),%xmm1 |
292 addl $32,%ecx | 288 addl $32,%ecx |
293 .byte 102,15,56,220,208 | 289 .byte 102,15,56,220,208 |
294 .byte 102,15,56,220,216 | 290 .byte 102,15,56,220,216 |
295 .byte 102,15,56,220,224 | 291 .byte 102,15,56,220,224 |
296 .byte 102,15,56,220,232 | 292 .byte 102,15,56,220,232 |
297 .byte 102,15,56,220,240 | 293 .byte 102,15,56,220,240 |
298 .byte 102,15,56,220,248 | 294 .byte 102,15,56,220,248 |
299 movups -16(%edx,%ecx,1),%xmm0 | 295 movups -16(%edx,%ecx,1),%xmm0 |
300 » jnz» .L009enc6_loop | 296 » jnz» .L008enc6_loop |
301 .byte 102,15,56,220,209 | 297 .byte 102,15,56,220,209 |
302 .byte 102,15,56,220,217 | 298 .byte 102,15,56,220,217 |
303 .byte 102,15,56,220,225 | 299 .byte 102,15,56,220,225 |
304 .byte 102,15,56,220,233 | 300 .byte 102,15,56,220,233 |
305 .byte 102,15,56,220,241 | 301 .byte 102,15,56,220,241 |
306 .byte 102,15,56,220,249 | 302 .byte 102,15,56,220,249 |
307 .byte 102,15,56,221,208 | 303 .byte 102,15,56,221,208 |
308 .byte 102,15,56,221,216 | 304 .byte 102,15,56,221,216 |
309 .byte 102,15,56,221,224 | 305 .byte 102,15,56,221,224 |
310 .byte 102,15,56,221,232 | 306 .byte 102,15,56,221,232 |
(...skipping 12 matching lines...) Expand all Loading... |
323 pxor %xmm0,%xmm3 | 319 pxor %xmm0,%xmm3 |
324 pxor %xmm0,%xmm4 | 320 pxor %xmm0,%xmm4 |
325 .byte 102,15,56,222,209 | 321 .byte 102,15,56,222,209 |
326 pxor %xmm0,%xmm5 | 322 pxor %xmm0,%xmm5 |
327 pxor %xmm0,%xmm6 | 323 pxor %xmm0,%xmm6 |
328 .byte 102,15,56,222,217 | 324 .byte 102,15,56,222,217 |
329 leal 32(%edx,%ecx,1),%edx | 325 leal 32(%edx,%ecx,1),%edx |
330 negl %ecx | 326 negl %ecx |
331 .byte 102,15,56,222,225 | 327 .byte 102,15,56,222,225 |
332 pxor %xmm0,%xmm7 | 328 pxor %xmm0,%xmm7 |
333 movups (%edx,%ecx,1),%xmm0 | |
334 addl $16,%ecx | 329 addl $16,%ecx |
335 » jmp» .L010_aesni_decrypt6_inner | 330 .byte» 102,15,56,222,233 |
| 331 .byte» 102,15,56,222,241 |
| 332 .byte» 102,15,56,222,249 |
| 333 » movups» -16(%edx,%ecx,1),%xmm0 |
| 334 » jmp» .L_aesni_decrypt6_enter |
336 .align 16 | 335 .align 16 |
337 .L011dec6_loop: | 336 .L009dec6_loop: |
338 .byte 102,15,56,222,209 | 337 .byte 102,15,56,222,209 |
339 .byte 102,15,56,222,217 | 338 .byte 102,15,56,222,217 |
340 .byte 102,15,56,222,225 | 339 .byte 102,15,56,222,225 |
341 .L010_aesni_decrypt6_inner: | |
342 .byte 102,15,56,222,233 | 340 .byte 102,15,56,222,233 |
343 .byte 102,15,56,222,241 | 341 .byte 102,15,56,222,241 |
344 .byte 102,15,56,222,249 | 342 .byte 102,15,56,222,249 |
345 .L_aesni_decrypt6_enter: | 343 .L_aesni_decrypt6_enter: |
346 movups (%edx,%ecx,1),%xmm1 | 344 movups (%edx,%ecx,1),%xmm1 |
347 addl $32,%ecx | 345 addl $32,%ecx |
348 .byte 102,15,56,222,208 | 346 .byte 102,15,56,222,208 |
349 .byte 102,15,56,222,216 | 347 .byte 102,15,56,222,216 |
350 .byte 102,15,56,222,224 | 348 .byte 102,15,56,222,224 |
351 .byte 102,15,56,222,232 | 349 .byte 102,15,56,222,232 |
352 .byte 102,15,56,222,240 | 350 .byte 102,15,56,222,240 |
353 .byte 102,15,56,222,248 | 351 .byte 102,15,56,222,248 |
354 movups -16(%edx,%ecx,1),%xmm0 | 352 movups -16(%edx,%ecx,1),%xmm0 |
355 » jnz» .L011dec6_loop | 353 » jnz» .L009dec6_loop |
356 .byte 102,15,56,222,209 | 354 .byte 102,15,56,222,209 |
357 .byte 102,15,56,222,217 | 355 .byte 102,15,56,222,217 |
358 .byte 102,15,56,222,225 | 356 .byte 102,15,56,222,225 |
359 .byte 102,15,56,222,233 | 357 .byte 102,15,56,222,233 |
360 .byte 102,15,56,222,241 | 358 .byte 102,15,56,222,241 |
361 .byte 102,15,56,222,249 | 359 .byte 102,15,56,222,249 |
362 .byte 102,15,56,223,208 | 360 .byte 102,15,56,223,208 |
363 .byte 102,15,56,223,216 | 361 .byte 102,15,56,223,216 |
364 .byte 102,15,56,223,224 | 362 .byte 102,15,56,223,224 |
365 .byte 102,15,56,223,232 | 363 .byte 102,15,56,223,232 |
(...skipping 10 matching lines...) Expand all Loading... |
376 pushl %ebp | 374 pushl %ebp |
377 pushl %ebx | 375 pushl %ebx |
378 pushl %esi | 376 pushl %esi |
379 pushl %edi | 377 pushl %edi |
380 movl 20(%esp),%esi | 378 movl 20(%esp),%esi |
381 movl 24(%esp),%edi | 379 movl 24(%esp),%edi |
382 movl 28(%esp),%eax | 380 movl 28(%esp),%eax |
383 movl 32(%esp),%edx | 381 movl 32(%esp),%edx |
384 movl 36(%esp),%ebx | 382 movl 36(%esp),%ebx |
385 andl $-16,%eax | 383 andl $-16,%eax |
386 » jz» .L012ecb_ret | 384 » jz» .L010ecb_ret |
387 movl 240(%edx),%ecx | 385 movl 240(%edx),%ecx |
388 testl %ebx,%ebx | 386 testl %ebx,%ebx |
389 » jz» .L013ecb_decrypt | 387 » jz» .L011ecb_decrypt |
390 movl %edx,%ebp | 388 movl %edx,%ebp |
391 movl %ecx,%ebx | 389 movl %ecx,%ebx |
392 cmpl $96,%eax | 390 cmpl $96,%eax |
393 » jb» .L014ecb_enc_tail | 391 » jb» .L012ecb_enc_tail |
394 movdqu (%esi),%xmm2 | 392 movdqu (%esi),%xmm2 |
395 movdqu 16(%esi),%xmm3 | 393 movdqu 16(%esi),%xmm3 |
396 movdqu 32(%esi),%xmm4 | 394 movdqu 32(%esi),%xmm4 |
397 movdqu 48(%esi),%xmm5 | 395 movdqu 48(%esi),%xmm5 |
398 movdqu 64(%esi),%xmm6 | 396 movdqu 64(%esi),%xmm6 |
399 movdqu 80(%esi),%xmm7 | 397 movdqu 80(%esi),%xmm7 |
400 leal 96(%esi),%esi | 398 leal 96(%esi),%esi |
401 subl $96,%eax | 399 subl $96,%eax |
402 » jmp» .L015ecb_enc_loop6_enter | 400 » jmp» .L013ecb_enc_loop6_enter |
403 .align 16 | 401 .align 16 |
404 .L016ecb_enc_loop6: | 402 .L014ecb_enc_loop6: |
405 movups %xmm2,(%edi) | 403 movups %xmm2,(%edi) |
406 movdqu (%esi),%xmm2 | 404 movdqu (%esi),%xmm2 |
407 movups %xmm3,16(%edi) | 405 movups %xmm3,16(%edi) |
408 movdqu 16(%esi),%xmm3 | 406 movdqu 16(%esi),%xmm3 |
409 movups %xmm4,32(%edi) | 407 movups %xmm4,32(%edi) |
410 movdqu 32(%esi),%xmm4 | 408 movdqu 32(%esi),%xmm4 |
411 movups %xmm5,48(%edi) | 409 movups %xmm5,48(%edi) |
412 movdqu 48(%esi),%xmm5 | 410 movdqu 48(%esi),%xmm5 |
413 movups %xmm6,64(%edi) | 411 movups %xmm6,64(%edi) |
414 movdqu 64(%esi),%xmm6 | 412 movdqu 64(%esi),%xmm6 |
415 movups %xmm7,80(%edi) | 413 movups %xmm7,80(%edi) |
416 leal 96(%edi),%edi | 414 leal 96(%edi),%edi |
417 movdqu 80(%esi),%xmm7 | 415 movdqu 80(%esi),%xmm7 |
418 leal 96(%esi),%esi | 416 leal 96(%esi),%esi |
419 .L015ecb_enc_loop6_enter: | 417 .L013ecb_enc_loop6_enter: |
420 call _aesni_encrypt6 | 418 call _aesni_encrypt6 |
421 movl %ebp,%edx | 419 movl %ebp,%edx |
422 movl %ebx,%ecx | 420 movl %ebx,%ecx |
423 subl $96,%eax | 421 subl $96,%eax |
424 » jnc» .L016ecb_enc_loop6 | 422 » jnc» .L014ecb_enc_loop6 |
425 movups %xmm2,(%edi) | 423 movups %xmm2,(%edi) |
426 movups %xmm3,16(%edi) | 424 movups %xmm3,16(%edi) |
427 movups %xmm4,32(%edi) | 425 movups %xmm4,32(%edi) |
428 movups %xmm5,48(%edi) | 426 movups %xmm5,48(%edi) |
429 movups %xmm6,64(%edi) | 427 movups %xmm6,64(%edi) |
430 movups %xmm7,80(%edi) | 428 movups %xmm7,80(%edi) |
431 leal 96(%edi),%edi | 429 leal 96(%edi),%edi |
432 addl $96,%eax | 430 addl $96,%eax |
433 » jz» .L012ecb_ret | 431 » jz» .L010ecb_ret |
434 .L014ecb_enc_tail: | 432 .L012ecb_enc_tail: |
435 movups (%esi),%xmm2 | 433 movups (%esi),%xmm2 |
436 cmpl $32,%eax | 434 cmpl $32,%eax |
437 » jb» .L017ecb_enc_one | 435 » jb» .L015ecb_enc_one |
438 movups 16(%esi),%xmm3 | 436 movups 16(%esi),%xmm3 |
439 » je» .L018ecb_enc_two | 437 » je» .L016ecb_enc_two |
440 movups 32(%esi),%xmm4 | 438 movups 32(%esi),%xmm4 |
441 cmpl $64,%eax | 439 cmpl $64,%eax |
442 » jb» .L019ecb_enc_three | 440 » jb» .L017ecb_enc_three |
443 movups 48(%esi),%xmm5 | 441 movups 48(%esi),%xmm5 |
444 » je» .L020ecb_enc_four | 442 » je» .L018ecb_enc_four |
445 movups 64(%esi),%xmm6 | 443 movups 64(%esi),%xmm6 |
446 xorps %xmm7,%xmm7 | 444 xorps %xmm7,%xmm7 |
447 call _aesni_encrypt6 | 445 call _aesni_encrypt6 |
448 movups %xmm2,(%edi) | 446 movups %xmm2,(%edi) |
449 movups %xmm3,16(%edi) | 447 movups %xmm3,16(%edi) |
450 movups %xmm4,32(%edi) | 448 movups %xmm4,32(%edi) |
451 movups %xmm5,48(%edi) | 449 movups %xmm5,48(%edi) |
452 movups %xmm6,64(%edi) | 450 movups %xmm6,64(%edi) |
453 » jmp» .L012ecb_ret | 451 » jmp» .L010ecb_ret |
454 .align 16 | 452 .align 16 |
455 .L017ecb_enc_one: | 453 .L015ecb_enc_one: |
456 movups (%edx),%xmm0 | 454 movups (%edx),%xmm0 |
457 movups 16(%edx),%xmm1 | 455 movups 16(%edx),%xmm1 |
458 leal 32(%edx),%edx | 456 leal 32(%edx),%edx |
459 xorps %xmm0,%xmm2 | 457 xorps %xmm0,%xmm2 |
460 .L021enc1_loop_3: | 458 .L019enc1_loop_3: |
461 .byte 102,15,56,220,209 | 459 .byte 102,15,56,220,209 |
462 decl %ecx | 460 decl %ecx |
463 movups (%edx),%xmm1 | 461 movups (%edx),%xmm1 |
464 leal 16(%edx),%edx | 462 leal 16(%edx),%edx |
465 » jnz» .L021enc1_loop_3 | 463 » jnz» .L019enc1_loop_3 |
466 .byte 102,15,56,221,209 | 464 .byte 102,15,56,221,209 |
467 movups %xmm2,(%edi) | 465 movups %xmm2,(%edi) |
468 » jmp» .L012ecb_ret | 466 » jmp» .L010ecb_ret |
469 .align 16 | 467 .align 16 |
470 .L018ecb_enc_two: | 468 .L016ecb_enc_two: |
471 call _aesni_encrypt2 | 469 call _aesni_encrypt2 |
472 movups %xmm2,(%edi) | 470 movups %xmm2,(%edi) |
473 movups %xmm3,16(%edi) | 471 movups %xmm3,16(%edi) |
474 » jmp» .L012ecb_ret | 472 » jmp» .L010ecb_ret |
475 .align 16 | 473 .align 16 |
476 .L019ecb_enc_three: | 474 .L017ecb_enc_three: |
477 call _aesni_encrypt3 | 475 call _aesni_encrypt3 |
478 movups %xmm2,(%edi) | 476 movups %xmm2,(%edi) |
479 movups %xmm3,16(%edi) | 477 movups %xmm3,16(%edi) |
480 movups %xmm4,32(%edi) | 478 movups %xmm4,32(%edi) |
481 » jmp» .L012ecb_ret | 479 » jmp» .L010ecb_ret |
482 .align 16 | 480 .align 16 |
483 .L020ecb_enc_four: | 481 .L018ecb_enc_four: |
484 call _aesni_encrypt4 | 482 call _aesni_encrypt4 |
485 movups %xmm2,(%edi) | 483 movups %xmm2,(%edi) |
486 movups %xmm3,16(%edi) | 484 movups %xmm3,16(%edi) |
487 movups %xmm4,32(%edi) | 485 movups %xmm4,32(%edi) |
488 movups %xmm5,48(%edi) | 486 movups %xmm5,48(%edi) |
489 » jmp» .L012ecb_ret | 487 » jmp» .L010ecb_ret |
490 .align 16 | 488 .align 16 |
491 .L013ecb_decrypt: | 489 .L011ecb_decrypt: |
492 movl %edx,%ebp | 490 movl %edx,%ebp |
493 movl %ecx,%ebx | 491 movl %ecx,%ebx |
494 cmpl $96,%eax | 492 cmpl $96,%eax |
495 » jb» .L022ecb_dec_tail | 493 » jb» .L020ecb_dec_tail |
496 movdqu (%esi),%xmm2 | 494 movdqu (%esi),%xmm2 |
497 movdqu 16(%esi),%xmm3 | 495 movdqu 16(%esi),%xmm3 |
498 movdqu 32(%esi),%xmm4 | 496 movdqu 32(%esi),%xmm4 |
499 movdqu 48(%esi),%xmm5 | 497 movdqu 48(%esi),%xmm5 |
500 movdqu 64(%esi),%xmm6 | 498 movdqu 64(%esi),%xmm6 |
501 movdqu 80(%esi),%xmm7 | 499 movdqu 80(%esi),%xmm7 |
502 leal 96(%esi),%esi | 500 leal 96(%esi),%esi |
503 subl $96,%eax | 501 subl $96,%eax |
504 » jmp» .L023ecb_dec_loop6_enter | 502 » jmp» .L021ecb_dec_loop6_enter |
505 .align 16 | 503 .align 16 |
506 .L024ecb_dec_loop6: | 504 .L022ecb_dec_loop6: |
507 movups %xmm2,(%edi) | 505 movups %xmm2,(%edi) |
508 movdqu (%esi),%xmm2 | 506 movdqu (%esi),%xmm2 |
509 movups %xmm3,16(%edi) | 507 movups %xmm3,16(%edi) |
510 movdqu 16(%esi),%xmm3 | 508 movdqu 16(%esi),%xmm3 |
511 movups %xmm4,32(%edi) | 509 movups %xmm4,32(%edi) |
512 movdqu 32(%esi),%xmm4 | 510 movdqu 32(%esi),%xmm4 |
513 movups %xmm5,48(%edi) | 511 movups %xmm5,48(%edi) |
514 movdqu 48(%esi),%xmm5 | 512 movdqu 48(%esi),%xmm5 |
515 movups %xmm6,64(%edi) | 513 movups %xmm6,64(%edi) |
516 movdqu 64(%esi),%xmm6 | 514 movdqu 64(%esi),%xmm6 |
517 movups %xmm7,80(%edi) | 515 movups %xmm7,80(%edi) |
518 leal 96(%edi),%edi | 516 leal 96(%edi),%edi |
519 movdqu 80(%esi),%xmm7 | 517 movdqu 80(%esi),%xmm7 |
520 leal 96(%esi),%esi | 518 leal 96(%esi),%esi |
521 .L023ecb_dec_loop6_enter: | 519 .L021ecb_dec_loop6_enter: |
522 call _aesni_decrypt6 | 520 call _aesni_decrypt6 |
523 movl %ebp,%edx | 521 movl %ebp,%edx |
524 movl %ebx,%ecx | 522 movl %ebx,%ecx |
525 subl $96,%eax | 523 subl $96,%eax |
526 » jnc» .L024ecb_dec_loop6 | 524 » jnc» .L022ecb_dec_loop6 |
527 movups %xmm2,(%edi) | 525 movups %xmm2,(%edi) |
528 movups %xmm3,16(%edi) | 526 movups %xmm3,16(%edi) |
529 movups %xmm4,32(%edi) | 527 movups %xmm4,32(%edi) |
530 movups %xmm5,48(%edi) | 528 movups %xmm5,48(%edi) |
531 movups %xmm6,64(%edi) | 529 movups %xmm6,64(%edi) |
532 movups %xmm7,80(%edi) | 530 movups %xmm7,80(%edi) |
533 leal 96(%edi),%edi | 531 leal 96(%edi),%edi |
534 addl $96,%eax | 532 addl $96,%eax |
535 » jz» .L012ecb_ret | 533 » jz» .L010ecb_ret |
536 .L022ecb_dec_tail: | 534 .L020ecb_dec_tail: |
537 movups (%esi),%xmm2 | 535 movups (%esi),%xmm2 |
538 cmpl $32,%eax | 536 cmpl $32,%eax |
539 » jb» .L025ecb_dec_one | 537 » jb» .L023ecb_dec_one |
540 movups 16(%esi),%xmm3 | 538 movups 16(%esi),%xmm3 |
541 » je» .L026ecb_dec_two | 539 » je» .L024ecb_dec_two |
542 movups 32(%esi),%xmm4 | 540 movups 32(%esi),%xmm4 |
543 cmpl $64,%eax | 541 cmpl $64,%eax |
544 » jb» .L027ecb_dec_three | 542 » jb» .L025ecb_dec_three |
545 movups 48(%esi),%xmm5 | 543 movups 48(%esi),%xmm5 |
546 » je» .L028ecb_dec_four | 544 » je» .L026ecb_dec_four |
547 movups 64(%esi),%xmm6 | 545 movups 64(%esi),%xmm6 |
548 xorps %xmm7,%xmm7 | 546 xorps %xmm7,%xmm7 |
549 call _aesni_decrypt6 | 547 call _aesni_decrypt6 |
550 movups %xmm2,(%edi) | 548 movups %xmm2,(%edi) |
551 movups %xmm3,16(%edi) | 549 movups %xmm3,16(%edi) |
552 movups %xmm4,32(%edi) | 550 movups %xmm4,32(%edi) |
553 movups %xmm5,48(%edi) | 551 movups %xmm5,48(%edi) |
554 movups %xmm6,64(%edi) | 552 movups %xmm6,64(%edi) |
555 » jmp» .L012ecb_ret | 553 » jmp» .L010ecb_ret |
556 .align 16 | 554 .align 16 |
557 .L025ecb_dec_one: | 555 .L023ecb_dec_one: |
558 movups (%edx),%xmm0 | 556 movups (%edx),%xmm0 |
559 movups 16(%edx),%xmm1 | 557 movups 16(%edx),%xmm1 |
560 leal 32(%edx),%edx | 558 leal 32(%edx),%edx |
561 xorps %xmm0,%xmm2 | 559 xorps %xmm0,%xmm2 |
562 .L029dec1_loop_4: | 560 .L027dec1_loop_4: |
563 .byte 102,15,56,222,209 | 561 .byte 102,15,56,222,209 |
564 decl %ecx | 562 decl %ecx |
565 movups (%edx),%xmm1 | 563 movups (%edx),%xmm1 |
566 leal 16(%edx),%edx | 564 leal 16(%edx),%edx |
567 » jnz» .L029dec1_loop_4 | 565 » jnz» .L027dec1_loop_4 |
568 .byte 102,15,56,223,209 | 566 .byte 102,15,56,223,209 |
569 movups %xmm2,(%edi) | 567 movups %xmm2,(%edi) |
570 » jmp» .L012ecb_ret | 568 » jmp» .L010ecb_ret |
571 .align 16 | 569 .align 16 |
572 .L026ecb_dec_two: | 570 .L024ecb_dec_two: |
573 call _aesni_decrypt2 | 571 call _aesni_decrypt2 |
574 movups %xmm2,(%edi) | 572 movups %xmm2,(%edi) |
575 movups %xmm3,16(%edi) | 573 movups %xmm3,16(%edi) |
576 » jmp» .L012ecb_ret | 574 » jmp» .L010ecb_ret |
577 .align 16 | 575 .align 16 |
578 .L027ecb_dec_three: | 576 .L025ecb_dec_three: |
579 call _aesni_decrypt3 | 577 call _aesni_decrypt3 |
580 movups %xmm2,(%edi) | 578 movups %xmm2,(%edi) |
581 movups %xmm3,16(%edi) | 579 movups %xmm3,16(%edi) |
582 movups %xmm4,32(%edi) | 580 movups %xmm4,32(%edi) |
583 » jmp» .L012ecb_ret | 581 » jmp» .L010ecb_ret |
584 .align 16 | 582 .align 16 |
585 .L028ecb_dec_four: | 583 .L026ecb_dec_four: |
586 call _aesni_decrypt4 | 584 call _aesni_decrypt4 |
587 movups %xmm2,(%edi) | 585 movups %xmm2,(%edi) |
588 movups %xmm3,16(%edi) | 586 movups %xmm3,16(%edi) |
589 movups %xmm4,32(%edi) | 587 movups %xmm4,32(%edi) |
590 movups %xmm5,48(%edi) | 588 movups %xmm5,48(%edi) |
591 .L012ecb_ret: | 589 .L010ecb_ret: |
592 » pxor» %xmm0,%xmm0 | |
593 » pxor» %xmm1,%xmm1 | |
594 » pxor» %xmm2,%xmm2 | |
595 » pxor» %xmm3,%xmm3 | |
596 » pxor» %xmm4,%xmm4 | |
597 » pxor» %xmm5,%xmm5 | |
598 » pxor» %xmm6,%xmm6 | |
599 » pxor» %xmm7,%xmm7 | |
600 popl %edi | 590 popl %edi |
601 popl %esi | 591 popl %esi |
602 popl %ebx | 592 popl %ebx |
603 popl %ebp | 593 popl %ebp |
604 ret | 594 ret |
605 .size aesni_ecb_encrypt,.-.L_aesni_ecb_encrypt_begin | 595 .size aesni_ecb_encrypt,.-.L_aesni_ecb_encrypt_begin |
606 .globl aesni_ccm64_encrypt_blocks | 596 .globl aesni_ccm64_encrypt_blocks |
607 .hidden aesni_ccm64_encrypt_blocks | 597 .hidden aesni_ccm64_encrypt_blocks |
608 .type aesni_ccm64_encrypt_blocks,@function | 598 .type aesni_ccm64_encrypt_blocks,@function |
609 .align 16 | 599 .align 16 |
(...skipping 27 matching lines...) Expand all Loading... |
637 movl %ebp,24(%esp) | 627 movl %ebp,24(%esp) |
638 movl %ebp,28(%esp) | 628 movl %ebp,28(%esp) |
639 shll $4,%ecx | 629 shll $4,%ecx |
640 movl $16,%ebx | 630 movl $16,%ebx |
641 leal (%edx),%ebp | 631 leal (%edx),%ebp |
642 movdqa (%esp),%xmm5 | 632 movdqa (%esp),%xmm5 |
643 movdqa %xmm7,%xmm2 | 633 movdqa %xmm7,%xmm2 |
644 leal 32(%edx,%ecx,1),%edx | 634 leal 32(%edx,%ecx,1),%edx |
645 subl %ecx,%ebx | 635 subl %ecx,%ebx |
646 .byte 102,15,56,0,253 | 636 .byte 102,15,56,0,253 |
647 .L030ccm64_enc_outer: | 637 .L028ccm64_enc_outer: |
648 movups (%ebp),%xmm0 | 638 movups (%ebp),%xmm0 |
649 movl %ebx,%ecx | 639 movl %ebx,%ecx |
650 movups (%esi),%xmm6 | 640 movups (%esi),%xmm6 |
651 xorps %xmm0,%xmm2 | 641 xorps %xmm0,%xmm2 |
652 movups 16(%ebp),%xmm1 | 642 movups 16(%ebp),%xmm1 |
653 xorps %xmm6,%xmm0 | 643 xorps %xmm6,%xmm0 |
654 xorps %xmm0,%xmm3 | 644 xorps %xmm0,%xmm3 |
655 movups 32(%ebp),%xmm0 | 645 movups 32(%ebp),%xmm0 |
656 .L031ccm64_enc2_loop: | 646 .L029ccm64_enc2_loop: |
657 .byte 102,15,56,220,209 | 647 .byte 102,15,56,220,209 |
658 .byte 102,15,56,220,217 | 648 .byte 102,15,56,220,217 |
659 movups (%edx,%ecx,1),%xmm1 | 649 movups (%edx,%ecx,1),%xmm1 |
660 addl $32,%ecx | 650 addl $32,%ecx |
661 .byte 102,15,56,220,208 | 651 .byte 102,15,56,220,208 |
662 .byte 102,15,56,220,216 | 652 .byte 102,15,56,220,216 |
663 movups -16(%edx,%ecx,1),%xmm0 | 653 movups -16(%edx,%ecx,1),%xmm0 |
664 » jnz» .L031ccm64_enc2_loop | 654 » jnz» .L029ccm64_enc2_loop |
665 .byte 102,15,56,220,209 | 655 .byte 102,15,56,220,209 |
666 .byte 102,15,56,220,217 | 656 .byte 102,15,56,220,217 |
667 paddq 16(%esp),%xmm7 | 657 paddq 16(%esp),%xmm7 |
668 decl %eax | 658 decl %eax |
669 .byte 102,15,56,221,208 | 659 .byte 102,15,56,221,208 |
670 .byte 102,15,56,221,216 | 660 .byte 102,15,56,221,216 |
671 leal 16(%esi),%esi | 661 leal 16(%esi),%esi |
672 xorps %xmm2,%xmm6 | 662 xorps %xmm2,%xmm6 |
673 movdqa %xmm7,%xmm2 | 663 movdqa %xmm7,%xmm2 |
674 movups %xmm6,(%edi) | 664 movups %xmm6,(%edi) |
675 .byte 102,15,56,0,213 | 665 .byte 102,15,56,0,213 |
676 leal 16(%edi),%edi | 666 leal 16(%edi),%edi |
677 » jnz» .L030ccm64_enc_outer | 667 » jnz» .L028ccm64_enc_outer |
678 movl 48(%esp),%esp | 668 movl 48(%esp),%esp |
679 movl 40(%esp),%edi | 669 movl 40(%esp),%edi |
680 movups %xmm3,(%edi) | 670 movups %xmm3,(%edi) |
681 pxor %xmm0,%xmm0 | |
682 pxor %xmm1,%xmm1 | |
683 pxor %xmm2,%xmm2 | |
684 pxor %xmm3,%xmm3 | |
685 pxor %xmm4,%xmm4 | |
686 pxor %xmm5,%xmm5 | |
687 pxor %xmm6,%xmm6 | |
688 pxor %xmm7,%xmm7 | |
689 popl %edi | 671 popl %edi |
690 popl %esi | 672 popl %esi |
691 popl %ebx | 673 popl %ebx |
692 popl %ebp | 674 popl %ebp |
693 ret | 675 ret |
694 .size aesni_ccm64_encrypt_blocks,.-.L_aesni_ccm64_encrypt_blocks_begin | 676 .size aesni_ccm64_encrypt_blocks,.-.L_aesni_ccm64_encrypt_blocks_begin |
695 .globl aesni_ccm64_decrypt_blocks | 677 .globl aesni_ccm64_decrypt_blocks |
696 .hidden aesni_ccm64_decrypt_blocks | 678 .hidden aesni_ccm64_decrypt_blocks |
697 .type aesni_ccm64_decrypt_blocks,@function | 679 .type aesni_ccm64_decrypt_blocks,@function |
698 .align 16 | 680 .align 16 |
(...skipping 28 matching lines...) Expand all Loading... |
727 movl %ebp,28(%esp) | 709 movl %ebp,28(%esp) |
728 movdqa (%esp),%xmm5 | 710 movdqa (%esp),%xmm5 |
729 movdqa %xmm7,%xmm2 | 711 movdqa %xmm7,%xmm2 |
730 movl %edx,%ebp | 712 movl %edx,%ebp |
731 movl %ecx,%ebx | 713 movl %ecx,%ebx |
732 .byte 102,15,56,0,253 | 714 .byte 102,15,56,0,253 |
733 movups (%edx),%xmm0 | 715 movups (%edx),%xmm0 |
734 movups 16(%edx),%xmm1 | 716 movups 16(%edx),%xmm1 |
735 leal 32(%edx),%edx | 717 leal 32(%edx),%edx |
736 xorps %xmm0,%xmm2 | 718 xorps %xmm0,%xmm2 |
737 .L032enc1_loop_5: | 719 .L030enc1_loop_5: |
738 .byte 102,15,56,220,209 | 720 .byte 102,15,56,220,209 |
739 decl %ecx | 721 decl %ecx |
740 movups (%edx),%xmm1 | 722 movups (%edx),%xmm1 |
741 leal 16(%edx),%edx | 723 leal 16(%edx),%edx |
742 » jnz» .L032enc1_loop_5 | 724 » jnz» .L030enc1_loop_5 |
743 .byte 102,15,56,221,209 | 725 .byte 102,15,56,221,209 |
744 shll $4,%ebx | 726 shll $4,%ebx |
745 movl $16,%ecx | 727 movl $16,%ecx |
746 movups (%esi),%xmm6 | 728 movups (%esi),%xmm6 |
747 paddq 16(%esp),%xmm7 | 729 paddq 16(%esp),%xmm7 |
748 leal 16(%esi),%esi | 730 leal 16(%esi),%esi |
749 subl %ebx,%ecx | 731 subl %ebx,%ecx |
750 leal 32(%ebp,%ebx,1),%edx | 732 leal 32(%ebp,%ebx,1),%edx |
751 movl %ecx,%ebx | 733 movl %ecx,%ebx |
752 » jmp» .L033ccm64_dec_outer | 734 » jmp» .L031ccm64_dec_outer |
753 .align 16 | 735 .align 16 |
754 .L033ccm64_dec_outer: | 736 .L031ccm64_dec_outer: |
755 xorps %xmm2,%xmm6 | 737 xorps %xmm2,%xmm6 |
756 movdqa %xmm7,%xmm2 | 738 movdqa %xmm7,%xmm2 |
757 movups %xmm6,(%edi) | 739 movups %xmm6,(%edi) |
758 leal 16(%edi),%edi | 740 leal 16(%edi),%edi |
759 .byte 102,15,56,0,213 | 741 .byte 102,15,56,0,213 |
760 subl $1,%eax | 742 subl $1,%eax |
761 » jz» .L034ccm64_dec_break | 743 » jz» .L032ccm64_dec_break |
762 movups (%ebp),%xmm0 | 744 movups (%ebp),%xmm0 |
763 movl %ebx,%ecx | 745 movl %ebx,%ecx |
764 movups 16(%ebp),%xmm1 | 746 movups 16(%ebp),%xmm1 |
765 xorps %xmm0,%xmm6 | 747 xorps %xmm0,%xmm6 |
766 xorps %xmm0,%xmm2 | 748 xorps %xmm0,%xmm2 |
767 xorps %xmm6,%xmm3 | 749 xorps %xmm6,%xmm3 |
768 movups 32(%ebp),%xmm0 | 750 movups 32(%ebp),%xmm0 |
769 .L035ccm64_dec2_loop: | 751 .L033ccm64_dec2_loop: |
770 .byte 102,15,56,220,209 | 752 .byte 102,15,56,220,209 |
771 .byte 102,15,56,220,217 | 753 .byte 102,15,56,220,217 |
772 movups (%edx,%ecx,1),%xmm1 | 754 movups (%edx,%ecx,1),%xmm1 |
773 addl $32,%ecx | 755 addl $32,%ecx |
774 .byte 102,15,56,220,208 | 756 .byte 102,15,56,220,208 |
775 .byte 102,15,56,220,216 | 757 .byte 102,15,56,220,216 |
776 movups -16(%edx,%ecx,1),%xmm0 | 758 movups -16(%edx,%ecx,1),%xmm0 |
777 » jnz» .L035ccm64_dec2_loop | 759 » jnz» .L033ccm64_dec2_loop |
778 movups (%esi),%xmm6 | 760 movups (%esi),%xmm6 |
779 paddq 16(%esp),%xmm7 | 761 paddq 16(%esp),%xmm7 |
780 .byte 102,15,56,220,209 | 762 .byte 102,15,56,220,209 |
781 .byte 102,15,56,220,217 | 763 .byte 102,15,56,220,217 |
782 .byte 102,15,56,221,208 | 764 .byte 102,15,56,221,208 |
783 .byte 102,15,56,221,216 | 765 .byte 102,15,56,221,216 |
784 leal 16(%esi),%esi | 766 leal 16(%esi),%esi |
785 » jmp» .L033ccm64_dec_outer | 767 » jmp» .L031ccm64_dec_outer |
786 .align 16 | 768 .align 16 |
787 .L034ccm64_dec_break: | 769 .L032ccm64_dec_break: |
788 movl 240(%ebp),%ecx | 770 movl 240(%ebp),%ecx |
789 movl %ebp,%edx | 771 movl %ebp,%edx |
790 movups (%edx),%xmm0 | 772 movups (%edx),%xmm0 |
791 movups 16(%edx),%xmm1 | 773 movups 16(%edx),%xmm1 |
792 xorps %xmm0,%xmm6 | 774 xorps %xmm0,%xmm6 |
793 leal 32(%edx),%edx | 775 leal 32(%edx),%edx |
794 xorps %xmm6,%xmm3 | 776 xorps %xmm6,%xmm3 |
795 .L036enc1_loop_6: | 777 .L034enc1_loop_6: |
796 .byte 102,15,56,220,217 | 778 .byte 102,15,56,220,217 |
797 decl %ecx | 779 decl %ecx |
798 movups (%edx),%xmm1 | 780 movups (%edx),%xmm1 |
799 leal 16(%edx),%edx | 781 leal 16(%edx),%edx |
800 » jnz» .L036enc1_loop_6 | 782 » jnz» .L034enc1_loop_6 |
801 .byte 102,15,56,221,217 | 783 .byte 102,15,56,221,217 |
802 movl 48(%esp),%esp | 784 movl 48(%esp),%esp |
803 movl 40(%esp),%edi | 785 movl 40(%esp),%edi |
804 movups %xmm3,(%edi) | 786 movups %xmm3,(%edi) |
805 pxor %xmm0,%xmm0 | |
806 pxor %xmm1,%xmm1 | |
807 pxor %xmm2,%xmm2 | |
808 pxor %xmm3,%xmm3 | |
809 pxor %xmm4,%xmm4 | |
810 pxor %xmm5,%xmm5 | |
811 pxor %xmm6,%xmm6 | |
812 pxor %xmm7,%xmm7 | |
813 popl %edi | 787 popl %edi |
814 popl %esi | 788 popl %esi |
815 popl %ebx | 789 popl %ebx |
816 popl %ebp | 790 popl %ebp |
817 ret | 791 ret |
818 .size aesni_ccm64_decrypt_blocks,.-.L_aesni_ccm64_decrypt_blocks_begin | 792 .size aesni_ccm64_decrypt_blocks,.-.L_aesni_ccm64_decrypt_blocks_begin |
819 .globl aesni_ctr32_encrypt_blocks | 793 .globl aesni_ctr32_encrypt_blocks |
820 .hidden aesni_ctr32_encrypt_blocks | 794 .hidden aesni_ctr32_encrypt_blocks |
821 .type aesni_ctr32_encrypt_blocks,@function | 795 .type aesni_ctr32_encrypt_blocks,@function |
822 .align 16 | 796 .align 16 |
823 aesni_ctr32_encrypt_blocks: | 797 aesni_ctr32_encrypt_blocks: |
824 .L_aesni_ctr32_encrypt_blocks_begin: | 798 .L_aesni_ctr32_encrypt_blocks_begin: |
825 pushl %ebp | 799 pushl %ebp |
826 pushl %ebx | 800 pushl %ebx |
827 pushl %esi | 801 pushl %esi |
828 pushl %edi | 802 pushl %edi |
829 movl 20(%esp),%esi | 803 movl 20(%esp),%esi |
830 movl 24(%esp),%edi | 804 movl 24(%esp),%edi |
831 movl 28(%esp),%eax | 805 movl 28(%esp),%eax |
832 movl 32(%esp),%edx | 806 movl 32(%esp),%edx |
833 movl 36(%esp),%ebx | 807 movl 36(%esp),%ebx |
834 movl %esp,%ebp | 808 movl %esp,%ebp |
835 subl $88,%esp | 809 subl $88,%esp |
836 andl $-16,%esp | 810 andl $-16,%esp |
837 movl %ebp,80(%esp) | 811 movl %ebp,80(%esp) |
838 cmpl $1,%eax | 812 cmpl $1,%eax |
839 » je» .L037ctr32_one_shortcut | 813 » je» .L035ctr32_one_shortcut |
840 movdqu (%ebx),%xmm7 | 814 movdqu (%ebx),%xmm7 |
841 movl $202182159,(%esp) | 815 movl $202182159,(%esp) |
842 movl $134810123,4(%esp) | 816 movl $134810123,4(%esp) |
843 movl $67438087,8(%esp) | 817 movl $67438087,8(%esp) |
844 movl $66051,12(%esp) | 818 movl $66051,12(%esp) |
845 movl $6,%ecx | 819 movl $6,%ecx |
846 xorl %ebp,%ebp | 820 xorl %ebp,%ebp |
847 movl %ecx,16(%esp) | 821 movl %ecx,16(%esp) |
848 movl %ecx,20(%esp) | 822 movl %ecx,20(%esp) |
849 movl %ecx,24(%esp) | 823 movl %ecx,24(%esp) |
(...skipping 17 matching lines...) Expand all Loading... |
867 incl %ebp | 841 incl %ebp |
868 .byte 102,15,58,34,205,2 | 842 .byte 102,15,58,34,205,2 |
869 movdqa %xmm0,48(%esp) | 843 movdqa %xmm0,48(%esp) |
870 .byte 102,15,56,0,194 | 844 .byte 102,15,56,0,194 |
871 movdqu (%edx),%xmm6 | 845 movdqu (%edx),%xmm6 |
872 movdqa %xmm1,64(%esp) | 846 movdqa %xmm1,64(%esp) |
873 .byte 102,15,56,0,202 | 847 .byte 102,15,56,0,202 |
874 pshufd $192,%xmm0,%xmm2 | 848 pshufd $192,%xmm0,%xmm2 |
875 pshufd $128,%xmm0,%xmm3 | 849 pshufd $128,%xmm0,%xmm3 |
876 cmpl $6,%eax | 850 cmpl $6,%eax |
877 » jb» .L038ctr32_tail | 851 » jb» .L036ctr32_tail |
878 pxor %xmm6,%xmm7 | 852 pxor %xmm6,%xmm7 |
879 shll $4,%ecx | 853 shll $4,%ecx |
880 movl $16,%ebx | 854 movl $16,%ebx |
881 movdqa %xmm7,32(%esp) | 855 movdqa %xmm7,32(%esp) |
882 movl %edx,%ebp | 856 movl %edx,%ebp |
883 subl %ecx,%ebx | 857 subl %ecx,%ebx |
884 leal 32(%edx,%ecx,1),%edx | 858 leal 32(%edx,%ecx,1),%edx |
885 subl $6,%eax | 859 subl $6,%eax |
886 » jmp» .L039ctr32_loop6 | 860 » jmp» .L037ctr32_loop6 |
887 .align 16 | 861 .align 16 |
888 .L039ctr32_loop6: | 862 .L037ctr32_loop6: |
889 pshufd $64,%xmm0,%xmm4 | 863 pshufd $64,%xmm0,%xmm4 |
890 movdqa 32(%esp),%xmm0 | 864 movdqa 32(%esp),%xmm0 |
891 pshufd $192,%xmm1,%xmm5 | 865 pshufd $192,%xmm1,%xmm5 |
892 pxor %xmm0,%xmm2 | 866 pxor %xmm0,%xmm2 |
893 pshufd $128,%xmm1,%xmm6 | 867 pshufd $128,%xmm1,%xmm6 |
894 pxor %xmm0,%xmm3 | 868 pxor %xmm0,%xmm3 |
895 pshufd $64,%xmm1,%xmm7 | 869 pshufd $64,%xmm1,%xmm7 |
896 movups 16(%ebp),%xmm1 | 870 movups 16(%ebp),%xmm1 |
897 pxor %xmm0,%xmm4 | 871 pxor %xmm0,%xmm4 |
898 pxor %xmm0,%xmm5 | 872 pxor %xmm0,%xmm5 |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
932 movups %xmm5,48(%edi) | 906 movups %xmm5,48(%edi) |
933 xorps %xmm3,%xmm7 | 907 xorps %xmm3,%xmm7 |
934 movdqa %xmm1,64(%esp) | 908 movdqa %xmm1,64(%esp) |
935 .byte 102,15,56,0,202 | 909 .byte 102,15,56,0,202 |
936 movups %xmm6,64(%edi) | 910 movups %xmm6,64(%edi) |
937 pshufd $192,%xmm0,%xmm2 | 911 pshufd $192,%xmm0,%xmm2 |
938 movups %xmm7,80(%edi) | 912 movups %xmm7,80(%edi) |
939 leal 96(%edi),%edi | 913 leal 96(%edi),%edi |
940 pshufd $128,%xmm0,%xmm3 | 914 pshufd $128,%xmm0,%xmm3 |
941 subl $6,%eax | 915 subl $6,%eax |
942 » jnc» .L039ctr32_loop6 | 916 » jnc» .L037ctr32_loop6 |
943 addl $6,%eax | 917 addl $6,%eax |
944 » jz» .L040ctr32_ret | 918 » jz» .L038ctr32_ret |
945 movdqu (%ebp),%xmm7 | 919 movdqu (%ebp),%xmm7 |
946 movl %ebp,%edx | 920 movl %ebp,%edx |
947 pxor 32(%esp),%xmm7 | 921 pxor 32(%esp),%xmm7 |
948 movl 240(%ebp),%ecx | 922 movl 240(%ebp),%ecx |
949 .L038ctr32_tail: | 923 .L036ctr32_tail: |
950 por %xmm7,%xmm2 | 924 por %xmm7,%xmm2 |
951 cmpl $2,%eax | 925 cmpl $2,%eax |
952 » jb» .L041ctr32_one | 926 » jb» .L039ctr32_one |
953 pshufd $64,%xmm0,%xmm4 | 927 pshufd $64,%xmm0,%xmm4 |
954 por %xmm7,%xmm3 | 928 por %xmm7,%xmm3 |
955 » je» .L042ctr32_two | 929 » je» .L040ctr32_two |
956 pshufd $192,%xmm1,%xmm5 | 930 pshufd $192,%xmm1,%xmm5 |
957 por %xmm7,%xmm4 | 931 por %xmm7,%xmm4 |
958 cmpl $4,%eax | 932 cmpl $4,%eax |
959 » jb» .L043ctr32_three | 933 » jb» .L041ctr32_three |
960 pshufd $128,%xmm1,%xmm6 | 934 pshufd $128,%xmm1,%xmm6 |
961 por %xmm7,%xmm5 | 935 por %xmm7,%xmm5 |
962 » je» .L044ctr32_four | 936 » je» .L042ctr32_four |
963 por %xmm7,%xmm6 | 937 por %xmm7,%xmm6 |
964 call _aesni_encrypt6 | 938 call _aesni_encrypt6 |
965 movups (%esi),%xmm1 | 939 movups (%esi),%xmm1 |
966 movups 16(%esi),%xmm0 | 940 movups 16(%esi),%xmm0 |
967 xorps %xmm1,%xmm2 | 941 xorps %xmm1,%xmm2 |
968 movups 32(%esi),%xmm1 | 942 movups 32(%esi),%xmm1 |
969 xorps %xmm0,%xmm3 | 943 xorps %xmm0,%xmm3 |
970 movups 48(%esi),%xmm0 | 944 movups 48(%esi),%xmm0 |
971 xorps %xmm1,%xmm4 | 945 xorps %xmm1,%xmm4 |
972 movups 64(%esi),%xmm1 | 946 movups 64(%esi),%xmm1 |
973 xorps %xmm0,%xmm5 | 947 xorps %xmm0,%xmm5 |
974 movups %xmm2,(%edi) | 948 movups %xmm2,(%edi) |
975 xorps %xmm1,%xmm6 | 949 xorps %xmm1,%xmm6 |
976 movups %xmm3,16(%edi) | 950 movups %xmm3,16(%edi) |
977 movups %xmm4,32(%edi) | 951 movups %xmm4,32(%edi) |
978 movups %xmm5,48(%edi) | 952 movups %xmm5,48(%edi) |
979 movups %xmm6,64(%edi) | 953 movups %xmm6,64(%edi) |
980 » jmp» .L040ctr32_ret | 954 » jmp» .L038ctr32_ret |
981 .align 16 | 955 .align 16 |
982 .L037ctr32_one_shortcut: | 956 .L035ctr32_one_shortcut: |
983 movups (%ebx),%xmm2 | 957 movups (%ebx),%xmm2 |
984 movl 240(%edx),%ecx | 958 movl 240(%edx),%ecx |
985 .L041ctr32_one: | 959 .L039ctr32_one: |
986 movups (%edx),%xmm0 | 960 movups (%edx),%xmm0 |
987 movups 16(%edx),%xmm1 | 961 movups 16(%edx),%xmm1 |
988 leal 32(%edx),%edx | 962 leal 32(%edx),%edx |
989 xorps %xmm0,%xmm2 | 963 xorps %xmm0,%xmm2 |
990 .L045enc1_loop_7: | 964 .L043enc1_loop_7: |
991 .byte 102,15,56,220,209 | 965 .byte 102,15,56,220,209 |
992 decl %ecx | 966 decl %ecx |
993 movups (%edx),%xmm1 | 967 movups (%edx),%xmm1 |
994 leal 16(%edx),%edx | 968 leal 16(%edx),%edx |
995 » jnz» .L045enc1_loop_7 | 969 » jnz» .L043enc1_loop_7 |
996 .byte 102,15,56,221,209 | 970 .byte 102,15,56,221,209 |
997 movups (%esi),%xmm6 | 971 movups (%esi),%xmm6 |
998 xorps %xmm2,%xmm6 | 972 xorps %xmm2,%xmm6 |
999 movups %xmm6,(%edi) | 973 movups %xmm6,(%edi) |
1000 » jmp» .L040ctr32_ret | 974 » jmp» .L038ctr32_ret |
1001 .align 16 | 975 .align 16 |
1002 .L042ctr32_two: | 976 .L040ctr32_two: |
1003 call _aesni_encrypt2 | 977 call _aesni_encrypt2 |
1004 movups (%esi),%xmm5 | 978 movups (%esi),%xmm5 |
1005 movups 16(%esi),%xmm6 | 979 movups 16(%esi),%xmm6 |
1006 xorps %xmm5,%xmm2 | 980 xorps %xmm5,%xmm2 |
1007 xorps %xmm6,%xmm3 | 981 xorps %xmm6,%xmm3 |
1008 movups %xmm2,(%edi) | 982 movups %xmm2,(%edi) |
1009 movups %xmm3,16(%edi) | 983 movups %xmm3,16(%edi) |
1010 » jmp» .L040ctr32_ret | 984 » jmp» .L038ctr32_ret |
1011 .align 16 | 985 .align 16 |
1012 .L043ctr32_three: | 986 .L041ctr32_three: |
1013 call _aesni_encrypt3 | 987 call _aesni_encrypt3 |
1014 movups (%esi),%xmm5 | 988 movups (%esi),%xmm5 |
1015 movups 16(%esi),%xmm6 | 989 movups 16(%esi),%xmm6 |
1016 xorps %xmm5,%xmm2 | 990 xorps %xmm5,%xmm2 |
1017 movups 32(%esi),%xmm7 | 991 movups 32(%esi),%xmm7 |
1018 xorps %xmm6,%xmm3 | 992 xorps %xmm6,%xmm3 |
1019 movups %xmm2,(%edi) | 993 movups %xmm2,(%edi) |
1020 xorps %xmm7,%xmm4 | 994 xorps %xmm7,%xmm4 |
1021 movups %xmm3,16(%edi) | 995 movups %xmm3,16(%edi) |
1022 movups %xmm4,32(%edi) | 996 movups %xmm4,32(%edi) |
1023 » jmp» .L040ctr32_ret | 997 » jmp» .L038ctr32_ret |
1024 .align 16 | 998 .align 16 |
1025 .L044ctr32_four: | 999 .L042ctr32_four: |
1026 call _aesni_encrypt4 | 1000 call _aesni_encrypt4 |
1027 movups (%esi),%xmm6 | 1001 movups (%esi),%xmm6 |
1028 movups 16(%esi),%xmm7 | 1002 movups 16(%esi),%xmm7 |
1029 movups 32(%esi),%xmm1 | 1003 movups 32(%esi),%xmm1 |
1030 xorps %xmm6,%xmm2 | 1004 xorps %xmm6,%xmm2 |
1031 movups 48(%esi),%xmm0 | 1005 movups 48(%esi),%xmm0 |
1032 xorps %xmm7,%xmm3 | 1006 xorps %xmm7,%xmm3 |
1033 movups %xmm2,(%edi) | 1007 movups %xmm2,(%edi) |
1034 xorps %xmm1,%xmm4 | 1008 xorps %xmm1,%xmm4 |
1035 movups %xmm3,16(%edi) | 1009 movups %xmm3,16(%edi) |
1036 xorps %xmm0,%xmm5 | 1010 xorps %xmm0,%xmm5 |
1037 movups %xmm4,32(%edi) | 1011 movups %xmm4,32(%edi) |
1038 movups %xmm5,48(%edi) | 1012 movups %xmm5,48(%edi) |
1039 .L040ctr32_ret: | 1013 .L038ctr32_ret: |
1040 » pxor» %xmm0,%xmm0 | |
1041 » pxor» %xmm1,%xmm1 | |
1042 » pxor» %xmm2,%xmm2 | |
1043 » pxor» %xmm3,%xmm3 | |
1044 » pxor» %xmm4,%xmm4 | |
1045 » movdqa» %xmm0,32(%esp) | |
1046 » pxor» %xmm5,%xmm5 | |
1047 » movdqa» %xmm0,48(%esp) | |
1048 » pxor» %xmm6,%xmm6 | |
1049 » movdqa» %xmm0,64(%esp) | |
1050 » pxor» %xmm7,%xmm7 | |
1051 movl 80(%esp),%esp | 1014 movl 80(%esp),%esp |
1052 popl %edi | 1015 popl %edi |
1053 popl %esi | 1016 popl %esi |
1054 popl %ebx | 1017 popl %ebx |
1055 popl %ebp | 1018 popl %ebp |
1056 ret | 1019 ret |
1057 .size aesni_ctr32_encrypt_blocks,.-.L_aesni_ctr32_encrypt_blocks_begin | 1020 .size aesni_ctr32_encrypt_blocks,.-.L_aesni_ctr32_encrypt_blocks_begin |
1058 .globl aesni_xts_encrypt | 1021 .globl aesni_xts_encrypt |
1059 .hidden aesni_xts_encrypt | 1022 .hidden aesni_xts_encrypt |
1060 .type aesni_xts_encrypt,@function | 1023 .type aesni_xts_encrypt,@function |
1061 .align 16 | 1024 .align 16 |
1062 aesni_xts_encrypt: | 1025 aesni_xts_encrypt: |
1063 .L_aesni_xts_encrypt_begin: | 1026 .L_aesni_xts_encrypt_begin: |
1064 pushl %ebp | 1027 pushl %ebp |
1065 pushl %ebx | 1028 pushl %ebx |
1066 pushl %esi | 1029 pushl %esi |
1067 pushl %edi | 1030 pushl %edi |
1068 movl 36(%esp),%edx | 1031 movl 36(%esp),%edx |
1069 movl 40(%esp),%esi | 1032 movl 40(%esp),%esi |
1070 movl 240(%edx),%ecx | 1033 movl 240(%edx),%ecx |
1071 movups (%esi),%xmm2 | 1034 movups (%esi),%xmm2 |
1072 movups (%edx),%xmm0 | 1035 movups (%edx),%xmm0 |
1073 movups 16(%edx),%xmm1 | 1036 movups 16(%edx),%xmm1 |
1074 leal 32(%edx),%edx | 1037 leal 32(%edx),%edx |
1075 xorps %xmm0,%xmm2 | 1038 xorps %xmm0,%xmm2 |
1076 .L046enc1_loop_8: | 1039 .L044enc1_loop_8: |
1077 .byte 102,15,56,220,209 | 1040 .byte 102,15,56,220,209 |
1078 decl %ecx | 1041 decl %ecx |
1079 movups (%edx),%xmm1 | 1042 movups (%edx),%xmm1 |
1080 leal 16(%edx),%edx | 1043 leal 16(%edx),%edx |
1081 » jnz» .L046enc1_loop_8 | 1044 » jnz» .L044enc1_loop_8 |
1082 .byte 102,15,56,221,209 | 1045 .byte 102,15,56,221,209 |
1083 movl 20(%esp),%esi | 1046 movl 20(%esp),%esi |
1084 movl 24(%esp),%edi | 1047 movl 24(%esp),%edi |
1085 movl 28(%esp),%eax | 1048 movl 28(%esp),%eax |
1086 movl 32(%esp),%edx | 1049 movl 32(%esp),%edx |
1087 movl %esp,%ebp | 1050 movl %esp,%ebp |
1088 subl $120,%esp | 1051 subl $120,%esp |
1089 movl 240(%edx),%ecx | 1052 movl 240(%edx),%ecx |
1090 andl $-16,%esp | 1053 andl $-16,%esp |
1091 movl $135,96(%esp) | 1054 movl $135,96(%esp) |
1092 movl $0,100(%esp) | 1055 movl $0,100(%esp) |
1093 movl $1,104(%esp) | 1056 movl $1,104(%esp) |
1094 movl $0,108(%esp) | 1057 movl $0,108(%esp) |
1095 movl %eax,112(%esp) | 1058 movl %eax,112(%esp) |
1096 movl %ebp,116(%esp) | 1059 movl %ebp,116(%esp) |
1097 movdqa %xmm2,%xmm1 | 1060 movdqa %xmm2,%xmm1 |
1098 pxor %xmm0,%xmm0 | 1061 pxor %xmm0,%xmm0 |
1099 movdqa 96(%esp),%xmm3 | 1062 movdqa 96(%esp),%xmm3 |
1100 pcmpgtd %xmm1,%xmm0 | 1063 pcmpgtd %xmm1,%xmm0 |
1101 andl $-16,%eax | 1064 andl $-16,%eax |
1102 movl %edx,%ebp | 1065 movl %edx,%ebp |
1103 movl %ecx,%ebx | 1066 movl %ecx,%ebx |
1104 subl $96,%eax | 1067 subl $96,%eax |
1105 » jc» .L047xts_enc_short | 1068 » jc» .L045xts_enc_short |
1106 shll $4,%ecx | 1069 shll $4,%ecx |
1107 movl $16,%ebx | 1070 movl $16,%ebx |
1108 subl %ecx,%ebx | 1071 subl %ecx,%ebx |
1109 leal 32(%edx,%ecx,1),%edx | 1072 leal 32(%edx,%ecx,1),%edx |
1110 » jmp» .L048xts_enc_loop6 | 1073 » jmp» .L046xts_enc_loop6 |
1111 .align 16 | 1074 .align 16 |
1112 .L048xts_enc_loop6: | 1075 .L046xts_enc_loop6: |
1113 pshufd $19,%xmm0,%xmm2 | 1076 pshufd $19,%xmm0,%xmm2 |
1114 pxor %xmm0,%xmm0 | 1077 pxor %xmm0,%xmm0 |
1115 movdqa %xmm1,(%esp) | 1078 movdqa %xmm1,(%esp) |
1116 paddq %xmm1,%xmm1 | 1079 paddq %xmm1,%xmm1 |
1117 pand %xmm3,%xmm2 | 1080 pand %xmm3,%xmm2 |
1118 pcmpgtd %xmm1,%xmm0 | 1081 pcmpgtd %xmm1,%xmm0 |
1119 pxor %xmm2,%xmm1 | 1082 pxor %xmm2,%xmm1 |
1120 pshufd $19,%xmm0,%xmm2 | 1083 pshufd $19,%xmm0,%xmm2 |
1121 pxor %xmm0,%xmm0 | 1084 pxor %xmm0,%xmm0 |
1122 movdqa %xmm1,16(%esp) | 1085 movdqa %xmm1,16(%esp) |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1191 pshufd $19,%xmm0,%xmm2 | 1154 pshufd $19,%xmm0,%xmm2 |
1192 movups %xmm7,80(%edi) | 1155 movups %xmm7,80(%edi) |
1193 leal 96(%edi),%edi | 1156 leal 96(%edi),%edi |
1194 movdqa 96(%esp),%xmm3 | 1157 movdqa 96(%esp),%xmm3 |
1195 pxor %xmm0,%xmm0 | 1158 pxor %xmm0,%xmm0 |
1196 paddq %xmm1,%xmm1 | 1159 paddq %xmm1,%xmm1 |
1197 pand %xmm3,%xmm2 | 1160 pand %xmm3,%xmm2 |
1198 pcmpgtd %xmm1,%xmm0 | 1161 pcmpgtd %xmm1,%xmm0 |
1199 pxor %xmm2,%xmm1 | 1162 pxor %xmm2,%xmm1 |
1200 subl $96,%eax | 1163 subl $96,%eax |
1201 » jnc» .L048xts_enc_loop6 | 1164 » jnc» .L046xts_enc_loop6 |
1202 movl 240(%ebp),%ecx | 1165 movl 240(%ebp),%ecx |
1203 movl %ebp,%edx | 1166 movl %ebp,%edx |
1204 movl %ecx,%ebx | 1167 movl %ecx,%ebx |
1205 .L047xts_enc_short: | 1168 .L045xts_enc_short: |
1206 addl $96,%eax | 1169 addl $96,%eax |
1207 » jz» .L049xts_enc_done6x | 1170 » jz» .L047xts_enc_done6x |
1208 movdqa %xmm1,%xmm5 | 1171 movdqa %xmm1,%xmm5 |
1209 cmpl $32,%eax | 1172 cmpl $32,%eax |
1210 » jb» .L050xts_enc_one | 1173 » jb» .L048xts_enc_one |
1211 pshufd $19,%xmm0,%xmm2 | 1174 pshufd $19,%xmm0,%xmm2 |
1212 pxor %xmm0,%xmm0 | 1175 pxor %xmm0,%xmm0 |
1213 paddq %xmm1,%xmm1 | 1176 paddq %xmm1,%xmm1 |
1214 pand %xmm3,%xmm2 | 1177 pand %xmm3,%xmm2 |
1215 pcmpgtd %xmm1,%xmm0 | 1178 pcmpgtd %xmm1,%xmm0 |
1216 pxor %xmm2,%xmm1 | 1179 pxor %xmm2,%xmm1 |
1217 » je» .L051xts_enc_two | 1180 » je» .L049xts_enc_two |
1218 pshufd $19,%xmm0,%xmm2 | 1181 pshufd $19,%xmm0,%xmm2 |
1219 pxor %xmm0,%xmm0 | 1182 pxor %xmm0,%xmm0 |
1220 movdqa %xmm1,%xmm6 | 1183 movdqa %xmm1,%xmm6 |
1221 paddq %xmm1,%xmm1 | 1184 paddq %xmm1,%xmm1 |
1222 pand %xmm3,%xmm2 | 1185 pand %xmm3,%xmm2 |
1223 pcmpgtd %xmm1,%xmm0 | 1186 pcmpgtd %xmm1,%xmm0 |
1224 pxor %xmm2,%xmm1 | 1187 pxor %xmm2,%xmm1 |
1225 cmpl $64,%eax | 1188 cmpl $64,%eax |
1226 » jb» .L052xts_enc_three | 1189 » jb» .L050xts_enc_three |
1227 pshufd $19,%xmm0,%xmm2 | 1190 pshufd $19,%xmm0,%xmm2 |
1228 pxor %xmm0,%xmm0 | 1191 pxor %xmm0,%xmm0 |
1229 movdqa %xmm1,%xmm7 | 1192 movdqa %xmm1,%xmm7 |
1230 paddq %xmm1,%xmm1 | 1193 paddq %xmm1,%xmm1 |
1231 pand %xmm3,%xmm2 | 1194 pand %xmm3,%xmm2 |
1232 pcmpgtd %xmm1,%xmm0 | 1195 pcmpgtd %xmm1,%xmm0 |
1233 pxor %xmm2,%xmm1 | 1196 pxor %xmm2,%xmm1 |
1234 movdqa %xmm5,(%esp) | 1197 movdqa %xmm5,(%esp) |
1235 movdqa %xmm6,16(%esp) | 1198 movdqa %xmm6,16(%esp) |
1236 » je» .L053xts_enc_four | 1199 » je» .L051xts_enc_four |
1237 movdqa %xmm7,32(%esp) | 1200 movdqa %xmm7,32(%esp) |
1238 pshufd $19,%xmm0,%xmm7 | 1201 pshufd $19,%xmm0,%xmm7 |
1239 movdqa %xmm1,48(%esp) | 1202 movdqa %xmm1,48(%esp) |
1240 paddq %xmm1,%xmm1 | 1203 paddq %xmm1,%xmm1 |
1241 pand %xmm3,%xmm7 | 1204 pand %xmm3,%xmm7 |
1242 pxor %xmm1,%xmm7 | 1205 pxor %xmm1,%xmm7 |
1243 movdqu (%esi),%xmm2 | 1206 movdqu (%esi),%xmm2 |
1244 movdqu 16(%esi),%xmm3 | 1207 movdqu 16(%esi),%xmm3 |
1245 movdqu 32(%esi),%xmm4 | 1208 movdqu 32(%esi),%xmm4 |
1246 pxor (%esp),%xmm2 | 1209 pxor (%esp),%xmm2 |
(...skipping 11 matching lines...) Expand all Loading... |
1258 xorps 16(%esp),%xmm3 | 1221 xorps 16(%esp),%xmm3 |
1259 xorps 32(%esp),%xmm4 | 1222 xorps 32(%esp),%xmm4 |
1260 movups %xmm2,(%edi) | 1223 movups %xmm2,(%edi) |
1261 xorps 48(%esp),%xmm5 | 1224 xorps 48(%esp),%xmm5 |
1262 movups %xmm3,16(%edi) | 1225 movups %xmm3,16(%edi) |
1263 xorps %xmm1,%xmm6 | 1226 xorps %xmm1,%xmm6 |
1264 movups %xmm4,32(%edi) | 1227 movups %xmm4,32(%edi) |
1265 movups %xmm5,48(%edi) | 1228 movups %xmm5,48(%edi) |
1266 movups %xmm6,64(%edi) | 1229 movups %xmm6,64(%edi) |
1267 leal 80(%edi),%edi | 1230 leal 80(%edi),%edi |
1268 » jmp» .L054xts_enc_done | 1231 » jmp» .L052xts_enc_done |
1269 .align 16 | 1232 .align 16 |
1270 .L050xts_enc_one: | 1233 .L048xts_enc_one: |
1271 movups (%esi),%xmm2 | 1234 movups (%esi),%xmm2 |
1272 leal 16(%esi),%esi | 1235 leal 16(%esi),%esi |
1273 xorps %xmm5,%xmm2 | 1236 xorps %xmm5,%xmm2 |
1274 movups (%edx),%xmm0 | 1237 movups (%edx),%xmm0 |
1275 movups 16(%edx),%xmm1 | 1238 movups 16(%edx),%xmm1 |
1276 leal 32(%edx),%edx | 1239 leal 32(%edx),%edx |
1277 xorps %xmm0,%xmm2 | 1240 xorps %xmm0,%xmm2 |
1278 .L055enc1_loop_9: | 1241 .L053enc1_loop_9: |
1279 .byte 102,15,56,220,209 | 1242 .byte 102,15,56,220,209 |
1280 decl %ecx | 1243 decl %ecx |
1281 movups (%edx),%xmm1 | 1244 movups (%edx),%xmm1 |
1282 leal 16(%edx),%edx | 1245 leal 16(%edx),%edx |
1283 » jnz» .L055enc1_loop_9 | 1246 » jnz» .L053enc1_loop_9 |
1284 .byte 102,15,56,221,209 | 1247 .byte 102,15,56,221,209 |
1285 xorps %xmm5,%xmm2 | 1248 xorps %xmm5,%xmm2 |
1286 movups %xmm2,(%edi) | 1249 movups %xmm2,(%edi) |
1287 leal 16(%edi),%edi | 1250 leal 16(%edi),%edi |
1288 movdqa %xmm5,%xmm1 | 1251 movdqa %xmm5,%xmm1 |
1289 » jmp» .L054xts_enc_done | 1252 » jmp» .L052xts_enc_done |
1290 .align 16 | 1253 .align 16 |
1291 .L051xts_enc_two: | 1254 .L049xts_enc_two: |
1292 movaps %xmm1,%xmm6 | 1255 movaps %xmm1,%xmm6 |
1293 movups (%esi),%xmm2 | 1256 movups (%esi),%xmm2 |
1294 movups 16(%esi),%xmm3 | 1257 movups 16(%esi),%xmm3 |
1295 leal 32(%esi),%esi | 1258 leal 32(%esi),%esi |
1296 xorps %xmm5,%xmm2 | 1259 xorps %xmm5,%xmm2 |
1297 xorps %xmm6,%xmm3 | 1260 xorps %xmm6,%xmm3 |
1298 call _aesni_encrypt2 | 1261 call _aesni_encrypt2 |
1299 xorps %xmm5,%xmm2 | 1262 xorps %xmm5,%xmm2 |
1300 xorps %xmm6,%xmm3 | 1263 xorps %xmm6,%xmm3 |
1301 movups %xmm2,(%edi) | 1264 movups %xmm2,(%edi) |
1302 movups %xmm3,16(%edi) | 1265 movups %xmm3,16(%edi) |
1303 leal 32(%edi),%edi | 1266 leal 32(%edi),%edi |
1304 movdqa %xmm6,%xmm1 | 1267 movdqa %xmm6,%xmm1 |
1305 » jmp» .L054xts_enc_done | 1268 » jmp» .L052xts_enc_done |
1306 .align 16 | 1269 .align 16 |
1307 .L052xts_enc_three: | 1270 .L050xts_enc_three: |
1308 movaps %xmm1,%xmm7 | 1271 movaps %xmm1,%xmm7 |
1309 movups (%esi),%xmm2 | 1272 movups (%esi),%xmm2 |
1310 movups 16(%esi),%xmm3 | 1273 movups 16(%esi),%xmm3 |
1311 movups 32(%esi),%xmm4 | 1274 movups 32(%esi),%xmm4 |
1312 leal 48(%esi),%esi | 1275 leal 48(%esi),%esi |
1313 xorps %xmm5,%xmm2 | 1276 xorps %xmm5,%xmm2 |
1314 xorps %xmm6,%xmm3 | 1277 xorps %xmm6,%xmm3 |
1315 xorps %xmm7,%xmm4 | 1278 xorps %xmm7,%xmm4 |
1316 call _aesni_encrypt3 | 1279 call _aesni_encrypt3 |
1317 xorps %xmm5,%xmm2 | 1280 xorps %xmm5,%xmm2 |
1318 xorps %xmm6,%xmm3 | 1281 xorps %xmm6,%xmm3 |
1319 xorps %xmm7,%xmm4 | 1282 xorps %xmm7,%xmm4 |
1320 movups %xmm2,(%edi) | 1283 movups %xmm2,(%edi) |
1321 movups %xmm3,16(%edi) | 1284 movups %xmm3,16(%edi) |
1322 movups %xmm4,32(%edi) | 1285 movups %xmm4,32(%edi) |
1323 leal 48(%edi),%edi | 1286 leal 48(%edi),%edi |
1324 movdqa %xmm7,%xmm1 | 1287 movdqa %xmm7,%xmm1 |
1325 » jmp» .L054xts_enc_done | 1288 » jmp» .L052xts_enc_done |
1326 .align 16 | 1289 .align 16 |
1327 .L053xts_enc_four: | 1290 .L051xts_enc_four: |
1328 movaps %xmm1,%xmm6 | 1291 movaps %xmm1,%xmm6 |
1329 movups (%esi),%xmm2 | 1292 movups (%esi),%xmm2 |
1330 movups 16(%esi),%xmm3 | 1293 movups 16(%esi),%xmm3 |
1331 movups 32(%esi),%xmm4 | 1294 movups 32(%esi),%xmm4 |
1332 xorps (%esp),%xmm2 | 1295 xorps (%esp),%xmm2 |
1333 movups 48(%esi),%xmm5 | 1296 movups 48(%esi),%xmm5 |
1334 leal 64(%esi),%esi | 1297 leal 64(%esi),%esi |
1335 xorps 16(%esp),%xmm3 | 1298 xorps 16(%esp),%xmm3 |
1336 xorps %xmm7,%xmm4 | 1299 xorps %xmm7,%xmm4 |
1337 xorps %xmm6,%xmm5 | 1300 xorps %xmm6,%xmm5 |
1338 call _aesni_encrypt4 | 1301 call _aesni_encrypt4 |
1339 xorps (%esp),%xmm2 | 1302 xorps (%esp),%xmm2 |
1340 xorps 16(%esp),%xmm3 | 1303 xorps 16(%esp),%xmm3 |
1341 xorps %xmm7,%xmm4 | 1304 xorps %xmm7,%xmm4 |
1342 movups %xmm2,(%edi) | 1305 movups %xmm2,(%edi) |
1343 xorps %xmm6,%xmm5 | 1306 xorps %xmm6,%xmm5 |
1344 movups %xmm3,16(%edi) | 1307 movups %xmm3,16(%edi) |
1345 movups %xmm4,32(%edi) | 1308 movups %xmm4,32(%edi) |
1346 movups %xmm5,48(%edi) | 1309 movups %xmm5,48(%edi) |
1347 leal 64(%edi),%edi | 1310 leal 64(%edi),%edi |
1348 movdqa %xmm6,%xmm1 | 1311 movdqa %xmm6,%xmm1 |
1349 » jmp» .L054xts_enc_done | 1312 » jmp» .L052xts_enc_done |
1350 .align 16 | 1313 .align 16 |
1351 .L049xts_enc_done6x: | 1314 .L047xts_enc_done6x: |
1352 movl 112(%esp),%eax | 1315 movl 112(%esp),%eax |
1353 andl $15,%eax | 1316 andl $15,%eax |
1354 » jz» .L056xts_enc_ret | 1317 » jz» .L054xts_enc_ret |
1355 movdqa %xmm1,%xmm5 | 1318 movdqa %xmm1,%xmm5 |
1356 movl %eax,112(%esp) | 1319 movl %eax,112(%esp) |
1357 » jmp» .L057xts_enc_steal | 1320 » jmp» .L055xts_enc_steal |
1358 .align 16 | 1321 .align 16 |
1359 .L054xts_enc_done: | 1322 .L052xts_enc_done: |
1360 movl 112(%esp),%eax | 1323 movl 112(%esp),%eax |
1361 pxor %xmm0,%xmm0 | 1324 pxor %xmm0,%xmm0 |
1362 andl $15,%eax | 1325 andl $15,%eax |
1363 » jz» .L056xts_enc_ret | 1326 » jz» .L054xts_enc_ret |
1364 pcmpgtd %xmm1,%xmm0 | 1327 pcmpgtd %xmm1,%xmm0 |
1365 movl %eax,112(%esp) | 1328 movl %eax,112(%esp) |
1366 pshufd $19,%xmm0,%xmm5 | 1329 pshufd $19,%xmm0,%xmm5 |
1367 paddq %xmm1,%xmm1 | 1330 paddq %xmm1,%xmm1 |
1368 pand 96(%esp),%xmm5 | 1331 pand 96(%esp),%xmm5 |
1369 pxor %xmm1,%xmm5 | 1332 pxor %xmm1,%xmm5 |
1370 .L057xts_enc_steal: | 1333 .L055xts_enc_steal: |
1371 movzbl (%esi),%ecx | 1334 movzbl (%esi),%ecx |
1372 movzbl -16(%edi),%edx | 1335 movzbl -16(%edi),%edx |
1373 leal 1(%esi),%esi | 1336 leal 1(%esi),%esi |
1374 movb %cl,-16(%edi) | 1337 movb %cl,-16(%edi) |
1375 movb %dl,(%edi) | 1338 movb %dl,(%edi) |
1376 leal 1(%edi),%edi | 1339 leal 1(%edi),%edi |
1377 subl $1,%eax | 1340 subl $1,%eax |
1378 » jnz» .L057xts_enc_steal | 1341 » jnz» .L055xts_enc_steal |
1379 subl 112(%esp),%edi | 1342 subl 112(%esp),%edi |
1380 movl %ebp,%edx | 1343 movl %ebp,%edx |
1381 movl %ebx,%ecx | 1344 movl %ebx,%ecx |
1382 movups -16(%edi),%xmm2 | 1345 movups -16(%edi),%xmm2 |
1383 xorps %xmm5,%xmm2 | 1346 xorps %xmm5,%xmm2 |
1384 movups (%edx),%xmm0 | 1347 movups (%edx),%xmm0 |
1385 movups 16(%edx),%xmm1 | 1348 movups 16(%edx),%xmm1 |
1386 leal 32(%edx),%edx | 1349 leal 32(%edx),%edx |
1387 xorps %xmm0,%xmm2 | 1350 xorps %xmm0,%xmm2 |
1388 .L058enc1_loop_10: | 1351 .L056enc1_loop_10: |
1389 .byte 102,15,56,220,209 | 1352 .byte 102,15,56,220,209 |
1390 decl %ecx | 1353 decl %ecx |
1391 movups (%edx),%xmm1 | 1354 movups (%edx),%xmm1 |
1392 leal 16(%edx),%edx | 1355 leal 16(%edx),%edx |
1393 » jnz» .L058enc1_loop_10 | 1356 » jnz» .L056enc1_loop_10 |
1394 .byte 102,15,56,221,209 | 1357 .byte 102,15,56,221,209 |
1395 xorps %xmm5,%xmm2 | 1358 xorps %xmm5,%xmm2 |
1396 movups %xmm2,-16(%edi) | 1359 movups %xmm2,-16(%edi) |
1397 .L056xts_enc_ret: | 1360 .L054xts_enc_ret: |
1398 » pxor» %xmm0,%xmm0 | |
1399 » pxor» %xmm1,%xmm1 | |
1400 » pxor» %xmm2,%xmm2 | |
1401 » movdqa» %xmm0,(%esp) | |
1402 » pxor» %xmm3,%xmm3 | |
1403 » movdqa» %xmm0,16(%esp) | |
1404 » pxor» %xmm4,%xmm4 | |
1405 » movdqa» %xmm0,32(%esp) | |
1406 » pxor» %xmm5,%xmm5 | |
1407 » movdqa» %xmm0,48(%esp) | |
1408 » pxor» %xmm6,%xmm6 | |
1409 » movdqa» %xmm0,64(%esp) | |
1410 » pxor» %xmm7,%xmm7 | |
1411 » movdqa» %xmm0,80(%esp) | |
1412 movl 116(%esp),%esp | 1361 movl 116(%esp),%esp |
1413 popl %edi | 1362 popl %edi |
1414 popl %esi | 1363 popl %esi |
1415 popl %ebx | 1364 popl %ebx |
1416 popl %ebp | 1365 popl %ebp |
1417 ret | 1366 ret |
1418 .size aesni_xts_encrypt,.-.L_aesni_xts_encrypt_begin | 1367 .size aesni_xts_encrypt,.-.L_aesni_xts_encrypt_begin |
1419 .globl aesni_xts_decrypt | 1368 .globl aesni_xts_decrypt |
1420 .hidden aesni_xts_decrypt | 1369 .hidden aesni_xts_decrypt |
1421 .type aesni_xts_decrypt,@function | 1370 .type aesni_xts_decrypt,@function |
1422 .align 16 | 1371 .align 16 |
1423 aesni_xts_decrypt: | 1372 aesni_xts_decrypt: |
1424 .L_aesni_xts_decrypt_begin: | 1373 .L_aesni_xts_decrypt_begin: |
1425 pushl %ebp | 1374 pushl %ebp |
1426 pushl %ebx | 1375 pushl %ebx |
1427 pushl %esi | 1376 pushl %esi |
1428 pushl %edi | 1377 pushl %edi |
1429 movl 36(%esp),%edx | 1378 movl 36(%esp),%edx |
1430 movl 40(%esp),%esi | 1379 movl 40(%esp),%esi |
1431 movl 240(%edx),%ecx | 1380 movl 240(%edx),%ecx |
1432 movups (%esi),%xmm2 | 1381 movups (%esi),%xmm2 |
1433 movups (%edx),%xmm0 | 1382 movups (%edx),%xmm0 |
1434 movups 16(%edx),%xmm1 | 1383 movups 16(%edx),%xmm1 |
1435 leal 32(%edx),%edx | 1384 leal 32(%edx),%edx |
1436 xorps %xmm0,%xmm2 | 1385 xorps %xmm0,%xmm2 |
1437 .L059enc1_loop_11: | 1386 .L057enc1_loop_11: |
1438 .byte 102,15,56,220,209 | 1387 .byte 102,15,56,220,209 |
1439 decl %ecx | 1388 decl %ecx |
1440 movups (%edx),%xmm1 | 1389 movups (%edx),%xmm1 |
1441 leal 16(%edx),%edx | 1390 leal 16(%edx),%edx |
1442 » jnz» .L059enc1_loop_11 | 1391 » jnz» .L057enc1_loop_11 |
1443 .byte 102,15,56,221,209 | 1392 .byte 102,15,56,221,209 |
1444 movl 20(%esp),%esi | 1393 movl 20(%esp),%esi |
1445 movl 24(%esp),%edi | 1394 movl 24(%esp),%edi |
1446 movl 28(%esp),%eax | 1395 movl 28(%esp),%eax |
1447 movl 32(%esp),%edx | 1396 movl 32(%esp),%edx |
1448 movl %esp,%ebp | 1397 movl %esp,%ebp |
1449 subl $120,%esp | 1398 subl $120,%esp |
1450 andl $-16,%esp | 1399 andl $-16,%esp |
1451 xorl %ebx,%ebx | 1400 xorl %ebx,%ebx |
1452 testl $15,%eax | 1401 testl $15,%eax |
1453 setnz %bl | 1402 setnz %bl |
1454 shll $4,%ebx | 1403 shll $4,%ebx |
1455 subl %ebx,%eax | 1404 subl %ebx,%eax |
1456 movl $135,96(%esp) | 1405 movl $135,96(%esp) |
1457 movl $0,100(%esp) | 1406 movl $0,100(%esp) |
1458 movl $1,104(%esp) | 1407 movl $1,104(%esp) |
1459 movl $0,108(%esp) | 1408 movl $0,108(%esp) |
1460 movl %eax,112(%esp) | 1409 movl %eax,112(%esp) |
1461 movl %ebp,116(%esp) | 1410 movl %ebp,116(%esp) |
1462 movl 240(%edx),%ecx | 1411 movl 240(%edx),%ecx |
1463 movl %edx,%ebp | 1412 movl %edx,%ebp |
1464 movl %ecx,%ebx | 1413 movl %ecx,%ebx |
1465 movdqa %xmm2,%xmm1 | 1414 movdqa %xmm2,%xmm1 |
1466 pxor %xmm0,%xmm0 | 1415 pxor %xmm0,%xmm0 |
1467 movdqa 96(%esp),%xmm3 | 1416 movdqa 96(%esp),%xmm3 |
1468 pcmpgtd %xmm1,%xmm0 | 1417 pcmpgtd %xmm1,%xmm0 |
1469 andl $-16,%eax | 1418 andl $-16,%eax |
1470 subl $96,%eax | 1419 subl $96,%eax |
1471 » jc» .L060xts_dec_short | 1420 » jc» .L058xts_dec_short |
1472 shll $4,%ecx | 1421 shll $4,%ecx |
1473 movl $16,%ebx | 1422 movl $16,%ebx |
1474 subl %ecx,%ebx | 1423 subl %ecx,%ebx |
1475 leal 32(%edx,%ecx,1),%edx | 1424 leal 32(%edx,%ecx,1),%edx |
1476 » jmp» .L061xts_dec_loop6 | 1425 » jmp» .L059xts_dec_loop6 |
1477 .align 16 | 1426 .align 16 |
1478 .L061xts_dec_loop6: | 1427 .L059xts_dec_loop6: |
1479 pshufd $19,%xmm0,%xmm2 | 1428 pshufd $19,%xmm0,%xmm2 |
1480 pxor %xmm0,%xmm0 | 1429 pxor %xmm0,%xmm0 |
1481 movdqa %xmm1,(%esp) | 1430 movdqa %xmm1,(%esp) |
1482 paddq %xmm1,%xmm1 | 1431 paddq %xmm1,%xmm1 |
1483 pand %xmm3,%xmm2 | 1432 pand %xmm3,%xmm2 |
1484 pcmpgtd %xmm1,%xmm0 | 1433 pcmpgtd %xmm1,%xmm0 |
1485 pxor %xmm2,%xmm1 | 1434 pxor %xmm2,%xmm1 |
1486 pshufd $19,%xmm0,%xmm2 | 1435 pshufd $19,%xmm0,%xmm2 |
1487 pxor %xmm0,%xmm0 | 1436 pxor %xmm0,%xmm0 |
1488 movdqa %xmm1,16(%esp) | 1437 movdqa %xmm1,16(%esp) |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1557 pshufd $19,%xmm0,%xmm2 | 1506 pshufd $19,%xmm0,%xmm2 |
1558 movups %xmm7,80(%edi) | 1507 movups %xmm7,80(%edi) |
1559 leal 96(%edi),%edi | 1508 leal 96(%edi),%edi |
1560 movdqa 96(%esp),%xmm3 | 1509 movdqa 96(%esp),%xmm3 |
1561 pxor %xmm0,%xmm0 | 1510 pxor %xmm0,%xmm0 |
1562 paddq %xmm1,%xmm1 | 1511 paddq %xmm1,%xmm1 |
1563 pand %xmm3,%xmm2 | 1512 pand %xmm3,%xmm2 |
1564 pcmpgtd %xmm1,%xmm0 | 1513 pcmpgtd %xmm1,%xmm0 |
1565 pxor %xmm2,%xmm1 | 1514 pxor %xmm2,%xmm1 |
1566 subl $96,%eax | 1515 subl $96,%eax |
1567 » jnc» .L061xts_dec_loop6 | 1516 » jnc» .L059xts_dec_loop6 |
1568 movl 240(%ebp),%ecx | 1517 movl 240(%ebp),%ecx |
1569 movl %ebp,%edx | 1518 movl %ebp,%edx |
1570 movl %ecx,%ebx | 1519 movl %ecx,%ebx |
1571 .L060xts_dec_short: | 1520 .L058xts_dec_short: |
1572 addl $96,%eax | 1521 addl $96,%eax |
1573 » jz» .L062xts_dec_done6x | 1522 » jz» .L060xts_dec_done6x |
1574 movdqa %xmm1,%xmm5 | 1523 movdqa %xmm1,%xmm5 |
1575 cmpl $32,%eax | 1524 cmpl $32,%eax |
1576 » jb» .L063xts_dec_one | 1525 » jb» .L061xts_dec_one |
1577 pshufd $19,%xmm0,%xmm2 | 1526 pshufd $19,%xmm0,%xmm2 |
1578 pxor %xmm0,%xmm0 | 1527 pxor %xmm0,%xmm0 |
1579 paddq %xmm1,%xmm1 | 1528 paddq %xmm1,%xmm1 |
1580 pand %xmm3,%xmm2 | 1529 pand %xmm3,%xmm2 |
1581 pcmpgtd %xmm1,%xmm0 | 1530 pcmpgtd %xmm1,%xmm0 |
1582 pxor %xmm2,%xmm1 | 1531 pxor %xmm2,%xmm1 |
1583 » je» .L064xts_dec_two | 1532 » je» .L062xts_dec_two |
1584 pshufd $19,%xmm0,%xmm2 | 1533 pshufd $19,%xmm0,%xmm2 |
1585 pxor %xmm0,%xmm0 | 1534 pxor %xmm0,%xmm0 |
1586 movdqa %xmm1,%xmm6 | 1535 movdqa %xmm1,%xmm6 |
1587 paddq %xmm1,%xmm1 | 1536 paddq %xmm1,%xmm1 |
1588 pand %xmm3,%xmm2 | 1537 pand %xmm3,%xmm2 |
1589 pcmpgtd %xmm1,%xmm0 | 1538 pcmpgtd %xmm1,%xmm0 |
1590 pxor %xmm2,%xmm1 | 1539 pxor %xmm2,%xmm1 |
1591 cmpl $64,%eax | 1540 cmpl $64,%eax |
1592 » jb» .L065xts_dec_three | 1541 » jb» .L063xts_dec_three |
1593 pshufd $19,%xmm0,%xmm2 | 1542 pshufd $19,%xmm0,%xmm2 |
1594 pxor %xmm0,%xmm0 | 1543 pxor %xmm0,%xmm0 |
1595 movdqa %xmm1,%xmm7 | 1544 movdqa %xmm1,%xmm7 |
1596 paddq %xmm1,%xmm1 | 1545 paddq %xmm1,%xmm1 |
1597 pand %xmm3,%xmm2 | 1546 pand %xmm3,%xmm2 |
1598 pcmpgtd %xmm1,%xmm0 | 1547 pcmpgtd %xmm1,%xmm0 |
1599 pxor %xmm2,%xmm1 | 1548 pxor %xmm2,%xmm1 |
1600 movdqa %xmm5,(%esp) | 1549 movdqa %xmm5,(%esp) |
1601 movdqa %xmm6,16(%esp) | 1550 movdqa %xmm6,16(%esp) |
1602 » je» .L066xts_dec_four | 1551 » je» .L064xts_dec_four |
1603 movdqa %xmm7,32(%esp) | 1552 movdqa %xmm7,32(%esp) |
1604 pshufd $19,%xmm0,%xmm7 | 1553 pshufd $19,%xmm0,%xmm7 |
1605 movdqa %xmm1,48(%esp) | 1554 movdqa %xmm1,48(%esp) |
1606 paddq %xmm1,%xmm1 | 1555 paddq %xmm1,%xmm1 |
1607 pand %xmm3,%xmm7 | 1556 pand %xmm3,%xmm7 |
1608 pxor %xmm1,%xmm7 | 1557 pxor %xmm1,%xmm7 |
1609 movdqu (%esi),%xmm2 | 1558 movdqu (%esi),%xmm2 |
1610 movdqu 16(%esi),%xmm3 | 1559 movdqu 16(%esi),%xmm3 |
1611 movdqu 32(%esi),%xmm4 | 1560 movdqu 32(%esi),%xmm4 |
1612 pxor (%esp),%xmm2 | 1561 pxor (%esp),%xmm2 |
(...skipping 11 matching lines...) Expand all Loading... |
1624 xorps 16(%esp),%xmm3 | 1573 xorps 16(%esp),%xmm3 |
1625 xorps 32(%esp),%xmm4 | 1574 xorps 32(%esp),%xmm4 |
1626 movups %xmm2,(%edi) | 1575 movups %xmm2,(%edi) |
1627 xorps 48(%esp),%xmm5 | 1576 xorps 48(%esp),%xmm5 |
1628 movups %xmm3,16(%edi) | 1577 movups %xmm3,16(%edi) |
1629 xorps %xmm1,%xmm6 | 1578 xorps %xmm1,%xmm6 |
1630 movups %xmm4,32(%edi) | 1579 movups %xmm4,32(%edi) |
1631 movups %xmm5,48(%edi) | 1580 movups %xmm5,48(%edi) |
1632 movups %xmm6,64(%edi) | 1581 movups %xmm6,64(%edi) |
1633 leal 80(%edi),%edi | 1582 leal 80(%edi),%edi |
1634 » jmp» .L067xts_dec_done | 1583 » jmp» .L065xts_dec_done |
1635 .align 16 | 1584 .align 16 |
1636 .L063xts_dec_one: | 1585 .L061xts_dec_one: |
1637 movups (%esi),%xmm2 | 1586 movups (%esi),%xmm2 |
1638 leal 16(%esi),%esi | 1587 leal 16(%esi),%esi |
1639 xorps %xmm5,%xmm2 | 1588 xorps %xmm5,%xmm2 |
1640 movups (%edx),%xmm0 | 1589 movups (%edx),%xmm0 |
1641 movups 16(%edx),%xmm1 | 1590 movups 16(%edx),%xmm1 |
1642 leal 32(%edx),%edx | 1591 leal 32(%edx),%edx |
1643 xorps %xmm0,%xmm2 | 1592 xorps %xmm0,%xmm2 |
1644 .L068dec1_loop_12: | 1593 .L066dec1_loop_12: |
1645 .byte 102,15,56,222,209 | 1594 .byte 102,15,56,222,209 |
1646 decl %ecx | 1595 decl %ecx |
1647 movups (%edx),%xmm1 | 1596 movups (%edx),%xmm1 |
1648 leal 16(%edx),%edx | 1597 leal 16(%edx),%edx |
1649 » jnz» .L068dec1_loop_12 | 1598 » jnz» .L066dec1_loop_12 |
1650 .byte 102,15,56,223,209 | 1599 .byte 102,15,56,223,209 |
1651 xorps %xmm5,%xmm2 | 1600 xorps %xmm5,%xmm2 |
1652 movups %xmm2,(%edi) | 1601 movups %xmm2,(%edi) |
1653 leal 16(%edi),%edi | 1602 leal 16(%edi),%edi |
1654 movdqa %xmm5,%xmm1 | 1603 movdqa %xmm5,%xmm1 |
1655 » jmp» .L067xts_dec_done | 1604 » jmp» .L065xts_dec_done |
1656 .align 16 | 1605 .align 16 |
1657 .L064xts_dec_two: | 1606 .L062xts_dec_two: |
1658 movaps %xmm1,%xmm6 | 1607 movaps %xmm1,%xmm6 |
1659 movups (%esi),%xmm2 | 1608 movups (%esi),%xmm2 |
1660 movups 16(%esi),%xmm3 | 1609 movups 16(%esi),%xmm3 |
1661 leal 32(%esi),%esi | 1610 leal 32(%esi),%esi |
1662 xorps %xmm5,%xmm2 | 1611 xorps %xmm5,%xmm2 |
1663 xorps %xmm6,%xmm3 | 1612 xorps %xmm6,%xmm3 |
1664 call _aesni_decrypt2 | 1613 call _aesni_decrypt2 |
1665 xorps %xmm5,%xmm2 | 1614 xorps %xmm5,%xmm2 |
1666 xorps %xmm6,%xmm3 | 1615 xorps %xmm6,%xmm3 |
1667 movups %xmm2,(%edi) | 1616 movups %xmm2,(%edi) |
1668 movups %xmm3,16(%edi) | 1617 movups %xmm3,16(%edi) |
1669 leal 32(%edi),%edi | 1618 leal 32(%edi),%edi |
1670 movdqa %xmm6,%xmm1 | 1619 movdqa %xmm6,%xmm1 |
1671 » jmp» .L067xts_dec_done | 1620 » jmp» .L065xts_dec_done |
1672 .align 16 | 1621 .align 16 |
1673 .L065xts_dec_three: | 1622 .L063xts_dec_three: |
1674 movaps %xmm1,%xmm7 | 1623 movaps %xmm1,%xmm7 |
1675 movups (%esi),%xmm2 | 1624 movups (%esi),%xmm2 |
1676 movups 16(%esi),%xmm3 | 1625 movups 16(%esi),%xmm3 |
1677 movups 32(%esi),%xmm4 | 1626 movups 32(%esi),%xmm4 |
1678 leal 48(%esi),%esi | 1627 leal 48(%esi),%esi |
1679 xorps %xmm5,%xmm2 | 1628 xorps %xmm5,%xmm2 |
1680 xorps %xmm6,%xmm3 | 1629 xorps %xmm6,%xmm3 |
1681 xorps %xmm7,%xmm4 | 1630 xorps %xmm7,%xmm4 |
1682 call _aesni_decrypt3 | 1631 call _aesni_decrypt3 |
1683 xorps %xmm5,%xmm2 | 1632 xorps %xmm5,%xmm2 |
1684 xorps %xmm6,%xmm3 | 1633 xorps %xmm6,%xmm3 |
1685 xorps %xmm7,%xmm4 | 1634 xorps %xmm7,%xmm4 |
1686 movups %xmm2,(%edi) | 1635 movups %xmm2,(%edi) |
1687 movups %xmm3,16(%edi) | 1636 movups %xmm3,16(%edi) |
1688 movups %xmm4,32(%edi) | 1637 movups %xmm4,32(%edi) |
1689 leal 48(%edi),%edi | 1638 leal 48(%edi),%edi |
1690 movdqa %xmm7,%xmm1 | 1639 movdqa %xmm7,%xmm1 |
1691 » jmp» .L067xts_dec_done | 1640 » jmp» .L065xts_dec_done |
1692 .align 16 | 1641 .align 16 |
1693 .L066xts_dec_four: | 1642 .L064xts_dec_four: |
1694 movaps %xmm1,%xmm6 | 1643 movaps %xmm1,%xmm6 |
1695 movups (%esi),%xmm2 | 1644 movups (%esi),%xmm2 |
1696 movups 16(%esi),%xmm3 | 1645 movups 16(%esi),%xmm3 |
1697 movups 32(%esi),%xmm4 | 1646 movups 32(%esi),%xmm4 |
1698 xorps (%esp),%xmm2 | 1647 xorps (%esp),%xmm2 |
1699 movups 48(%esi),%xmm5 | 1648 movups 48(%esi),%xmm5 |
1700 leal 64(%esi),%esi | 1649 leal 64(%esi),%esi |
1701 xorps 16(%esp),%xmm3 | 1650 xorps 16(%esp),%xmm3 |
1702 xorps %xmm7,%xmm4 | 1651 xorps %xmm7,%xmm4 |
1703 xorps %xmm6,%xmm5 | 1652 xorps %xmm6,%xmm5 |
1704 call _aesni_decrypt4 | 1653 call _aesni_decrypt4 |
1705 xorps (%esp),%xmm2 | 1654 xorps (%esp),%xmm2 |
1706 xorps 16(%esp),%xmm3 | 1655 xorps 16(%esp),%xmm3 |
1707 xorps %xmm7,%xmm4 | 1656 xorps %xmm7,%xmm4 |
1708 movups %xmm2,(%edi) | 1657 movups %xmm2,(%edi) |
1709 xorps %xmm6,%xmm5 | 1658 xorps %xmm6,%xmm5 |
1710 movups %xmm3,16(%edi) | 1659 movups %xmm3,16(%edi) |
1711 movups %xmm4,32(%edi) | 1660 movups %xmm4,32(%edi) |
1712 movups %xmm5,48(%edi) | 1661 movups %xmm5,48(%edi) |
1713 leal 64(%edi),%edi | 1662 leal 64(%edi),%edi |
1714 movdqa %xmm6,%xmm1 | 1663 movdqa %xmm6,%xmm1 |
1715 » jmp» .L067xts_dec_done | 1664 » jmp» .L065xts_dec_done |
1716 .align 16 | 1665 .align 16 |
1717 .L062xts_dec_done6x: | 1666 .L060xts_dec_done6x: |
1718 movl 112(%esp),%eax | 1667 movl 112(%esp),%eax |
1719 andl $15,%eax | 1668 andl $15,%eax |
1720 » jz» .L069xts_dec_ret | 1669 » jz» .L067xts_dec_ret |
1721 movl %eax,112(%esp) | 1670 movl %eax,112(%esp) |
1722 » jmp» .L070xts_dec_only_one_more | 1671 » jmp» .L068xts_dec_only_one_more |
1723 .align 16 | 1672 .align 16 |
1724 .L067xts_dec_done: | 1673 .L065xts_dec_done: |
1725 movl 112(%esp),%eax | 1674 movl 112(%esp),%eax |
1726 pxor %xmm0,%xmm0 | 1675 pxor %xmm0,%xmm0 |
1727 andl $15,%eax | 1676 andl $15,%eax |
1728 » jz» .L069xts_dec_ret | 1677 » jz» .L067xts_dec_ret |
1729 pcmpgtd %xmm1,%xmm0 | 1678 pcmpgtd %xmm1,%xmm0 |
1730 movl %eax,112(%esp) | 1679 movl %eax,112(%esp) |
1731 pshufd $19,%xmm0,%xmm2 | 1680 pshufd $19,%xmm0,%xmm2 |
1732 pxor %xmm0,%xmm0 | 1681 pxor %xmm0,%xmm0 |
1733 movdqa 96(%esp),%xmm3 | 1682 movdqa 96(%esp),%xmm3 |
1734 paddq %xmm1,%xmm1 | 1683 paddq %xmm1,%xmm1 |
1735 pand %xmm3,%xmm2 | 1684 pand %xmm3,%xmm2 |
1736 pcmpgtd %xmm1,%xmm0 | 1685 pcmpgtd %xmm1,%xmm0 |
1737 pxor %xmm2,%xmm1 | 1686 pxor %xmm2,%xmm1 |
1738 .L070xts_dec_only_one_more: | 1687 .L068xts_dec_only_one_more: |
1739 pshufd $19,%xmm0,%xmm5 | 1688 pshufd $19,%xmm0,%xmm5 |
1740 movdqa %xmm1,%xmm6 | 1689 movdqa %xmm1,%xmm6 |
1741 paddq %xmm1,%xmm1 | 1690 paddq %xmm1,%xmm1 |
1742 pand %xmm3,%xmm5 | 1691 pand %xmm3,%xmm5 |
1743 pxor %xmm1,%xmm5 | 1692 pxor %xmm1,%xmm5 |
1744 movl %ebp,%edx | 1693 movl %ebp,%edx |
1745 movl %ebx,%ecx | 1694 movl %ebx,%ecx |
1746 movups (%esi),%xmm2 | 1695 movups (%esi),%xmm2 |
1747 xorps %xmm5,%xmm2 | 1696 xorps %xmm5,%xmm2 |
1748 movups (%edx),%xmm0 | 1697 movups (%edx),%xmm0 |
1749 movups 16(%edx),%xmm1 | 1698 movups 16(%edx),%xmm1 |
1750 leal 32(%edx),%edx | 1699 leal 32(%edx),%edx |
1751 xorps %xmm0,%xmm2 | 1700 xorps %xmm0,%xmm2 |
1752 .L071dec1_loop_13: | 1701 .L069dec1_loop_13: |
1753 .byte 102,15,56,222,209 | 1702 .byte 102,15,56,222,209 |
1754 decl %ecx | 1703 decl %ecx |
1755 movups (%edx),%xmm1 | 1704 movups (%edx),%xmm1 |
1756 leal 16(%edx),%edx | 1705 leal 16(%edx),%edx |
1757 » jnz» .L071dec1_loop_13 | 1706 » jnz» .L069dec1_loop_13 |
1758 .byte 102,15,56,223,209 | 1707 .byte 102,15,56,223,209 |
1759 xorps %xmm5,%xmm2 | 1708 xorps %xmm5,%xmm2 |
1760 movups %xmm2,(%edi) | 1709 movups %xmm2,(%edi) |
1761 .L072xts_dec_steal: | 1710 .L070xts_dec_steal: |
1762 movzbl 16(%esi),%ecx | 1711 movzbl 16(%esi),%ecx |
1763 movzbl (%edi),%edx | 1712 movzbl (%edi),%edx |
1764 leal 1(%esi),%esi | 1713 leal 1(%esi),%esi |
1765 movb %cl,(%edi) | 1714 movb %cl,(%edi) |
1766 movb %dl,16(%edi) | 1715 movb %dl,16(%edi) |
1767 leal 1(%edi),%edi | 1716 leal 1(%edi),%edi |
1768 subl $1,%eax | 1717 subl $1,%eax |
1769 » jnz» .L072xts_dec_steal | 1718 » jnz» .L070xts_dec_steal |
1770 subl 112(%esp),%edi | 1719 subl 112(%esp),%edi |
1771 movl %ebp,%edx | 1720 movl %ebp,%edx |
1772 movl %ebx,%ecx | 1721 movl %ebx,%ecx |
1773 movups (%edi),%xmm2 | 1722 movups (%edi),%xmm2 |
1774 xorps %xmm6,%xmm2 | 1723 xorps %xmm6,%xmm2 |
1775 movups (%edx),%xmm0 | 1724 movups (%edx),%xmm0 |
1776 movups 16(%edx),%xmm1 | 1725 movups 16(%edx),%xmm1 |
1777 leal 32(%edx),%edx | 1726 leal 32(%edx),%edx |
1778 xorps %xmm0,%xmm2 | 1727 xorps %xmm0,%xmm2 |
1779 .L073dec1_loop_14: | 1728 .L071dec1_loop_14: |
1780 .byte 102,15,56,222,209 | 1729 .byte 102,15,56,222,209 |
1781 decl %ecx | 1730 decl %ecx |
1782 movups (%edx),%xmm1 | 1731 movups (%edx),%xmm1 |
1783 leal 16(%edx),%edx | 1732 leal 16(%edx),%edx |
1784 » jnz» .L073dec1_loop_14 | 1733 » jnz» .L071dec1_loop_14 |
1785 .byte 102,15,56,223,209 | 1734 .byte 102,15,56,223,209 |
1786 xorps %xmm6,%xmm2 | 1735 xorps %xmm6,%xmm2 |
1787 movups %xmm2,(%edi) | 1736 movups %xmm2,(%edi) |
1788 .L069xts_dec_ret: | 1737 .L067xts_dec_ret: |
1789 » pxor» %xmm0,%xmm0 | |
1790 » pxor» %xmm1,%xmm1 | |
1791 » pxor» %xmm2,%xmm2 | |
1792 » movdqa» %xmm0,(%esp) | |
1793 » pxor» %xmm3,%xmm3 | |
1794 » movdqa» %xmm0,16(%esp) | |
1795 » pxor» %xmm4,%xmm4 | |
1796 » movdqa» %xmm0,32(%esp) | |
1797 » pxor» %xmm5,%xmm5 | |
1798 » movdqa» %xmm0,48(%esp) | |
1799 » pxor» %xmm6,%xmm6 | |
1800 » movdqa» %xmm0,64(%esp) | |
1801 » pxor» %xmm7,%xmm7 | |
1802 » movdqa» %xmm0,80(%esp) | |
1803 movl 116(%esp),%esp | 1738 movl 116(%esp),%esp |
1804 popl %edi | 1739 popl %edi |
1805 popl %esi | 1740 popl %esi |
1806 popl %ebx | 1741 popl %ebx |
1807 popl %ebp | 1742 popl %ebp |
1808 ret | 1743 ret |
1809 .size aesni_xts_decrypt,.-.L_aesni_xts_decrypt_begin | 1744 .size aesni_xts_decrypt,.-.L_aesni_xts_decrypt_begin |
1810 .globl aesni_cbc_encrypt | 1745 .globl aesni_cbc_encrypt |
1811 .hidden aesni_cbc_encrypt | 1746 .hidden aesni_cbc_encrypt |
1812 .type aesni_cbc_encrypt,@function | 1747 .type aesni_cbc_encrypt,@function |
1813 .align 16 | 1748 .align 16 |
1814 aesni_cbc_encrypt: | 1749 aesni_cbc_encrypt: |
1815 .L_aesni_cbc_encrypt_begin: | 1750 .L_aesni_cbc_encrypt_begin: |
1816 pushl %ebp | 1751 pushl %ebp |
1817 pushl %ebx | 1752 pushl %ebx |
1818 pushl %esi | 1753 pushl %esi |
1819 pushl %edi | 1754 pushl %edi |
1820 movl 20(%esp),%esi | 1755 movl 20(%esp),%esi |
1821 movl %esp,%ebx | 1756 movl %esp,%ebx |
1822 movl 24(%esp),%edi | 1757 movl 24(%esp),%edi |
1823 subl $24,%ebx | 1758 subl $24,%ebx |
1824 movl 28(%esp),%eax | 1759 movl 28(%esp),%eax |
1825 andl $-16,%ebx | 1760 andl $-16,%ebx |
1826 movl 32(%esp),%edx | 1761 movl 32(%esp),%edx |
1827 movl 36(%esp),%ebp | 1762 movl 36(%esp),%ebp |
1828 testl %eax,%eax | 1763 testl %eax,%eax |
1829 » jz» .L074cbc_abort | 1764 » jz» .L072cbc_abort |
1830 cmpl $0,40(%esp) | 1765 cmpl $0,40(%esp) |
1831 xchgl %esp,%ebx | 1766 xchgl %esp,%ebx |
1832 movups (%ebp),%xmm7 | 1767 movups (%ebp),%xmm7 |
1833 movl 240(%edx),%ecx | 1768 movl 240(%edx),%ecx |
1834 movl %edx,%ebp | 1769 movl %edx,%ebp |
1835 movl %ebx,16(%esp) | 1770 movl %ebx,16(%esp) |
1836 movl %ecx,%ebx | 1771 movl %ecx,%ebx |
1837 » je» .L075cbc_decrypt | 1772 » je» .L073cbc_decrypt |
1838 movaps %xmm7,%xmm2 | 1773 movaps %xmm7,%xmm2 |
1839 cmpl $16,%eax | 1774 cmpl $16,%eax |
1840 » jb» .L076cbc_enc_tail | 1775 » jb» .L074cbc_enc_tail |
1841 subl $16,%eax | 1776 subl $16,%eax |
1842 » jmp» .L077cbc_enc_loop | 1777 » jmp» .L075cbc_enc_loop |
1843 .align 16 | 1778 .align 16 |
1844 .L077cbc_enc_loop: | 1779 .L075cbc_enc_loop: |
1845 movups (%esi),%xmm7 | 1780 movups (%esi),%xmm7 |
1846 leal 16(%esi),%esi | 1781 leal 16(%esi),%esi |
1847 movups (%edx),%xmm0 | 1782 movups (%edx),%xmm0 |
1848 movups 16(%edx),%xmm1 | 1783 movups 16(%edx),%xmm1 |
1849 xorps %xmm0,%xmm7 | 1784 xorps %xmm0,%xmm7 |
1850 leal 32(%edx),%edx | 1785 leal 32(%edx),%edx |
1851 xorps %xmm7,%xmm2 | 1786 xorps %xmm7,%xmm2 |
1852 .L078enc1_loop_15: | 1787 .L076enc1_loop_15: |
1853 .byte 102,15,56,220,209 | 1788 .byte 102,15,56,220,209 |
1854 decl %ecx | 1789 decl %ecx |
1855 movups (%edx),%xmm1 | 1790 movups (%edx),%xmm1 |
1856 leal 16(%edx),%edx | 1791 leal 16(%edx),%edx |
1857 » jnz» .L078enc1_loop_15 | 1792 » jnz» .L076enc1_loop_15 |
1858 .byte 102,15,56,221,209 | 1793 .byte 102,15,56,221,209 |
1859 movl %ebx,%ecx | 1794 movl %ebx,%ecx |
1860 movl %ebp,%edx | 1795 movl %ebp,%edx |
1861 movups %xmm2,(%edi) | 1796 movups %xmm2,(%edi) |
1862 leal 16(%edi),%edi | 1797 leal 16(%edi),%edi |
1863 subl $16,%eax | 1798 subl $16,%eax |
1864 » jnc» .L077cbc_enc_loop | 1799 » jnc» .L075cbc_enc_loop |
1865 addl $16,%eax | 1800 addl $16,%eax |
1866 » jnz» .L076cbc_enc_tail | 1801 » jnz» .L074cbc_enc_tail |
1867 movaps %xmm2,%xmm7 | 1802 movaps %xmm2,%xmm7 |
1868 » pxor» %xmm2,%xmm2 | 1803 » jmp» .L077cbc_ret |
1869 » jmp» .L079cbc_ret | 1804 .L074cbc_enc_tail: |
1870 .L076cbc_enc_tail: | |
1871 movl %eax,%ecx | 1805 movl %eax,%ecx |
1872 .long 2767451785 | 1806 .long 2767451785 |
1873 movl $16,%ecx | 1807 movl $16,%ecx |
1874 subl %eax,%ecx | 1808 subl %eax,%ecx |
1875 xorl %eax,%eax | 1809 xorl %eax,%eax |
1876 .long 2868115081 | 1810 .long 2868115081 |
1877 leal -16(%edi),%edi | 1811 leal -16(%edi),%edi |
1878 movl %ebx,%ecx | 1812 movl %ebx,%ecx |
1879 movl %edi,%esi | 1813 movl %edi,%esi |
1880 movl %ebp,%edx | 1814 movl %ebp,%edx |
1881 » jmp» .L077cbc_enc_loop | 1815 » jmp» .L075cbc_enc_loop |
1882 .align 16 | 1816 .align 16 |
1883 .L075cbc_decrypt: | 1817 .L073cbc_decrypt: |
1884 cmpl $80,%eax | 1818 cmpl $80,%eax |
1885 » jbe» .L080cbc_dec_tail | 1819 » jbe» .L078cbc_dec_tail |
1886 movaps %xmm7,(%esp) | 1820 movaps %xmm7,(%esp) |
1887 subl $80,%eax | 1821 subl $80,%eax |
1888 » jmp» .L081cbc_dec_loop6_enter | 1822 » jmp» .L079cbc_dec_loop6_enter |
1889 .align 16 | 1823 .align 16 |
1890 .L082cbc_dec_loop6: | 1824 .L080cbc_dec_loop6: |
1891 movaps %xmm0,(%esp) | 1825 movaps %xmm0,(%esp) |
1892 movups %xmm7,(%edi) | 1826 movups %xmm7,(%edi) |
1893 leal 16(%edi),%edi | 1827 leal 16(%edi),%edi |
1894 .L081cbc_dec_loop6_enter: | 1828 .L079cbc_dec_loop6_enter: |
1895 movdqu (%esi),%xmm2 | 1829 movdqu (%esi),%xmm2 |
1896 movdqu 16(%esi),%xmm3 | 1830 movdqu 16(%esi),%xmm3 |
1897 movdqu 32(%esi),%xmm4 | 1831 movdqu 32(%esi),%xmm4 |
1898 movdqu 48(%esi),%xmm5 | 1832 movdqu 48(%esi),%xmm5 |
1899 movdqu 64(%esi),%xmm6 | 1833 movdqu 64(%esi),%xmm6 |
1900 movdqu 80(%esi),%xmm7 | 1834 movdqu 80(%esi),%xmm7 |
1901 call _aesni_decrypt6 | 1835 call _aesni_decrypt6 |
1902 movups (%esi),%xmm1 | 1836 movups (%esi),%xmm1 |
1903 movups 16(%esi),%xmm0 | 1837 movups 16(%esi),%xmm0 |
1904 xorps (%esp),%xmm2 | 1838 xorps (%esp),%xmm2 |
1905 xorps %xmm1,%xmm3 | 1839 xorps %xmm1,%xmm3 |
1906 movups 32(%esi),%xmm1 | 1840 movups 32(%esi),%xmm1 |
1907 xorps %xmm0,%xmm4 | 1841 xorps %xmm0,%xmm4 |
1908 movups 48(%esi),%xmm0 | 1842 movups 48(%esi),%xmm0 |
1909 xorps %xmm1,%xmm5 | 1843 xorps %xmm1,%xmm5 |
1910 movups 64(%esi),%xmm1 | 1844 movups 64(%esi),%xmm1 |
1911 xorps %xmm0,%xmm6 | 1845 xorps %xmm0,%xmm6 |
1912 movups 80(%esi),%xmm0 | 1846 movups 80(%esi),%xmm0 |
1913 xorps %xmm1,%xmm7 | 1847 xorps %xmm1,%xmm7 |
1914 movups %xmm2,(%edi) | 1848 movups %xmm2,(%edi) |
1915 movups %xmm3,16(%edi) | 1849 movups %xmm3,16(%edi) |
1916 leal 96(%esi),%esi | 1850 leal 96(%esi),%esi |
1917 movups %xmm4,32(%edi) | 1851 movups %xmm4,32(%edi) |
1918 movl %ebx,%ecx | 1852 movl %ebx,%ecx |
1919 movups %xmm5,48(%edi) | 1853 movups %xmm5,48(%edi) |
1920 movl %ebp,%edx | 1854 movl %ebp,%edx |
1921 movups %xmm6,64(%edi) | 1855 movups %xmm6,64(%edi) |
1922 leal 80(%edi),%edi | 1856 leal 80(%edi),%edi |
1923 subl $96,%eax | 1857 subl $96,%eax |
1924 » ja» .L082cbc_dec_loop6 | 1858 » ja» .L080cbc_dec_loop6 |
1925 movaps %xmm7,%xmm2 | 1859 movaps %xmm7,%xmm2 |
1926 movaps %xmm0,%xmm7 | 1860 movaps %xmm0,%xmm7 |
1927 addl $80,%eax | 1861 addl $80,%eax |
1928 » jle» .L083cbc_dec_clear_tail_collected | 1862 » jle» .L081cbc_dec_tail_collected |
1929 movups %xmm2,(%edi) | 1863 movups %xmm2,(%edi) |
1930 leal 16(%edi),%edi | 1864 leal 16(%edi),%edi |
1931 .L080cbc_dec_tail: | 1865 .L078cbc_dec_tail: |
1932 movups (%esi),%xmm2 | 1866 movups (%esi),%xmm2 |
1933 movaps %xmm2,%xmm6 | 1867 movaps %xmm2,%xmm6 |
1934 cmpl $16,%eax | 1868 cmpl $16,%eax |
1935 » jbe» .L084cbc_dec_one | 1869 » jbe» .L082cbc_dec_one |
1936 movups 16(%esi),%xmm3 | 1870 movups 16(%esi),%xmm3 |
1937 movaps %xmm3,%xmm5 | 1871 movaps %xmm3,%xmm5 |
1938 cmpl $32,%eax | 1872 cmpl $32,%eax |
1939 » jbe» .L085cbc_dec_two | 1873 » jbe» .L083cbc_dec_two |
1940 movups 32(%esi),%xmm4 | 1874 movups 32(%esi),%xmm4 |
1941 cmpl $48,%eax | 1875 cmpl $48,%eax |
1942 » jbe» .L086cbc_dec_three | 1876 » jbe» .L084cbc_dec_three |
1943 movups 48(%esi),%xmm5 | 1877 movups 48(%esi),%xmm5 |
1944 cmpl $64,%eax | 1878 cmpl $64,%eax |
1945 » jbe» .L087cbc_dec_four | 1879 » jbe» .L085cbc_dec_four |
1946 movups 64(%esi),%xmm6 | 1880 movups 64(%esi),%xmm6 |
1947 movaps %xmm7,(%esp) | 1881 movaps %xmm7,(%esp) |
1948 movups (%esi),%xmm2 | 1882 movups (%esi),%xmm2 |
1949 xorps %xmm7,%xmm7 | 1883 xorps %xmm7,%xmm7 |
1950 call _aesni_decrypt6 | 1884 call _aesni_decrypt6 |
1951 movups (%esi),%xmm1 | 1885 movups (%esi),%xmm1 |
1952 movups 16(%esi),%xmm0 | 1886 movups 16(%esi),%xmm0 |
1953 xorps (%esp),%xmm2 | 1887 xorps (%esp),%xmm2 |
1954 xorps %xmm1,%xmm3 | 1888 xorps %xmm1,%xmm3 |
1955 movups 32(%esi),%xmm1 | 1889 movups 32(%esi),%xmm1 |
1956 xorps %xmm0,%xmm4 | 1890 xorps %xmm0,%xmm4 |
1957 movups 48(%esi),%xmm0 | 1891 movups 48(%esi),%xmm0 |
1958 xorps %xmm1,%xmm5 | 1892 xorps %xmm1,%xmm5 |
1959 movups 64(%esi),%xmm7 | 1893 movups 64(%esi),%xmm7 |
1960 xorps %xmm0,%xmm6 | 1894 xorps %xmm0,%xmm6 |
1961 movups %xmm2,(%edi) | 1895 movups %xmm2,(%edi) |
1962 movups %xmm3,16(%edi) | 1896 movups %xmm3,16(%edi) |
1963 pxor %xmm3,%xmm3 | |
1964 movups %xmm4,32(%edi) | 1897 movups %xmm4,32(%edi) |
1965 pxor %xmm4,%xmm4 | |
1966 movups %xmm5,48(%edi) | 1898 movups %xmm5,48(%edi) |
1967 pxor %xmm5,%xmm5 | |
1968 leal 64(%edi),%edi | 1899 leal 64(%edi),%edi |
1969 movaps %xmm6,%xmm2 | 1900 movaps %xmm6,%xmm2 |
1970 pxor %xmm6,%xmm6 | |
1971 subl $80,%eax | 1901 subl $80,%eax |
1972 » jmp» .L088cbc_dec_tail_collected | 1902 » jmp» .L081cbc_dec_tail_collected |
1973 .align 16 | 1903 .align 16 |
1974 .L084cbc_dec_one: | 1904 .L082cbc_dec_one: |
1975 movups (%edx),%xmm0 | 1905 movups (%edx),%xmm0 |
1976 movups 16(%edx),%xmm1 | 1906 movups 16(%edx),%xmm1 |
1977 leal 32(%edx),%edx | 1907 leal 32(%edx),%edx |
1978 xorps %xmm0,%xmm2 | 1908 xorps %xmm0,%xmm2 |
1979 .L089dec1_loop_16: | 1909 .L086dec1_loop_16: |
1980 .byte 102,15,56,222,209 | 1910 .byte 102,15,56,222,209 |
1981 decl %ecx | 1911 decl %ecx |
1982 movups (%edx),%xmm1 | 1912 movups (%edx),%xmm1 |
1983 leal 16(%edx),%edx | 1913 leal 16(%edx),%edx |
1984 » jnz» .L089dec1_loop_16 | 1914 » jnz» .L086dec1_loop_16 |
1985 .byte 102,15,56,223,209 | 1915 .byte 102,15,56,223,209 |
1986 xorps %xmm7,%xmm2 | 1916 xorps %xmm7,%xmm2 |
1987 movaps %xmm6,%xmm7 | 1917 movaps %xmm6,%xmm7 |
1988 subl $16,%eax | 1918 subl $16,%eax |
1989 » jmp» .L088cbc_dec_tail_collected | 1919 » jmp» .L081cbc_dec_tail_collected |
1990 .align 16 | 1920 .align 16 |
1991 .L085cbc_dec_two: | 1921 .L083cbc_dec_two: |
1992 call _aesni_decrypt2 | 1922 call _aesni_decrypt2 |
1993 xorps %xmm7,%xmm2 | 1923 xorps %xmm7,%xmm2 |
1994 xorps %xmm6,%xmm3 | 1924 xorps %xmm6,%xmm3 |
1995 movups %xmm2,(%edi) | 1925 movups %xmm2,(%edi) |
1996 movaps %xmm3,%xmm2 | 1926 movaps %xmm3,%xmm2 |
1997 pxor %xmm3,%xmm3 | |
1998 leal 16(%edi),%edi | 1927 leal 16(%edi),%edi |
1999 movaps %xmm5,%xmm7 | 1928 movaps %xmm5,%xmm7 |
2000 subl $32,%eax | 1929 subl $32,%eax |
2001 » jmp» .L088cbc_dec_tail_collected | 1930 » jmp» .L081cbc_dec_tail_collected |
2002 .align 16 | 1931 .align 16 |
2003 .L086cbc_dec_three: | 1932 .L084cbc_dec_three: |
2004 call _aesni_decrypt3 | 1933 call _aesni_decrypt3 |
2005 xorps %xmm7,%xmm2 | 1934 xorps %xmm7,%xmm2 |
2006 xorps %xmm6,%xmm3 | 1935 xorps %xmm6,%xmm3 |
2007 xorps %xmm5,%xmm4 | 1936 xorps %xmm5,%xmm4 |
2008 movups %xmm2,(%edi) | 1937 movups %xmm2,(%edi) |
2009 movaps %xmm4,%xmm2 | 1938 movaps %xmm4,%xmm2 |
2010 pxor %xmm4,%xmm4 | |
2011 movups %xmm3,16(%edi) | 1939 movups %xmm3,16(%edi) |
2012 pxor %xmm3,%xmm3 | |
2013 leal 32(%edi),%edi | 1940 leal 32(%edi),%edi |
2014 movups 32(%esi),%xmm7 | 1941 movups 32(%esi),%xmm7 |
2015 subl $48,%eax | 1942 subl $48,%eax |
2016 » jmp» .L088cbc_dec_tail_collected | 1943 » jmp» .L081cbc_dec_tail_collected |
2017 .align 16 | 1944 .align 16 |
2018 .L087cbc_dec_four: | 1945 .L085cbc_dec_four: |
2019 call _aesni_decrypt4 | 1946 call _aesni_decrypt4 |
2020 movups 16(%esi),%xmm1 | 1947 movups 16(%esi),%xmm1 |
2021 movups 32(%esi),%xmm0 | 1948 movups 32(%esi),%xmm0 |
2022 xorps %xmm7,%xmm2 | 1949 xorps %xmm7,%xmm2 |
2023 movups 48(%esi),%xmm7 | 1950 movups 48(%esi),%xmm7 |
2024 xorps %xmm6,%xmm3 | 1951 xorps %xmm6,%xmm3 |
2025 movups %xmm2,(%edi) | 1952 movups %xmm2,(%edi) |
2026 xorps %xmm1,%xmm4 | 1953 xorps %xmm1,%xmm4 |
2027 movups %xmm3,16(%edi) | 1954 movups %xmm3,16(%edi) |
2028 pxor %xmm3,%xmm3 | |
2029 xorps %xmm0,%xmm5 | 1955 xorps %xmm0,%xmm5 |
2030 movups %xmm4,32(%edi) | 1956 movups %xmm4,32(%edi) |
2031 pxor %xmm4,%xmm4 | |
2032 leal 48(%edi),%edi | 1957 leal 48(%edi),%edi |
2033 movaps %xmm5,%xmm2 | 1958 movaps %xmm5,%xmm2 |
2034 pxor %xmm5,%xmm5 | |
2035 subl $64,%eax | 1959 subl $64,%eax |
2036 » jmp» .L088cbc_dec_tail_collected | 1960 .L081cbc_dec_tail_collected: |
| 1961 » andl» $15,%eax |
| 1962 » jnz» .L087cbc_dec_tail_partial |
| 1963 » movups» %xmm2,(%edi) |
| 1964 » jmp» .L077cbc_ret |
2037 .align 16 | 1965 .align 16 |
2038 .L083cbc_dec_clear_tail_collected: | 1966 .L087cbc_dec_tail_partial: |
2039 » pxor» %xmm3,%xmm3 | |
2040 » pxor» %xmm4,%xmm4 | |
2041 » pxor» %xmm5,%xmm5 | |
2042 » pxor» %xmm6,%xmm6 | |
2043 .L088cbc_dec_tail_collected: | |
2044 » andl» $15,%eax | |
2045 » jnz» .L090cbc_dec_tail_partial | |
2046 » movups» %xmm2,(%edi) | |
2047 » pxor» %xmm0,%xmm0 | |
2048 » jmp» .L079cbc_ret | |
2049 .align» 16 | |
2050 .L090cbc_dec_tail_partial: | |
2051 movaps %xmm2,(%esp) | 1967 movaps %xmm2,(%esp) |
2052 pxor %xmm0,%xmm0 | |
2053 movl $16,%ecx | 1968 movl $16,%ecx |
2054 movl %esp,%esi | 1969 movl %esp,%esi |
2055 subl %eax,%ecx | 1970 subl %eax,%ecx |
2056 .long 2767451785 | 1971 .long 2767451785 |
2057 » movdqa» %xmm2,(%esp) | 1972 .L077cbc_ret: |
2058 .L079cbc_ret: | |
2059 movl 16(%esp),%esp | 1973 movl 16(%esp),%esp |
2060 movl 36(%esp),%ebp | 1974 movl 36(%esp),%ebp |
2061 pxor %xmm2,%xmm2 | |
2062 pxor %xmm1,%xmm1 | |
2063 movups %xmm7,(%ebp) | 1975 movups %xmm7,(%ebp) |
2064 » pxor» %xmm7,%xmm7 | 1976 .L072cbc_abort: |
2065 .L074cbc_abort: | |
2066 popl %edi | 1977 popl %edi |
2067 popl %esi | 1978 popl %esi |
2068 popl %ebx | 1979 popl %ebx |
2069 popl %ebp | 1980 popl %ebp |
2070 ret | 1981 ret |
2071 .size aesni_cbc_encrypt,.-.L_aesni_cbc_encrypt_begin | 1982 .size aesni_cbc_encrypt,.-.L_aesni_cbc_encrypt_begin |
2072 .hidden _aesni_set_encrypt_key | 1983 .hidden _aesni_set_encrypt_key |
2073 .type _aesni_set_encrypt_key,@function | 1984 .type _aesni_set_encrypt_key,@function |
2074 .align 16 | 1985 .align 16 |
2075 _aesni_set_encrypt_key: | 1986 _aesni_set_encrypt_key: |
2076 pushl %ebp | |
2077 pushl %ebx | |
2078 testl %eax,%eax | 1987 testl %eax,%eax |
2079 » jz» .L091bad_pointer | 1988 » jz» .L088bad_pointer |
2080 testl %edx,%edx | 1989 testl %edx,%edx |
2081 » jz» .L091bad_pointer | 1990 » jz» .L088bad_pointer |
2082 » call» .L092pic | |
2083 .L092pic: | |
2084 » popl» %ebx | |
2085 » leal» .Lkey_const-.L092pic(%ebx),%ebx | |
2086 » leal» OPENSSL_ia32cap_P-.Lkey_const(%ebx),%ebp | |
2087 movups (%eax),%xmm0 | 1991 movups (%eax),%xmm0 |
2088 xorps %xmm4,%xmm4 | 1992 xorps %xmm4,%xmm4 |
2089 movl 4(%ebp),%ebp | |
2090 leal 16(%edx),%edx | 1993 leal 16(%edx),%edx |
2091 andl $268437504,%ebp | |
2092 cmpl $256,%ecx | 1994 cmpl $256,%ecx |
2093 » je» .L09314rounds | 1995 » je» .L08914rounds |
2094 cmpl $192,%ecx | 1996 cmpl $192,%ecx |
2095 » je» .L09412rounds | 1997 » je» .L09012rounds |
2096 cmpl $128,%ecx | 1998 cmpl $128,%ecx |
2097 » jne» .L095bad_keybits | 1999 » jne» .L091bad_keybits |
2098 .align 16 | 2000 .align 16 |
2099 .L09610rounds: | 2001 .L09210rounds: |
2100 » cmpl» $268435456,%ebp | |
2101 » je» .L09710rounds_alt | |
2102 movl $9,%ecx | 2002 movl $9,%ecx |
2103 movups %xmm0,-16(%edx) | 2003 movups %xmm0,-16(%edx) |
2104 .byte 102,15,58,223,200,1 | 2004 .byte 102,15,58,223,200,1 |
2105 » call» .L098key_128_cold | 2005 » call» .L093key_128_cold |
2106 .byte 102,15,58,223,200,2 | 2006 .byte 102,15,58,223,200,2 |
2107 » call» .L099key_128 | 2007 » call» .L094key_128 |
2108 .byte 102,15,58,223,200,4 | 2008 .byte 102,15,58,223,200,4 |
2109 » call» .L099key_128 | 2009 » call» .L094key_128 |
2110 .byte 102,15,58,223,200,8 | 2010 .byte 102,15,58,223,200,8 |
2111 » call» .L099key_128 | 2011 » call» .L094key_128 |
2112 .byte 102,15,58,223,200,16 | 2012 .byte 102,15,58,223,200,16 |
2113 » call» .L099key_128 | 2013 » call» .L094key_128 |
2114 .byte 102,15,58,223,200,32 | 2014 .byte 102,15,58,223,200,32 |
2115 » call» .L099key_128 | 2015 » call» .L094key_128 |
2116 .byte 102,15,58,223,200,64 | 2016 .byte 102,15,58,223,200,64 |
2117 » call» .L099key_128 | 2017 » call» .L094key_128 |
2118 .byte 102,15,58,223,200,128 | 2018 .byte 102,15,58,223,200,128 |
2119 » call» .L099key_128 | 2019 » call» .L094key_128 |
2120 .byte 102,15,58,223,200,27 | 2020 .byte 102,15,58,223,200,27 |
2121 » call» .L099key_128 | 2021 » call» .L094key_128 |
2122 .byte 102,15,58,223,200,54 | 2022 .byte 102,15,58,223,200,54 |
2123 » call» .L099key_128 | 2023 » call» .L094key_128 |
2124 movups %xmm0,(%edx) | 2024 movups %xmm0,(%edx) |
2125 movl %ecx,80(%edx) | 2025 movl %ecx,80(%edx) |
2126 » jmp» .L100good_key | 2026 » xorl» %eax,%eax |
| 2027 » ret |
2127 .align 16 | 2028 .align 16 |
2128 .L099key_128: | 2029 .L094key_128: |
2129 movups %xmm0,(%edx) | 2030 movups %xmm0,(%edx) |
2130 leal 16(%edx),%edx | 2031 leal 16(%edx),%edx |
2131 .L098key_128_cold: | 2032 .L093key_128_cold: |
2132 shufps $16,%xmm0,%xmm4 | 2033 shufps $16,%xmm0,%xmm4 |
2133 xorps %xmm4,%xmm0 | 2034 xorps %xmm4,%xmm0 |
2134 shufps $140,%xmm0,%xmm4 | 2035 shufps $140,%xmm0,%xmm4 |
2135 xorps %xmm4,%xmm0 | 2036 xorps %xmm4,%xmm0 |
2136 shufps $255,%xmm1,%xmm1 | 2037 shufps $255,%xmm1,%xmm1 |
2137 xorps %xmm1,%xmm0 | 2038 xorps %xmm1,%xmm0 |
2138 ret | 2039 ret |
2139 .align 16 | 2040 .align 16 |
2140 .L09710rounds_alt: | 2041 .L09012rounds: |
2141 » movdqa» (%ebx),%xmm5 | |
2142 » movl» $8,%ecx | |
2143 » movdqa» 32(%ebx),%xmm4 | |
2144 » movdqa» %xmm0,%xmm2 | |
2145 » movdqu» %xmm0,-16(%edx) | |
2146 .L101loop_key128: | |
2147 .byte» 102,15,56,0,197 | |
2148 .byte» 102,15,56,221,196 | |
2149 » pslld» $1,%xmm4 | |
2150 » leal» 16(%edx),%edx | |
2151 » movdqa» %xmm2,%xmm3 | |
2152 » pslldq» $4,%xmm2 | |
2153 » pxor» %xmm2,%xmm3 | |
2154 » pslldq» $4,%xmm2 | |
2155 » pxor» %xmm2,%xmm3 | |
2156 » pslldq» $4,%xmm2 | |
2157 » pxor» %xmm3,%xmm2 | |
2158 » pxor» %xmm2,%xmm0 | |
2159 » movdqu» %xmm0,-16(%edx) | |
2160 » movdqa» %xmm0,%xmm2 | |
2161 » decl» %ecx | |
2162 » jnz» .L101loop_key128 | |
2163 » movdqa» 48(%ebx),%xmm4 | |
2164 .byte» 102,15,56,0,197 | |
2165 .byte» 102,15,56,221,196 | |
2166 » pslld» $1,%xmm4 | |
2167 » movdqa» %xmm2,%xmm3 | |
2168 » pslldq» $4,%xmm2 | |
2169 » pxor» %xmm2,%xmm3 | |
2170 » pslldq» $4,%xmm2 | |
2171 » pxor» %xmm2,%xmm3 | |
2172 » pslldq» $4,%xmm2 | |
2173 » pxor» %xmm3,%xmm2 | |
2174 » pxor» %xmm2,%xmm0 | |
2175 » movdqu» %xmm0,(%edx) | |
2176 » movdqa» %xmm0,%xmm2 | |
2177 .byte» 102,15,56,0,197 | |
2178 .byte» 102,15,56,221,196 | |
2179 » movdqa» %xmm2,%xmm3 | |
2180 » pslldq» $4,%xmm2 | |
2181 » pxor» %xmm2,%xmm3 | |
2182 » pslldq» $4,%xmm2 | |
2183 » pxor» %xmm2,%xmm3 | |
2184 » pslldq» $4,%xmm2 | |
2185 » pxor» %xmm3,%xmm2 | |
2186 » pxor» %xmm2,%xmm0 | |
2187 » movdqu» %xmm0,16(%edx) | |
2188 » movl» $9,%ecx | |
2189 » movl» %ecx,96(%edx) | |
2190 » jmp» .L100good_key | |
2191 .align» 16 | |
2192 .L09412rounds: | |
2193 movq 16(%eax),%xmm2 | 2042 movq 16(%eax),%xmm2 |
2194 cmpl $268435456,%ebp | |
2195 je .L10212rounds_alt | |
2196 movl $11,%ecx | 2043 movl $11,%ecx |
2197 movups %xmm0,-16(%edx) | 2044 movups %xmm0,-16(%edx) |
2198 .byte 102,15,58,223,202,1 | 2045 .byte 102,15,58,223,202,1 |
2199 » call» .L103key_192a_cold | 2046 » call» .L095key_192a_cold |
2200 .byte 102,15,58,223,202,2 | 2047 .byte 102,15,58,223,202,2 |
2201 » call» .L104key_192b | 2048 » call» .L096key_192b |
2202 .byte 102,15,58,223,202,4 | 2049 .byte 102,15,58,223,202,4 |
2203 » call» .L105key_192a | 2050 » call» .L097key_192a |
2204 .byte 102,15,58,223,202,8 | 2051 .byte 102,15,58,223,202,8 |
2205 » call» .L104key_192b | 2052 » call» .L096key_192b |
2206 .byte 102,15,58,223,202,16 | 2053 .byte 102,15,58,223,202,16 |
2207 » call» .L105key_192a | 2054 » call» .L097key_192a |
2208 .byte 102,15,58,223,202,32 | 2055 .byte 102,15,58,223,202,32 |
2209 » call» .L104key_192b | 2056 » call» .L096key_192b |
2210 .byte 102,15,58,223,202,64 | 2057 .byte 102,15,58,223,202,64 |
2211 » call» .L105key_192a | 2058 » call» .L097key_192a |
2212 .byte 102,15,58,223,202,128 | 2059 .byte 102,15,58,223,202,128 |
2213 » call» .L104key_192b | 2060 » call» .L096key_192b |
2214 movups %xmm0,(%edx) | 2061 movups %xmm0,(%edx) |
2215 movl %ecx,48(%edx) | 2062 movl %ecx,48(%edx) |
2216 » jmp» .L100good_key | 2063 » xorl» %eax,%eax |
| 2064 » ret |
2217 .align 16 | 2065 .align 16 |
2218 .L105key_192a: | 2066 .L097key_192a: |
2219 movups %xmm0,(%edx) | 2067 movups %xmm0,(%edx) |
2220 leal 16(%edx),%edx | 2068 leal 16(%edx),%edx |
2221 .align 16 | 2069 .align 16 |
2222 .L103key_192a_cold: | 2070 .L095key_192a_cold: |
2223 movaps %xmm2,%xmm5 | 2071 movaps %xmm2,%xmm5 |
2224 .L106key_192b_warm: | 2072 .L098key_192b_warm: |
2225 shufps $16,%xmm0,%xmm4 | 2073 shufps $16,%xmm0,%xmm4 |
2226 movdqa %xmm2,%xmm3 | 2074 movdqa %xmm2,%xmm3 |
2227 xorps %xmm4,%xmm0 | 2075 xorps %xmm4,%xmm0 |
2228 shufps $140,%xmm0,%xmm4 | 2076 shufps $140,%xmm0,%xmm4 |
2229 pslldq $4,%xmm3 | 2077 pslldq $4,%xmm3 |
2230 xorps %xmm4,%xmm0 | 2078 xorps %xmm4,%xmm0 |
2231 pshufd $85,%xmm1,%xmm1 | 2079 pshufd $85,%xmm1,%xmm1 |
2232 pxor %xmm3,%xmm2 | 2080 pxor %xmm3,%xmm2 |
2233 pxor %xmm1,%xmm0 | 2081 pxor %xmm1,%xmm0 |
2234 pshufd $255,%xmm0,%xmm3 | 2082 pshufd $255,%xmm0,%xmm3 |
2235 pxor %xmm3,%xmm2 | 2083 pxor %xmm3,%xmm2 |
2236 ret | 2084 ret |
2237 .align 16 | 2085 .align 16 |
2238 .L104key_192b: | 2086 .L096key_192b: |
2239 movaps %xmm0,%xmm3 | 2087 movaps %xmm0,%xmm3 |
2240 shufps $68,%xmm0,%xmm5 | 2088 shufps $68,%xmm0,%xmm5 |
2241 movups %xmm5,(%edx) | 2089 movups %xmm5,(%edx) |
2242 shufps $78,%xmm2,%xmm3 | 2090 shufps $78,%xmm2,%xmm3 |
2243 movups %xmm3,16(%edx) | 2091 movups %xmm3,16(%edx) |
2244 leal 32(%edx),%edx | 2092 leal 32(%edx),%edx |
2245 » jmp» .L106key_192b_warm | 2093 » jmp» .L098key_192b_warm |
2246 .align 16 | 2094 .align 16 |
2247 .L10212rounds_alt: | 2095 .L08914rounds: |
2248 » movdqa» 16(%ebx),%xmm5 | |
2249 » movdqa» 32(%ebx),%xmm4 | |
2250 » movl» $8,%ecx | |
2251 » movdqu» %xmm0,-16(%edx) | |
2252 .L107loop_key192: | |
2253 » movq» %xmm2,(%edx) | |
2254 » movdqa» %xmm2,%xmm1 | |
2255 .byte» 102,15,56,0,213 | |
2256 .byte» 102,15,56,221,212 | |
2257 » pslld» $1,%xmm4 | |
2258 » leal» 24(%edx),%edx | |
2259 » movdqa» %xmm0,%xmm3 | |
2260 » pslldq» $4,%xmm0 | |
2261 » pxor» %xmm0,%xmm3 | |
2262 » pslldq» $4,%xmm0 | |
2263 » pxor» %xmm0,%xmm3 | |
2264 » pslldq» $4,%xmm0 | |
2265 » pxor» %xmm3,%xmm0 | |
2266 » pshufd» $255,%xmm0,%xmm3 | |
2267 » pxor» %xmm1,%xmm3 | |
2268 » pslldq» $4,%xmm1 | |
2269 » pxor» %xmm1,%xmm3 | |
2270 » pxor» %xmm2,%xmm0 | |
2271 » pxor» %xmm3,%xmm2 | |
2272 » movdqu» %xmm0,-16(%edx) | |
2273 » decl» %ecx | |
2274 » jnz» .L107loop_key192 | |
2275 » movl» $11,%ecx | |
2276 » movl» %ecx,32(%edx) | |
2277 » jmp» .L100good_key | |
2278 .align» 16 | |
2279 .L09314rounds: | |
2280 movups 16(%eax),%xmm2 | 2096 movups 16(%eax),%xmm2 |
| 2097 movl $13,%ecx |
2281 leal 16(%edx),%edx | 2098 leal 16(%edx),%edx |
2282 cmpl $268435456,%ebp | |
2283 je .L10814rounds_alt | |
2284 movl $13,%ecx | |
2285 movups %xmm0,-32(%edx) | 2099 movups %xmm0,-32(%edx) |
2286 movups %xmm2,-16(%edx) | 2100 movups %xmm2,-16(%edx) |
2287 .byte 102,15,58,223,202,1 | 2101 .byte 102,15,58,223,202,1 |
2288 » call» .L109key_256a_cold | 2102 » call» .L099key_256a_cold |
2289 .byte 102,15,58,223,200,1 | 2103 .byte 102,15,58,223,200,1 |
2290 » call» .L110key_256b | 2104 » call» .L100key_256b |
2291 .byte 102,15,58,223,202,2 | 2105 .byte 102,15,58,223,202,2 |
2292 » call» .L111key_256a | 2106 » call» .L101key_256a |
2293 .byte 102,15,58,223,200,2 | 2107 .byte 102,15,58,223,200,2 |
2294 » call» .L110key_256b | 2108 » call» .L100key_256b |
2295 .byte 102,15,58,223,202,4 | 2109 .byte 102,15,58,223,202,4 |
2296 » call» .L111key_256a | 2110 » call» .L101key_256a |
2297 .byte 102,15,58,223,200,4 | 2111 .byte 102,15,58,223,200,4 |
2298 » call» .L110key_256b | 2112 » call» .L100key_256b |
2299 .byte 102,15,58,223,202,8 | 2113 .byte 102,15,58,223,202,8 |
2300 » call» .L111key_256a | 2114 » call» .L101key_256a |
2301 .byte 102,15,58,223,200,8 | 2115 .byte 102,15,58,223,200,8 |
2302 » call» .L110key_256b | 2116 » call» .L100key_256b |
2303 .byte 102,15,58,223,202,16 | 2117 .byte 102,15,58,223,202,16 |
2304 » call» .L111key_256a | 2118 » call» .L101key_256a |
2305 .byte 102,15,58,223,200,16 | 2119 .byte 102,15,58,223,200,16 |
2306 » call» .L110key_256b | 2120 » call» .L100key_256b |
2307 .byte 102,15,58,223,202,32 | 2121 .byte 102,15,58,223,202,32 |
2308 » call» .L111key_256a | 2122 » call» .L101key_256a |
2309 .byte 102,15,58,223,200,32 | 2123 .byte 102,15,58,223,200,32 |
2310 » call» .L110key_256b | 2124 » call» .L100key_256b |
2311 .byte 102,15,58,223,202,64 | 2125 .byte 102,15,58,223,202,64 |
2312 » call» .L111key_256a | 2126 » call» .L101key_256a |
2313 movups %xmm0,(%edx) | 2127 movups %xmm0,(%edx) |
2314 movl %ecx,16(%edx) | 2128 movl %ecx,16(%edx) |
2315 xorl %eax,%eax | 2129 xorl %eax,%eax |
2316 » jmp» .L100good_key | 2130 » ret |
2317 .align 16 | 2131 .align 16 |
2318 .L111key_256a: | 2132 .L101key_256a: |
2319 movups %xmm2,(%edx) | 2133 movups %xmm2,(%edx) |
2320 leal 16(%edx),%edx | 2134 leal 16(%edx),%edx |
2321 .L109key_256a_cold: | 2135 .L099key_256a_cold: |
2322 shufps $16,%xmm0,%xmm4 | 2136 shufps $16,%xmm0,%xmm4 |
2323 xorps %xmm4,%xmm0 | 2137 xorps %xmm4,%xmm0 |
2324 shufps $140,%xmm0,%xmm4 | 2138 shufps $140,%xmm0,%xmm4 |
2325 xorps %xmm4,%xmm0 | 2139 xorps %xmm4,%xmm0 |
2326 shufps $255,%xmm1,%xmm1 | 2140 shufps $255,%xmm1,%xmm1 |
2327 xorps %xmm1,%xmm0 | 2141 xorps %xmm1,%xmm0 |
2328 ret | 2142 ret |
2329 .align 16 | 2143 .align 16 |
2330 .L110key_256b: | 2144 .L100key_256b: |
2331 movups %xmm0,(%edx) | 2145 movups %xmm0,(%edx) |
2332 leal 16(%edx),%edx | 2146 leal 16(%edx),%edx |
2333 shufps $16,%xmm2,%xmm4 | 2147 shufps $16,%xmm2,%xmm4 |
2334 xorps %xmm4,%xmm2 | 2148 xorps %xmm4,%xmm2 |
2335 shufps $140,%xmm2,%xmm4 | 2149 shufps $140,%xmm2,%xmm4 |
2336 xorps %xmm4,%xmm2 | 2150 xorps %xmm4,%xmm2 |
2337 shufps $170,%xmm1,%xmm1 | 2151 shufps $170,%xmm1,%xmm1 |
2338 xorps %xmm1,%xmm2 | 2152 xorps %xmm1,%xmm2 |
2339 ret | 2153 ret |
2340 .align» 16 | 2154 .align» 4 |
2341 .L10814rounds_alt: | 2155 .L088bad_pointer: |
2342 » movdqa» (%ebx),%xmm5 | 2156 » movl» $-1,%eax |
2343 » movdqa» 32(%ebx),%xmm4 | |
2344 » movl» $7,%ecx | |
2345 » movdqu» %xmm0,-32(%edx) | |
2346 » movdqa» %xmm2,%xmm1 | |
2347 » movdqu» %xmm2,-16(%edx) | |
2348 .L112loop_key256: | |
2349 .byte» 102,15,56,0,213 | |
2350 .byte» 102,15,56,221,212 | |
2351 » movdqa» %xmm0,%xmm3 | |
2352 » pslldq» $4,%xmm0 | |
2353 » pxor» %xmm0,%xmm3 | |
2354 » pslldq» $4,%xmm0 | |
2355 » pxor» %xmm0,%xmm3 | |
2356 » pslldq» $4,%xmm0 | |
2357 » pxor» %xmm3,%xmm0 | |
2358 » pslld» $1,%xmm4 | |
2359 » pxor» %xmm2,%xmm0 | |
2360 » movdqu» %xmm0,(%edx) | |
2361 » decl» %ecx | |
2362 » jz» .L113done_key256 | |
2363 » pshufd» $255,%xmm0,%xmm2 | |
2364 » pxor» %xmm3,%xmm3 | |
2365 .byte» 102,15,56,221,211 | |
2366 » movdqa» %xmm1,%xmm3 | |
2367 » pslldq» $4,%xmm1 | |
2368 » pxor» %xmm1,%xmm3 | |
2369 » pslldq» $4,%xmm1 | |
2370 » pxor» %xmm1,%xmm3 | |
2371 » pslldq» $4,%xmm1 | |
2372 » pxor» %xmm3,%xmm1 | |
2373 » pxor» %xmm1,%xmm2 | |
2374 » movdqu» %xmm2,16(%edx) | |
2375 » leal» 32(%edx),%edx | |
2376 » movdqa» %xmm2,%xmm1 | |
2377 » jmp» .L112loop_key256 | |
2378 .L113done_key256: | |
2379 » movl» $13,%ecx | |
2380 » movl» %ecx,16(%edx) | |
2381 .L100good_key: | |
2382 » pxor» %xmm0,%xmm0 | |
2383 » pxor» %xmm1,%xmm1 | |
2384 » pxor» %xmm2,%xmm2 | |
2385 » pxor» %xmm3,%xmm3 | |
2386 » pxor» %xmm4,%xmm4 | |
2387 » pxor» %xmm5,%xmm5 | |
2388 » xorl» %eax,%eax | |
2389 » popl» %ebx | |
2390 » popl» %ebp | |
2391 ret | 2157 ret |
2392 .align 4 | 2158 .align 4 |
2393 .L091bad_pointer: | 2159 .L091bad_keybits: |
2394 » movl» $-1,%eax | |
2395 » popl» %ebx | |
2396 » popl» %ebp | |
2397 » ret | |
2398 .align» 4 | |
2399 .L095bad_keybits: | |
2400 » pxor» %xmm0,%xmm0 | |
2401 movl $-2,%eax | 2160 movl $-2,%eax |
2402 popl %ebx | |
2403 popl %ebp | |
2404 ret | 2161 ret |
2405 .size _aesni_set_encrypt_key,.-_aesni_set_encrypt_key | 2162 .size _aesni_set_encrypt_key,.-_aesni_set_encrypt_key |
2406 .globl aesni_set_encrypt_key | 2163 .globl aesni_set_encrypt_key |
2407 .hidden aesni_set_encrypt_key | 2164 .hidden aesni_set_encrypt_key |
2408 .type aesni_set_encrypt_key,@function | 2165 .type aesni_set_encrypt_key,@function |
2409 .align 16 | 2166 .align 16 |
2410 aesni_set_encrypt_key: | 2167 aesni_set_encrypt_key: |
2411 .L_aesni_set_encrypt_key_begin: | 2168 .L_aesni_set_encrypt_key_begin: |
2412 movl 4(%esp),%eax | 2169 movl 4(%esp),%eax |
2413 movl 8(%esp),%ecx | 2170 movl 8(%esp),%ecx |
2414 movl 12(%esp),%edx | 2171 movl 12(%esp),%edx |
2415 call _aesni_set_encrypt_key | 2172 call _aesni_set_encrypt_key |
2416 ret | 2173 ret |
2417 .size aesni_set_encrypt_key,.-.L_aesni_set_encrypt_key_begin | 2174 .size aesni_set_encrypt_key,.-.L_aesni_set_encrypt_key_begin |
2418 .globl aesni_set_decrypt_key | 2175 .globl aesni_set_decrypt_key |
2419 .hidden aesni_set_decrypt_key | 2176 .hidden aesni_set_decrypt_key |
2420 .type aesni_set_decrypt_key,@function | 2177 .type aesni_set_decrypt_key,@function |
2421 .align 16 | 2178 .align 16 |
2422 aesni_set_decrypt_key: | 2179 aesni_set_decrypt_key: |
2423 .L_aesni_set_decrypt_key_begin: | 2180 .L_aesni_set_decrypt_key_begin: |
2424 movl 4(%esp),%eax | 2181 movl 4(%esp),%eax |
2425 movl 8(%esp),%ecx | 2182 movl 8(%esp),%ecx |
2426 movl 12(%esp),%edx | 2183 movl 12(%esp),%edx |
2427 call _aesni_set_encrypt_key | 2184 call _aesni_set_encrypt_key |
2428 movl 12(%esp),%edx | 2185 movl 12(%esp),%edx |
2429 shll $4,%ecx | 2186 shll $4,%ecx |
2430 testl %eax,%eax | 2187 testl %eax,%eax |
2431 » jnz» .L114dec_key_ret | 2188 » jnz» .L102dec_key_ret |
2432 leal 16(%edx,%ecx,1),%eax | 2189 leal 16(%edx,%ecx,1),%eax |
2433 movups (%edx),%xmm0 | 2190 movups (%edx),%xmm0 |
2434 movups (%eax),%xmm1 | 2191 movups (%eax),%xmm1 |
2435 movups %xmm0,(%eax) | 2192 movups %xmm0,(%eax) |
2436 movups %xmm1,(%edx) | 2193 movups %xmm1,(%edx) |
2437 leal 16(%edx),%edx | 2194 leal 16(%edx),%edx |
2438 leal -16(%eax),%eax | 2195 leal -16(%eax),%eax |
2439 .L115dec_key_inverse: | 2196 .L103dec_key_inverse: |
2440 movups (%edx),%xmm0 | 2197 movups (%edx),%xmm0 |
2441 movups (%eax),%xmm1 | 2198 movups (%eax),%xmm1 |
2442 .byte 102,15,56,219,192 | 2199 .byte 102,15,56,219,192 |
2443 .byte 102,15,56,219,201 | 2200 .byte 102,15,56,219,201 |
2444 leal 16(%edx),%edx | 2201 leal 16(%edx),%edx |
2445 leal -16(%eax),%eax | 2202 leal -16(%eax),%eax |
2446 movups %xmm0,16(%eax) | 2203 movups %xmm0,16(%eax) |
2447 movups %xmm1,-16(%edx) | 2204 movups %xmm1,-16(%edx) |
2448 cmpl %edx,%eax | 2205 cmpl %edx,%eax |
2449 » ja» .L115dec_key_inverse | 2206 » ja» .L103dec_key_inverse |
2450 movups (%edx),%xmm0 | 2207 movups (%edx),%xmm0 |
2451 .byte 102,15,56,219,192 | 2208 .byte 102,15,56,219,192 |
2452 movups %xmm0,(%edx) | 2209 movups %xmm0,(%edx) |
2453 pxor %xmm0,%xmm0 | |
2454 pxor %xmm1,%xmm1 | |
2455 xorl %eax,%eax | 2210 xorl %eax,%eax |
2456 .L114dec_key_ret: | 2211 .L102dec_key_ret: |
2457 ret | 2212 ret |
2458 .size aesni_set_decrypt_key,.-.L_aesni_set_decrypt_key_begin | 2213 .size aesni_set_decrypt_key,.-.L_aesni_set_decrypt_key_begin |
2459 .align 64 | |
2460 .Lkey_const: | |
2461 .long 202313229,202313229,202313229,202313229 | |
2462 .long 67569157,67569157,67569157,67569157 | |
2463 .long 1,1,1,1 | |
2464 .long 27,27,27,27 | |
2465 .byte 65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69 | 2214 .byte 65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69 |
2466 .byte 83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83 | 2215 .byte 83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83 |
2467 .byte 32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115 | 2216 .byte 32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115 |
2468 .byte 115,108,46,111,114,103,62,0 | 2217 .byte 115,108,46,111,114,103,62,0 |
2469 #endif | 2218 #endif |
OLD | NEW |