OLD | NEW |
1 #if defined(__i386__) | 1 #if defined(__i386__) |
2 .file "src/crypto/aes/asm/aesni-x86.S" | 2 .file "src/crypto/aes/asm/aesni-x86.S" |
3 .text | 3 .text |
4 .globl aesni_encrypt | 4 .globl aesni_encrypt |
5 .hidden aesni_encrypt | 5 .hidden aesni_encrypt |
6 .type aesni_encrypt,@function | 6 .type aesni_encrypt,@function |
7 .align 16 | 7 .align 16 |
8 aesni_encrypt: | 8 aesni_encrypt: |
9 .L_aesni_encrypt_begin: | 9 .L_aesni_encrypt_begin: |
10 movl 4(%esp),%eax | 10 movl 4(%esp),%eax |
11 movl 12(%esp),%edx | 11 movl 12(%esp),%edx |
12 movups (%eax),%xmm2 | 12 movups (%eax),%xmm2 |
13 movl 240(%edx),%ecx | 13 movl 240(%edx),%ecx |
14 movl 8(%esp),%eax | 14 movl 8(%esp),%eax |
15 movups (%edx),%xmm0 | 15 movups (%edx),%xmm0 |
16 movups 16(%edx),%xmm1 | 16 movups 16(%edx),%xmm1 |
17 leal 32(%edx),%edx | 17 leal 32(%edx),%edx |
18 xorps %xmm0,%xmm2 | 18 xorps %xmm0,%xmm2 |
19 .L000enc1_loop_1: | 19 .L000enc1_loop_1: |
20 .byte 102,15,56,220,209 | 20 .byte 102,15,56,220,209 |
21 decl %ecx | 21 decl %ecx |
22 movups (%edx),%xmm1 | 22 movups (%edx),%xmm1 |
23 leal 16(%edx),%edx | 23 leal 16(%edx),%edx |
24 jnz .L000enc1_loop_1 | 24 jnz .L000enc1_loop_1 |
25 .byte 102,15,56,221,209 | 25 .byte 102,15,56,221,209 |
| 26 pxor %xmm0,%xmm0 |
| 27 pxor %xmm1,%xmm1 |
26 movups %xmm2,(%eax) | 28 movups %xmm2,(%eax) |
| 29 pxor %xmm2,%xmm2 |
27 ret | 30 ret |
28 .size aesni_encrypt,.-.L_aesni_encrypt_begin | 31 .size aesni_encrypt,.-.L_aesni_encrypt_begin |
29 .globl aesni_decrypt | 32 .globl aesni_decrypt |
30 .hidden aesni_decrypt | 33 .hidden aesni_decrypt |
31 .type aesni_decrypt,@function | 34 .type aesni_decrypt,@function |
32 .align 16 | 35 .align 16 |
33 aesni_decrypt: | 36 aesni_decrypt: |
34 .L_aesni_decrypt_begin: | 37 .L_aesni_decrypt_begin: |
35 movl 4(%esp),%eax | 38 movl 4(%esp),%eax |
36 movl 12(%esp),%edx | 39 movl 12(%esp),%edx |
37 movups (%eax),%xmm2 | 40 movups (%eax),%xmm2 |
38 movl 240(%edx),%ecx | 41 movl 240(%edx),%ecx |
39 movl 8(%esp),%eax | 42 movl 8(%esp),%eax |
40 movups (%edx),%xmm0 | 43 movups (%edx),%xmm0 |
41 movups 16(%edx),%xmm1 | 44 movups 16(%edx),%xmm1 |
42 leal 32(%edx),%edx | 45 leal 32(%edx),%edx |
43 xorps %xmm0,%xmm2 | 46 xorps %xmm0,%xmm2 |
44 .L001dec1_loop_2: | 47 .L001dec1_loop_2: |
45 .byte 102,15,56,222,209 | 48 .byte 102,15,56,222,209 |
46 decl %ecx | 49 decl %ecx |
47 movups (%edx),%xmm1 | 50 movups (%edx),%xmm1 |
48 leal 16(%edx),%edx | 51 leal 16(%edx),%edx |
49 jnz .L001dec1_loop_2 | 52 jnz .L001dec1_loop_2 |
50 .byte 102,15,56,223,209 | 53 .byte 102,15,56,223,209 |
| 54 pxor %xmm0,%xmm0 |
| 55 pxor %xmm1,%xmm1 |
51 movups %xmm2,(%eax) | 56 movups %xmm2,(%eax) |
| 57 pxor %xmm2,%xmm2 |
52 ret | 58 ret |
53 .size aesni_decrypt,.-.L_aesni_decrypt_begin | 59 .size aesni_decrypt,.-.L_aesni_decrypt_begin |
54 .hidden _aesni_encrypt2 | 60 .hidden _aesni_encrypt2 |
55 .type _aesni_encrypt2,@function | 61 .type _aesni_encrypt2,@function |
56 .align 16 | 62 .align 16 |
57 _aesni_encrypt2: | 63 _aesni_encrypt2: |
58 movups (%edx),%xmm0 | 64 movups (%edx),%xmm0 |
59 shll $4,%ecx | 65 shll $4,%ecx |
60 movups 16(%edx),%xmm1 | 66 movups 16(%edx),%xmm1 |
61 xorps %xmm0,%xmm2 | 67 xorps %xmm0,%xmm2 |
(...skipping 200 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
262 pxor %xmm0,%xmm3 | 268 pxor %xmm0,%xmm3 |
263 pxor %xmm0,%xmm4 | 269 pxor %xmm0,%xmm4 |
264 .byte 102,15,56,220,209 | 270 .byte 102,15,56,220,209 |
265 pxor %xmm0,%xmm5 | 271 pxor %xmm0,%xmm5 |
266 pxor %xmm0,%xmm6 | 272 pxor %xmm0,%xmm6 |
267 .byte 102,15,56,220,217 | 273 .byte 102,15,56,220,217 |
268 leal 32(%edx,%ecx,1),%edx | 274 leal 32(%edx,%ecx,1),%edx |
269 negl %ecx | 275 negl %ecx |
270 .byte 102,15,56,220,225 | 276 .byte 102,15,56,220,225 |
271 pxor %xmm0,%xmm7 | 277 pxor %xmm0,%xmm7 |
| 278 movups (%edx,%ecx,1),%xmm0 |
272 addl $16,%ecx | 279 addl $16,%ecx |
| 280 jmp .L008_aesni_encrypt6_inner |
| 281 .align 16 |
| 282 .L009enc6_loop: |
| 283 .byte 102,15,56,220,209 |
| 284 .byte 102,15,56,220,217 |
| 285 .byte 102,15,56,220,225 |
| 286 .L008_aesni_encrypt6_inner: |
273 .byte 102,15,56,220,233 | 287 .byte 102,15,56,220,233 |
274 .byte 102,15,56,220,241 | 288 .byte 102,15,56,220,241 |
275 .byte 102,15,56,220,249 | 289 .byte 102,15,56,220,249 |
276 movups -16(%edx,%ecx,1),%xmm0 | |
277 jmp .L_aesni_encrypt6_enter | |
278 .align 16 | |
279 .L008enc6_loop: | |
280 .byte 102,15,56,220,209 | |
281 .byte 102,15,56,220,217 | |
282 .byte 102,15,56,220,225 | |
283 .byte 102,15,56,220,233 | |
284 .byte 102,15,56,220,241 | |
285 .byte 102,15,56,220,249 | |
286 .L_aesni_encrypt6_enter: | 290 .L_aesni_encrypt6_enter: |
287 movups (%edx,%ecx,1),%xmm1 | 291 movups (%edx,%ecx,1),%xmm1 |
288 addl $32,%ecx | 292 addl $32,%ecx |
289 .byte 102,15,56,220,208 | 293 .byte 102,15,56,220,208 |
290 .byte 102,15,56,220,216 | 294 .byte 102,15,56,220,216 |
291 .byte 102,15,56,220,224 | 295 .byte 102,15,56,220,224 |
292 .byte 102,15,56,220,232 | 296 .byte 102,15,56,220,232 |
293 .byte 102,15,56,220,240 | 297 .byte 102,15,56,220,240 |
294 .byte 102,15,56,220,248 | 298 .byte 102,15,56,220,248 |
295 movups -16(%edx,%ecx,1),%xmm0 | 299 movups -16(%edx,%ecx,1),%xmm0 |
296 » jnz» .L008enc6_loop | 300 » jnz» .L009enc6_loop |
297 .byte 102,15,56,220,209 | 301 .byte 102,15,56,220,209 |
298 .byte 102,15,56,220,217 | 302 .byte 102,15,56,220,217 |
299 .byte 102,15,56,220,225 | 303 .byte 102,15,56,220,225 |
300 .byte 102,15,56,220,233 | 304 .byte 102,15,56,220,233 |
301 .byte 102,15,56,220,241 | 305 .byte 102,15,56,220,241 |
302 .byte 102,15,56,220,249 | 306 .byte 102,15,56,220,249 |
303 .byte 102,15,56,221,208 | 307 .byte 102,15,56,221,208 |
304 .byte 102,15,56,221,216 | 308 .byte 102,15,56,221,216 |
305 .byte 102,15,56,221,224 | 309 .byte 102,15,56,221,224 |
306 .byte 102,15,56,221,232 | 310 .byte 102,15,56,221,232 |
(...skipping 12 matching lines...) Expand all Loading... |
319 pxor %xmm0,%xmm3 | 323 pxor %xmm0,%xmm3 |
320 pxor %xmm0,%xmm4 | 324 pxor %xmm0,%xmm4 |
321 .byte 102,15,56,222,209 | 325 .byte 102,15,56,222,209 |
322 pxor %xmm0,%xmm5 | 326 pxor %xmm0,%xmm5 |
323 pxor %xmm0,%xmm6 | 327 pxor %xmm0,%xmm6 |
324 .byte 102,15,56,222,217 | 328 .byte 102,15,56,222,217 |
325 leal 32(%edx,%ecx,1),%edx | 329 leal 32(%edx,%ecx,1),%edx |
326 negl %ecx | 330 negl %ecx |
327 .byte 102,15,56,222,225 | 331 .byte 102,15,56,222,225 |
328 pxor %xmm0,%xmm7 | 332 pxor %xmm0,%xmm7 |
| 333 movups (%edx,%ecx,1),%xmm0 |
329 addl $16,%ecx | 334 addl $16,%ecx |
| 335 jmp .L010_aesni_decrypt6_inner |
| 336 .align 16 |
| 337 .L011dec6_loop: |
| 338 .byte 102,15,56,222,209 |
| 339 .byte 102,15,56,222,217 |
| 340 .byte 102,15,56,222,225 |
| 341 .L010_aesni_decrypt6_inner: |
330 .byte 102,15,56,222,233 | 342 .byte 102,15,56,222,233 |
331 .byte 102,15,56,222,241 | 343 .byte 102,15,56,222,241 |
332 .byte 102,15,56,222,249 | 344 .byte 102,15,56,222,249 |
333 movups -16(%edx,%ecx,1),%xmm0 | |
334 jmp .L_aesni_decrypt6_enter | |
335 .align 16 | |
336 .L009dec6_loop: | |
337 .byte 102,15,56,222,209 | |
338 .byte 102,15,56,222,217 | |
339 .byte 102,15,56,222,225 | |
340 .byte 102,15,56,222,233 | |
341 .byte 102,15,56,222,241 | |
342 .byte 102,15,56,222,249 | |
343 .L_aesni_decrypt6_enter: | 345 .L_aesni_decrypt6_enter: |
344 movups (%edx,%ecx,1),%xmm1 | 346 movups (%edx,%ecx,1),%xmm1 |
345 addl $32,%ecx | 347 addl $32,%ecx |
346 .byte 102,15,56,222,208 | 348 .byte 102,15,56,222,208 |
347 .byte 102,15,56,222,216 | 349 .byte 102,15,56,222,216 |
348 .byte 102,15,56,222,224 | 350 .byte 102,15,56,222,224 |
349 .byte 102,15,56,222,232 | 351 .byte 102,15,56,222,232 |
350 .byte 102,15,56,222,240 | 352 .byte 102,15,56,222,240 |
351 .byte 102,15,56,222,248 | 353 .byte 102,15,56,222,248 |
352 movups -16(%edx,%ecx,1),%xmm0 | 354 movups -16(%edx,%ecx,1),%xmm0 |
353 » jnz» .L009dec6_loop | 355 » jnz» .L011dec6_loop |
354 .byte 102,15,56,222,209 | 356 .byte 102,15,56,222,209 |
355 .byte 102,15,56,222,217 | 357 .byte 102,15,56,222,217 |
356 .byte 102,15,56,222,225 | 358 .byte 102,15,56,222,225 |
357 .byte 102,15,56,222,233 | 359 .byte 102,15,56,222,233 |
358 .byte 102,15,56,222,241 | 360 .byte 102,15,56,222,241 |
359 .byte 102,15,56,222,249 | 361 .byte 102,15,56,222,249 |
360 .byte 102,15,56,223,208 | 362 .byte 102,15,56,223,208 |
361 .byte 102,15,56,223,216 | 363 .byte 102,15,56,223,216 |
362 .byte 102,15,56,223,224 | 364 .byte 102,15,56,223,224 |
363 .byte 102,15,56,223,232 | 365 .byte 102,15,56,223,232 |
(...skipping 10 matching lines...) Expand all Loading... |
374 pushl %ebp | 376 pushl %ebp |
375 pushl %ebx | 377 pushl %ebx |
376 pushl %esi | 378 pushl %esi |
377 pushl %edi | 379 pushl %edi |
378 movl 20(%esp),%esi | 380 movl 20(%esp),%esi |
379 movl 24(%esp),%edi | 381 movl 24(%esp),%edi |
380 movl 28(%esp),%eax | 382 movl 28(%esp),%eax |
381 movl 32(%esp),%edx | 383 movl 32(%esp),%edx |
382 movl 36(%esp),%ebx | 384 movl 36(%esp),%ebx |
383 andl $-16,%eax | 385 andl $-16,%eax |
384 » jz» .L010ecb_ret | 386 » jz» .L012ecb_ret |
385 movl 240(%edx),%ecx | 387 movl 240(%edx),%ecx |
386 testl %ebx,%ebx | 388 testl %ebx,%ebx |
387 » jz» .L011ecb_decrypt | 389 » jz» .L013ecb_decrypt |
388 movl %edx,%ebp | 390 movl %edx,%ebp |
389 movl %ecx,%ebx | 391 movl %ecx,%ebx |
390 cmpl $96,%eax | 392 cmpl $96,%eax |
391 » jb» .L012ecb_enc_tail | 393 » jb» .L014ecb_enc_tail |
392 movdqu (%esi),%xmm2 | 394 movdqu (%esi),%xmm2 |
393 movdqu 16(%esi),%xmm3 | 395 movdqu 16(%esi),%xmm3 |
394 movdqu 32(%esi),%xmm4 | 396 movdqu 32(%esi),%xmm4 |
395 movdqu 48(%esi),%xmm5 | 397 movdqu 48(%esi),%xmm5 |
396 movdqu 64(%esi),%xmm6 | 398 movdqu 64(%esi),%xmm6 |
397 movdqu 80(%esi),%xmm7 | 399 movdqu 80(%esi),%xmm7 |
398 leal 96(%esi),%esi | 400 leal 96(%esi),%esi |
399 subl $96,%eax | 401 subl $96,%eax |
400 » jmp» .L013ecb_enc_loop6_enter | 402 » jmp» .L015ecb_enc_loop6_enter |
401 .align 16 | 403 .align 16 |
402 .L014ecb_enc_loop6: | 404 .L016ecb_enc_loop6: |
403 movups %xmm2,(%edi) | 405 movups %xmm2,(%edi) |
404 movdqu (%esi),%xmm2 | 406 movdqu (%esi),%xmm2 |
405 movups %xmm3,16(%edi) | 407 movups %xmm3,16(%edi) |
406 movdqu 16(%esi),%xmm3 | 408 movdqu 16(%esi),%xmm3 |
407 movups %xmm4,32(%edi) | 409 movups %xmm4,32(%edi) |
408 movdqu 32(%esi),%xmm4 | 410 movdqu 32(%esi),%xmm4 |
409 movups %xmm5,48(%edi) | 411 movups %xmm5,48(%edi) |
410 movdqu 48(%esi),%xmm5 | 412 movdqu 48(%esi),%xmm5 |
411 movups %xmm6,64(%edi) | 413 movups %xmm6,64(%edi) |
412 movdqu 64(%esi),%xmm6 | 414 movdqu 64(%esi),%xmm6 |
413 movups %xmm7,80(%edi) | 415 movups %xmm7,80(%edi) |
414 leal 96(%edi),%edi | 416 leal 96(%edi),%edi |
415 movdqu 80(%esi),%xmm7 | 417 movdqu 80(%esi),%xmm7 |
416 leal 96(%esi),%esi | 418 leal 96(%esi),%esi |
417 .L013ecb_enc_loop6_enter: | 419 .L015ecb_enc_loop6_enter: |
418 call _aesni_encrypt6 | 420 call _aesni_encrypt6 |
419 movl %ebp,%edx | 421 movl %ebp,%edx |
420 movl %ebx,%ecx | 422 movl %ebx,%ecx |
421 subl $96,%eax | 423 subl $96,%eax |
422 » jnc» .L014ecb_enc_loop6 | 424 » jnc» .L016ecb_enc_loop6 |
423 movups %xmm2,(%edi) | 425 movups %xmm2,(%edi) |
424 movups %xmm3,16(%edi) | 426 movups %xmm3,16(%edi) |
425 movups %xmm4,32(%edi) | 427 movups %xmm4,32(%edi) |
426 movups %xmm5,48(%edi) | 428 movups %xmm5,48(%edi) |
427 movups %xmm6,64(%edi) | 429 movups %xmm6,64(%edi) |
428 movups %xmm7,80(%edi) | 430 movups %xmm7,80(%edi) |
429 leal 96(%edi),%edi | 431 leal 96(%edi),%edi |
430 addl $96,%eax | 432 addl $96,%eax |
431 » jz» .L010ecb_ret | 433 » jz» .L012ecb_ret |
432 .L012ecb_enc_tail: | 434 .L014ecb_enc_tail: |
433 movups (%esi),%xmm2 | 435 movups (%esi),%xmm2 |
434 cmpl $32,%eax | 436 cmpl $32,%eax |
435 » jb» .L015ecb_enc_one | 437 » jb» .L017ecb_enc_one |
436 movups 16(%esi),%xmm3 | 438 movups 16(%esi),%xmm3 |
437 » je» .L016ecb_enc_two | 439 » je» .L018ecb_enc_two |
438 movups 32(%esi),%xmm4 | 440 movups 32(%esi),%xmm4 |
439 cmpl $64,%eax | 441 cmpl $64,%eax |
440 » jb» .L017ecb_enc_three | 442 » jb» .L019ecb_enc_three |
441 movups 48(%esi),%xmm5 | 443 movups 48(%esi),%xmm5 |
442 » je» .L018ecb_enc_four | 444 » je» .L020ecb_enc_four |
443 movups 64(%esi),%xmm6 | 445 movups 64(%esi),%xmm6 |
444 xorps %xmm7,%xmm7 | 446 xorps %xmm7,%xmm7 |
445 call _aesni_encrypt6 | 447 call _aesni_encrypt6 |
446 movups %xmm2,(%edi) | 448 movups %xmm2,(%edi) |
447 movups %xmm3,16(%edi) | 449 movups %xmm3,16(%edi) |
448 movups %xmm4,32(%edi) | 450 movups %xmm4,32(%edi) |
449 movups %xmm5,48(%edi) | 451 movups %xmm5,48(%edi) |
450 movups %xmm6,64(%edi) | 452 movups %xmm6,64(%edi) |
451 » jmp» .L010ecb_ret | 453 » jmp» .L012ecb_ret |
452 .align 16 | 454 .align 16 |
453 .L015ecb_enc_one: | 455 .L017ecb_enc_one: |
454 movups (%edx),%xmm0 | 456 movups (%edx),%xmm0 |
455 movups 16(%edx),%xmm1 | 457 movups 16(%edx),%xmm1 |
456 leal 32(%edx),%edx | 458 leal 32(%edx),%edx |
457 xorps %xmm0,%xmm2 | 459 xorps %xmm0,%xmm2 |
458 .L019enc1_loop_3: | 460 .L021enc1_loop_3: |
459 .byte 102,15,56,220,209 | 461 .byte 102,15,56,220,209 |
460 decl %ecx | 462 decl %ecx |
461 movups (%edx),%xmm1 | 463 movups (%edx),%xmm1 |
462 leal 16(%edx),%edx | 464 leal 16(%edx),%edx |
463 » jnz» .L019enc1_loop_3 | 465 » jnz» .L021enc1_loop_3 |
464 .byte 102,15,56,221,209 | 466 .byte 102,15,56,221,209 |
465 movups %xmm2,(%edi) | 467 movups %xmm2,(%edi) |
466 » jmp» .L010ecb_ret | 468 » jmp» .L012ecb_ret |
467 .align 16 | 469 .align 16 |
468 .L016ecb_enc_two: | 470 .L018ecb_enc_two: |
469 call _aesni_encrypt2 | 471 call _aesni_encrypt2 |
470 movups %xmm2,(%edi) | 472 movups %xmm2,(%edi) |
471 movups %xmm3,16(%edi) | 473 movups %xmm3,16(%edi) |
472 » jmp» .L010ecb_ret | 474 » jmp» .L012ecb_ret |
473 .align 16 | 475 .align 16 |
474 .L017ecb_enc_three: | 476 .L019ecb_enc_three: |
475 call _aesni_encrypt3 | 477 call _aesni_encrypt3 |
476 movups %xmm2,(%edi) | 478 movups %xmm2,(%edi) |
477 movups %xmm3,16(%edi) | 479 movups %xmm3,16(%edi) |
478 movups %xmm4,32(%edi) | 480 movups %xmm4,32(%edi) |
479 » jmp» .L010ecb_ret | 481 » jmp» .L012ecb_ret |
480 .align 16 | 482 .align 16 |
481 .L018ecb_enc_four: | 483 .L020ecb_enc_four: |
482 call _aesni_encrypt4 | 484 call _aesni_encrypt4 |
483 movups %xmm2,(%edi) | 485 movups %xmm2,(%edi) |
484 movups %xmm3,16(%edi) | 486 movups %xmm3,16(%edi) |
485 movups %xmm4,32(%edi) | 487 movups %xmm4,32(%edi) |
486 movups %xmm5,48(%edi) | 488 movups %xmm5,48(%edi) |
487 » jmp» .L010ecb_ret | 489 » jmp» .L012ecb_ret |
488 .align 16 | 490 .align 16 |
489 .L011ecb_decrypt: | 491 .L013ecb_decrypt: |
490 movl %edx,%ebp | 492 movl %edx,%ebp |
491 movl %ecx,%ebx | 493 movl %ecx,%ebx |
492 cmpl $96,%eax | 494 cmpl $96,%eax |
493 » jb» .L020ecb_dec_tail | 495 » jb» .L022ecb_dec_tail |
494 movdqu (%esi),%xmm2 | 496 movdqu (%esi),%xmm2 |
495 movdqu 16(%esi),%xmm3 | 497 movdqu 16(%esi),%xmm3 |
496 movdqu 32(%esi),%xmm4 | 498 movdqu 32(%esi),%xmm4 |
497 movdqu 48(%esi),%xmm5 | 499 movdqu 48(%esi),%xmm5 |
498 movdqu 64(%esi),%xmm6 | 500 movdqu 64(%esi),%xmm6 |
499 movdqu 80(%esi),%xmm7 | 501 movdqu 80(%esi),%xmm7 |
500 leal 96(%esi),%esi | 502 leal 96(%esi),%esi |
501 subl $96,%eax | 503 subl $96,%eax |
502 » jmp» .L021ecb_dec_loop6_enter | 504 » jmp» .L023ecb_dec_loop6_enter |
503 .align 16 | 505 .align 16 |
504 .L022ecb_dec_loop6: | 506 .L024ecb_dec_loop6: |
505 movups %xmm2,(%edi) | 507 movups %xmm2,(%edi) |
506 movdqu (%esi),%xmm2 | 508 movdqu (%esi),%xmm2 |
507 movups %xmm3,16(%edi) | 509 movups %xmm3,16(%edi) |
508 movdqu 16(%esi),%xmm3 | 510 movdqu 16(%esi),%xmm3 |
509 movups %xmm4,32(%edi) | 511 movups %xmm4,32(%edi) |
510 movdqu 32(%esi),%xmm4 | 512 movdqu 32(%esi),%xmm4 |
511 movups %xmm5,48(%edi) | 513 movups %xmm5,48(%edi) |
512 movdqu 48(%esi),%xmm5 | 514 movdqu 48(%esi),%xmm5 |
513 movups %xmm6,64(%edi) | 515 movups %xmm6,64(%edi) |
514 movdqu 64(%esi),%xmm6 | 516 movdqu 64(%esi),%xmm6 |
515 movups %xmm7,80(%edi) | 517 movups %xmm7,80(%edi) |
516 leal 96(%edi),%edi | 518 leal 96(%edi),%edi |
517 movdqu 80(%esi),%xmm7 | 519 movdqu 80(%esi),%xmm7 |
518 leal 96(%esi),%esi | 520 leal 96(%esi),%esi |
519 .L021ecb_dec_loop6_enter: | 521 .L023ecb_dec_loop6_enter: |
520 call _aesni_decrypt6 | 522 call _aesni_decrypt6 |
521 movl %ebp,%edx | 523 movl %ebp,%edx |
522 movl %ebx,%ecx | 524 movl %ebx,%ecx |
523 subl $96,%eax | 525 subl $96,%eax |
524 » jnc» .L022ecb_dec_loop6 | 526 » jnc» .L024ecb_dec_loop6 |
525 movups %xmm2,(%edi) | 527 movups %xmm2,(%edi) |
526 movups %xmm3,16(%edi) | 528 movups %xmm3,16(%edi) |
527 movups %xmm4,32(%edi) | 529 movups %xmm4,32(%edi) |
528 movups %xmm5,48(%edi) | 530 movups %xmm5,48(%edi) |
529 movups %xmm6,64(%edi) | 531 movups %xmm6,64(%edi) |
530 movups %xmm7,80(%edi) | 532 movups %xmm7,80(%edi) |
531 leal 96(%edi),%edi | 533 leal 96(%edi),%edi |
532 addl $96,%eax | 534 addl $96,%eax |
533 » jz» .L010ecb_ret | 535 » jz» .L012ecb_ret |
534 .L020ecb_dec_tail: | 536 .L022ecb_dec_tail: |
535 movups (%esi),%xmm2 | 537 movups (%esi),%xmm2 |
536 cmpl $32,%eax | 538 cmpl $32,%eax |
537 » jb» .L023ecb_dec_one | 539 » jb» .L025ecb_dec_one |
538 movups 16(%esi),%xmm3 | 540 movups 16(%esi),%xmm3 |
539 » je» .L024ecb_dec_two | 541 » je» .L026ecb_dec_two |
540 movups 32(%esi),%xmm4 | 542 movups 32(%esi),%xmm4 |
541 cmpl $64,%eax | 543 cmpl $64,%eax |
542 » jb» .L025ecb_dec_three | 544 » jb» .L027ecb_dec_three |
543 movups 48(%esi),%xmm5 | 545 movups 48(%esi),%xmm5 |
544 » je» .L026ecb_dec_four | 546 » je» .L028ecb_dec_four |
545 movups 64(%esi),%xmm6 | 547 movups 64(%esi),%xmm6 |
546 xorps %xmm7,%xmm7 | 548 xorps %xmm7,%xmm7 |
547 call _aesni_decrypt6 | 549 call _aesni_decrypt6 |
548 movups %xmm2,(%edi) | 550 movups %xmm2,(%edi) |
549 movups %xmm3,16(%edi) | 551 movups %xmm3,16(%edi) |
550 movups %xmm4,32(%edi) | 552 movups %xmm4,32(%edi) |
551 movups %xmm5,48(%edi) | 553 movups %xmm5,48(%edi) |
552 movups %xmm6,64(%edi) | 554 movups %xmm6,64(%edi) |
553 » jmp» .L010ecb_ret | 555 » jmp» .L012ecb_ret |
554 .align 16 | 556 .align 16 |
555 .L023ecb_dec_one: | 557 .L025ecb_dec_one: |
556 movups (%edx),%xmm0 | 558 movups (%edx),%xmm0 |
557 movups 16(%edx),%xmm1 | 559 movups 16(%edx),%xmm1 |
558 leal 32(%edx),%edx | 560 leal 32(%edx),%edx |
559 xorps %xmm0,%xmm2 | 561 xorps %xmm0,%xmm2 |
560 .L027dec1_loop_4: | 562 .L029dec1_loop_4: |
561 .byte 102,15,56,222,209 | 563 .byte 102,15,56,222,209 |
562 decl %ecx | 564 decl %ecx |
563 movups (%edx),%xmm1 | 565 movups (%edx),%xmm1 |
564 leal 16(%edx),%edx | 566 leal 16(%edx),%edx |
565 » jnz» .L027dec1_loop_4 | 567 » jnz» .L029dec1_loop_4 |
566 .byte 102,15,56,223,209 | 568 .byte 102,15,56,223,209 |
567 movups %xmm2,(%edi) | 569 movups %xmm2,(%edi) |
568 » jmp» .L010ecb_ret | 570 » jmp» .L012ecb_ret |
569 .align 16 | 571 .align 16 |
570 .L024ecb_dec_two: | 572 .L026ecb_dec_two: |
571 call _aesni_decrypt2 | 573 call _aesni_decrypt2 |
572 movups %xmm2,(%edi) | 574 movups %xmm2,(%edi) |
573 movups %xmm3,16(%edi) | 575 movups %xmm3,16(%edi) |
574 » jmp» .L010ecb_ret | 576 » jmp» .L012ecb_ret |
575 .align 16 | 577 .align 16 |
576 .L025ecb_dec_three: | 578 .L027ecb_dec_three: |
577 call _aesni_decrypt3 | 579 call _aesni_decrypt3 |
578 movups %xmm2,(%edi) | 580 movups %xmm2,(%edi) |
579 movups %xmm3,16(%edi) | 581 movups %xmm3,16(%edi) |
580 movups %xmm4,32(%edi) | 582 movups %xmm4,32(%edi) |
581 » jmp» .L010ecb_ret | 583 » jmp» .L012ecb_ret |
582 .align 16 | 584 .align 16 |
583 .L026ecb_dec_four: | 585 .L028ecb_dec_four: |
584 call _aesni_decrypt4 | 586 call _aesni_decrypt4 |
585 movups %xmm2,(%edi) | 587 movups %xmm2,(%edi) |
586 movups %xmm3,16(%edi) | 588 movups %xmm3,16(%edi) |
587 movups %xmm4,32(%edi) | 589 movups %xmm4,32(%edi) |
588 movups %xmm5,48(%edi) | 590 movups %xmm5,48(%edi) |
589 .L010ecb_ret: | 591 .L012ecb_ret: |
| 592 » pxor» %xmm0,%xmm0 |
| 593 » pxor» %xmm1,%xmm1 |
| 594 » pxor» %xmm2,%xmm2 |
| 595 » pxor» %xmm3,%xmm3 |
| 596 » pxor» %xmm4,%xmm4 |
| 597 » pxor» %xmm5,%xmm5 |
| 598 » pxor» %xmm6,%xmm6 |
| 599 » pxor» %xmm7,%xmm7 |
590 popl %edi | 600 popl %edi |
591 popl %esi | 601 popl %esi |
592 popl %ebx | 602 popl %ebx |
593 popl %ebp | 603 popl %ebp |
594 ret | 604 ret |
595 .size aesni_ecb_encrypt,.-.L_aesni_ecb_encrypt_begin | 605 .size aesni_ecb_encrypt,.-.L_aesni_ecb_encrypt_begin |
596 .globl aesni_ccm64_encrypt_blocks | 606 .globl aesni_ccm64_encrypt_blocks |
597 .hidden aesni_ccm64_encrypt_blocks | 607 .hidden aesni_ccm64_encrypt_blocks |
598 .type aesni_ccm64_encrypt_blocks,@function | 608 .type aesni_ccm64_encrypt_blocks,@function |
599 .align 16 | 609 .align 16 |
(...skipping 27 matching lines...) Expand all Loading... |
627 movl %ebp,24(%esp) | 637 movl %ebp,24(%esp) |
628 movl %ebp,28(%esp) | 638 movl %ebp,28(%esp) |
629 shll $4,%ecx | 639 shll $4,%ecx |
630 movl $16,%ebx | 640 movl $16,%ebx |
631 leal (%edx),%ebp | 641 leal (%edx),%ebp |
632 movdqa (%esp),%xmm5 | 642 movdqa (%esp),%xmm5 |
633 movdqa %xmm7,%xmm2 | 643 movdqa %xmm7,%xmm2 |
634 leal 32(%edx,%ecx,1),%edx | 644 leal 32(%edx,%ecx,1),%edx |
635 subl %ecx,%ebx | 645 subl %ecx,%ebx |
636 .byte 102,15,56,0,253 | 646 .byte 102,15,56,0,253 |
637 .L028ccm64_enc_outer: | 647 .L030ccm64_enc_outer: |
638 movups (%ebp),%xmm0 | 648 movups (%ebp),%xmm0 |
639 movl %ebx,%ecx | 649 movl %ebx,%ecx |
640 movups (%esi),%xmm6 | 650 movups (%esi),%xmm6 |
641 xorps %xmm0,%xmm2 | 651 xorps %xmm0,%xmm2 |
642 movups 16(%ebp),%xmm1 | 652 movups 16(%ebp),%xmm1 |
643 xorps %xmm6,%xmm0 | 653 xorps %xmm6,%xmm0 |
644 xorps %xmm0,%xmm3 | 654 xorps %xmm0,%xmm3 |
645 movups 32(%ebp),%xmm0 | 655 movups 32(%ebp),%xmm0 |
646 .L029ccm64_enc2_loop: | 656 .L031ccm64_enc2_loop: |
647 .byte 102,15,56,220,209 | 657 .byte 102,15,56,220,209 |
648 .byte 102,15,56,220,217 | 658 .byte 102,15,56,220,217 |
649 movups (%edx,%ecx,1),%xmm1 | 659 movups (%edx,%ecx,1),%xmm1 |
650 addl $32,%ecx | 660 addl $32,%ecx |
651 .byte 102,15,56,220,208 | 661 .byte 102,15,56,220,208 |
652 .byte 102,15,56,220,216 | 662 .byte 102,15,56,220,216 |
653 movups -16(%edx,%ecx,1),%xmm0 | 663 movups -16(%edx,%ecx,1),%xmm0 |
654 » jnz» .L029ccm64_enc2_loop | 664 » jnz» .L031ccm64_enc2_loop |
655 .byte 102,15,56,220,209 | 665 .byte 102,15,56,220,209 |
656 .byte 102,15,56,220,217 | 666 .byte 102,15,56,220,217 |
657 paddq 16(%esp),%xmm7 | 667 paddq 16(%esp),%xmm7 |
658 decl %eax | 668 decl %eax |
659 .byte 102,15,56,221,208 | 669 .byte 102,15,56,221,208 |
660 .byte 102,15,56,221,216 | 670 .byte 102,15,56,221,216 |
661 leal 16(%esi),%esi | 671 leal 16(%esi),%esi |
662 xorps %xmm2,%xmm6 | 672 xorps %xmm2,%xmm6 |
663 movdqa %xmm7,%xmm2 | 673 movdqa %xmm7,%xmm2 |
664 movups %xmm6,(%edi) | 674 movups %xmm6,(%edi) |
665 .byte 102,15,56,0,213 | 675 .byte 102,15,56,0,213 |
666 leal 16(%edi),%edi | 676 leal 16(%edi),%edi |
667 » jnz» .L028ccm64_enc_outer | 677 » jnz» .L030ccm64_enc_outer |
668 movl 48(%esp),%esp | 678 movl 48(%esp),%esp |
669 movl 40(%esp),%edi | 679 movl 40(%esp),%edi |
670 movups %xmm3,(%edi) | 680 movups %xmm3,(%edi) |
| 681 pxor %xmm0,%xmm0 |
| 682 pxor %xmm1,%xmm1 |
| 683 pxor %xmm2,%xmm2 |
| 684 pxor %xmm3,%xmm3 |
| 685 pxor %xmm4,%xmm4 |
| 686 pxor %xmm5,%xmm5 |
| 687 pxor %xmm6,%xmm6 |
| 688 pxor %xmm7,%xmm7 |
671 popl %edi | 689 popl %edi |
672 popl %esi | 690 popl %esi |
673 popl %ebx | 691 popl %ebx |
674 popl %ebp | 692 popl %ebp |
675 ret | 693 ret |
676 .size aesni_ccm64_encrypt_blocks,.-.L_aesni_ccm64_encrypt_blocks_begin | 694 .size aesni_ccm64_encrypt_blocks,.-.L_aesni_ccm64_encrypt_blocks_begin |
677 .globl aesni_ccm64_decrypt_blocks | 695 .globl aesni_ccm64_decrypt_blocks |
678 .hidden aesni_ccm64_decrypt_blocks | 696 .hidden aesni_ccm64_decrypt_blocks |
679 .type aesni_ccm64_decrypt_blocks,@function | 697 .type aesni_ccm64_decrypt_blocks,@function |
680 .align 16 | 698 .align 16 |
(...skipping 28 matching lines...) Expand all Loading... |
709 movl %ebp,28(%esp) | 727 movl %ebp,28(%esp) |
710 movdqa (%esp),%xmm5 | 728 movdqa (%esp),%xmm5 |
711 movdqa %xmm7,%xmm2 | 729 movdqa %xmm7,%xmm2 |
712 movl %edx,%ebp | 730 movl %edx,%ebp |
713 movl %ecx,%ebx | 731 movl %ecx,%ebx |
714 .byte 102,15,56,0,253 | 732 .byte 102,15,56,0,253 |
715 movups (%edx),%xmm0 | 733 movups (%edx),%xmm0 |
716 movups 16(%edx),%xmm1 | 734 movups 16(%edx),%xmm1 |
717 leal 32(%edx),%edx | 735 leal 32(%edx),%edx |
718 xorps %xmm0,%xmm2 | 736 xorps %xmm0,%xmm2 |
719 .L030enc1_loop_5: | 737 .L032enc1_loop_5: |
720 .byte 102,15,56,220,209 | 738 .byte 102,15,56,220,209 |
721 decl %ecx | 739 decl %ecx |
722 movups (%edx),%xmm1 | 740 movups (%edx),%xmm1 |
723 leal 16(%edx),%edx | 741 leal 16(%edx),%edx |
724 » jnz» .L030enc1_loop_5 | 742 » jnz» .L032enc1_loop_5 |
725 .byte 102,15,56,221,209 | 743 .byte 102,15,56,221,209 |
726 shll $4,%ebx | 744 shll $4,%ebx |
727 movl $16,%ecx | 745 movl $16,%ecx |
728 movups (%esi),%xmm6 | 746 movups (%esi),%xmm6 |
729 paddq 16(%esp),%xmm7 | 747 paddq 16(%esp),%xmm7 |
730 leal 16(%esi),%esi | 748 leal 16(%esi),%esi |
731 subl %ebx,%ecx | 749 subl %ebx,%ecx |
732 leal 32(%ebp,%ebx,1),%edx | 750 leal 32(%ebp,%ebx,1),%edx |
733 movl %ecx,%ebx | 751 movl %ecx,%ebx |
734 » jmp» .L031ccm64_dec_outer | 752 » jmp» .L033ccm64_dec_outer |
735 .align 16 | 753 .align 16 |
736 .L031ccm64_dec_outer: | 754 .L033ccm64_dec_outer: |
737 xorps %xmm2,%xmm6 | 755 xorps %xmm2,%xmm6 |
738 movdqa %xmm7,%xmm2 | 756 movdqa %xmm7,%xmm2 |
739 movups %xmm6,(%edi) | 757 movups %xmm6,(%edi) |
740 leal 16(%edi),%edi | 758 leal 16(%edi),%edi |
741 .byte 102,15,56,0,213 | 759 .byte 102,15,56,0,213 |
742 subl $1,%eax | 760 subl $1,%eax |
743 » jz» .L032ccm64_dec_break | 761 » jz» .L034ccm64_dec_break |
744 movups (%ebp),%xmm0 | 762 movups (%ebp),%xmm0 |
745 movl %ebx,%ecx | 763 movl %ebx,%ecx |
746 movups 16(%ebp),%xmm1 | 764 movups 16(%ebp),%xmm1 |
747 xorps %xmm0,%xmm6 | 765 xorps %xmm0,%xmm6 |
748 xorps %xmm0,%xmm2 | 766 xorps %xmm0,%xmm2 |
749 xorps %xmm6,%xmm3 | 767 xorps %xmm6,%xmm3 |
750 movups 32(%ebp),%xmm0 | 768 movups 32(%ebp),%xmm0 |
751 .L033ccm64_dec2_loop: | 769 .L035ccm64_dec2_loop: |
752 .byte 102,15,56,220,209 | 770 .byte 102,15,56,220,209 |
753 .byte 102,15,56,220,217 | 771 .byte 102,15,56,220,217 |
754 movups (%edx,%ecx,1),%xmm1 | 772 movups (%edx,%ecx,1),%xmm1 |
755 addl $32,%ecx | 773 addl $32,%ecx |
756 .byte 102,15,56,220,208 | 774 .byte 102,15,56,220,208 |
757 .byte 102,15,56,220,216 | 775 .byte 102,15,56,220,216 |
758 movups -16(%edx,%ecx,1),%xmm0 | 776 movups -16(%edx,%ecx,1),%xmm0 |
759 » jnz» .L033ccm64_dec2_loop | 777 » jnz» .L035ccm64_dec2_loop |
760 movups (%esi),%xmm6 | 778 movups (%esi),%xmm6 |
761 paddq 16(%esp),%xmm7 | 779 paddq 16(%esp),%xmm7 |
762 .byte 102,15,56,220,209 | 780 .byte 102,15,56,220,209 |
763 .byte 102,15,56,220,217 | 781 .byte 102,15,56,220,217 |
764 .byte 102,15,56,221,208 | 782 .byte 102,15,56,221,208 |
765 .byte 102,15,56,221,216 | 783 .byte 102,15,56,221,216 |
766 leal 16(%esi),%esi | 784 leal 16(%esi),%esi |
767 » jmp» .L031ccm64_dec_outer | 785 » jmp» .L033ccm64_dec_outer |
768 .align 16 | 786 .align 16 |
769 .L032ccm64_dec_break: | 787 .L034ccm64_dec_break: |
770 movl 240(%ebp),%ecx | 788 movl 240(%ebp),%ecx |
771 movl %ebp,%edx | 789 movl %ebp,%edx |
772 movups (%edx),%xmm0 | 790 movups (%edx),%xmm0 |
773 movups 16(%edx),%xmm1 | 791 movups 16(%edx),%xmm1 |
774 xorps %xmm0,%xmm6 | 792 xorps %xmm0,%xmm6 |
775 leal 32(%edx),%edx | 793 leal 32(%edx),%edx |
776 xorps %xmm6,%xmm3 | 794 xorps %xmm6,%xmm3 |
777 .L034enc1_loop_6: | 795 .L036enc1_loop_6: |
778 .byte 102,15,56,220,217 | 796 .byte 102,15,56,220,217 |
779 decl %ecx | 797 decl %ecx |
780 movups (%edx),%xmm1 | 798 movups (%edx),%xmm1 |
781 leal 16(%edx),%edx | 799 leal 16(%edx),%edx |
782 » jnz» .L034enc1_loop_6 | 800 » jnz» .L036enc1_loop_6 |
783 .byte 102,15,56,221,217 | 801 .byte 102,15,56,221,217 |
784 movl 48(%esp),%esp | 802 movl 48(%esp),%esp |
785 movl 40(%esp),%edi | 803 movl 40(%esp),%edi |
786 movups %xmm3,(%edi) | 804 movups %xmm3,(%edi) |
| 805 pxor %xmm0,%xmm0 |
| 806 pxor %xmm1,%xmm1 |
| 807 pxor %xmm2,%xmm2 |
| 808 pxor %xmm3,%xmm3 |
| 809 pxor %xmm4,%xmm4 |
| 810 pxor %xmm5,%xmm5 |
| 811 pxor %xmm6,%xmm6 |
| 812 pxor %xmm7,%xmm7 |
787 popl %edi | 813 popl %edi |
788 popl %esi | 814 popl %esi |
789 popl %ebx | 815 popl %ebx |
790 popl %ebp | 816 popl %ebp |
791 ret | 817 ret |
792 .size aesni_ccm64_decrypt_blocks,.-.L_aesni_ccm64_decrypt_blocks_begin | 818 .size aesni_ccm64_decrypt_blocks,.-.L_aesni_ccm64_decrypt_blocks_begin |
793 .globl aesni_ctr32_encrypt_blocks | 819 .globl aesni_ctr32_encrypt_blocks |
794 .hidden aesni_ctr32_encrypt_blocks | 820 .hidden aesni_ctr32_encrypt_blocks |
795 .type aesni_ctr32_encrypt_blocks,@function | 821 .type aesni_ctr32_encrypt_blocks,@function |
796 .align 16 | 822 .align 16 |
797 aesni_ctr32_encrypt_blocks: | 823 aesni_ctr32_encrypt_blocks: |
798 .L_aesni_ctr32_encrypt_blocks_begin: | 824 .L_aesni_ctr32_encrypt_blocks_begin: |
799 pushl %ebp | 825 pushl %ebp |
800 pushl %ebx | 826 pushl %ebx |
801 pushl %esi | 827 pushl %esi |
802 pushl %edi | 828 pushl %edi |
803 movl 20(%esp),%esi | 829 movl 20(%esp),%esi |
804 movl 24(%esp),%edi | 830 movl 24(%esp),%edi |
805 movl 28(%esp),%eax | 831 movl 28(%esp),%eax |
806 movl 32(%esp),%edx | 832 movl 32(%esp),%edx |
807 movl 36(%esp),%ebx | 833 movl 36(%esp),%ebx |
808 movl %esp,%ebp | 834 movl %esp,%ebp |
809 subl $88,%esp | 835 subl $88,%esp |
810 andl $-16,%esp | 836 andl $-16,%esp |
811 movl %ebp,80(%esp) | 837 movl %ebp,80(%esp) |
812 cmpl $1,%eax | 838 cmpl $1,%eax |
813 » je» .L035ctr32_one_shortcut | 839 » je» .L037ctr32_one_shortcut |
814 movdqu (%ebx),%xmm7 | 840 movdqu (%ebx),%xmm7 |
815 movl $202182159,(%esp) | 841 movl $202182159,(%esp) |
816 movl $134810123,4(%esp) | 842 movl $134810123,4(%esp) |
817 movl $67438087,8(%esp) | 843 movl $67438087,8(%esp) |
818 movl $66051,12(%esp) | 844 movl $66051,12(%esp) |
819 movl $6,%ecx | 845 movl $6,%ecx |
820 xorl %ebp,%ebp | 846 xorl %ebp,%ebp |
821 movl %ecx,16(%esp) | 847 movl %ecx,16(%esp) |
822 movl %ecx,20(%esp) | 848 movl %ecx,20(%esp) |
823 movl %ecx,24(%esp) | 849 movl %ecx,24(%esp) |
(...skipping 17 matching lines...) Expand all Loading... |
841 incl %ebp | 867 incl %ebp |
842 .byte 102,15,58,34,205,2 | 868 .byte 102,15,58,34,205,2 |
843 movdqa %xmm0,48(%esp) | 869 movdqa %xmm0,48(%esp) |
844 .byte 102,15,56,0,194 | 870 .byte 102,15,56,0,194 |
845 movdqu (%edx),%xmm6 | 871 movdqu (%edx),%xmm6 |
846 movdqa %xmm1,64(%esp) | 872 movdqa %xmm1,64(%esp) |
847 .byte 102,15,56,0,202 | 873 .byte 102,15,56,0,202 |
848 pshufd $192,%xmm0,%xmm2 | 874 pshufd $192,%xmm0,%xmm2 |
849 pshufd $128,%xmm0,%xmm3 | 875 pshufd $128,%xmm0,%xmm3 |
850 cmpl $6,%eax | 876 cmpl $6,%eax |
851 » jb» .L036ctr32_tail | 877 » jb» .L038ctr32_tail |
852 pxor %xmm6,%xmm7 | 878 pxor %xmm6,%xmm7 |
853 shll $4,%ecx | 879 shll $4,%ecx |
854 movl $16,%ebx | 880 movl $16,%ebx |
855 movdqa %xmm7,32(%esp) | 881 movdqa %xmm7,32(%esp) |
856 movl %edx,%ebp | 882 movl %edx,%ebp |
857 subl %ecx,%ebx | 883 subl %ecx,%ebx |
858 leal 32(%edx,%ecx,1),%edx | 884 leal 32(%edx,%ecx,1),%edx |
859 subl $6,%eax | 885 subl $6,%eax |
860 » jmp» .L037ctr32_loop6 | 886 » jmp» .L039ctr32_loop6 |
861 .align 16 | 887 .align 16 |
862 .L037ctr32_loop6: | 888 .L039ctr32_loop6: |
863 pshufd $64,%xmm0,%xmm4 | 889 pshufd $64,%xmm0,%xmm4 |
864 movdqa 32(%esp),%xmm0 | 890 movdqa 32(%esp),%xmm0 |
865 pshufd $192,%xmm1,%xmm5 | 891 pshufd $192,%xmm1,%xmm5 |
866 pxor %xmm0,%xmm2 | 892 pxor %xmm0,%xmm2 |
867 pshufd $128,%xmm1,%xmm6 | 893 pshufd $128,%xmm1,%xmm6 |
868 pxor %xmm0,%xmm3 | 894 pxor %xmm0,%xmm3 |
869 pshufd $64,%xmm1,%xmm7 | 895 pshufd $64,%xmm1,%xmm7 |
870 movups 16(%ebp),%xmm1 | 896 movups 16(%ebp),%xmm1 |
871 pxor %xmm0,%xmm4 | 897 pxor %xmm0,%xmm4 |
872 pxor %xmm0,%xmm5 | 898 pxor %xmm0,%xmm5 |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
906 movups %xmm5,48(%edi) | 932 movups %xmm5,48(%edi) |
907 xorps %xmm3,%xmm7 | 933 xorps %xmm3,%xmm7 |
908 movdqa %xmm1,64(%esp) | 934 movdqa %xmm1,64(%esp) |
909 .byte 102,15,56,0,202 | 935 .byte 102,15,56,0,202 |
910 movups %xmm6,64(%edi) | 936 movups %xmm6,64(%edi) |
911 pshufd $192,%xmm0,%xmm2 | 937 pshufd $192,%xmm0,%xmm2 |
912 movups %xmm7,80(%edi) | 938 movups %xmm7,80(%edi) |
913 leal 96(%edi),%edi | 939 leal 96(%edi),%edi |
914 pshufd $128,%xmm0,%xmm3 | 940 pshufd $128,%xmm0,%xmm3 |
915 subl $6,%eax | 941 subl $6,%eax |
916 » jnc» .L037ctr32_loop6 | 942 » jnc» .L039ctr32_loop6 |
917 addl $6,%eax | 943 addl $6,%eax |
918 » jz» .L038ctr32_ret | 944 » jz» .L040ctr32_ret |
919 movdqu (%ebp),%xmm7 | 945 movdqu (%ebp),%xmm7 |
920 movl %ebp,%edx | 946 movl %ebp,%edx |
921 pxor 32(%esp),%xmm7 | 947 pxor 32(%esp),%xmm7 |
922 movl 240(%ebp),%ecx | 948 movl 240(%ebp),%ecx |
923 .L036ctr32_tail: | 949 .L038ctr32_tail: |
924 por %xmm7,%xmm2 | 950 por %xmm7,%xmm2 |
925 cmpl $2,%eax | 951 cmpl $2,%eax |
926 » jb» .L039ctr32_one | 952 » jb» .L041ctr32_one |
927 pshufd $64,%xmm0,%xmm4 | 953 pshufd $64,%xmm0,%xmm4 |
928 por %xmm7,%xmm3 | 954 por %xmm7,%xmm3 |
929 » je» .L040ctr32_two | 955 » je» .L042ctr32_two |
930 pshufd $192,%xmm1,%xmm5 | 956 pshufd $192,%xmm1,%xmm5 |
931 por %xmm7,%xmm4 | 957 por %xmm7,%xmm4 |
932 cmpl $4,%eax | 958 cmpl $4,%eax |
933 » jb» .L041ctr32_three | 959 » jb» .L043ctr32_three |
934 pshufd $128,%xmm1,%xmm6 | 960 pshufd $128,%xmm1,%xmm6 |
935 por %xmm7,%xmm5 | 961 por %xmm7,%xmm5 |
936 » je» .L042ctr32_four | 962 » je» .L044ctr32_four |
937 por %xmm7,%xmm6 | 963 por %xmm7,%xmm6 |
938 call _aesni_encrypt6 | 964 call _aesni_encrypt6 |
939 movups (%esi),%xmm1 | 965 movups (%esi),%xmm1 |
940 movups 16(%esi),%xmm0 | 966 movups 16(%esi),%xmm0 |
941 xorps %xmm1,%xmm2 | 967 xorps %xmm1,%xmm2 |
942 movups 32(%esi),%xmm1 | 968 movups 32(%esi),%xmm1 |
943 xorps %xmm0,%xmm3 | 969 xorps %xmm0,%xmm3 |
944 movups 48(%esi),%xmm0 | 970 movups 48(%esi),%xmm0 |
945 xorps %xmm1,%xmm4 | 971 xorps %xmm1,%xmm4 |
946 movups 64(%esi),%xmm1 | 972 movups 64(%esi),%xmm1 |
947 xorps %xmm0,%xmm5 | 973 xorps %xmm0,%xmm5 |
948 movups %xmm2,(%edi) | 974 movups %xmm2,(%edi) |
949 xorps %xmm1,%xmm6 | 975 xorps %xmm1,%xmm6 |
950 movups %xmm3,16(%edi) | 976 movups %xmm3,16(%edi) |
951 movups %xmm4,32(%edi) | 977 movups %xmm4,32(%edi) |
952 movups %xmm5,48(%edi) | 978 movups %xmm5,48(%edi) |
953 movups %xmm6,64(%edi) | 979 movups %xmm6,64(%edi) |
954 » jmp» .L038ctr32_ret | 980 » jmp» .L040ctr32_ret |
955 .align 16 | 981 .align 16 |
956 .L035ctr32_one_shortcut: | 982 .L037ctr32_one_shortcut: |
957 movups (%ebx),%xmm2 | 983 movups (%ebx),%xmm2 |
958 movl 240(%edx),%ecx | 984 movl 240(%edx),%ecx |
959 .L039ctr32_one: | 985 .L041ctr32_one: |
960 movups (%edx),%xmm0 | 986 movups (%edx),%xmm0 |
961 movups 16(%edx),%xmm1 | 987 movups 16(%edx),%xmm1 |
962 leal 32(%edx),%edx | 988 leal 32(%edx),%edx |
963 xorps %xmm0,%xmm2 | 989 xorps %xmm0,%xmm2 |
964 .L043enc1_loop_7: | 990 .L045enc1_loop_7: |
965 .byte 102,15,56,220,209 | 991 .byte 102,15,56,220,209 |
966 decl %ecx | 992 decl %ecx |
967 movups (%edx),%xmm1 | 993 movups (%edx),%xmm1 |
968 leal 16(%edx),%edx | 994 leal 16(%edx),%edx |
969 » jnz» .L043enc1_loop_7 | 995 » jnz» .L045enc1_loop_7 |
970 .byte 102,15,56,221,209 | 996 .byte 102,15,56,221,209 |
971 movups (%esi),%xmm6 | 997 movups (%esi),%xmm6 |
972 xorps %xmm2,%xmm6 | 998 xorps %xmm2,%xmm6 |
973 movups %xmm6,(%edi) | 999 movups %xmm6,(%edi) |
974 » jmp» .L038ctr32_ret | 1000 » jmp» .L040ctr32_ret |
975 .align 16 | 1001 .align 16 |
976 .L040ctr32_two: | 1002 .L042ctr32_two: |
977 call _aesni_encrypt2 | 1003 call _aesni_encrypt2 |
978 movups (%esi),%xmm5 | 1004 movups (%esi),%xmm5 |
979 movups 16(%esi),%xmm6 | 1005 movups 16(%esi),%xmm6 |
980 xorps %xmm5,%xmm2 | 1006 xorps %xmm5,%xmm2 |
981 xorps %xmm6,%xmm3 | 1007 xorps %xmm6,%xmm3 |
982 movups %xmm2,(%edi) | 1008 movups %xmm2,(%edi) |
983 movups %xmm3,16(%edi) | 1009 movups %xmm3,16(%edi) |
984 » jmp» .L038ctr32_ret | 1010 » jmp» .L040ctr32_ret |
985 .align 16 | 1011 .align 16 |
986 .L041ctr32_three: | 1012 .L043ctr32_three: |
987 call _aesni_encrypt3 | 1013 call _aesni_encrypt3 |
988 movups (%esi),%xmm5 | 1014 movups (%esi),%xmm5 |
989 movups 16(%esi),%xmm6 | 1015 movups 16(%esi),%xmm6 |
990 xorps %xmm5,%xmm2 | 1016 xorps %xmm5,%xmm2 |
991 movups 32(%esi),%xmm7 | 1017 movups 32(%esi),%xmm7 |
992 xorps %xmm6,%xmm3 | 1018 xorps %xmm6,%xmm3 |
993 movups %xmm2,(%edi) | 1019 movups %xmm2,(%edi) |
994 xorps %xmm7,%xmm4 | 1020 xorps %xmm7,%xmm4 |
995 movups %xmm3,16(%edi) | 1021 movups %xmm3,16(%edi) |
996 movups %xmm4,32(%edi) | 1022 movups %xmm4,32(%edi) |
997 » jmp» .L038ctr32_ret | 1023 » jmp» .L040ctr32_ret |
998 .align 16 | 1024 .align 16 |
999 .L042ctr32_four: | 1025 .L044ctr32_four: |
1000 call _aesni_encrypt4 | 1026 call _aesni_encrypt4 |
1001 movups (%esi),%xmm6 | 1027 movups (%esi),%xmm6 |
1002 movups 16(%esi),%xmm7 | 1028 movups 16(%esi),%xmm7 |
1003 movups 32(%esi),%xmm1 | 1029 movups 32(%esi),%xmm1 |
1004 xorps %xmm6,%xmm2 | 1030 xorps %xmm6,%xmm2 |
1005 movups 48(%esi),%xmm0 | 1031 movups 48(%esi),%xmm0 |
1006 xorps %xmm7,%xmm3 | 1032 xorps %xmm7,%xmm3 |
1007 movups %xmm2,(%edi) | 1033 movups %xmm2,(%edi) |
1008 xorps %xmm1,%xmm4 | 1034 xorps %xmm1,%xmm4 |
1009 movups %xmm3,16(%edi) | 1035 movups %xmm3,16(%edi) |
1010 xorps %xmm0,%xmm5 | 1036 xorps %xmm0,%xmm5 |
1011 movups %xmm4,32(%edi) | 1037 movups %xmm4,32(%edi) |
1012 movups %xmm5,48(%edi) | 1038 movups %xmm5,48(%edi) |
1013 .L038ctr32_ret: | 1039 .L040ctr32_ret: |
| 1040 » pxor» %xmm0,%xmm0 |
| 1041 » pxor» %xmm1,%xmm1 |
| 1042 » pxor» %xmm2,%xmm2 |
| 1043 » pxor» %xmm3,%xmm3 |
| 1044 » pxor» %xmm4,%xmm4 |
| 1045 » movdqa» %xmm0,32(%esp) |
| 1046 » pxor» %xmm5,%xmm5 |
| 1047 » movdqa» %xmm0,48(%esp) |
| 1048 » pxor» %xmm6,%xmm6 |
| 1049 » movdqa» %xmm0,64(%esp) |
| 1050 » pxor» %xmm7,%xmm7 |
1014 movl 80(%esp),%esp | 1051 movl 80(%esp),%esp |
1015 popl %edi | 1052 popl %edi |
1016 popl %esi | 1053 popl %esi |
1017 popl %ebx | 1054 popl %ebx |
1018 popl %ebp | 1055 popl %ebp |
1019 ret | 1056 ret |
1020 .size aesni_ctr32_encrypt_blocks,.-.L_aesni_ctr32_encrypt_blocks_begin | 1057 .size aesni_ctr32_encrypt_blocks,.-.L_aesni_ctr32_encrypt_blocks_begin |
1021 .globl aesni_xts_encrypt | 1058 .globl aesni_xts_encrypt |
1022 .hidden aesni_xts_encrypt | 1059 .hidden aesni_xts_encrypt |
1023 .type aesni_xts_encrypt,@function | 1060 .type aesni_xts_encrypt,@function |
1024 .align 16 | 1061 .align 16 |
1025 aesni_xts_encrypt: | 1062 aesni_xts_encrypt: |
1026 .L_aesni_xts_encrypt_begin: | 1063 .L_aesni_xts_encrypt_begin: |
1027 pushl %ebp | 1064 pushl %ebp |
1028 pushl %ebx | 1065 pushl %ebx |
1029 pushl %esi | 1066 pushl %esi |
1030 pushl %edi | 1067 pushl %edi |
1031 movl 36(%esp),%edx | 1068 movl 36(%esp),%edx |
1032 movl 40(%esp),%esi | 1069 movl 40(%esp),%esi |
1033 movl 240(%edx),%ecx | 1070 movl 240(%edx),%ecx |
1034 movups (%esi),%xmm2 | 1071 movups (%esi),%xmm2 |
1035 movups (%edx),%xmm0 | 1072 movups (%edx),%xmm0 |
1036 movups 16(%edx),%xmm1 | 1073 movups 16(%edx),%xmm1 |
1037 leal 32(%edx),%edx | 1074 leal 32(%edx),%edx |
1038 xorps %xmm0,%xmm2 | 1075 xorps %xmm0,%xmm2 |
1039 .L044enc1_loop_8: | 1076 .L046enc1_loop_8: |
1040 .byte 102,15,56,220,209 | 1077 .byte 102,15,56,220,209 |
1041 decl %ecx | 1078 decl %ecx |
1042 movups (%edx),%xmm1 | 1079 movups (%edx),%xmm1 |
1043 leal 16(%edx),%edx | 1080 leal 16(%edx),%edx |
1044 » jnz» .L044enc1_loop_8 | 1081 » jnz» .L046enc1_loop_8 |
1045 .byte 102,15,56,221,209 | 1082 .byte 102,15,56,221,209 |
1046 movl 20(%esp),%esi | 1083 movl 20(%esp),%esi |
1047 movl 24(%esp),%edi | 1084 movl 24(%esp),%edi |
1048 movl 28(%esp),%eax | 1085 movl 28(%esp),%eax |
1049 movl 32(%esp),%edx | 1086 movl 32(%esp),%edx |
1050 movl %esp,%ebp | 1087 movl %esp,%ebp |
1051 subl $120,%esp | 1088 subl $120,%esp |
1052 movl 240(%edx),%ecx | 1089 movl 240(%edx),%ecx |
1053 andl $-16,%esp | 1090 andl $-16,%esp |
1054 movl $135,96(%esp) | 1091 movl $135,96(%esp) |
1055 movl $0,100(%esp) | 1092 movl $0,100(%esp) |
1056 movl $1,104(%esp) | 1093 movl $1,104(%esp) |
1057 movl $0,108(%esp) | 1094 movl $0,108(%esp) |
1058 movl %eax,112(%esp) | 1095 movl %eax,112(%esp) |
1059 movl %ebp,116(%esp) | 1096 movl %ebp,116(%esp) |
1060 movdqa %xmm2,%xmm1 | 1097 movdqa %xmm2,%xmm1 |
1061 pxor %xmm0,%xmm0 | 1098 pxor %xmm0,%xmm0 |
1062 movdqa 96(%esp),%xmm3 | 1099 movdqa 96(%esp),%xmm3 |
1063 pcmpgtd %xmm1,%xmm0 | 1100 pcmpgtd %xmm1,%xmm0 |
1064 andl $-16,%eax | 1101 andl $-16,%eax |
1065 movl %edx,%ebp | 1102 movl %edx,%ebp |
1066 movl %ecx,%ebx | 1103 movl %ecx,%ebx |
1067 subl $96,%eax | 1104 subl $96,%eax |
1068 » jc» .L045xts_enc_short | 1105 » jc» .L047xts_enc_short |
1069 shll $4,%ecx | 1106 shll $4,%ecx |
1070 movl $16,%ebx | 1107 movl $16,%ebx |
1071 subl %ecx,%ebx | 1108 subl %ecx,%ebx |
1072 leal 32(%edx,%ecx,1),%edx | 1109 leal 32(%edx,%ecx,1),%edx |
1073 » jmp» .L046xts_enc_loop6 | 1110 » jmp» .L048xts_enc_loop6 |
1074 .align 16 | 1111 .align 16 |
1075 .L046xts_enc_loop6: | 1112 .L048xts_enc_loop6: |
1076 pshufd $19,%xmm0,%xmm2 | 1113 pshufd $19,%xmm0,%xmm2 |
1077 pxor %xmm0,%xmm0 | 1114 pxor %xmm0,%xmm0 |
1078 movdqa %xmm1,(%esp) | 1115 movdqa %xmm1,(%esp) |
1079 paddq %xmm1,%xmm1 | 1116 paddq %xmm1,%xmm1 |
1080 pand %xmm3,%xmm2 | 1117 pand %xmm3,%xmm2 |
1081 pcmpgtd %xmm1,%xmm0 | 1118 pcmpgtd %xmm1,%xmm0 |
1082 pxor %xmm2,%xmm1 | 1119 pxor %xmm2,%xmm1 |
1083 pshufd $19,%xmm0,%xmm2 | 1120 pshufd $19,%xmm0,%xmm2 |
1084 pxor %xmm0,%xmm0 | 1121 pxor %xmm0,%xmm0 |
1085 movdqa %xmm1,16(%esp) | 1122 movdqa %xmm1,16(%esp) |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1154 pshufd $19,%xmm0,%xmm2 | 1191 pshufd $19,%xmm0,%xmm2 |
1155 movups %xmm7,80(%edi) | 1192 movups %xmm7,80(%edi) |
1156 leal 96(%edi),%edi | 1193 leal 96(%edi),%edi |
1157 movdqa 96(%esp),%xmm3 | 1194 movdqa 96(%esp),%xmm3 |
1158 pxor %xmm0,%xmm0 | 1195 pxor %xmm0,%xmm0 |
1159 paddq %xmm1,%xmm1 | 1196 paddq %xmm1,%xmm1 |
1160 pand %xmm3,%xmm2 | 1197 pand %xmm3,%xmm2 |
1161 pcmpgtd %xmm1,%xmm0 | 1198 pcmpgtd %xmm1,%xmm0 |
1162 pxor %xmm2,%xmm1 | 1199 pxor %xmm2,%xmm1 |
1163 subl $96,%eax | 1200 subl $96,%eax |
1164 » jnc» .L046xts_enc_loop6 | 1201 » jnc» .L048xts_enc_loop6 |
1165 movl 240(%ebp),%ecx | 1202 movl 240(%ebp),%ecx |
1166 movl %ebp,%edx | 1203 movl %ebp,%edx |
1167 movl %ecx,%ebx | 1204 movl %ecx,%ebx |
1168 .L045xts_enc_short: | 1205 .L047xts_enc_short: |
1169 addl $96,%eax | 1206 addl $96,%eax |
1170 » jz» .L047xts_enc_done6x | 1207 » jz» .L049xts_enc_done6x |
1171 movdqa %xmm1,%xmm5 | 1208 movdqa %xmm1,%xmm5 |
1172 cmpl $32,%eax | 1209 cmpl $32,%eax |
1173 » jb» .L048xts_enc_one | 1210 » jb» .L050xts_enc_one |
1174 pshufd $19,%xmm0,%xmm2 | 1211 pshufd $19,%xmm0,%xmm2 |
1175 pxor %xmm0,%xmm0 | 1212 pxor %xmm0,%xmm0 |
1176 paddq %xmm1,%xmm1 | 1213 paddq %xmm1,%xmm1 |
1177 pand %xmm3,%xmm2 | 1214 pand %xmm3,%xmm2 |
1178 pcmpgtd %xmm1,%xmm0 | 1215 pcmpgtd %xmm1,%xmm0 |
1179 pxor %xmm2,%xmm1 | 1216 pxor %xmm2,%xmm1 |
1180 » je» .L049xts_enc_two | 1217 » je» .L051xts_enc_two |
1181 pshufd $19,%xmm0,%xmm2 | 1218 pshufd $19,%xmm0,%xmm2 |
1182 pxor %xmm0,%xmm0 | 1219 pxor %xmm0,%xmm0 |
1183 movdqa %xmm1,%xmm6 | 1220 movdqa %xmm1,%xmm6 |
1184 paddq %xmm1,%xmm1 | 1221 paddq %xmm1,%xmm1 |
1185 pand %xmm3,%xmm2 | 1222 pand %xmm3,%xmm2 |
1186 pcmpgtd %xmm1,%xmm0 | 1223 pcmpgtd %xmm1,%xmm0 |
1187 pxor %xmm2,%xmm1 | 1224 pxor %xmm2,%xmm1 |
1188 cmpl $64,%eax | 1225 cmpl $64,%eax |
1189 » jb» .L050xts_enc_three | 1226 » jb» .L052xts_enc_three |
1190 pshufd $19,%xmm0,%xmm2 | 1227 pshufd $19,%xmm0,%xmm2 |
1191 pxor %xmm0,%xmm0 | 1228 pxor %xmm0,%xmm0 |
1192 movdqa %xmm1,%xmm7 | 1229 movdqa %xmm1,%xmm7 |
1193 paddq %xmm1,%xmm1 | 1230 paddq %xmm1,%xmm1 |
1194 pand %xmm3,%xmm2 | 1231 pand %xmm3,%xmm2 |
1195 pcmpgtd %xmm1,%xmm0 | 1232 pcmpgtd %xmm1,%xmm0 |
1196 pxor %xmm2,%xmm1 | 1233 pxor %xmm2,%xmm1 |
1197 movdqa %xmm5,(%esp) | 1234 movdqa %xmm5,(%esp) |
1198 movdqa %xmm6,16(%esp) | 1235 movdqa %xmm6,16(%esp) |
1199 » je» .L051xts_enc_four | 1236 » je» .L053xts_enc_four |
1200 movdqa %xmm7,32(%esp) | 1237 movdqa %xmm7,32(%esp) |
1201 pshufd $19,%xmm0,%xmm7 | 1238 pshufd $19,%xmm0,%xmm7 |
1202 movdqa %xmm1,48(%esp) | 1239 movdqa %xmm1,48(%esp) |
1203 paddq %xmm1,%xmm1 | 1240 paddq %xmm1,%xmm1 |
1204 pand %xmm3,%xmm7 | 1241 pand %xmm3,%xmm7 |
1205 pxor %xmm1,%xmm7 | 1242 pxor %xmm1,%xmm7 |
1206 movdqu (%esi),%xmm2 | 1243 movdqu (%esi),%xmm2 |
1207 movdqu 16(%esi),%xmm3 | 1244 movdqu 16(%esi),%xmm3 |
1208 movdqu 32(%esi),%xmm4 | 1245 movdqu 32(%esi),%xmm4 |
1209 pxor (%esp),%xmm2 | 1246 pxor (%esp),%xmm2 |
(...skipping 11 matching lines...) Expand all Loading... |
1221 xorps 16(%esp),%xmm3 | 1258 xorps 16(%esp),%xmm3 |
1222 xorps 32(%esp),%xmm4 | 1259 xorps 32(%esp),%xmm4 |
1223 movups %xmm2,(%edi) | 1260 movups %xmm2,(%edi) |
1224 xorps 48(%esp),%xmm5 | 1261 xorps 48(%esp),%xmm5 |
1225 movups %xmm3,16(%edi) | 1262 movups %xmm3,16(%edi) |
1226 xorps %xmm1,%xmm6 | 1263 xorps %xmm1,%xmm6 |
1227 movups %xmm4,32(%edi) | 1264 movups %xmm4,32(%edi) |
1228 movups %xmm5,48(%edi) | 1265 movups %xmm5,48(%edi) |
1229 movups %xmm6,64(%edi) | 1266 movups %xmm6,64(%edi) |
1230 leal 80(%edi),%edi | 1267 leal 80(%edi),%edi |
1231 » jmp» .L052xts_enc_done | 1268 » jmp» .L054xts_enc_done |
1232 .align 16 | 1269 .align 16 |
1233 .L048xts_enc_one: | 1270 .L050xts_enc_one: |
1234 movups (%esi),%xmm2 | 1271 movups (%esi),%xmm2 |
1235 leal 16(%esi),%esi | 1272 leal 16(%esi),%esi |
1236 xorps %xmm5,%xmm2 | 1273 xorps %xmm5,%xmm2 |
1237 movups (%edx),%xmm0 | 1274 movups (%edx),%xmm0 |
1238 movups 16(%edx),%xmm1 | 1275 movups 16(%edx),%xmm1 |
1239 leal 32(%edx),%edx | 1276 leal 32(%edx),%edx |
1240 xorps %xmm0,%xmm2 | 1277 xorps %xmm0,%xmm2 |
1241 .L053enc1_loop_9: | 1278 .L055enc1_loop_9: |
1242 .byte 102,15,56,220,209 | 1279 .byte 102,15,56,220,209 |
1243 decl %ecx | 1280 decl %ecx |
1244 movups (%edx),%xmm1 | 1281 movups (%edx),%xmm1 |
1245 leal 16(%edx),%edx | 1282 leal 16(%edx),%edx |
1246 » jnz» .L053enc1_loop_9 | 1283 » jnz» .L055enc1_loop_9 |
1247 .byte 102,15,56,221,209 | 1284 .byte 102,15,56,221,209 |
1248 xorps %xmm5,%xmm2 | 1285 xorps %xmm5,%xmm2 |
1249 movups %xmm2,(%edi) | 1286 movups %xmm2,(%edi) |
1250 leal 16(%edi),%edi | 1287 leal 16(%edi),%edi |
1251 movdqa %xmm5,%xmm1 | 1288 movdqa %xmm5,%xmm1 |
1252 » jmp» .L052xts_enc_done | 1289 » jmp» .L054xts_enc_done |
1253 .align 16 | 1290 .align 16 |
1254 .L049xts_enc_two: | 1291 .L051xts_enc_two: |
1255 movaps %xmm1,%xmm6 | 1292 movaps %xmm1,%xmm6 |
1256 movups (%esi),%xmm2 | 1293 movups (%esi),%xmm2 |
1257 movups 16(%esi),%xmm3 | 1294 movups 16(%esi),%xmm3 |
1258 leal 32(%esi),%esi | 1295 leal 32(%esi),%esi |
1259 xorps %xmm5,%xmm2 | 1296 xorps %xmm5,%xmm2 |
1260 xorps %xmm6,%xmm3 | 1297 xorps %xmm6,%xmm3 |
1261 call _aesni_encrypt2 | 1298 call _aesni_encrypt2 |
1262 xorps %xmm5,%xmm2 | 1299 xorps %xmm5,%xmm2 |
1263 xorps %xmm6,%xmm3 | 1300 xorps %xmm6,%xmm3 |
1264 movups %xmm2,(%edi) | 1301 movups %xmm2,(%edi) |
1265 movups %xmm3,16(%edi) | 1302 movups %xmm3,16(%edi) |
1266 leal 32(%edi),%edi | 1303 leal 32(%edi),%edi |
1267 movdqa %xmm6,%xmm1 | 1304 movdqa %xmm6,%xmm1 |
1268 » jmp» .L052xts_enc_done | 1305 » jmp» .L054xts_enc_done |
1269 .align 16 | 1306 .align 16 |
1270 .L050xts_enc_three: | 1307 .L052xts_enc_three: |
1271 movaps %xmm1,%xmm7 | 1308 movaps %xmm1,%xmm7 |
1272 movups (%esi),%xmm2 | 1309 movups (%esi),%xmm2 |
1273 movups 16(%esi),%xmm3 | 1310 movups 16(%esi),%xmm3 |
1274 movups 32(%esi),%xmm4 | 1311 movups 32(%esi),%xmm4 |
1275 leal 48(%esi),%esi | 1312 leal 48(%esi),%esi |
1276 xorps %xmm5,%xmm2 | 1313 xorps %xmm5,%xmm2 |
1277 xorps %xmm6,%xmm3 | 1314 xorps %xmm6,%xmm3 |
1278 xorps %xmm7,%xmm4 | 1315 xorps %xmm7,%xmm4 |
1279 call _aesni_encrypt3 | 1316 call _aesni_encrypt3 |
1280 xorps %xmm5,%xmm2 | 1317 xorps %xmm5,%xmm2 |
1281 xorps %xmm6,%xmm3 | 1318 xorps %xmm6,%xmm3 |
1282 xorps %xmm7,%xmm4 | 1319 xorps %xmm7,%xmm4 |
1283 movups %xmm2,(%edi) | 1320 movups %xmm2,(%edi) |
1284 movups %xmm3,16(%edi) | 1321 movups %xmm3,16(%edi) |
1285 movups %xmm4,32(%edi) | 1322 movups %xmm4,32(%edi) |
1286 leal 48(%edi),%edi | 1323 leal 48(%edi),%edi |
1287 movdqa %xmm7,%xmm1 | 1324 movdqa %xmm7,%xmm1 |
1288 » jmp» .L052xts_enc_done | 1325 » jmp» .L054xts_enc_done |
1289 .align 16 | 1326 .align 16 |
1290 .L051xts_enc_four: | 1327 .L053xts_enc_four: |
1291 movaps %xmm1,%xmm6 | 1328 movaps %xmm1,%xmm6 |
1292 movups (%esi),%xmm2 | 1329 movups (%esi),%xmm2 |
1293 movups 16(%esi),%xmm3 | 1330 movups 16(%esi),%xmm3 |
1294 movups 32(%esi),%xmm4 | 1331 movups 32(%esi),%xmm4 |
1295 xorps (%esp),%xmm2 | 1332 xorps (%esp),%xmm2 |
1296 movups 48(%esi),%xmm5 | 1333 movups 48(%esi),%xmm5 |
1297 leal 64(%esi),%esi | 1334 leal 64(%esi),%esi |
1298 xorps 16(%esp),%xmm3 | 1335 xorps 16(%esp),%xmm3 |
1299 xorps %xmm7,%xmm4 | 1336 xorps %xmm7,%xmm4 |
1300 xorps %xmm6,%xmm5 | 1337 xorps %xmm6,%xmm5 |
1301 call _aesni_encrypt4 | 1338 call _aesni_encrypt4 |
1302 xorps (%esp),%xmm2 | 1339 xorps (%esp),%xmm2 |
1303 xorps 16(%esp),%xmm3 | 1340 xorps 16(%esp),%xmm3 |
1304 xorps %xmm7,%xmm4 | 1341 xorps %xmm7,%xmm4 |
1305 movups %xmm2,(%edi) | 1342 movups %xmm2,(%edi) |
1306 xorps %xmm6,%xmm5 | 1343 xorps %xmm6,%xmm5 |
1307 movups %xmm3,16(%edi) | 1344 movups %xmm3,16(%edi) |
1308 movups %xmm4,32(%edi) | 1345 movups %xmm4,32(%edi) |
1309 movups %xmm5,48(%edi) | 1346 movups %xmm5,48(%edi) |
1310 leal 64(%edi),%edi | 1347 leal 64(%edi),%edi |
1311 movdqa %xmm6,%xmm1 | 1348 movdqa %xmm6,%xmm1 |
1312 » jmp» .L052xts_enc_done | 1349 » jmp» .L054xts_enc_done |
1313 .align 16 | 1350 .align 16 |
1314 .L047xts_enc_done6x: | 1351 .L049xts_enc_done6x: |
1315 movl 112(%esp),%eax | 1352 movl 112(%esp),%eax |
1316 andl $15,%eax | 1353 andl $15,%eax |
1317 » jz» .L054xts_enc_ret | 1354 » jz» .L056xts_enc_ret |
1318 movdqa %xmm1,%xmm5 | 1355 movdqa %xmm1,%xmm5 |
1319 movl %eax,112(%esp) | 1356 movl %eax,112(%esp) |
1320 » jmp» .L055xts_enc_steal | 1357 » jmp» .L057xts_enc_steal |
1321 .align 16 | 1358 .align 16 |
1322 .L052xts_enc_done: | 1359 .L054xts_enc_done: |
1323 movl 112(%esp),%eax | 1360 movl 112(%esp),%eax |
1324 pxor %xmm0,%xmm0 | 1361 pxor %xmm0,%xmm0 |
1325 andl $15,%eax | 1362 andl $15,%eax |
1326 » jz» .L054xts_enc_ret | 1363 » jz» .L056xts_enc_ret |
1327 pcmpgtd %xmm1,%xmm0 | 1364 pcmpgtd %xmm1,%xmm0 |
1328 movl %eax,112(%esp) | 1365 movl %eax,112(%esp) |
1329 pshufd $19,%xmm0,%xmm5 | 1366 pshufd $19,%xmm0,%xmm5 |
1330 paddq %xmm1,%xmm1 | 1367 paddq %xmm1,%xmm1 |
1331 pand 96(%esp),%xmm5 | 1368 pand 96(%esp),%xmm5 |
1332 pxor %xmm1,%xmm5 | 1369 pxor %xmm1,%xmm5 |
1333 .L055xts_enc_steal: | 1370 .L057xts_enc_steal: |
1334 movzbl (%esi),%ecx | 1371 movzbl (%esi),%ecx |
1335 movzbl -16(%edi),%edx | 1372 movzbl -16(%edi),%edx |
1336 leal 1(%esi),%esi | 1373 leal 1(%esi),%esi |
1337 movb %cl,-16(%edi) | 1374 movb %cl,-16(%edi) |
1338 movb %dl,(%edi) | 1375 movb %dl,(%edi) |
1339 leal 1(%edi),%edi | 1376 leal 1(%edi),%edi |
1340 subl $1,%eax | 1377 subl $1,%eax |
1341 » jnz» .L055xts_enc_steal | 1378 » jnz» .L057xts_enc_steal |
1342 subl 112(%esp),%edi | 1379 subl 112(%esp),%edi |
1343 movl %ebp,%edx | 1380 movl %ebp,%edx |
1344 movl %ebx,%ecx | 1381 movl %ebx,%ecx |
1345 movups -16(%edi),%xmm2 | 1382 movups -16(%edi),%xmm2 |
1346 xorps %xmm5,%xmm2 | 1383 xorps %xmm5,%xmm2 |
1347 movups (%edx),%xmm0 | 1384 movups (%edx),%xmm0 |
1348 movups 16(%edx),%xmm1 | 1385 movups 16(%edx),%xmm1 |
1349 leal 32(%edx),%edx | 1386 leal 32(%edx),%edx |
1350 xorps %xmm0,%xmm2 | 1387 xorps %xmm0,%xmm2 |
1351 .L056enc1_loop_10: | 1388 .L058enc1_loop_10: |
1352 .byte 102,15,56,220,209 | 1389 .byte 102,15,56,220,209 |
1353 decl %ecx | 1390 decl %ecx |
1354 movups (%edx),%xmm1 | 1391 movups (%edx),%xmm1 |
1355 leal 16(%edx),%edx | 1392 leal 16(%edx),%edx |
1356 » jnz» .L056enc1_loop_10 | 1393 » jnz» .L058enc1_loop_10 |
1357 .byte 102,15,56,221,209 | 1394 .byte 102,15,56,221,209 |
1358 xorps %xmm5,%xmm2 | 1395 xorps %xmm5,%xmm2 |
1359 movups %xmm2,-16(%edi) | 1396 movups %xmm2,-16(%edi) |
1360 .L054xts_enc_ret: | 1397 .L056xts_enc_ret: |
| 1398 » pxor» %xmm0,%xmm0 |
| 1399 » pxor» %xmm1,%xmm1 |
| 1400 » pxor» %xmm2,%xmm2 |
| 1401 » movdqa» %xmm0,(%esp) |
| 1402 » pxor» %xmm3,%xmm3 |
| 1403 » movdqa» %xmm0,16(%esp) |
| 1404 » pxor» %xmm4,%xmm4 |
| 1405 » movdqa» %xmm0,32(%esp) |
| 1406 » pxor» %xmm5,%xmm5 |
| 1407 » movdqa» %xmm0,48(%esp) |
| 1408 » pxor» %xmm6,%xmm6 |
| 1409 » movdqa» %xmm0,64(%esp) |
| 1410 » pxor» %xmm7,%xmm7 |
| 1411 » movdqa» %xmm0,80(%esp) |
1361 movl 116(%esp),%esp | 1412 movl 116(%esp),%esp |
1362 popl %edi | 1413 popl %edi |
1363 popl %esi | 1414 popl %esi |
1364 popl %ebx | 1415 popl %ebx |
1365 popl %ebp | 1416 popl %ebp |
1366 ret | 1417 ret |
1367 .size aesni_xts_encrypt,.-.L_aesni_xts_encrypt_begin | 1418 .size aesni_xts_encrypt,.-.L_aesni_xts_encrypt_begin |
1368 .globl aesni_xts_decrypt | 1419 .globl aesni_xts_decrypt |
1369 .hidden aesni_xts_decrypt | 1420 .hidden aesni_xts_decrypt |
1370 .type aesni_xts_decrypt,@function | 1421 .type aesni_xts_decrypt,@function |
1371 .align 16 | 1422 .align 16 |
1372 aesni_xts_decrypt: | 1423 aesni_xts_decrypt: |
1373 .L_aesni_xts_decrypt_begin: | 1424 .L_aesni_xts_decrypt_begin: |
1374 pushl %ebp | 1425 pushl %ebp |
1375 pushl %ebx | 1426 pushl %ebx |
1376 pushl %esi | 1427 pushl %esi |
1377 pushl %edi | 1428 pushl %edi |
1378 movl 36(%esp),%edx | 1429 movl 36(%esp),%edx |
1379 movl 40(%esp),%esi | 1430 movl 40(%esp),%esi |
1380 movl 240(%edx),%ecx | 1431 movl 240(%edx),%ecx |
1381 movups (%esi),%xmm2 | 1432 movups (%esi),%xmm2 |
1382 movups (%edx),%xmm0 | 1433 movups (%edx),%xmm0 |
1383 movups 16(%edx),%xmm1 | 1434 movups 16(%edx),%xmm1 |
1384 leal 32(%edx),%edx | 1435 leal 32(%edx),%edx |
1385 xorps %xmm0,%xmm2 | 1436 xorps %xmm0,%xmm2 |
1386 .L057enc1_loop_11: | 1437 .L059enc1_loop_11: |
1387 .byte 102,15,56,220,209 | 1438 .byte 102,15,56,220,209 |
1388 decl %ecx | 1439 decl %ecx |
1389 movups (%edx),%xmm1 | 1440 movups (%edx),%xmm1 |
1390 leal 16(%edx),%edx | 1441 leal 16(%edx),%edx |
1391 » jnz» .L057enc1_loop_11 | 1442 » jnz» .L059enc1_loop_11 |
1392 .byte 102,15,56,221,209 | 1443 .byte 102,15,56,221,209 |
1393 movl 20(%esp),%esi | 1444 movl 20(%esp),%esi |
1394 movl 24(%esp),%edi | 1445 movl 24(%esp),%edi |
1395 movl 28(%esp),%eax | 1446 movl 28(%esp),%eax |
1396 movl 32(%esp),%edx | 1447 movl 32(%esp),%edx |
1397 movl %esp,%ebp | 1448 movl %esp,%ebp |
1398 subl $120,%esp | 1449 subl $120,%esp |
1399 andl $-16,%esp | 1450 andl $-16,%esp |
1400 xorl %ebx,%ebx | 1451 xorl %ebx,%ebx |
1401 testl $15,%eax | 1452 testl $15,%eax |
1402 setnz %bl | 1453 setnz %bl |
1403 shll $4,%ebx | 1454 shll $4,%ebx |
1404 subl %ebx,%eax | 1455 subl %ebx,%eax |
1405 movl $135,96(%esp) | 1456 movl $135,96(%esp) |
1406 movl $0,100(%esp) | 1457 movl $0,100(%esp) |
1407 movl $1,104(%esp) | 1458 movl $1,104(%esp) |
1408 movl $0,108(%esp) | 1459 movl $0,108(%esp) |
1409 movl %eax,112(%esp) | 1460 movl %eax,112(%esp) |
1410 movl %ebp,116(%esp) | 1461 movl %ebp,116(%esp) |
1411 movl 240(%edx),%ecx | 1462 movl 240(%edx),%ecx |
1412 movl %edx,%ebp | 1463 movl %edx,%ebp |
1413 movl %ecx,%ebx | 1464 movl %ecx,%ebx |
1414 movdqa %xmm2,%xmm1 | 1465 movdqa %xmm2,%xmm1 |
1415 pxor %xmm0,%xmm0 | 1466 pxor %xmm0,%xmm0 |
1416 movdqa 96(%esp),%xmm3 | 1467 movdqa 96(%esp),%xmm3 |
1417 pcmpgtd %xmm1,%xmm0 | 1468 pcmpgtd %xmm1,%xmm0 |
1418 andl $-16,%eax | 1469 andl $-16,%eax |
1419 subl $96,%eax | 1470 subl $96,%eax |
1420 » jc» .L058xts_dec_short | 1471 » jc» .L060xts_dec_short |
1421 shll $4,%ecx | 1472 shll $4,%ecx |
1422 movl $16,%ebx | 1473 movl $16,%ebx |
1423 subl %ecx,%ebx | 1474 subl %ecx,%ebx |
1424 leal 32(%edx,%ecx,1),%edx | 1475 leal 32(%edx,%ecx,1),%edx |
1425 » jmp» .L059xts_dec_loop6 | 1476 » jmp» .L061xts_dec_loop6 |
1426 .align 16 | 1477 .align 16 |
1427 .L059xts_dec_loop6: | 1478 .L061xts_dec_loop6: |
1428 pshufd $19,%xmm0,%xmm2 | 1479 pshufd $19,%xmm0,%xmm2 |
1429 pxor %xmm0,%xmm0 | 1480 pxor %xmm0,%xmm0 |
1430 movdqa %xmm1,(%esp) | 1481 movdqa %xmm1,(%esp) |
1431 paddq %xmm1,%xmm1 | 1482 paddq %xmm1,%xmm1 |
1432 pand %xmm3,%xmm2 | 1483 pand %xmm3,%xmm2 |
1433 pcmpgtd %xmm1,%xmm0 | 1484 pcmpgtd %xmm1,%xmm0 |
1434 pxor %xmm2,%xmm1 | 1485 pxor %xmm2,%xmm1 |
1435 pshufd $19,%xmm0,%xmm2 | 1486 pshufd $19,%xmm0,%xmm2 |
1436 pxor %xmm0,%xmm0 | 1487 pxor %xmm0,%xmm0 |
1437 movdqa %xmm1,16(%esp) | 1488 movdqa %xmm1,16(%esp) |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1506 pshufd $19,%xmm0,%xmm2 | 1557 pshufd $19,%xmm0,%xmm2 |
1507 movups %xmm7,80(%edi) | 1558 movups %xmm7,80(%edi) |
1508 leal 96(%edi),%edi | 1559 leal 96(%edi),%edi |
1509 movdqa 96(%esp),%xmm3 | 1560 movdqa 96(%esp),%xmm3 |
1510 pxor %xmm0,%xmm0 | 1561 pxor %xmm0,%xmm0 |
1511 paddq %xmm1,%xmm1 | 1562 paddq %xmm1,%xmm1 |
1512 pand %xmm3,%xmm2 | 1563 pand %xmm3,%xmm2 |
1513 pcmpgtd %xmm1,%xmm0 | 1564 pcmpgtd %xmm1,%xmm0 |
1514 pxor %xmm2,%xmm1 | 1565 pxor %xmm2,%xmm1 |
1515 subl $96,%eax | 1566 subl $96,%eax |
1516 » jnc» .L059xts_dec_loop6 | 1567 » jnc» .L061xts_dec_loop6 |
1517 movl 240(%ebp),%ecx | 1568 movl 240(%ebp),%ecx |
1518 movl %ebp,%edx | 1569 movl %ebp,%edx |
1519 movl %ecx,%ebx | 1570 movl %ecx,%ebx |
1520 .L058xts_dec_short: | 1571 .L060xts_dec_short: |
1521 addl $96,%eax | 1572 addl $96,%eax |
1522 » jz» .L060xts_dec_done6x | 1573 » jz» .L062xts_dec_done6x |
1523 movdqa %xmm1,%xmm5 | 1574 movdqa %xmm1,%xmm5 |
1524 cmpl $32,%eax | 1575 cmpl $32,%eax |
1525 » jb» .L061xts_dec_one | 1576 » jb» .L063xts_dec_one |
1526 pshufd $19,%xmm0,%xmm2 | 1577 pshufd $19,%xmm0,%xmm2 |
1527 pxor %xmm0,%xmm0 | 1578 pxor %xmm0,%xmm0 |
1528 paddq %xmm1,%xmm1 | 1579 paddq %xmm1,%xmm1 |
1529 pand %xmm3,%xmm2 | 1580 pand %xmm3,%xmm2 |
1530 pcmpgtd %xmm1,%xmm0 | 1581 pcmpgtd %xmm1,%xmm0 |
1531 pxor %xmm2,%xmm1 | 1582 pxor %xmm2,%xmm1 |
1532 » je» .L062xts_dec_two | 1583 » je» .L064xts_dec_two |
1533 pshufd $19,%xmm0,%xmm2 | 1584 pshufd $19,%xmm0,%xmm2 |
1534 pxor %xmm0,%xmm0 | 1585 pxor %xmm0,%xmm0 |
1535 movdqa %xmm1,%xmm6 | 1586 movdqa %xmm1,%xmm6 |
1536 paddq %xmm1,%xmm1 | 1587 paddq %xmm1,%xmm1 |
1537 pand %xmm3,%xmm2 | 1588 pand %xmm3,%xmm2 |
1538 pcmpgtd %xmm1,%xmm0 | 1589 pcmpgtd %xmm1,%xmm0 |
1539 pxor %xmm2,%xmm1 | 1590 pxor %xmm2,%xmm1 |
1540 cmpl $64,%eax | 1591 cmpl $64,%eax |
1541 » jb» .L063xts_dec_three | 1592 » jb» .L065xts_dec_three |
1542 pshufd $19,%xmm0,%xmm2 | 1593 pshufd $19,%xmm0,%xmm2 |
1543 pxor %xmm0,%xmm0 | 1594 pxor %xmm0,%xmm0 |
1544 movdqa %xmm1,%xmm7 | 1595 movdqa %xmm1,%xmm7 |
1545 paddq %xmm1,%xmm1 | 1596 paddq %xmm1,%xmm1 |
1546 pand %xmm3,%xmm2 | 1597 pand %xmm3,%xmm2 |
1547 pcmpgtd %xmm1,%xmm0 | 1598 pcmpgtd %xmm1,%xmm0 |
1548 pxor %xmm2,%xmm1 | 1599 pxor %xmm2,%xmm1 |
1549 movdqa %xmm5,(%esp) | 1600 movdqa %xmm5,(%esp) |
1550 movdqa %xmm6,16(%esp) | 1601 movdqa %xmm6,16(%esp) |
1551 » je» .L064xts_dec_four | 1602 » je» .L066xts_dec_four |
1552 movdqa %xmm7,32(%esp) | 1603 movdqa %xmm7,32(%esp) |
1553 pshufd $19,%xmm0,%xmm7 | 1604 pshufd $19,%xmm0,%xmm7 |
1554 movdqa %xmm1,48(%esp) | 1605 movdqa %xmm1,48(%esp) |
1555 paddq %xmm1,%xmm1 | 1606 paddq %xmm1,%xmm1 |
1556 pand %xmm3,%xmm7 | 1607 pand %xmm3,%xmm7 |
1557 pxor %xmm1,%xmm7 | 1608 pxor %xmm1,%xmm7 |
1558 movdqu (%esi),%xmm2 | 1609 movdqu (%esi),%xmm2 |
1559 movdqu 16(%esi),%xmm3 | 1610 movdqu 16(%esi),%xmm3 |
1560 movdqu 32(%esi),%xmm4 | 1611 movdqu 32(%esi),%xmm4 |
1561 pxor (%esp),%xmm2 | 1612 pxor (%esp),%xmm2 |
(...skipping 11 matching lines...) Expand all Loading... |
1573 xorps 16(%esp),%xmm3 | 1624 xorps 16(%esp),%xmm3 |
1574 xorps 32(%esp),%xmm4 | 1625 xorps 32(%esp),%xmm4 |
1575 movups %xmm2,(%edi) | 1626 movups %xmm2,(%edi) |
1576 xorps 48(%esp),%xmm5 | 1627 xorps 48(%esp),%xmm5 |
1577 movups %xmm3,16(%edi) | 1628 movups %xmm3,16(%edi) |
1578 xorps %xmm1,%xmm6 | 1629 xorps %xmm1,%xmm6 |
1579 movups %xmm4,32(%edi) | 1630 movups %xmm4,32(%edi) |
1580 movups %xmm5,48(%edi) | 1631 movups %xmm5,48(%edi) |
1581 movups %xmm6,64(%edi) | 1632 movups %xmm6,64(%edi) |
1582 leal 80(%edi),%edi | 1633 leal 80(%edi),%edi |
1583 » jmp» .L065xts_dec_done | 1634 » jmp» .L067xts_dec_done |
1584 .align 16 | 1635 .align 16 |
1585 .L061xts_dec_one: | 1636 .L063xts_dec_one: |
1586 movups (%esi),%xmm2 | 1637 movups (%esi),%xmm2 |
1587 leal 16(%esi),%esi | 1638 leal 16(%esi),%esi |
1588 xorps %xmm5,%xmm2 | 1639 xorps %xmm5,%xmm2 |
1589 movups (%edx),%xmm0 | 1640 movups (%edx),%xmm0 |
1590 movups 16(%edx),%xmm1 | 1641 movups 16(%edx),%xmm1 |
1591 leal 32(%edx),%edx | 1642 leal 32(%edx),%edx |
1592 xorps %xmm0,%xmm2 | 1643 xorps %xmm0,%xmm2 |
1593 .L066dec1_loop_12: | 1644 .L068dec1_loop_12: |
1594 .byte 102,15,56,222,209 | 1645 .byte 102,15,56,222,209 |
1595 decl %ecx | 1646 decl %ecx |
1596 movups (%edx),%xmm1 | 1647 movups (%edx),%xmm1 |
1597 leal 16(%edx),%edx | 1648 leal 16(%edx),%edx |
1598 » jnz» .L066dec1_loop_12 | 1649 » jnz» .L068dec1_loop_12 |
1599 .byte 102,15,56,223,209 | 1650 .byte 102,15,56,223,209 |
1600 xorps %xmm5,%xmm2 | 1651 xorps %xmm5,%xmm2 |
1601 movups %xmm2,(%edi) | 1652 movups %xmm2,(%edi) |
1602 leal 16(%edi),%edi | 1653 leal 16(%edi),%edi |
1603 movdqa %xmm5,%xmm1 | 1654 movdqa %xmm5,%xmm1 |
1604 » jmp» .L065xts_dec_done | 1655 » jmp» .L067xts_dec_done |
1605 .align 16 | 1656 .align 16 |
1606 .L062xts_dec_two: | 1657 .L064xts_dec_two: |
1607 movaps %xmm1,%xmm6 | 1658 movaps %xmm1,%xmm6 |
1608 movups (%esi),%xmm2 | 1659 movups (%esi),%xmm2 |
1609 movups 16(%esi),%xmm3 | 1660 movups 16(%esi),%xmm3 |
1610 leal 32(%esi),%esi | 1661 leal 32(%esi),%esi |
1611 xorps %xmm5,%xmm2 | 1662 xorps %xmm5,%xmm2 |
1612 xorps %xmm6,%xmm3 | 1663 xorps %xmm6,%xmm3 |
1613 call _aesni_decrypt2 | 1664 call _aesni_decrypt2 |
1614 xorps %xmm5,%xmm2 | 1665 xorps %xmm5,%xmm2 |
1615 xorps %xmm6,%xmm3 | 1666 xorps %xmm6,%xmm3 |
1616 movups %xmm2,(%edi) | 1667 movups %xmm2,(%edi) |
1617 movups %xmm3,16(%edi) | 1668 movups %xmm3,16(%edi) |
1618 leal 32(%edi),%edi | 1669 leal 32(%edi),%edi |
1619 movdqa %xmm6,%xmm1 | 1670 movdqa %xmm6,%xmm1 |
1620 » jmp» .L065xts_dec_done | 1671 » jmp» .L067xts_dec_done |
1621 .align 16 | 1672 .align 16 |
1622 .L063xts_dec_three: | 1673 .L065xts_dec_three: |
1623 movaps %xmm1,%xmm7 | 1674 movaps %xmm1,%xmm7 |
1624 movups (%esi),%xmm2 | 1675 movups (%esi),%xmm2 |
1625 movups 16(%esi),%xmm3 | 1676 movups 16(%esi),%xmm3 |
1626 movups 32(%esi),%xmm4 | 1677 movups 32(%esi),%xmm4 |
1627 leal 48(%esi),%esi | 1678 leal 48(%esi),%esi |
1628 xorps %xmm5,%xmm2 | 1679 xorps %xmm5,%xmm2 |
1629 xorps %xmm6,%xmm3 | 1680 xorps %xmm6,%xmm3 |
1630 xorps %xmm7,%xmm4 | 1681 xorps %xmm7,%xmm4 |
1631 call _aesni_decrypt3 | 1682 call _aesni_decrypt3 |
1632 xorps %xmm5,%xmm2 | 1683 xorps %xmm5,%xmm2 |
1633 xorps %xmm6,%xmm3 | 1684 xorps %xmm6,%xmm3 |
1634 xorps %xmm7,%xmm4 | 1685 xorps %xmm7,%xmm4 |
1635 movups %xmm2,(%edi) | 1686 movups %xmm2,(%edi) |
1636 movups %xmm3,16(%edi) | 1687 movups %xmm3,16(%edi) |
1637 movups %xmm4,32(%edi) | 1688 movups %xmm4,32(%edi) |
1638 leal 48(%edi),%edi | 1689 leal 48(%edi),%edi |
1639 movdqa %xmm7,%xmm1 | 1690 movdqa %xmm7,%xmm1 |
1640 » jmp» .L065xts_dec_done | 1691 » jmp» .L067xts_dec_done |
1641 .align 16 | 1692 .align 16 |
1642 .L064xts_dec_four: | 1693 .L066xts_dec_four: |
1643 movaps %xmm1,%xmm6 | 1694 movaps %xmm1,%xmm6 |
1644 movups (%esi),%xmm2 | 1695 movups (%esi),%xmm2 |
1645 movups 16(%esi),%xmm3 | 1696 movups 16(%esi),%xmm3 |
1646 movups 32(%esi),%xmm4 | 1697 movups 32(%esi),%xmm4 |
1647 xorps (%esp),%xmm2 | 1698 xorps (%esp),%xmm2 |
1648 movups 48(%esi),%xmm5 | 1699 movups 48(%esi),%xmm5 |
1649 leal 64(%esi),%esi | 1700 leal 64(%esi),%esi |
1650 xorps 16(%esp),%xmm3 | 1701 xorps 16(%esp),%xmm3 |
1651 xorps %xmm7,%xmm4 | 1702 xorps %xmm7,%xmm4 |
1652 xorps %xmm6,%xmm5 | 1703 xorps %xmm6,%xmm5 |
1653 call _aesni_decrypt4 | 1704 call _aesni_decrypt4 |
1654 xorps (%esp),%xmm2 | 1705 xorps (%esp),%xmm2 |
1655 xorps 16(%esp),%xmm3 | 1706 xorps 16(%esp),%xmm3 |
1656 xorps %xmm7,%xmm4 | 1707 xorps %xmm7,%xmm4 |
1657 movups %xmm2,(%edi) | 1708 movups %xmm2,(%edi) |
1658 xorps %xmm6,%xmm5 | 1709 xorps %xmm6,%xmm5 |
1659 movups %xmm3,16(%edi) | 1710 movups %xmm3,16(%edi) |
1660 movups %xmm4,32(%edi) | 1711 movups %xmm4,32(%edi) |
1661 movups %xmm5,48(%edi) | 1712 movups %xmm5,48(%edi) |
1662 leal 64(%edi),%edi | 1713 leal 64(%edi),%edi |
1663 movdqa %xmm6,%xmm1 | 1714 movdqa %xmm6,%xmm1 |
1664 » jmp» .L065xts_dec_done | 1715 » jmp» .L067xts_dec_done |
1665 .align 16 | 1716 .align 16 |
1666 .L060xts_dec_done6x: | 1717 .L062xts_dec_done6x: |
1667 movl 112(%esp),%eax | 1718 movl 112(%esp),%eax |
1668 andl $15,%eax | 1719 andl $15,%eax |
1669 » jz» .L067xts_dec_ret | 1720 » jz» .L069xts_dec_ret |
1670 movl %eax,112(%esp) | 1721 movl %eax,112(%esp) |
1671 » jmp» .L068xts_dec_only_one_more | 1722 » jmp» .L070xts_dec_only_one_more |
1672 .align 16 | 1723 .align 16 |
1673 .L065xts_dec_done: | 1724 .L067xts_dec_done: |
1674 movl 112(%esp),%eax | 1725 movl 112(%esp),%eax |
1675 pxor %xmm0,%xmm0 | 1726 pxor %xmm0,%xmm0 |
1676 andl $15,%eax | 1727 andl $15,%eax |
1677 » jz» .L067xts_dec_ret | 1728 » jz» .L069xts_dec_ret |
1678 pcmpgtd %xmm1,%xmm0 | 1729 pcmpgtd %xmm1,%xmm0 |
1679 movl %eax,112(%esp) | 1730 movl %eax,112(%esp) |
1680 pshufd $19,%xmm0,%xmm2 | 1731 pshufd $19,%xmm0,%xmm2 |
1681 pxor %xmm0,%xmm0 | 1732 pxor %xmm0,%xmm0 |
1682 movdqa 96(%esp),%xmm3 | 1733 movdqa 96(%esp),%xmm3 |
1683 paddq %xmm1,%xmm1 | 1734 paddq %xmm1,%xmm1 |
1684 pand %xmm3,%xmm2 | 1735 pand %xmm3,%xmm2 |
1685 pcmpgtd %xmm1,%xmm0 | 1736 pcmpgtd %xmm1,%xmm0 |
1686 pxor %xmm2,%xmm1 | 1737 pxor %xmm2,%xmm1 |
1687 .L068xts_dec_only_one_more: | 1738 .L070xts_dec_only_one_more: |
1688 pshufd $19,%xmm0,%xmm5 | 1739 pshufd $19,%xmm0,%xmm5 |
1689 movdqa %xmm1,%xmm6 | 1740 movdqa %xmm1,%xmm6 |
1690 paddq %xmm1,%xmm1 | 1741 paddq %xmm1,%xmm1 |
1691 pand %xmm3,%xmm5 | 1742 pand %xmm3,%xmm5 |
1692 pxor %xmm1,%xmm5 | 1743 pxor %xmm1,%xmm5 |
1693 movl %ebp,%edx | 1744 movl %ebp,%edx |
1694 movl %ebx,%ecx | 1745 movl %ebx,%ecx |
1695 movups (%esi),%xmm2 | 1746 movups (%esi),%xmm2 |
1696 xorps %xmm5,%xmm2 | 1747 xorps %xmm5,%xmm2 |
1697 movups (%edx),%xmm0 | 1748 movups (%edx),%xmm0 |
1698 movups 16(%edx),%xmm1 | 1749 movups 16(%edx),%xmm1 |
1699 leal 32(%edx),%edx | 1750 leal 32(%edx),%edx |
1700 xorps %xmm0,%xmm2 | 1751 xorps %xmm0,%xmm2 |
1701 .L069dec1_loop_13: | 1752 .L071dec1_loop_13: |
1702 .byte 102,15,56,222,209 | 1753 .byte 102,15,56,222,209 |
1703 decl %ecx | 1754 decl %ecx |
1704 movups (%edx),%xmm1 | 1755 movups (%edx),%xmm1 |
1705 leal 16(%edx),%edx | 1756 leal 16(%edx),%edx |
1706 » jnz» .L069dec1_loop_13 | 1757 » jnz» .L071dec1_loop_13 |
1707 .byte 102,15,56,223,209 | 1758 .byte 102,15,56,223,209 |
1708 xorps %xmm5,%xmm2 | 1759 xorps %xmm5,%xmm2 |
1709 movups %xmm2,(%edi) | 1760 movups %xmm2,(%edi) |
1710 .L070xts_dec_steal: | 1761 .L072xts_dec_steal: |
1711 movzbl 16(%esi),%ecx | 1762 movzbl 16(%esi),%ecx |
1712 movzbl (%edi),%edx | 1763 movzbl (%edi),%edx |
1713 leal 1(%esi),%esi | 1764 leal 1(%esi),%esi |
1714 movb %cl,(%edi) | 1765 movb %cl,(%edi) |
1715 movb %dl,16(%edi) | 1766 movb %dl,16(%edi) |
1716 leal 1(%edi),%edi | 1767 leal 1(%edi),%edi |
1717 subl $1,%eax | 1768 subl $1,%eax |
1718 » jnz» .L070xts_dec_steal | 1769 » jnz» .L072xts_dec_steal |
1719 subl 112(%esp),%edi | 1770 subl 112(%esp),%edi |
1720 movl %ebp,%edx | 1771 movl %ebp,%edx |
1721 movl %ebx,%ecx | 1772 movl %ebx,%ecx |
1722 movups (%edi),%xmm2 | 1773 movups (%edi),%xmm2 |
1723 xorps %xmm6,%xmm2 | 1774 xorps %xmm6,%xmm2 |
1724 movups (%edx),%xmm0 | 1775 movups (%edx),%xmm0 |
1725 movups 16(%edx),%xmm1 | 1776 movups 16(%edx),%xmm1 |
1726 leal 32(%edx),%edx | 1777 leal 32(%edx),%edx |
1727 xorps %xmm0,%xmm2 | 1778 xorps %xmm0,%xmm2 |
1728 .L071dec1_loop_14: | 1779 .L073dec1_loop_14: |
1729 .byte 102,15,56,222,209 | 1780 .byte 102,15,56,222,209 |
1730 decl %ecx | 1781 decl %ecx |
1731 movups (%edx),%xmm1 | 1782 movups (%edx),%xmm1 |
1732 leal 16(%edx),%edx | 1783 leal 16(%edx),%edx |
1733 » jnz» .L071dec1_loop_14 | 1784 » jnz» .L073dec1_loop_14 |
1734 .byte 102,15,56,223,209 | 1785 .byte 102,15,56,223,209 |
1735 xorps %xmm6,%xmm2 | 1786 xorps %xmm6,%xmm2 |
1736 movups %xmm2,(%edi) | 1787 movups %xmm2,(%edi) |
1737 .L067xts_dec_ret: | 1788 .L069xts_dec_ret: |
| 1789 » pxor» %xmm0,%xmm0 |
| 1790 » pxor» %xmm1,%xmm1 |
| 1791 » pxor» %xmm2,%xmm2 |
| 1792 » movdqa» %xmm0,(%esp) |
| 1793 » pxor» %xmm3,%xmm3 |
| 1794 » movdqa» %xmm0,16(%esp) |
| 1795 » pxor» %xmm4,%xmm4 |
| 1796 » movdqa» %xmm0,32(%esp) |
| 1797 » pxor» %xmm5,%xmm5 |
| 1798 » movdqa» %xmm0,48(%esp) |
| 1799 » pxor» %xmm6,%xmm6 |
| 1800 » movdqa» %xmm0,64(%esp) |
| 1801 » pxor» %xmm7,%xmm7 |
| 1802 » movdqa» %xmm0,80(%esp) |
1738 movl 116(%esp),%esp | 1803 movl 116(%esp),%esp |
1739 popl %edi | 1804 popl %edi |
1740 popl %esi | 1805 popl %esi |
1741 popl %ebx | 1806 popl %ebx |
1742 popl %ebp | 1807 popl %ebp |
1743 ret | 1808 ret |
1744 .size aesni_xts_decrypt,.-.L_aesni_xts_decrypt_begin | 1809 .size aesni_xts_decrypt,.-.L_aesni_xts_decrypt_begin |
1745 .globl aesni_cbc_encrypt | 1810 .globl aesni_cbc_encrypt |
1746 .hidden aesni_cbc_encrypt | 1811 .hidden aesni_cbc_encrypt |
1747 .type aesni_cbc_encrypt,@function | 1812 .type aesni_cbc_encrypt,@function |
1748 .align 16 | 1813 .align 16 |
1749 aesni_cbc_encrypt: | 1814 aesni_cbc_encrypt: |
1750 .L_aesni_cbc_encrypt_begin: | 1815 .L_aesni_cbc_encrypt_begin: |
1751 pushl %ebp | 1816 pushl %ebp |
1752 pushl %ebx | 1817 pushl %ebx |
1753 pushl %esi | 1818 pushl %esi |
1754 pushl %edi | 1819 pushl %edi |
1755 movl 20(%esp),%esi | 1820 movl 20(%esp),%esi |
1756 movl %esp,%ebx | 1821 movl %esp,%ebx |
1757 movl 24(%esp),%edi | 1822 movl 24(%esp),%edi |
1758 subl $24,%ebx | 1823 subl $24,%ebx |
1759 movl 28(%esp),%eax | 1824 movl 28(%esp),%eax |
1760 andl $-16,%ebx | 1825 andl $-16,%ebx |
1761 movl 32(%esp),%edx | 1826 movl 32(%esp),%edx |
1762 movl 36(%esp),%ebp | 1827 movl 36(%esp),%ebp |
1763 testl %eax,%eax | 1828 testl %eax,%eax |
1764 » jz» .L072cbc_abort | 1829 » jz» .L074cbc_abort |
1765 cmpl $0,40(%esp) | 1830 cmpl $0,40(%esp) |
1766 xchgl %esp,%ebx | 1831 xchgl %esp,%ebx |
1767 movups (%ebp),%xmm7 | 1832 movups (%ebp),%xmm7 |
1768 movl 240(%edx),%ecx | 1833 movl 240(%edx),%ecx |
1769 movl %edx,%ebp | 1834 movl %edx,%ebp |
1770 movl %ebx,16(%esp) | 1835 movl %ebx,16(%esp) |
1771 movl %ecx,%ebx | 1836 movl %ecx,%ebx |
1772 » je» .L073cbc_decrypt | 1837 » je» .L075cbc_decrypt |
1773 movaps %xmm7,%xmm2 | 1838 movaps %xmm7,%xmm2 |
1774 cmpl $16,%eax | 1839 cmpl $16,%eax |
1775 » jb» .L074cbc_enc_tail | 1840 » jb» .L076cbc_enc_tail |
1776 subl $16,%eax | 1841 subl $16,%eax |
1777 » jmp» .L075cbc_enc_loop | 1842 » jmp» .L077cbc_enc_loop |
1778 .align 16 | 1843 .align 16 |
1779 .L075cbc_enc_loop: | 1844 .L077cbc_enc_loop: |
1780 movups (%esi),%xmm7 | 1845 movups (%esi),%xmm7 |
1781 leal 16(%esi),%esi | 1846 leal 16(%esi),%esi |
1782 movups (%edx),%xmm0 | 1847 movups (%edx),%xmm0 |
1783 movups 16(%edx),%xmm1 | 1848 movups 16(%edx),%xmm1 |
1784 xorps %xmm0,%xmm7 | 1849 xorps %xmm0,%xmm7 |
1785 leal 32(%edx),%edx | 1850 leal 32(%edx),%edx |
1786 xorps %xmm7,%xmm2 | 1851 xorps %xmm7,%xmm2 |
1787 .L076enc1_loop_15: | 1852 .L078enc1_loop_15: |
1788 .byte 102,15,56,220,209 | 1853 .byte 102,15,56,220,209 |
1789 decl %ecx | 1854 decl %ecx |
1790 movups (%edx),%xmm1 | 1855 movups (%edx),%xmm1 |
1791 leal 16(%edx),%edx | 1856 leal 16(%edx),%edx |
1792 » jnz» .L076enc1_loop_15 | 1857 » jnz» .L078enc1_loop_15 |
1793 .byte 102,15,56,221,209 | 1858 .byte 102,15,56,221,209 |
1794 movl %ebx,%ecx | 1859 movl %ebx,%ecx |
1795 movl %ebp,%edx | 1860 movl %ebp,%edx |
1796 movups %xmm2,(%edi) | 1861 movups %xmm2,(%edi) |
1797 leal 16(%edi),%edi | 1862 leal 16(%edi),%edi |
1798 subl $16,%eax | 1863 subl $16,%eax |
1799 » jnc» .L075cbc_enc_loop | 1864 » jnc» .L077cbc_enc_loop |
1800 addl $16,%eax | 1865 addl $16,%eax |
1801 » jnz» .L074cbc_enc_tail | 1866 » jnz» .L076cbc_enc_tail |
1802 movaps %xmm2,%xmm7 | 1867 movaps %xmm2,%xmm7 |
1803 » jmp» .L077cbc_ret | 1868 » pxor» %xmm2,%xmm2 |
1804 .L074cbc_enc_tail: | 1869 » jmp» .L079cbc_ret |
| 1870 .L076cbc_enc_tail: |
1805 movl %eax,%ecx | 1871 movl %eax,%ecx |
1806 .long 2767451785 | 1872 .long 2767451785 |
1807 movl $16,%ecx | 1873 movl $16,%ecx |
1808 subl %eax,%ecx | 1874 subl %eax,%ecx |
1809 xorl %eax,%eax | 1875 xorl %eax,%eax |
1810 .long 2868115081 | 1876 .long 2868115081 |
1811 leal -16(%edi),%edi | 1877 leal -16(%edi),%edi |
1812 movl %ebx,%ecx | 1878 movl %ebx,%ecx |
1813 movl %edi,%esi | 1879 movl %edi,%esi |
1814 movl %ebp,%edx | 1880 movl %ebp,%edx |
1815 » jmp» .L075cbc_enc_loop | 1881 » jmp» .L077cbc_enc_loop |
1816 .align 16 | 1882 .align 16 |
1817 .L073cbc_decrypt: | 1883 .L075cbc_decrypt: |
1818 cmpl $80,%eax | 1884 cmpl $80,%eax |
1819 » jbe» .L078cbc_dec_tail | 1885 » jbe» .L080cbc_dec_tail |
1820 movaps %xmm7,(%esp) | 1886 movaps %xmm7,(%esp) |
1821 subl $80,%eax | 1887 subl $80,%eax |
1822 » jmp» .L079cbc_dec_loop6_enter | 1888 » jmp» .L081cbc_dec_loop6_enter |
1823 .align 16 | 1889 .align 16 |
1824 .L080cbc_dec_loop6: | 1890 .L082cbc_dec_loop6: |
1825 movaps %xmm0,(%esp) | 1891 movaps %xmm0,(%esp) |
1826 movups %xmm7,(%edi) | 1892 movups %xmm7,(%edi) |
1827 leal 16(%edi),%edi | 1893 leal 16(%edi),%edi |
1828 .L079cbc_dec_loop6_enter: | 1894 .L081cbc_dec_loop6_enter: |
1829 movdqu (%esi),%xmm2 | 1895 movdqu (%esi),%xmm2 |
1830 movdqu 16(%esi),%xmm3 | 1896 movdqu 16(%esi),%xmm3 |
1831 movdqu 32(%esi),%xmm4 | 1897 movdqu 32(%esi),%xmm4 |
1832 movdqu 48(%esi),%xmm5 | 1898 movdqu 48(%esi),%xmm5 |
1833 movdqu 64(%esi),%xmm6 | 1899 movdqu 64(%esi),%xmm6 |
1834 movdqu 80(%esi),%xmm7 | 1900 movdqu 80(%esi),%xmm7 |
1835 call _aesni_decrypt6 | 1901 call _aesni_decrypt6 |
1836 movups (%esi),%xmm1 | 1902 movups (%esi),%xmm1 |
1837 movups 16(%esi),%xmm0 | 1903 movups 16(%esi),%xmm0 |
1838 xorps (%esp),%xmm2 | 1904 xorps (%esp),%xmm2 |
1839 xorps %xmm1,%xmm3 | 1905 xorps %xmm1,%xmm3 |
1840 movups 32(%esi),%xmm1 | 1906 movups 32(%esi),%xmm1 |
1841 xorps %xmm0,%xmm4 | 1907 xorps %xmm0,%xmm4 |
1842 movups 48(%esi),%xmm0 | 1908 movups 48(%esi),%xmm0 |
1843 xorps %xmm1,%xmm5 | 1909 xorps %xmm1,%xmm5 |
1844 movups 64(%esi),%xmm1 | 1910 movups 64(%esi),%xmm1 |
1845 xorps %xmm0,%xmm6 | 1911 xorps %xmm0,%xmm6 |
1846 movups 80(%esi),%xmm0 | 1912 movups 80(%esi),%xmm0 |
1847 xorps %xmm1,%xmm7 | 1913 xorps %xmm1,%xmm7 |
1848 movups %xmm2,(%edi) | 1914 movups %xmm2,(%edi) |
1849 movups %xmm3,16(%edi) | 1915 movups %xmm3,16(%edi) |
1850 leal 96(%esi),%esi | 1916 leal 96(%esi),%esi |
1851 movups %xmm4,32(%edi) | 1917 movups %xmm4,32(%edi) |
1852 movl %ebx,%ecx | 1918 movl %ebx,%ecx |
1853 movups %xmm5,48(%edi) | 1919 movups %xmm5,48(%edi) |
1854 movl %ebp,%edx | 1920 movl %ebp,%edx |
1855 movups %xmm6,64(%edi) | 1921 movups %xmm6,64(%edi) |
1856 leal 80(%edi),%edi | 1922 leal 80(%edi),%edi |
1857 subl $96,%eax | 1923 subl $96,%eax |
1858 » ja» .L080cbc_dec_loop6 | 1924 » ja» .L082cbc_dec_loop6 |
1859 movaps %xmm7,%xmm2 | 1925 movaps %xmm7,%xmm2 |
1860 movaps %xmm0,%xmm7 | 1926 movaps %xmm0,%xmm7 |
1861 addl $80,%eax | 1927 addl $80,%eax |
1862 » jle» .L081cbc_dec_tail_collected | 1928 » jle» .L083cbc_dec_clear_tail_collected |
1863 movups %xmm2,(%edi) | 1929 movups %xmm2,(%edi) |
1864 leal 16(%edi),%edi | 1930 leal 16(%edi),%edi |
1865 .L078cbc_dec_tail: | 1931 .L080cbc_dec_tail: |
1866 movups (%esi),%xmm2 | 1932 movups (%esi),%xmm2 |
1867 movaps %xmm2,%xmm6 | 1933 movaps %xmm2,%xmm6 |
1868 cmpl $16,%eax | 1934 cmpl $16,%eax |
1869 » jbe» .L082cbc_dec_one | 1935 » jbe» .L084cbc_dec_one |
1870 movups 16(%esi),%xmm3 | 1936 movups 16(%esi),%xmm3 |
1871 movaps %xmm3,%xmm5 | 1937 movaps %xmm3,%xmm5 |
1872 cmpl $32,%eax | 1938 cmpl $32,%eax |
1873 » jbe» .L083cbc_dec_two | 1939 » jbe» .L085cbc_dec_two |
1874 movups 32(%esi),%xmm4 | 1940 movups 32(%esi),%xmm4 |
1875 cmpl $48,%eax | 1941 cmpl $48,%eax |
1876 » jbe» .L084cbc_dec_three | 1942 » jbe» .L086cbc_dec_three |
1877 movups 48(%esi),%xmm5 | 1943 movups 48(%esi),%xmm5 |
1878 cmpl $64,%eax | 1944 cmpl $64,%eax |
1879 » jbe» .L085cbc_dec_four | 1945 » jbe» .L087cbc_dec_four |
1880 movups 64(%esi),%xmm6 | 1946 movups 64(%esi),%xmm6 |
1881 movaps %xmm7,(%esp) | 1947 movaps %xmm7,(%esp) |
1882 movups (%esi),%xmm2 | 1948 movups (%esi),%xmm2 |
1883 xorps %xmm7,%xmm7 | 1949 xorps %xmm7,%xmm7 |
1884 call _aesni_decrypt6 | 1950 call _aesni_decrypt6 |
1885 movups (%esi),%xmm1 | 1951 movups (%esi),%xmm1 |
1886 movups 16(%esi),%xmm0 | 1952 movups 16(%esi),%xmm0 |
1887 xorps (%esp),%xmm2 | 1953 xorps (%esp),%xmm2 |
1888 xorps %xmm1,%xmm3 | 1954 xorps %xmm1,%xmm3 |
1889 movups 32(%esi),%xmm1 | 1955 movups 32(%esi),%xmm1 |
1890 xorps %xmm0,%xmm4 | 1956 xorps %xmm0,%xmm4 |
1891 movups 48(%esi),%xmm0 | 1957 movups 48(%esi),%xmm0 |
1892 xorps %xmm1,%xmm5 | 1958 xorps %xmm1,%xmm5 |
1893 movups 64(%esi),%xmm7 | 1959 movups 64(%esi),%xmm7 |
1894 xorps %xmm0,%xmm6 | 1960 xorps %xmm0,%xmm6 |
1895 movups %xmm2,(%edi) | 1961 movups %xmm2,(%edi) |
1896 movups %xmm3,16(%edi) | 1962 movups %xmm3,16(%edi) |
| 1963 pxor %xmm3,%xmm3 |
1897 movups %xmm4,32(%edi) | 1964 movups %xmm4,32(%edi) |
| 1965 pxor %xmm4,%xmm4 |
1898 movups %xmm5,48(%edi) | 1966 movups %xmm5,48(%edi) |
| 1967 pxor %xmm5,%xmm5 |
1899 leal 64(%edi),%edi | 1968 leal 64(%edi),%edi |
1900 movaps %xmm6,%xmm2 | 1969 movaps %xmm6,%xmm2 |
| 1970 pxor %xmm6,%xmm6 |
1901 subl $80,%eax | 1971 subl $80,%eax |
1902 » jmp» .L081cbc_dec_tail_collected | 1972 » jmp» .L088cbc_dec_tail_collected |
1903 .align 16 | 1973 .align 16 |
1904 .L082cbc_dec_one: | 1974 .L084cbc_dec_one: |
1905 movups (%edx),%xmm0 | 1975 movups (%edx),%xmm0 |
1906 movups 16(%edx),%xmm1 | 1976 movups 16(%edx),%xmm1 |
1907 leal 32(%edx),%edx | 1977 leal 32(%edx),%edx |
1908 xorps %xmm0,%xmm2 | 1978 xorps %xmm0,%xmm2 |
1909 .L086dec1_loop_16: | 1979 .L089dec1_loop_16: |
1910 .byte 102,15,56,222,209 | 1980 .byte 102,15,56,222,209 |
1911 decl %ecx | 1981 decl %ecx |
1912 movups (%edx),%xmm1 | 1982 movups (%edx),%xmm1 |
1913 leal 16(%edx),%edx | 1983 leal 16(%edx),%edx |
1914 » jnz» .L086dec1_loop_16 | 1984 » jnz» .L089dec1_loop_16 |
1915 .byte 102,15,56,223,209 | 1985 .byte 102,15,56,223,209 |
1916 xorps %xmm7,%xmm2 | 1986 xorps %xmm7,%xmm2 |
1917 movaps %xmm6,%xmm7 | 1987 movaps %xmm6,%xmm7 |
1918 subl $16,%eax | 1988 subl $16,%eax |
1919 » jmp» .L081cbc_dec_tail_collected | 1989 » jmp» .L088cbc_dec_tail_collected |
1920 .align 16 | 1990 .align 16 |
1921 .L083cbc_dec_two: | 1991 .L085cbc_dec_two: |
1922 call _aesni_decrypt2 | 1992 call _aesni_decrypt2 |
1923 xorps %xmm7,%xmm2 | 1993 xorps %xmm7,%xmm2 |
1924 xorps %xmm6,%xmm3 | 1994 xorps %xmm6,%xmm3 |
1925 movups %xmm2,(%edi) | 1995 movups %xmm2,(%edi) |
1926 movaps %xmm3,%xmm2 | 1996 movaps %xmm3,%xmm2 |
| 1997 pxor %xmm3,%xmm3 |
1927 leal 16(%edi),%edi | 1998 leal 16(%edi),%edi |
1928 movaps %xmm5,%xmm7 | 1999 movaps %xmm5,%xmm7 |
1929 subl $32,%eax | 2000 subl $32,%eax |
1930 » jmp» .L081cbc_dec_tail_collected | 2001 » jmp» .L088cbc_dec_tail_collected |
1931 .align 16 | 2002 .align 16 |
1932 .L084cbc_dec_three: | 2003 .L086cbc_dec_three: |
1933 call _aesni_decrypt3 | 2004 call _aesni_decrypt3 |
1934 xorps %xmm7,%xmm2 | 2005 xorps %xmm7,%xmm2 |
1935 xorps %xmm6,%xmm3 | 2006 xorps %xmm6,%xmm3 |
1936 xorps %xmm5,%xmm4 | 2007 xorps %xmm5,%xmm4 |
1937 movups %xmm2,(%edi) | 2008 movups %xmm2,(%edi) |
1938 movaps %xmm4,%xmm2 | 2009 movaps %xmm4,%xmm2 |
| 2010 pxor %xmm4,%xmm4 |
1939 movups %xmm3,16(%edi) | 2011 movups %xmm3,16(%edi) |
| 2012 pxor %xmm3,%xmm3 |
1940 leal 32(%edi),%edi | 2013 leal 32(%edi),%edi |
1941 movups 32(%esi),%xmm7 | 2014 movups 32(%esi),%xmm7 |
1942 subl $48,%eax | 2015 subl $48,%eax |
1943 » jmp» .L081cbc_dec_tail_collected | 2016 » jmp» .L088cbc_dec_tail_collected |
1944 .align 16 | 2017 .align 16 |
1945 .L085cbc_dec_four: | 2018 .L087cbc_dec_four: |
1946 call _aesni_decrypt4 | 2019 call _aesni_decrypt4 |
1947 movups 16(%esi),%xmm1 | 2020 movups 16(%esi),%xmm1 |
1948 movups 32(%esi),%xmm0 | 2021 movups 32(%esi),%xmm0 |
1949 xorps %xmm7,%xmm2 | 2022 xorps %xmm7,%xmm2 |
1950 movups 48(%esi),%xmm7 | 2023 movups 48(%esi),%xmm7 |
1951 xorps %xmm6,%xmm3 | 2024 xorps %xmm6,%xmm3 |
1952 movups %xmm2,(%edi) | 2025 movups %xmm2,(%edi) |
1953 xorps %xmm1,%xmm4 | 2026 xorps %xmm1,%xmm4 |
1954 movups %xmm3,16(%edi) | 2027 movups %xmm3,16(%edi) |
| 2028 pxor %xmm3,%xmm3 |
1955 xorps %xmm0,%xmm5 | 2029 xorps %xmm0,%xmm5 |
1956 movups %xmm4,32(%edi) | 2030 movups %xmm4,32(%edi) |
| 2031 pxor %xmm4,%xmm4 |
1957 leal 48(%edi),%edi | 2032 leal 48(%edi),%edi |
1958 movaps %xmm5,%xmm2 | 2033 movaps %xmm5,%xmm2 |
| 2034 pxor %xmm5,%xmm5 |
1959 subl $64,%eax | 2035 subl $64,%eax |
1960 .L081cbc_dec_tail_collected: | 2036 » jmp» .L088cbc_dec_tail_collected |
| 2037 .align» 16 |
| 2038 .L083cbc_dec_clear_tail_collected: |
| 2039 » pxor» %xmm3,%xmm3 |
| 2040 » pxor» %xmm4,%xmm4 |
| 2041 » pxor» %xmm5,%xmm5 |
| 2042 » pxor» %xmm6,%xmm6 |
| 2043 .L088cbc_dec_tail_collected: |
1961 andl $15,%eax | 2044 andl $15,%eax |
1962 » jnz» .L087cbc_dec_tail_partial | 2045 » jnz» .L090cbc_dec_tail_partial |
1963 movups %xmm2,(%edi) | 2046 movups %xmm2,(%edi) |
1964 » jmp» .L077cbc_ret | 2047 » pxor» %xmm0,%xmm0 |
| 2048 » jmp» .L079cbc_ret |
1965 .align 16 | 2049 .align 16 |
1966 .L087cbc_dec_tail_partial: | 2050 .L090cbc_dec_tail_partial: |
1967 movaps %xmm2,(%esp) | 2051 movaps %xmm2,(%esp) |
| 2052 pxor %xmm0,%xmm0 |
1968 movl $16,%ecx | 2053 movl $16,%ecx |
1969 movl %esp,%esi | 2054 movl %esp,%esi |
1970 subl %eax,%ecx | 2055 subl %eax,%ecx |
1971 .long 2767451785 | 2056 .long 2767451785 |
1972 .L077cbc_ret: | 2057 » movdqa» %xmm2,(%esp) |
| 2058 .L079cbc_ret: |
1973 movl 16(%esp),%esp | 2059 movl 16(%esp),%esp |
1974 movl 36(%esp),%ebp | 2060 movl 36(%esp),%ebp |
| 2061 pxor %xmm2,%xmm2 |
| 2062 pxor %xmm1,%xmm1 |
1975 movups %xmm7,(%ebp) | 2063 movups %xmm7,(%ebp) |
1976 .L072cbc_abort: | 2064 » pxor» %xmm7,%xmm7 |
| 2065 .L074cbc_abort: |
1977 popl %edi | 2066 popl %edi |
1978 popl %esi | 2067 popl %esi |
1979 popl %ebx | 2068 popl %ebx |
1980 popl %ebp | 2069 popl %ebp |
1981 ret | 2070 ret |
1982 .size aesni_cbc_encrypt,.-.L_aesni_cbc_encrypt_begin | 2071 .size aesni_cbc_encrypt,.-.L_aesni_cbc_encrypt_begin |
1983 .hidden _aesni_set_encrypt_key | 2072 .hidden _aesni_set_encrypt_key |
1984 .type _aesni_set_encrypt_key,@function | 2073 .type _aesni_set_encrypt_key,@function |
1985 .align 16 | 2074 .align 16 |
1986 _aesni_set_encrypt_key: | 2075 _aesni_set_encrypt_key: |
| 2076 pushl %ebp |
| 2077 pushl %ebx |
1987 testl %eax,%eax | 2078 testl %eax,%eax |
1988 » jz» .L088bad_pointer | 2079 » jz» .L091bad_pointer |
1989 testl %edx,%edx | 2080 testl %edx,%edx |
1990 » jz» .L088bad_pointer | 2081 » jz» .L091bad_pointer |
| 2082 » call» .L092pic |
| 2083 .L092pic: |
| 2084 » popl» %ebx |
| 2085 » leal» .Lkey_const-.L092pic(%ebx),%ebx |
| 2086 » leal» OPENSSL_ia32cap_P-.Lkey_const(%ebx),%ebp |
1991 movups (%eax),%xmm0 | 2087 movups (%eax),%xmm0 |
1992 xorps %xmm4,%xmm4 | 2088 xorps %xmm4,%xmm4 |
| 2089 movl 4(%ebp),%ebp |
1993 leal 16(%edx),%edx | 2090 leal 16(%edx),%edx |
| 2091 andl $268437504,%ebp |
1994 cmpl $256,%ecx | 2092 cmpl $256,%ecx |
1995 » je» .L08914rounds | 2093 » je» .L09314rounds |
1996 cmpl $192,%ecx | 2094 cmpl $192,%ecx |
1997 » je» .L09012rounds | 2095 » je» .L09412rounds |
1998 cmpl $128,%ecx | 2096 cmpl $128,%ecx |
1999 » jne» .L091bad_keybits | 2097 » jne» .L095bad_keybits |
2000 .align 16 | 2098 .align 16 |
2001 .L09210rounds: | 2099 .L09610rounds: |
| 2100 » cmpl» $268435456,%ebp |
| 2101 » je» .L09710rounds_alt |
2002 movl $9,%ecx | 2102 movl $9,%ecx |
2003 movups %xmm0,-16(%edx) | 2103 movups %xmm0,-16(%edx) |
2004 .byte 102,15,58,223,200,1 | 2104 .byte 102,15,58,223,200,1 |
2005 » call» .L093key_128_cold | 2105 » call» .L098key_128_cold |
2006 .byte 102,15,58,223,200,2 | 2106 .byte 102,15,58,223,200,2 |
2007 » call» .L094key_128 | 2107 » call» .L099key_128 |
2008 .byte 102,15,58,223,200,4 | 2108 .byte 102,15,58,223,200,4 |
2009 » call» .L094key_128 | 2109 » call» .L099key_128 |
2010 .byte 102,15,58,223,200,8 | 2110 .byte 102,15,58,223,200,8 |
2011 » call» .L094key_128 | 2111 » call» .L099key_128 |
2012 .byte 102,15,58,223,200,16 | 2112 .byte 102,15,58,223,200,16 |
2013 » call» .L094key_128 | 2113 » call» .L099key_128 |
2014 .byte 102,15,58,223,200,32 | 2114 .byte 102,15,58,223,200,32 |
2015 » call» .L094key_128 | 2115 » call» .L099key_128 |
2016 .byte 102,15,58,223,200,64 | 2116 .byte 102,15,58,223,200,64 |
2017 » call» .L094key_128 | 2117 » call» .L099key_128 |
2018 .byte 102,15,58,223,200,128 | 2118 .byte 102,15,58,223,200,128 |
2019 » call» .L094key_128 | 2119 » call» .L099key_128 |
2020 .byte 102,15,58,223,200,27 | 2120 .byte 102,15,58,223,200,27 |
2021 » call» .L094key_128 | 2121 » call» .L099key_128 |
2022 .byte 102,15,58,223,200,54 | 2122 .byte 102,15,58,223,200,54 |
2023 » call» .L094key_128 | 2123 » call» .L099key_128 |
2024 movups %xmm0,(%edx) | 2124 movups %xmm0,(%edx) |
2025 movl %ecx,80(%edx) | 2125 movl %ecx,80(%edx) |
2026 » xorl» %eax,%eax | 2126 » jmp» .L100good_key |
2027 » ret | |
2028 .align 16 | 2127 .align 16 |
2029 .L094key_128: | 2128 .L099key_128: |
2030 movups %xmm0,(%edx) | 2129 movups %xmm0,(%edx) |
2031 leal 16(%edx),%edx | 2130 leal 16(%edx),%edx |
2032 .L093key_128_cold: | 2131 .L098key_128_cold: |
2033 shufps $16,%xmm0,%xmm4 | 2132 shufps $16,%xmm0,%xmm4 |
2034 xorps %xmm4,%xmm0 | 2133 xorps %xmm4,%xmm0 |
2035 shufps $140,%xmm0,%xmm4 | 2134 shufps $140,%xmm0,%xmm4 |
2036 xorps %xmm4,%xmm0 | 2135 xorps %xmm4,%xmm0 |
2037 shufps $255,%xmm1,%xmm1 | 2136 shufps $255,%xmm1,%xmm1 |
2038 xorps %xmm1,%xmm0 | 2137 xorps %xmm1,%xmm0 |
2039 ret | 2138 ret |
2040 .align 16 | 2139 .align 16 |
2041 .L09012rounds: | 2140 .L09710rounds_alt: |
| 2141 » movdqa» (%ebx),%xmm5 |
| 2142 » movl» $8,%ecx |
| 2143 » movdqa» 32(%ebx),%xmm4 |
| 2144 » movdqa» %xmm0,%xmm2 |
| 2145 » movdqu» %xmm0,-16(%edx) |
| 2146 .L101loop_key128: |
| 2147 .byte» 102,15,56,0,197 |
| 2148 .byte» 102,15,56,221,196 |
| 2149 » pslld» $1,%xmm4 |
| 2150 » leal» 16(%edx),%edx |
| 2151 » movdqa» %xmm2,%xmm3 |
| 2152 » pslldq» $4,%xmm2 |
| 2153 » pxor» %xmm2,%xmm3 |
| 2154 » pslldq» $4,%xmm2 |
| 2155 » pxor» %xmm2,%xmm3 |
| 2156 » pslldq» $4,%xmm2 |
| 2157 » pxor» %xmm3,%xmm2 |
| 2158 » pxor» %xmm2,%xmm0 |
| 2159 » movdqu» %xmm0,-16(%edx) |
| 2160 » movdqa» %xmm0,%xmm2 |
| 2161 » decl» %ecx |
| 2162 » jnz» .L101loop_key128 |
| 2163 » movdqa» 48(%ebx),%xmm4 |
| 2164 .byte» 102,15,56,0,197 |
| 2165 .byte» 102,15,56,221,196 |
| 2166 » pslld» $1,%xmm4 |
| 2167 » movdqa» %xmm2,%xmm3 |
| 2168 » pslldq» $4,%xmm2 |
| 2169 » pxor» %xmm2,%xmm3 |
| 2170 » pslldq» $4,%xmm2 |
| 2171 » pxor» %xmm2,%xmm3 |
| 2172 » pslldq» $4,%xmm2 |
| 2173 » pxor» %xmm3,%xmm2 |
| 2174 » pxor» %xmm2,%xmm0 |
| 2175 » movdqu» %xmm0,(%edx) |
| 2176 » movdqa» %xmm0,%xmm2 |
| 2177 .byte» 102,15,56,0,197 |
| 2178 .byte» 102,15,56,221,196 |
| 2179 » movdqa» %xmm2,%xmm3 |
| 2180 » pslldq» $4,%xmm2 |
| 2181 » pxor» %xmm2,%xmm3 |
| 2182 » pslldq» $4,%xmm2 |
| 2183 » pxor» %xmm2,%xmm3 |
| 2184 » pslldq» $4,%xmm2 |
| 2185 » pxor» %xmm3,%xmm2 |
| 2186 » pxor» %xmm2,%xmm0 |
| 2187 » movdqu» %xmm0,16(%edx) |
| 2188 » movl» $9,%ecx |
| 2189 » movl» %ecx,96(%edx) |
| 2190 » jmp» .L100good_key |
| 2191 .align» 16 |
| 2192 .L09412rounds: |
2042 movq 16(%eax),%xmm2 | 2193 movq 16(%eax),%xmm2 |
| 2194 cmpl $268435456,%ebp |
| 2195 je .L10212rounds_alt |
2043 movl $11,%ecx | 2196 movl $11,%ecx |
2044 movups %xmm0,-16(%edx) | 2197 movups %xmm0,-16(%edx) |
2045 .byte 102,15,58,223,202,1 | 2198 .byte 102,15,58,223,202,1 |
2046 » call» .L095key_192a_cold | 2199 » call» .L103key_192a_cold |
2047 .byte 102,15,58,223,202,2 | 2200 .byte 102,15,58,223,202,2 |
2048 » call» .L096key_192b | 2201 » call» .L104key_192b |
2049 .byte 102,15,58,223,202,4 | 2202 .byte 102,15,58,223,202,4 |
2050 » call» .L097key_192a | 2203 » call» .L105key_192a |
2051 .byte 102,15,58,223,202,8 | 2204 .byte 102,15,58,223,202,8 |
2052 » call» .L096key_192b | 2205 » call» .L104key_192b |
2053 .byte 102,15,58,223,202,16 | 2206 .byte 102,15,58,223,202,16 |
2054 » call» .L097key_192a | 2207 » call» .L105key_192a |
2055 .byte 102,15,58,223,202,32 | 2208 .byte 102,15,58,223,202,32 |
2056 » call» .L096key_192b | 2209 » call» .L104key_192b |
2057 .byte 102,15,58,223,202,64 | 2210 .byte 102,15,58,223,202,64 |
2058 » call» .L097key_192a | 2211 » call» .L105key_192a |
2059 .byte 102,15,58,223,202,128 | 2212 .byte 102,15,58,223,202,128 |
2060 » call» .L096key_192b | 2213 » call» .L104key_192b |
2061 movups %xmm0,(%edx) | 2214 movups %xmm0,(%edx) |
2062 movl %ecx,48(%edx) | 2215 movl %ecx,48(%edx) |
2063 » xorl» %eax,%eax | 2216 » jmp» .L100good_key |
2064 » ret | |
2065 .align 16 | 2217 .align 16 |
2066 .L097key_192a: | 2218 .L105key_192a: |
2067 movups %xmm0,(%edx) | 2219 movups %xmm0,(%edx) |
2068 leal 16(%edx),%edx | 2220 leal 16(%edx),%edx |
2069 .align 16 | 2221 .align 16 |
2070 .L095key_192a_cold: | 2222 .L103key_192a_cold: |
2071 movaps %xmm2,%xmm5 | 2223 movaps %xmm2,%xmm5 |
2072 .L098key_192b_warm: | 2224 .L106key_192b_warm: |
2073 shufps $16,%xmm0,%xmm4 | 2225 shufps $16,%xmm0,%xmm4 |
2074 movdqa %xmm2,%xmm3 | 2226 movdqa %xmm2,%xmm3 |
2075 xorps %xmm4,%xmm0 | 2227 xorps %xmm4,%xmm0 |
2076 shufps $140,%xmm0,%xmm4 | 2228 shufps $140,%xmm0,%xmm4 |
2077 pslldq $4,%xmm3 | 2229 pslldq $4,%xmm3 |
2078 xorps %xmm4,%xmm0 | 2230 xorps %xmm4,%xmm0 |
2079 pshufd $85,%xmm1,%xmm1 | 2231 pshufd $85,%xmm1,%xmm1 |
2080 pxor %xmm3,%xmm2 | 2232 pxor %xmm3,%xmm2 |
2081 pxor %xmm1,%xmm0 | 2233 pxor %xmm1,%xmm0 |
2082 pshufd $255,%xmm0,%xmm3 | 2234 pshufd $255,%xmm0,%xmm3 |
2083 pxor %xmm3,%xmm2 | 2235 pxor %xmm3,%xmm2 |
2084 ret | 2236 ret |
2085 .align 16 | 2237 .align 16 |
2086 .L096key_192b: | 2238 .L104key_192b: |
2087 movaps %xmm0,%xmm3 | 2239 movaps %xmm0,%xmm3 |
2088 shufps $68,%xmm0,%xmm5 | 2240 shufps $68,%xmm0,%xmm5 |
2089 movups %xmm5,(%edx) | 2241 movups %xmm5,(%edx) |
2090 shufps $78,%xmm2,%xmm3 | 2242 shufps $78,%xmm2,%xmm3 |
2091 movups %xmm3,16(%edx) | 2243 movups %xmm3,16(%edx) |
2092 leal 32(%edx),%edx | 2244 leal 32(%edx),%edx |
2093 » jmp» .L098key_192b_warm | 2245 » jmp» .L106key_192b_warm |
2094 .align 16 | 2246 .align 16 |
2095 .L08914rounds: | 2247 .L10212rounds_alt: |
| 2248 » movdqa» 16(%ebx),%xmm5 |
| 2249 » movdqa» 32(%ebx),%xmm4 |
| 2250 » movl» $8,%ecx |
| 2251 » movdqu» %xmm0,-16(%edx) |
| 2252 .L107loop_key192: |
| 2253 » movq» %xmm2,(%edx) |
| 2254 » movdqa» %xmm2,%xmm1 |
| 2255 .byte» 102,15,56,0,213 |
| 2256 .byte» 102,15,56,221,212 |
| 2257 » pslld» $1,%xmm4 |
| 2258 » leal» 24(%edx),%edx |
| 2259 » movdqa» %xmm0,%xmm3 |
| 2260 » pslldq» $4,%xmm0 |
| 2261 » pxor» %xmm0,%xmm3 |
| 2262 » pslldq» $4,%xmm0 |
| 2263 » pxor» %xmm0,%xmm3 |
| 2264 » pslldq» $4,%xmm0 |
| 2265 » pxor» %xmm3,%xmm0 |
| 2266 » pshufd» $255,%xmm0,%xmm3 |
| 2267 » pxor» %xmm1,%xmm3 |
| 2268 » pslldq» $4,%xmm1 |
| 2269 » pxor» %xmm1,%xmm3 |
| 2270 » pxor» %xmm2,%xmm0 |
| 2271 » pxor» %xmm3,%xmm2 |
| 2272 » movdqu» %xmm0,-16(%edx) |
| 2273 » decl» %ecx |
| 2274 » jnz» .L107loop_key192 |
| 2275 » movl» $11,%ecx |
| 2276 » movl» %ecx,32(%edx) |
| 2277 » jmp» .L100good_key |
| 2278 .align» 16 |
| 2279 .L09314rounds: |
2096 movups 16(%eax),%xmm2 | 2280 movups 16(%eax),%xmm2 |
| 2281 leal 16(%edx),%edx |
| 2282 cmpl $268435456,%ebp |
| 2283 je .L10814rounds_alt |
2097 movl $13,%ecx | 2284 movl $13,%ecx |
2098 leal 16(%edx),%edx | |
2099 movups %xmm0,-32(%edx) | 2285 movups %xmm0,-32(%edx) |
2100 movups %xmm2,-16(%edx) | 2286 movups %xmm2,-16(%edx) |
2101 .byte 102,15,58,223,202,1 | 2287 .byte 102,15,58,223,202,1 |
2102 » call» .L099key_256a_cold | 2288 » call» .L109key_256a_cold |
2103 .byte 102,15,58,223,200,1 | 2289 .byte 102,15,58,223,200,1 |
2104 » call» .L100key_256b | 2290 » call» .L110key_256b |
2105 .byte 102,15,58,223,202,2 | 2291 .byte 102,15,58,223,202,2 |
2106 » call» .L101key_256a | 2292 » call» .L111key_256a |
2107 .byte 102,15,58,223,200,2 | 2293 .byte 102,15,58,223,200,2 |
2108 » call» .L100key_256b | 2294 » call» .L110key_256b |
2109 .byte 102,15,58,223,202,4 | 2295 .byte 102,15,58,223,202,4 |
2110 » call» .L101key_256a | 2296 » call» .L111key_256a |
2111 .byte 102,15,58,223,200,4 | 2297 .byte 102,15,58,223,200,4 |
2112 » call» .L100key_256b | 2298 » call» .L110key_256b |
2113 .byte 102,15,58,223,202,8 | 2299 .byte 102,15,58,223,202,8 |
2114 » call» .L101key_256a | 2300 » call» .L111key_256a |
2115 .byte 102,15,58,223,200,8 | 2301 .byte 102,15,58,223,200,8 |
2116 » call» .L100key_256b | 2302 » call» .L110key_256b |
2117 .byte 102,15,58,223,202,16 | 2303 .byte 102,15,58,223,202,16 |
2118 » call» .L101key_256a | 2304 » call» .L111key_256a |
2119 .byte 102,15,58,223,200,16 | 2305 .byte 102,15,58,223,200,16 |
2120 » call» .L100key_256b | 2306 » call» .L110key_256b |
2121 .byte 102,15,58,223,202,32 | 2307 .byte 102,15,58,223,202,32 |
2122 » call» .L101key_256a | 2308 » call» .L111key_256a |
2123 .byte 102,15,58,223,200,32 | 2309 .byte 102,15,58,223,200,32 |
2124 » call» .L100key_256b | 2310 » call» .L110key_256b |
2125 .byte 102,15,58,223,202,64 | 2311 .byte 102,15,58,223,202,64 |
2126 » call» .L101key_256a | 2312 » call» .L111key_256a |
2127 movups %xmm0,(%edx) | 2313 movups %xmm0,(%edx) |
2128 movl %ecx,16(%edx) | 2314 movl %ecx,16(%edx) |
2129 xorl %eax,%eax | 2315 xorl %eax,%eax |
2130 » ret | 2316 » jmp» .L100good_key |
2131 .align 16 | 2317 .align 16 |
2132 .L101key_256a: | 2318 .L111key_256a: |
2133 movups %xmm2,(%edx) | 2319 movups %xmm2,(%edx) |
2134 leal 16(%edx),%edx | 2320 leal 16(%edx),%edx |
2135 .L099key_256a_cold: | 2321 .L109key_256a_cold: |
2136 shufps $16,%xmm0,%xmm4 | 2322 shufps $16,%xmm0,%xmm4 |
2137 xorps %xmm4,%xmm0 | 2323 xorps %xmm4,%xmm0 |
2138 shufps $140,%xmm0,%xmm4 | 2324 shufps $140,%xmm0,%xmm4 |
2139 xorps %xmm4,%xmm0 | 2325 xorps %xmm4,%xmm0 |
2140 shufps $255,%xmm1,%xmm1 | 2326 shufps $255,%xmm1,%xmm1 |
2141 xorps %xmm1,%xmm0 | 2327 xorps %xmm1,%xmm0 |
2142 ret | 2328 ret |
2143 .align 16 | 2329 .align 16 |
2144 .L100key_256b: | 2330 .L110key_256b: |
2145 movups %xmm0,(%edx) | 2331 movups %xmm0,(%edx) |
2146 leal 16(%edx),%edx | 2332 leal 16(%edx),%edx |
2147 shufps $16,%xmm2,%xmm4 | 2333 shufps $16,%xmm2,%xmm4 |
2148 xorps %xmm4,%xmm2 | 2334 xorps %xmm4,%xmm2 |
2149 shufps $140,%xmm2,%xmm4 | 2335 shufps $140,%xmm2,%xmm4 |
2150 xorps %xmm4,%xmm2 | 2336 xorps %xmm4,%xmm2 |
2151 shufps $170,%xmm1,%xmm1 | 2337 shufps $170,%xmm1,%xmm1 |
2152 xorps %xmm1,%xmm2 | 2338 xorps %xmm1,%xmm2 |
2153 ret | 2339 ret |
2154 .align» 4 | 2340 .align» 16 |
2155 .L088bad_pointer: | 2341 .L10814rounds_alt: |
2156 » movl» $-1,%eax | 2342 » movdqa» (%ebx),%xmm5 |
| 2343 » movdqa» 32(%ebx),%xmm4 |
| 2344 » movl» $7,%ecx |
| 2345 » movdqu» %xmm0,-32(%edx) |
| 2346 » movdqa» %xmm2,%xmm1 |
| 2347 » movdqu» %xmm2,-16(%edx) |
| 2348 .L112loop_key256: |
| 2349 .byte» 102,15,56,0,213 |
| 2350 .byte» 102,15,56,221,212 |
| 2351 » movdqa» %xmm0,%xmm3 |
| 2352 » pslldq» $4,%xmm0 |
| 2353 » pxor» %xmm0,%xmm3 |
| 2354 » pslldq» $4,%xmm0 |
| 2355 » pxor» %xmm0,%xmm3 |
| 2356 » pslldq» $4,%xmm0 |
| 2357 » pxor» %xmm3,%xmm0 |
| 2358 » pslld» $1,%xmm4 |
| 2359 » pxor» %xmm2,%xmm0 |
| 2360 » movdqu» %xmm0,(%edx) |
| 2361 » decl» %ecx |
| 2362 » jz» .L113done_key256 |
| 2363 » pshufd» $255,%xmm0,%xmm2 |
| 2364 » pxor» %xmm3,%xmm3 |
| 2365 .byte» 102,15,56,221,211 |
| 2366 » movdqa» %xmm1,%xmm3 |
| 2367 » pslldq» $4,%xmm1 |
| 2368 » pxor» %xmm1,%xmm3 |
| 2369 » pslldq» $4,%xmm1 |
| 2370 » pxor» %xmm1,%xmm3 |
| 2371 » pslldq» $4,%xmm1 |
| 2372 » pxor» %xmm3,%xmm1 |
| 2373 » pxor» %xmm1,%xmm2 |
| 2374 » movdqu» %xmm2,16(%edx) |
| 2375 » leal» 32(%edx),%edx |
| 2376 » movdqa» %xmm2,%xmm1 |
| 2377 » jmp» .L112loop_key256 |
| 2378 .L113done_key256: |
| 2379 » movl» $13,%ecx |
| 2380 » movl» %ecx,16(%edx) |
| 2381 .L100good_key: |
| 2382 » pxor» %xmm0,%xmm0 |
| 2383 » pxor» %xmm1,%xmm1 |
| 2384 » pxor» %xmm2,%xmm2 |
| 2385 » pxor» %xmm3,%xmm3 |
| 2386 » pxor» %xmm4,%xmm4 |
| 2387 » pxor» %xmm5,%xmm5 |
| 2388 » xorl» %eax,%eax |
| 2389 » popl» %ebx |
| 2390 » popl» %ebp |
2157 ret | 2391 ret |
2158 .align 4 | 2392 .align 4 |
2159 .L091bad_keybits: | 2393 .L091bad_pointer: |
| 2394 » movl» $-1,%eax |
| 2395 » popl» %ebx |
| 2396 » popl» %ebp |
| 2397 » ret |
| 2398 .align» 4 |
| 2399 .L095bad_keybits: |
| 2400 » pxor» %xmm0,%xmm0 |
2160 movl $-2,%eax | 2401 movl $-2,%eax |
| 2402 popl %ebx |
| 2403 popl %ebp |
2161 ret | 2404 ret |
2162 .size _aesni_set_encrypt_key,.-_aesni_set_encrypt_key | 2405 .size _aesni_set_encrypt_key,.-_aesni_set_encrypt_key |
2163 .globl aesni_set_encrypt_key | 2406 .globl aesni_set_encrypt_key |
2164 .hidden aesni_set_encrypt_key | 2407 .hidden aesni_set_encrypt_key |
2165 .type aesni_set_encrypt_key,@function | 2408 .type aesni_set_encrypt_key,@function |
2166 .align 16 | 2409 .align 16 |
2167 aesni_set_encrypt_key: | 2410 aesni_set_encrypt_key: |
2168 .L_aesni_set_encrypt_key_begin: | 2411 .L_aesni_set_encrypt_key_begin: |
2169 movl 4(%esp),%eax | 2412 movl 4(%esp),%eax |
2170 movl 8(%esp),%ecx | 2413 movl 8(%esp),%ecx |
2171 movl 12(%esp),%edx | 2414 movl 12(%esp),%edx |
2172 call _aesni_set_encrypt_key | 2415 call _aesni_set_encrypt_key |
2173 ret | 2416 ret |
2174 .size aesni_set_encrypt_key,.-.L_aesni_set_encrypt_key_begin | 2417 .size aesni_set_encrypt_key,.-.L_aesni_set_encrypt_key_begin |
2175 .globl aesni_set_decrypt_key | 2418 .globl aesni_set_decrypt_key |
2176 .hidden aesni_set_decrypt_key | 2419 .hidden aesni_set_decrypt_key |
2177 .type aesni_set_decrypt_key,@function | 2420 .type aesni_set_decrypt_key,@function |
2178 .align 16 | 2421 .align 16 |
2179 aesni_set_decrypt_key: | 2422 aesni_set_decrypt_key: |
2180 .L_aesni_set_decrypt_key_begin: | 2423 .L_aesni_set_decrypt_key_begin: |
2181 movl 4(%esp),%eax | 2424 movl 4(%esp),%eax |
2182 movl 8(%esp),%ecx | 2425 movl 8(%esp),%ecx |
2183 movl 12(%esp),%edx | 2426 movl 12(%esp),%edx |
2184 call _aesni_set_encrypt_key | 2427 call _aesni_set_encrypt_key |
2185 movl 12(%esp),%edx | 2428 movl 12(%esp),%edx |
2186 shll $4,%ecx | 2429 shll $4,%ecx |
2187 testl %eax,%eax | 2430 testl %eax,%eax |
2188 » jnz» .L102dec_key_ret | 2431 » jnz» .L114dec_key_ret |
2189 leal 16(%edx,%ecx,1),%eax | 2432 leal 16(%edx,%ecx,1),%eax |
2190 movups (%edx),%xmm0 | 2433 movups (%edx),%xmm0 |
2191 movups (%eax),%xmm1 | 2434 movups (%eax),%xmm1 |
2192 movups %xmm0,(%eax) | 2435 movups %xmm0,(%eax) |
2193 movups %xmm1,(%edx) | 2436 movups %xmm1,(%edx) |
2194 leal 16(%edx),%edx | 2437 leal 16(%edx),%edx |
2195 leal -16(%eax),%eax | 2438 leal -16(%eax),%eax |
2196 .L103dec_key_inverse: | 2439 .L115dec_key_inverse: |
2197 movups (%edx),%xmm0 | 2440 movups (%edx),%xmm0 |
2198 movups (%eax),%xmm1 | 2441 movups (%eax),%xmm1 |
2199 .byte 102,15,56,219,192 | 2442 .byte 102,15,56,219,192 |
2200 .byte 102,15,56,219,201 | 2443 .byte 102,15,56,219,201 |
2201 leal 16(%edx),%edx | 2444 leal 16(%edx),%edx |
2202 leal -16(%eax),%eax | 2445 leal -16(%eax),%eax |
2203 movups %xmm0,16(%eax) | 2446 movups %xmm0,16(%eax) |
2204 movups %xmm1,-16(%edx) | 2447 movups %xmm1,-16(%edx) |
2205 cmpl %edx,%eax | 2448 cmpl %edx,%eax |
2206 » ja» .L103dec_key_inverse | 2449 » ja» .L115dec_key_inverse |
2207 movups (%edx),%xmm0 | 2450 movups (%edx),%xmm0 |
2208 .byte 102,15,56,219,192 | 2451 .byte 102,15,56,219,192 |
2209 movups %xmm0,(%edx) | 2452 movups %xmm0,(%edx) |
| 2453 pxor %xmm0,%xmm0 |
| 2454 pxor %xmm1,%xmm1 |
2210 xorl %eax,%eax | 2455 xorl %eax,%eax |
2211 .L102dec_key_ret: | 2456 .L114dec_key_ret: |
2212 ret | 2457 ret |
2213 .size aesni_set_decrypt_key,.-.L_aesni_set_decrypt_key_begin | 2458 .size aesni_set_decrypt_key,.-.L_aesni_set_decrypt_key_begin |
| 2459 .align 64 |
| 2460 .Lkey_const: |
| 2461 .long 202313229,202313229,202313229,202313229 |
| 2462 .long 67569157,67569157,67569157,67569157 |
| 2463 .long 1,1,1,1 |
| 2464 .long 27,27,27,27 |
2214 .byte 65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69 | 2465 .byte 65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69 |
2215 .byte 83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83 | 2466 .byte 83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83 |
2216 .byte 32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115 | 2467 .byte 32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115 |
2217 .byte 115,108,46,111,114,103,62,0 | 2468 .byte 115,108,46,111,114,103,62,0 |
2218 #endif | 2469 #endif |
OLD | NEW |