OLD | NEW |
1 #if defined(__i386__) | 1 #if defined(__i386__) |
2 .file "src/crypto/aes/asm/aesni-x86.S" | 2 .file "src/crypto/aes/asm/aesni-x86.S" |
3 .text | 3 .text |
4 .globl _aesni_encrypt | 4 .globl _aesni_encrypt |
5 .private_extern _aesni_encrypt | 5 .private_extern _aesni_encrypt |
6 .align 4 | 6 .align 4 |
7 _aesni_encrypt: | 7 _aesni_encrypt: |
8 L_aesni_encrypt_begin: | 8 L_aesni_encrypt_begin: |
9 movl 4(%esp),%eax | 9 movl 4(%esp),%eax |
10 movl 12(%esp),%edx | 10 movl 12(%esp),%edx |
11 movups (%eax),%xmm2 | 11 movups (%eax),%xmm2 |
12 movl 240(%edx),%ecx | 12 movl 240(%edx),%ecx |
13 movl 8(%esp),%eax | 13 movl 8(%esp),%eax |
14 movups (%edx),%xmm0 | 14 movups (%edx),%xmm0 |
15 movups 16(%edx),%xmm1 | 15 movups 16(%edx),%xmm1 |
16 leal 32(%edx),%edx | 16 leal 32(%edx),%edx |
17 xorps %xmm0,%xmm2 | 17 xorps %xmm0,%xmm2 |
18 L000enc1_loop_1: | 18 L000enc1_loop_1: |
19 .byte 102,15,56,220,209 | 19 .byte 102,15,56,220,209 |
20 decl %ecx | 20 decl %ecx |
21 movups (%edx),%xmm1 | 21 movups (%edx),%xmm1 |
22 leal 16(%edx),%edx | 22 leal 16(%edx),%edx |
23 jnz L000enc1_loop_1 | 23 jnz L000enc1_loop_1 |
24 .byte 102,15,56,221,209 | 24 .byte 102,15,56,221,209 |
| 25 pxor %xmm0,%xmm0 |
| 26 pxor %xmm1,%xmm1 |
25 movups %xmm2,(%eax) | 27 movups %xmm2,(%eax) |
| 28 pxor %xmm2,%xmm2 |
26 ret | 29 ret |
27 .globl _aesni_decrypt | 30 .globl _aesni_decrypt |
28 .private_extern _aesni_decrypt | 31 .private_extern _aesni_decrypt |
29 .align 4 | 32 .align 4 |
30 _aesni_decrypt: | 33 _aesni_decrypt: |
31 L_aesni_decrypt_begin: | 34 L_aesni_decrypt_begin: |
32 movl 4(%esp),%eax | 35 movl 4(%esp),%eax |
33 movl 12(%esp),%edx | 36 movl 12(%esp),%edx |
34 movups (%eax),%xmm2 | 37 movups (%eax),%xmm2 |
35 movl 240(%edx),%ecx | 38 movl 240(%edx),%ecx |
36 movl 8(%esp),%eax | 39 movl 8(%esp),%eax |
37 movups (%edx),%xmm0 | 40 movups (%edx),%xmm0 |
38 movups 16(%edx),%xmm1 | 41 movups 16(%edx),%xmm1 |
39 leal 32(%edx),%edx | 42 leal 32(%edx),%edx |
40 xorps %xmm0,%xmm2 | 43 xorps %xmm0,%xmm2 |
41 L001dec1_loop_2: | 44 L001dec1_loop_2: |
42 .byte 102,15,56,222,209 | 45 .byte 102,15,56,222,209 |
43 decl %ecx | 46 decl %ecx |
44 movups (%edx),%xmm1 | 47 movups (%edx),%xmm1 |
45 leal 16(%edx),%edx | 48 leal 16(%edx),%edx |
46 jnz L001dec1_loop_2 | 49 jnz L001dec1_loop_2 |
47 .byte 102,15,56,223,209 | 50 .byte 102,15,56,223,209 |
| 51 pxor %xmm0,%xmm0 |
| 52 pxor %xmm1,%xmm1 |
48 movups %xmm2,(%eax) | 53 movups %xmm2,(%eax) |
| 54 pxor %xmm2,%xmm2 |
49 ret | 55 ret |
50 .private_extern __aesni_encrypt2 | 56 .private_extern __aesni_encrypt2 |
51 .align 4 | 57 .align 4 |
52 __aesni_encrypt2: | 58 __aesni_encrypt2: |
53 movups (%edx),%xmm0 | 59 movups (%edx),%xmm0 |
54 shll $4,%ecx | 60 shll $4,%ecx |
55 movups 16(%edx),%xmm1 | 61 movups 16(%edx),%xmm1 |
56 xorps %xmm0,%xmm2 | 62 xorps %xmm0,%xmm2 |
57 pxor %xmm0,%xmm3 | 63 pxor %xmm0,%xmm3 |
58 movups 32(%edx),%xmm0 | 64 movups 32(%edx),%xmm0 |
(...skipping 186 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
245 pxor %xmm0,%xmm3 | 251 pxor %xmm0,%xmm3 |
246 pxor %xmm0,%xmm4 | 252 pxor %xmm0,%xmm4 |
247 .byte 102,15,56,220,209 | 253 .byte 102,15,56,220,209 |
248 pxor %xmm0,%xmm5 | 254 pxor %xmm0,%xmm5 |
249 pxor %xmm0,%xmm6 | 255 pxor %xmm0,%xmm6 |
250 .byte 102,15,56,220,217 | 256 .byte 102,15,56,220,217 |
251 leal 32(%edx,%ecx,1),%edx | 257 leal 32(%edx,%ecx,1),%edx |
252 negl %ecx | 258 negl %ecx |
253 .byte 102,15,56,220,225 | 259 .byte 102,15,56,220,225 |
254 pxor %xmm0,%xmm7 | 260 pxor %xmm0,%xmm7 |
| 261 movups (%edx,%ecx,1),%xmm0 |
255 addl $16,%ecx | 262 addl $16,%ecx |
| 263 jmp L008_aesni_encrypt6_inner |
| 264 .align 4,0x90 |
| 265 L009enc6_loop: |
| 266 .byte 102,15,56,220,209 |
| 267 .byte 102,15,56,220,217 |
| 268 .byte 102,15,56,220,225 |
| 269 L008_aesni_encrypt6_inner: |
256 .byte 102,15,56,220,233 | 270 .byte 102,15,56,220,233 |
257 .byte 102,15,56,220,241 | 271 .byte 102,15,56,220,241 |
258 .byte 102,15,56,220,249 | 272 .byte 102,15,56,220,249 |
259 movups -16(%edx,%ecx,1),%xmm0 | |
260 jmp L_aesni_encrypt6_enter | |
261 .align 4,0x90 | |
262 L008enc6_loop: | |
263 .byte 102,15,56,220,209 | |
264 .byte 102,15,56,220,217 | |
265 .byte 102,15,56,220,225 | |
266 .byte 102,15,56,220,233 | |
267 .byte 102,15,56,220,241 | |
268 .byte 102,15,56,220,249 | |
269 L_aesni_encrypt6_enter: | 273 L_aesni_encrypt6_enter: |
270 movups (%edx,%ecx,1),%xmm1 | 274 movups (%edx,%ecx,1),%xmm1 |
271 addl $32,%ecx | 275 addl $32,%ecx |
272 .byte 102,15,56,220,208 | 276 .byte 102,15,56,220,208 |
273 .byte 102,15,56,220,216 | 277 .byte 102,15,56,220,216 |
274 .byte 102,15,56,220,224 | 278 .byte 102,15,56,220,224 |
275 .byte 102,15,56,220,232 | 279 .byte 102,15,56,220,232 |
276 .byte 102,15,56,220,240 | 280 .byte 102,15,56,220,240 |
277 .byte 102,15,56,220,248 | 281 .byte 102,15,56,220,248 |
278 movups -16(%edx,%ecx,1),%xmm0 | 282 movups -16(%edx,%ecx,1),%xmm0 |
279 » jnz» L008enc6_loop | 283 » jnz» L009enc6_loop |
280 .byte 102,15,56,220,209 | 284 .byte 102,15,56,220,209 |
281 .byte 102,15,56,220,217 | 285 .byte 102,15,56,220,217 |
282 .byte 102,15,56,220,225 | 286 .byte 102,15,56,220,225 |
283 .byte 102,15,56,220,233 | 287 .byte 102,15,56,220,233 |
284 .byte 102,15,56,220,241 | 288 .byte 102,15,56,220,241 |
285 .byte 102,15,56,220,249 | 289 .byte 102,15,56,220,249 |
286 .byte 102,15,56,221,208 | 290 .byte 102,15,56,221,208 |
287 .byte 102,15,56,221,216 | 291 .byte 102,15,56,221,216 |
288 .byte 102,15,56,221,224 | 292 .byte 102,15,56,221,224 |
289 .byte 102,15,56,221,232 | 293 .byte 102,15,56,221,232 |
(...skipping 10 matching lines...) Expand all Loading... |
300 pxor %xmm0,%xmm3 | 304 pxor %xmm0,%xmm3 |
301 pxor %xmm0,%xmm4 | 305 pxor %xmm0,%xmm4 |
302 .byte 102,15,56,222,209 | 306 .byte 102,15,56,222,209 |
303 pxor %xmm0,%xmm5 | 307 pxor %xmm0,%xmm5 |
304 pxor %xmm0,%xmm6 | 308 pxor %xmm0,%xmm6 |
305 .byte 102,15,56,222,217 | 309 .byte 102,15,56,222,217 |
306 leal 32(%edx,%ecx,1),%edx | 310 leal 32(%edx,%ecx,1),%edx |
307 negl %ecx | 311 negl %ecx |
308 .byte 102,15,56,222,225 | 312 .byte 102,15,56,222,225 |
309 pxor %xmm0,%xmm7 | 313 pxor %xmm0,%xmm7 |
| 314 movups (%edx,%ecx,1),%xmm0 |
310 addl $16,%ecx | 315 addl $16,%ecx |
| 316 jmp L010_aesni_decrypt6_inner |
| 317 .align 4,0x90 |
| 318 L011dec6_loop: |
| 319 .byte 102,15,56,222,209 |
| 320 .byte 102,15,56,222,217 |
| 321 .byte 102,15,56,222,225 |
| 322 L010_aesni_decrypt6_inner: |
311 .byte 102,15,56,222,233 | 323 .byte 102,15,56,222,233 |
312 .byte 102,15,56,222,241 | 324 .byte 102,15,56,222,241 |
313 .byte 102,15,56,222,249 | 325 .byte 102,15,56,222,249 |
314 movups -16(%edx,%ecx,1),%xmm0 | |
315 jmp L_aesni_decrypt6_enter | |
316 .align 4,0x90 | |
317 L009dec6_loop: | |
318 .byte 102,15,56,222,209 | |
319 .byte 102,15,56,222,217 | |
320 .byte 102,15,56,222,225 | |
321 .byte 102,15,56,222,233 | |
322 .byte 102,15,56,222,241 | |
323 .byte 102,15,56,222,249 | |
324 L_aesni_decrypt6_enter: | 326 L_aesni_decrypt6_enter: |
325 movups (%edx,%ecx,1),%xmm1 | 327 movups (%edx,%ecx,1),%xmm1 |
326 addl $32,%ecx | 328 addl $32,%ecx |
327 .byte 102,15,56,222,208 | 329 .byte 102,15,56,222,208 |
328 .byte 102,15,56,222,216 | 330 .byte 102,15,56,222,216 |
329 .byte 102,15,56,222,224 | 331 .byte 102,15,56,222,224 |
330 .byte 102,15,56,222,232 | 332 .byte 102,15,56,222,232 |
331 .byte 102,15,56,222,240 | 333 .byte 102,15,56,222,240 |
332 .byte 102,15,56,222,248 | 334 .byte 102,15,56,222,248 |
333 movups -16(%edx,%ecx,1),%xmm0 | 335 movups -16(%edx,%ecx,1),%xmm0 |
334 » jnz» L009dec6_loop | 336 » jnz» L011dec6_loop |
335 .byte 102,15,56,222,209 | 337 .byte 102,15,56,222,209 |
336 .byte 102,15,56,222,217 | 338 .byte 102,15,56,222,217 |
337 .byte 102,15,56,222,225 | 339 .byte 102,15,56,222,225 |
338 .byte 102,15,56,222,233 | 340 .byte 102,15,56,222,233 |
339 .byte 102,15,56,222,241 | 341 .byte 102,15,56,222,241 |
340 .byte 102,15,56,222,249 | 342 .byte 102,15,56,222,249 |
341 .byte 102,15,56,223,208 | 343 .byte 102,15,56,223,208 |
342 .byte 102,15,56,223,216 | 344 .byte 102,15,56,223,216 |
343 .byte 102,15,56,223,224 | 345 .byte 102,15,56,223,224 |
344 .byte 102,15,56,223,232 | 346 .byte 102,15,56,223,232 |
345 .byte 102,15,56,223,240 | 347 .byte 102,15,56,223,240 |
346 .byte 102,15,56,223,248 | 348 .byte 102,15,56,223,248 |
347 ret | 349 ret |
348 .globl _aesni_ecb_encrypt | 350 .globl _aesni_ecb_encrypt |
349 .private_extern _aesni_ecb_encrypt | 351 .private_extern _aesni_ecb_encrypt |
350 .align 4 | 352 .align 4 |
351 _aesni_ecb_encrypt: | 353 _aesni_ecb_encrypt: |
352 L_aesni_ecb_encrypt_begin: | 354 L_aesni_ecb_encrypt_begin: |
353 pushl %ebp | 355 pushl %ebp |
354 pushl %ebx | 356 pushl %ebx |
355 pushl %esi | 357 pushl %esi |
356 pushl %edi | 358 pushl %edi |
357 movl 20(%esp),%esi | 359 movl 20(%esp),%esi |
358 movl 24(%esp),%edi | 360 movl 24(%esp),%edi |
359 movl 28(%esp),%eax | 361 movl 28(%esp),%eax |
360 movl 32(%esp),%edx | 362 movl 32(%esp),%edx |
361 movl 36(%esp),%ebx | 363 movl 36(%esp),%ebx |
362 andl $-16,%eax | 364 andl $-16,%eax |
363 » jz» L010ecb_ret | 365 » jz» L012ecb_ret |
364 movl 240(%edx),%ecx | 366 movl 240(%edx),%ecx |
365 testl %ebx,%ebx | 367 testl %ebx,%ebx |
366 » jz» L011ecb_decrypt | 368 » jz» L013ecb_decrypt |
367 movl %edx,%ebp | 369 movl %edx,%ebp |
368 movl %ecx,%ebx | 370 movl %ecx,%ebx |
369 cmpl $96,%eax | 371 cmpl $96,%eax |
370 » jb» L012ecb_enc_tail | 372 » jb» L014ecb_enc_tail |
371 movdqu (%esi),%xmm2 | 373 movdqu (%esi),%xmm2 |
372 movdqu 16(%esi),%xmm3 | 374 movdqu 16(%esi),%xmm3 |
373 movdqu 32(%esi),%xmm4 | 375 movdqu 32(%esi),%xmm4 |
374 movdqu 48(%esi),%xmm5 | 376 movdqu 48(%esi),%xmm5 |
375 movdqu 64(%esi),%xmm6 | 377 movdqu 64(%esi),%xmm6 |
376 movdqu 80(%esi),%xmm7 | 378 movdqu 80(%esi),%xmm7 |
377 leal 96(%esi),%esi | 379 leal 96(%esi),%esi |
378 subl $96,%eax | 380 subl $96,%eax |
379 » jmp» L013ecb_enc_loop6_enter | 381 » jmp» L015ecb_enc_loop6_enter |
380 .align 4,0x90 | 382 .align 4,0x90 |
381 L014ecb_enc_loop6: | 383 L016ecb_enc_loop6: |
382 movups %xmm2,(%edi) | 384 movups %xmm2,(%edi) |
383 movdqu (%esi),%xmm2 | 385 movdqu (%esi),%xmm2 |
384 movups %xmm3,16(%edi) | 386 movups %xmm3,16(%edi) |
385 movdqu 16(%esi),%xmm3 | 387 movdqu 16(%esi),%xmm3 |
386 movups %xmm4,32(%edi) | 388 movups %xmm4,32(%edi) |
387 movdqu 32(%esi),%xmm4 | 389 movdqu 32(%esi),%xmm4 |
388 movups %xmm5,48(%edi) | 390 movups %xmm5,48(%edi) |
389 movdqu 48(%esi),%xmm5 | 391 movdqu 48(%esi),%xmm5 |
390 movups %xmm6,64(%edi) | 392 movups %xmm6,64(%edi) |
391 movdqu 64(%esi),%xmm6 | 393 movdqu 64(%esi),%xmm6 |
392 movups %xmm7,80(%edi) | 394 movups %xmm7,80(%edi) |
393 leal 96(%edi),%edi | 395 leal 96(%edi),%edi |
394 movdqu 80(%esi),%xmm7 | 396 movdqu 80(%esi),%xmm7 |
395 leal 96(%esi),%esi | 397 leal 96(%esi),%esi |
396 L013ecb_enc_loop6_enter: | 398 L015ecb_enc_loop6_enter: |
397 call __aesni_encrypt6 | 399 call __aesni_encrypt6 |
398 movl %ebp,%edx | 400 movl %ebp,%edx |
399 movl %ebx,%ecx | 401 movl %ebx,%ecx |
400 subl $96,%eax | 402 subl $96,%eax |
401 » jnc» L014ecb_enc_loop6 | 403 » jnc» L016ecb_enc_loop6 |
402 movups %xmm2,(%edi) | 404 movups %xmm2,(%edi) |
403 movups %xmm3,16(%edi) | 405 movups %xmm3,16(%edi) |
404 movups %xmm4,32(%edi) | 406 movups %xmm4,32(%edi) |
405 movups %xmm5,48(%edi) | 407 movups %xmm5,48(%edi) |
406 movups %xmm6,64(%edi) | 408 movups %xmm6,64(%edi) |
407 movups %xmm7,80(%edi) | 409 movups %xmm7,80(%edi) |
408 leal 96(%edi),%edi | 410 leal 96(%edi),%edi |
409 addl $96,%eax | 411 addl $96,%eax |
410 » jz» L010ecb_ret | 412 » jz» L012ecb_ret |
411 L012ecb_enc_tail: | 413 L014ecb_enc_tail: |
412 movups (%esi),%xmm2 | 414 movups (%esi),%xmm2 |
413 cmpl $32,%eax | 415 cmpl $32,%eax |
414 » jb» L015ecb_enc_one | 416 » jb» L017ecb_enc_one |
415 movups 16(%esi),%xmm3 | 417 movups 16(%esi),%xmm3 |
416 » je» L016ecb_enc_two | 418 » je» L018ecb_enc_two |
417 movups 32(%esi),%xmm4 | 419 movups 32(%esi),%xmm4 |
418 cmpl $64,%eax | 420 cmpl $64,%eax |
419 » jb» L017ecb_enc_three | 421 » jb» L019ecb_enc_three |
420 movups 48(%esi),%xmm5 | 422 movups 48(%esi),%xmm5 |
421 » je» L018ecb_enc_four | 423 » je» L020ecb_enc_four |
422 movups 64(%esi),%xmm6 | 424 movups 64(%esi),%xmm6 |
423 xorps %xmm7,%xmm7 | 425 xorps %xmm7,%xmm7 |
424 call __aesni_encrypt6 | 426 call __aesni_encrypt6 |
425 movups %xmm2,(%edi) | 427 movups %xmm2,(%edi) |
426 movups %xmm3,16(%edi) | 428 movups %xmm3,16(%edi) |
427 movups %xmm4,32(%edi) | 429 movups %xmm4,32(%edi) |
428 movups %xmm5,48(%edi) | 430 movups %xmm5,48(%edi) |
429 movups %xmm6,64(%edi) | 431 movups %xmm6,64(%edi) |
430 » jmp» L010ecb_ret | 432 » jmp» L012ecb_ret |
431 .align 4,0x90 | 433 .align 4,0x90 |
432 L015ecb_enc_one: | 434 L017ecb_enc_one: |
433 movups (%edx),%xmm0 | 435 movups (%edx),%xmm0 |
434 movups 16(%edx),%xmm1 | 436 movups 16(%edx),%xmm1 |
435 leal 32(%edx),%edx | 437 leal 32(%edx),%edx |
436 xorps %xmm0,%xmm2 | 438 xorps %xmm0,%xmm2 |
437 L019enc1_loop_3: | 439 L021enc1_loop_3: |
438 .byte 102,15,56,220,209 | 440 .byte 102,15,56,220,209 |
439 decl %ecx | 441 decl %ecx |
440 movups (%edx),%xmm1 | 442 movups (%edx),%xmm1 |
441 leal 16(%edx),%edx | 443 leal 16(%edx),%edx |
442 » jnz» L019enc1_loop_3 | 444 » jnz» L021enc1_loop_3 |
443 .byte 102,15,56,221,209 | 445 .byte 102,15,56,221,209 |
444 movups %xmm2,(%edi) | 446 movups %xmm2,(%edi) |
445 » jmp» L010ecb_ret | 447 » jmp» L012ecb_ret |
446 .align 4,0x90 | 448 .align 4,0x90 |
447 L016ecb_enc_two: | 449 L018ecb_enc_two: |
448 call __aesni_encrypt2 | 450 call __aesni_encrypt2 |
449 movups %xmm2,(%edi) | 451 movups %xmm2,(%edi) |
450 movups %xmm3,16(%edi) | 452 movups %xmm3,16(%edi) |
451 » jmp» L010ecb_ret | 453 » jmp» L012ecb_ret |
452 .align 4,0x90 | 454 .align 4,0x90 |
453 L017ecb_enc_three: | 455 L019ecb_enc_three: |
454 call __aesni_encrypt3 | 456 call __aesni_encrypt3 |
455 movups %xmm2,(%edi) | 457 movups %xmm2,(%edi) |
456 movups %xmm3,16(%edi) | 458 movups %xmm3,16(%edi) |
457 movups %xmm4,32(%edi) | 459 movups %xmm4,32(%edi) |
458 » jmp» L010ecb_ret | 460 » jmp» L012ecb_ret |
459 .align 4,0x90 | 461 .align 4,0x90 |
460 L018ecb_enc_four: | 462 L020ecb_enc_four: |
461 call __aesni_encrypt4 | 463 call __aesni_encrypt4 |
462 movups %xmm2,(%edi) | 464 movups %xmm2,(%edi) |
463 movups %xmm3,16(%edi) | 465 movups %xmm3,16(%edi) |
464 movups %xmm4,32(%edi) | 466 movups %xmm4,32(%edi) |
465 movups %xmm5,48(%edi) | 467 movups %xmm5,48(%edi) |
466 » jmp» L010ecb_ret | 468 » jmp» L012ecb_ret |
467 .align 4,0x90 | 469 .align 4,0x90 |
468 L011ecb_decrypt: | 470 L013ecb_decrypt: |
469 movl %edx,%ebp | 471 movl %edx,%ebp |
470 movl %ecx,%ebx | 472 movl %ecx,%ebx |
471 cmpl $96,%eax | 473 cmpl $96,%eax |
472 » jb» L020ecb_dec_tail | 474 » jb» L022ecb_dec_tail |
473 movdqu (%esi),%xmm2 | 475 movdqu (%esi),%xmm2 |
474 movdqu 16(%esi),%xmm3 | 476 movdqu 16(%esi),%xmm3 |
475 movdqu 32(%esi),%xmm4 | 477 movdqu 32(%esi),%xmm4 |
476 movdqu 48(%esi),%xmm5 | 478 movdqu 48(%esi),%xmm5 |
477 movdqu 64(%esi),%xmm6 | 479 movdqu 64(%esi),%xmm6 |
478 movdqu 80(%esi),%xmm7 | 480 movdqu 80(%esi),%xmm7 |
479 leal 96(%esi),%esi | 481 leal 96(%esi),%esi |
480 subl $96,%eax | 482 subl $96,%eax |
481 » jmp» L021ecb_dec_loop6_enter | 483 » jmp» L023ecb_dec_loop6_enter |
482 .align 4,0x90 | 484 .align 4,0x90 |
483 L022ecb_dec_loop6: | 485 L024ecb_dec_loop6: |
484 movups %xmm2,(%edi) | 486 movups %xmm2,(%edi) |
485 movdqu (%esi),%xmm2 | 487 movdqu (%esi),%xmm2 |
486 movups %xmm3,16(%edi) | 488 movups %xmm3,16(%edi) |
487 movdqu 16(%esi),%xmm3 | 489 movdqu 16(%esi),%xmm3 |
488 movups %xmm4,32(%edi) | 490 movups %xmm4,32(%edi) |
489 movdqu 32(%esi),%xmm4 | 491 movdqu 32(%esi),%xmm4 |
490 movups %xmm5,48(%edi) | 492 movups %xmm5,48(%edi) |
491 movdqu 48(%esi),%xmm5 | 493 movdqu 48(%esi),%xmm5 |
492 movups %xmm6,64(%edi) | 494 movups %xmm6,64(%edi) |
493 movdqu 64(%esi),%xmm6 | 495 movdqu 64(%esi),%xmm6 |
494 movups %xmm7,80(%edi) | 496 movups %xmm7,80(%edi) |
495 leal 96(%edi),%edi | 497 leal 96(%edi),%edi |
496 movdqu 80(%esi),%xmm7 | 498 movdqu 80(%esi),%xmm7 |
497 leal 96(%esi),%esi | 499 leal 96(%esi),%esi |
498 L021ecb_dec_loop6_enter: | 500 L023ecb_dec_loop6_enter: |
499 call __aesni_decrypt6 | 501 call __aesni_decrypt6 |
500 movl %ebp,%edx | 502 movl %ebp,%edx |
501 movl %ebx,%ecx | 503 movl %ebx,%ecx |
502 subl $96,%eax | 504 subl $96,%eax |
503 » jnc» L022ecb_dec_loop6 | 505 » jnc» L024ecb_dec_loop6 |
504 movups %xmm2,(%edi) | 506 movups %xmm2,(%edi) |
505 movups %xmm3,16(%edi) | 507 movups %xmm3,16(%edi) |
506 movups %xmm4,32(%edi) | 508 movups %xmm4,32(%edi) |
507 movups %xmm5,48(%edi) | 509 movups %xmm5,48(%edi) |
508 movups %xmm6,64(%edi) | 510 movups %xmm6,64(%edi) |
509 movups %xmm7,80(%edi) | 511 movups %xmm7,80(%edi) |
510 leal 96(%edi),%edi | 512 leal 96(%edi),%edi |
511 addl $96,%eax | 513 addl $96,%eax |
512 » jz» L010ecb_ret | 514 » jz» L012ecb_ret |
513 L020ecb_dec_tail: | 515 L022ecb_dec_tail: |
514 movups (%esi),%xmm2 | 516 movups (%esi),%xmm2 |
515 cmpl $32,%eax | 517 cmpl $32,%eax |
516 » jb» L023ecb_dec_one | 518 » jb» L025ecb_dec_one |
517 movups 16(%esi),%xmm3 | 519 movups 16(%esi),%xmm3 |
518 » je» L024ecb_dec_two | 520 » je» L026ecb_dec_two |
519 movups 32(%esi),%xmm4 | 521 movups 32(%esi),%xmm4 |
520 cmpl $64,%eax | 522 cmpl $64,%eax |
521 » jb» L025ecb_dec_three | 523 » jb» L027ecb_dec_three |
522 movups 48(%esi),%xmm5 | 524 movups 48(%esi),%xmm5 |
523 » je» L026ecb_dec_four | 525 » je» L028ecb_dec_four |
524 movups 64(%esi),%xmm6 | 526 movups 64(%esi),%xmm6 |
525 xorps %xmm7,%xmm7 | 527 xorps %xmm7,%xmm7 |
526 call __aesni_decrypt6 | 528 call __aesni_decrypt6 |
527 movups %xmm2,(%edi) | 529 movups %xmm2,(%edi) |
528 movups %xmm3,16(%edi) | 530 movups %xmm3,16(%edi) |
529 movups %xmm4,32(%edi) | 531 movups %xmm4,32(%edi) |
530 movups %xmm5,48(%edi) | 532 movups %xmm5,48(%edi) |
531 movups %xmm6,64(%edi) | 533 movups %xmm6,64(%edi) |
532 » jmp» L010ecb_ret | 534 » jmp» L012ecb_ret |
533 .align 4,0x90 | 535 .align 4,0x90 |
534 L023ecb_dec_one: | 536 L025ecb_dec_one: |
535 movups (%edx),%xmm0 | 537 movups (%edx),%xmm0 |
536 movups 16(%edx),%xmm1 | 538 movups 16(%edx),%xmm1 |
537 leal 32(%edx),%edx | 539 leal 32(%edx),%edx |
538 xorps %xmm0,%xmm2 | 540 xorps %xmm0,%xmm2 |
539 L027dec1_loop_4: | 541 L029dec1_loop_4: |
540 .byte 102,15,56,222,209 | 542 .byte 102,15,56,222,209 |
541 decl %ecx | 543 decl %ecx |
542 movups (%edx),%xmm1 | 544 movups (%edx),%xmm1 |
543 leal 16(%edx),%edx | 545 leal 16(%edx),%edx |
544 » jnz» L027dec1_loop_4 | 546 » jnz» L029dec1_loop_4 |
545 .byte 102,15,56,223,209 | 547 .byte 102,15,56,223,209 |
546 movups %xmm2,(%edi) | 548 movups %xmm2,(%edi) |
547 » jmp» L010ecb_ret | 549 » jmp» L012ecb_ret |
548 .align 4,0x90 | 550 .align 4,0x90 |
549 L024ecb_dec_two: | 551 L026ecb_dec_two: |
550 call __aesni_decrypt2 | 552 call __aesni_decrypt2 |
551 movups %xmm2,(%edi) | 553 movups %xmm2,(%edi) |
552 movups %xmm3,16(%edi) | 554 movups %xmm3,16(%edi) |
553 » jmp» L010ecb_ret | 555 » jmp» L012ecb_ret |
554 .align 4,0x90 | 556 .align 4,0x90 |
555 L025ecb_dec_three: | 557 L027ecb_dec_three: |
556 call __aesni_decrypt3 | 558 call __aesni_decrypt3 |
557 movups %xmm2,(%edi) | 559 movups %xmm2,(%edi) |
558 movups %xmm3,16(%edi) | 560 movups %xmm3,16(%edi) |
559 movups %xmm4,32(%edi) | 561 movups %xmm4,32(%edi) |
560 » jmp» L010ecb_ret | 562 » jmp» L012ecb_ret |
561 .align 4,0x90 | 563 .align 4,0x90 |
562 L026ecb_dec_four: | 564 L028ecb_dec_four: |
563 call __aesni_decrypt4 | 565 call __aesni_decrypt4 |
564 movups %xmm2,(%edi) | 566 movups %xmm2,(%edi) |
565 movups %xmm3,16(%edi) | 567 movups %xmm3,16(%edi) |
566 movups %xmm4,32(%edi) | 568 movups %xmm4,32(%edi) |
567 movups %xmm5,48(%edi) | 569 movups %xmm5,48(%edi) |
568 L010ecb_ret: | 570 L012ecb_ret: |
| 571 » pxor» %xmm0,%xmm0 |
| 572 » pxor» %xmm1,%xmm1 |
| 573 » pxor» %xmm2,%xmm2 |
| 574 » pxor» %xmm3,%xmm3 |
| 575 » pxor» %xmm4,%xmm4 |
| 576 » pxor» %xmm5,%xmm5 |
| 577 » pxor» %xmm6,%xmm6 |
| 578 » pxor» %xmm7,%xmm7 |
569 popl %edi | 579 popl %edi |
570 popl %esi | 580 popl %esi |
571 popl %ebx | 581 popl %ebx |
572 popl %ebp | 582 popl %ebp |
573 ret | 583 ret |
574 .globl _aesni_ccm64_encrypt_blocks | 584 .globl _aesni_ccm64_encrypt_blocks |
575 .private_extern _aesni_ccm64_encrypt_blocks | 585 .private_extern _aesni_ccm64_encrypt_blocks |
576 .align 4 | 586 .align 4 |
577 _aesni_ccm64_encrypt_blocks: | 587 _aesni_ccm64_encrypt_blocks: |
578 L_aesni_ccm64_encrypt_blocks_begin: | 588 L_aesni_ccm64_encrypt_blocks_begin: |
(...skipping 25 matching lines...) Expand all Loading... |
604 movl %ebp,24(%esp) | 614 movl %ebp,24(%esp) |
605 movl %ebp,28(%esp) | 615 movl %ebp,28(%esp) |
606 shll $4,%ecx | 616 shll $4,%ecx |
607 movl $16,%ebx | 617 movl $16,%ebx |
608 leal (%edx),%ebp | 618 leal (%edx),%ebp |
609 movdqa (%esp),%xmm5 | 619 movdqa (%esp),%xmm5 |
610 movdqa %xmm7,%xmm2 | 620 movdqa %xmm7,%xmm2 |
611 leal 32(%edx,%ecx,1),%edx | 621 leal 32(%edx,%ecx,1),%edx |
612 subl %ecx,%ebx | 622 subl %ecx,%ebx |
613 .byte 102,15,56,0,253 | 623 .byte 102,15,56,0,253 |
614 L028ccm64_enc_outer: | 624 L030ccm64_enc_outer: |
615 movups (%ebp),%xmm0 | 625 movups (%ebp),%xmm0 |
616 movl %ebx,%ecx | 626 movl %ebx,%ecx |
617 movups (%esi),%xmm6 | 627 movups (%esi),%xmm6 |
618 xorps %xmm0,%xmm2 | 628 xorps %xmm0,%xmm2 |
619 movups 16(%ebp),%xmm1 | 629 movups 16(%ebp),%xmm1 |
620 xorps %xmm6,%xmm0 | 630 xorps %xmm6,%xmm0 |
621 xorps %xmm0,%xmm3 | 631 xorps %xmm0,%xmm3 |
622 movups 32(%ebp),%xmm0 | 632 movups 32(%ebp),%xmm0 |
623 L029ccm64_enc2_loop: | 633 L031ccm64_enc2_loop: |
624 .byte 102,15,56,220,209 | 634 .byte 102,15,56,220,209 |
625 .byte 102,15,56,220,217 | 635 .byte 102,15,56,220,217 |
626 movups (%edx,%ecx,1),%xmm1 | 636 movups (%edx,%ecx,1),%xmm1 |
627 addl $32,%ecx | 637 addl $32,%ecx |
628 .byte 102,15,56,220,208 | 638 .byte 102,15,56,220,208 |
629 .byte 102,15,56,220,216 | 639 .byte 102,15,56,220,216 |
630 movups -16(%edx,%ecx,1),%xmm0 | 640 movups -16(%edx,%ecx,1),%xmm0 |
631 » jnz» L029ccm64_enc2_loop | 641 » jnz» L031ccm64_enc2_loop |
632 .byte 102,15,56,220,209 | 642 .byte 102,15,56,220,209 |
633 .byte 102,15,56,220,217 | 643 .byte 102,15,56,220,217 |
634 paddq 16(%esp),%xmm7 | 644 paddq 16(%esp),%xmm7 |
635 decl %eax | 645 decl %eax |
636 .byte 102,15,56,221,208 | 646 .byte 102,15,56,221,208 |
637 .byte 102,15,56,221,216 | 647 .byte 102,15,56,221,216 |
638 leal 16(%esi),%esi | 648 leal 16(%esi),%esi |
639 xorps %xmm2,%xmm6 | 649 xorps %xmm2,%xmm6 |
640 movdqa %xmm7,%xmm2 | 650 movdqa %xmm7,%xmm2 |
641 movups %xmm6,(%edi) | 651 movups %xmm6,(%edi) |
642 .byte 102,15,56,0,213 | 652 .byte 102,15,56,0,213 |
643 leal 16(%edi),%edi | 653 leal 16(%edi),%edi |
644 » jnz» L028ccm64_enc_outer | 654 » jnz» L030ccm64_enc_outer |
645 movl 48(%esp),%esp | 655 movl 48(%esp),%esp |
646 movl 40(%esp),%edi | 656 movl 40(%esp),%edi |
647 movups %xmm3,(%edi) | 657 movups %xmm3,(%edi) |
| 658 pxor %xmm0,%xmm0 |
| 659 pxor %xmm1,%xmm1 |
| 660 pxor %xmm2,%xmm2 |
| 661 pxor %xmm3,%xmm3 |
| 662 pxor %xmm4,%xmm4 |
| 663 pxor %xmm5,%xmm5 |
| 664 pxor %xmm6,%xmm6 |
| 665 pxor %xmm7,%xmm7 |
648 popl %edi | 666 popl %edi |
649 popl %esi | 667 popl %esi |
650 popl %ebx | 668 popl %ebx |
651 popl %ebp | 669 popl %ebp |
652 ret | 670 ret |
653 .globl _aesni_ccm64_decrypt_blocks | 671 .globl _aesni_ccm64_decrypt_blocks |
654 .private_extern _aesni_ccm64_decrypt_blocks | 672 .private_extern _aesni_ccm64_decrypt_blocks |
655 .align 4 | 673 .align 4 |
656 _aesni_ccm64_decrypt_blocks: | 674 _aesni_ccm64_decrypt_blocks: |
657 L_aesni_ccm64_decrypt_blocks_begin: | 675 L_aesni_ccm64_decrypt_blocks_begin: |
(...skipping 26 matching lines...) Expand all Loading... |
684 movl %ebp,28(%esp) | 702 movl %ebp,28(%esp) |
685 movdqa (%esp),%xmm5 | 703 movdqa (%esp),%xmm5 |
686 movdqa %xmm7,%xmm2 | 704 movdqa %xmm7,%xmm2 |
687 movl %edx,%ebp | 705 movl %edx,%ebp |
688 movl %ecx,%ebx | 706 movl %ecx,%ebx |
689 .byte 102,15,56,0,253 | 707 .byte 102,15,56,0,253 |
690 movups (%edx),%xmm0 | 708 movups (%edx),%xmm0 |
691 movups 16(%edx),%xmm1 | 709 movups 16(%edx),%xmm1 |
692 leal 32(%edx),%edx | 710 leal 32(%edx),%edx |
693 xorps %xmm0,%xmm2 | 711 xorps %xmm0,%xmm2 |
694 L030enc1_loop_5: | 712 L032enc1_loop_5: |
695 .byte 102,15,56,220,209 | 713 .byte 102,15,56,220,209 |
696 decl %ecx | 714 decl %ecx |
697 movups (%edx),%xmm1 | 715 movups (%edx),%xmm1 |
698 leal 16(%edx),%edx | 716 leal 16(%edx),%edx |
699 » jnz» L030enc1_loop_5 | 717 » jnz» L032enc1_loop_5 |
700 .byte 102,15,56,221,209 | 718 .byte 102,15,56,221,209 |
701 shll $4,%ebx | 719 shll $4,%ebx |
702 movl $16,%ecx | 720 movl $16,%ecx |
703 movups (%esi),%xmm6 | 721 movups (%esi),%xmm6 |
704 paddq 16(%esp),%xmm7 | 722 paddq 16(%esp),%xmm7 |
705 leal 16(%esi),%esi | 723 leal 16(%esi),%esi |
706 subl %ebx,%ecx | 724 subl %ebx,%ecx |
707 leal 32(%ebp,%ebx,1),%edx | 725 leal 32(%ebp,%ebx,1),%edx |
708 movl %ecx,%ebx | 726 movl %ecx,%ebx |
709 » jmp» L031ccm64_dec_outer | 727 » jmp» L033ccm64_dec_outer |
710 .align 4,0x90 | 728 .align 4,0x90 |
711 L031ccm64_dec_outer: | 729 L033ccm64_dec_outer: |
712 xorps %xmm2,%xmm6 | 730 xorps %xmm2,%xmm6 |
713 movdqa %xmm7,%xmm2 | 731 movdqa %xmm7,%xmm2 |
714 movups %xmm6,(%edi) | 732 movups %xmm6,(%edi) |
715 leal 16(%edi),%edi | 733 leal 16(%edi),%edi |
716 .byte 102,15,56,0,213 | 734 .byte 102,15,56,0,213 |
717 subl $1,%eax | 735 subl $1,%eax |
718 » jz» L032ccm64_dec_break | 736 » jz» L034ccm64_dec_break |
719 movups (%ebp),%xmm0 | 737 movups (%ebp),%xmm0 |
720 movl %ebx,%ecx | 738 movl %ebx,%ecx |
721 movups 16(%ebp),%xmm1 | 739 movups 16(%ebp),%xmm1 |
722 xorps %xmm0,%xmm6 | 740 xorps %xmm0,%xmm6 |
723 xorps %xmm0,%xmm2 | 741 xorps %xmm0,%xmm2 |
724 xorps %xmm6,%xmm3 | 742 xorps %xmm6,%xmm3 |
725 movups 32(%ebp),%xmm0 | 743 movups 32(%ebp),%xmm0 |
726 L033ccm64_dec2_loop: | 744 L035ccm64_dec2_loop: |
727 .byte 102,15,56,220,209 | 745 .byte 102,15,56,220,209 |
728 .byte 102,15,56,220,217 | 746 .byte 102,15,56,220,217 |
729 movups (%edx,%ecx,1),%xmm1 | 747 movups (%edx,%ecx,1),%xmm1 |
730 addl $32,%ecx | 748 addl $32,%ecx |
731 .byte 102,15,56,220,208 | 749 .byte 102,15,56,220,208 |
732 .byte 102,15,56,220,216 | 750 .byte 102,15,56,220,216 |
733 movups -16(%edx,%ecx,1),%xmm0 | 751 movups -16(%edx,%ecx,1),%xmm0 |
734 » jnz» L033ccm64_dec2_loop | 752 » jnz» L035ccm64_dec2_loop |
735 movups (%esi),%xmm6 | 753 movups (%esi),%xmm6 |
736 paddq 16(%esp),%xmm7 | 754 paddq 16(%esp),%xmm7 |
737 .byte 102,15,56,220,209 | 755 .byte 102,15,56,220,209 |
738 .byte 102,15,56,220,217 | 756 .byte 102,15,56,220,217 |
739 .byte 102,15,56,221,208 | 757 .byte 102,15,56,221,208 |
740 .byte 102,15,56,221,216 | 758 .byte 102,15,56,221,216 |
741 leal 16(%esi),%esi | 759 leal 16(%esi),%esi |
742 » jmp» L031ccm64_dec_outer | 760 » jmp» L033ccm64_dec_outer |
743 .align 4,0x90 | 761 .align 4,0x90 |
744 L032ccm64_dec_break: | 762 L034ccm64_dec_break: |
745 movl 240(%ebp),%ecx | 763 movl 240(%ebp),%ecx |
746 movl %ebp,%edx | 764 movl %ebp,%edx |
747 movups (%edx),%xmm0 | 765 movups (%edx),%xmm0 |
748 movups 16(%edx),%xmm1 | 766 movups 16(%edx),%xmm1 |
749 xorps %xmm0,%xmm6 | 767 xorps %xmm0,%xmm6 |
750 leal 32(%edx),%edx | 768 leal 32(%edx),%edx |
751 xorps %xmm6,%xmm3 | 769 xorps %xmm6,%xmm3 |
752 L034enc1_loop_6: | 770 L036enc1_loop_6: |
753 .byte 102,15,56,220,217 | 771 .byte 102,15,56,220,217 |
754 decl %ecx | 772 decl %ecx |
755 movups (%edx),%xmm1 | 773 movups (%edx),%xmm1 |
756 leal 16(%edx),%edx | 774 leal 16(%edx),%edx |
757 » jnz» L034enc1_loop_6 | 775 » jnz» L036enc1_loop_6 |
758 .byte 102,15,56,221,217 | 776 .byte 102,15,56,221,217 |
759 movl 48(%esp),%esp | 777 movl 48(%esp),%esp |
760 movl 40(%esp),%edi | 778 movl 40(%esp),%edi |
761 movups %xmm3,(%edi) | 779 movups %xmm3,(%edi) |
| 780 pxor %xmm0,%xmm0 |
| 781 pxor %xmm1,%xmm1 |
| 782 pxor %xmm2,%xmm2 |
| 783 pxor %xmm3,%xmm3 |
| 784 pxor %xmm4,%xmm4 |
| 785 pxor %xmm5,%xmm5 |
| 786 pxor %xmm6,%xmm6 |
| 787 pxor %xmm7,%xmm7 |
762 popl %edi | 788 popl %edi |
763 popl %esi | 789 popl %esi |
764 popl %ebx | 790 popl %ebx |
765 popl %ebp | 791 popl %ebp |
766 ret | 792 ret |
767 .globl _aesni_ctr32_encrypt_blocks | 793 .globl _aesni_ctr32_encrypt_blocks |
768 .private_extern _aesni_ctr32_encrypt_blocks | 794 .private_extern _aesni_ctr32_encrypt_blocks |
769 .align 4 | 795 .align 4 |
770 _aesni_ctr32_encrypt_blocks: | 796 _aesni_ctr32_encrypt_blocks: |
771 L_aesni_ctr32_encrypt_blocks_begin: | 797 L_aesni_ctr32_encrypt_blocks_begin: |
772 pushl %ebp | 798 pushl %ebp |
773 pushl %ebx | 799 pushl %ebx |
774 pushl %esi | 800 pushl %esi |
775 pushl %edi | 801 pushl %edi |
776 movl 20(%esp),%esi | 802 movl 20(%esp),%esi |
777 movl 24(%esp),%edi | 803 movl 24(%esp),%edi |
778 movl 28(%esp),%eax | 804 movl 28(%esp),%eax |
779 movl 32(%esp),%edx | 805 movl 32(%esp),%edx |
780 movl 36(%esp),%ebx | 806 movl 36(%esp),%ebx |
781 movl %esp,%ebp | 807 movl %esp,%ebp |
782 subl $88,%esp | 808 subl $88,%esp |
783 andl $-16,%esp | 809 andl $-16,%esp |
784 movl %ebp,80(%esp) | 810 movl %ebp,80(%esp) |
785 cmpl $1,%eax | 811 cmpl $1,%eax |
786 » je» L035ctr32_one_shortcut | 812 » je» L037ctr32_one_shortcut |
787 movdqu (%ebx),%xmm7 | 813 movdqu (%ebx),%xmm7 |
788 movl $202182159,(%esp) | 814 movl $202182159,(%esp) |
789 movl $134810123,4(%esp) | 815 movl $134810123,4(%esp) |
790 movl $67438087,8(%esp) | 816 movl $67438087,8(%esp) |
791 movl $66051,12(%esp) | 817 movl $66051,12(%esp) |
792 movl $6,%ecx | 818 movl $6,%ecx |
793 xorl %ebp,%ebp | 819 xorl %ebp,%ebp |
794 movl %ecx,16(%esp) | 820 movl %ecx,16(%esp) |
795 movl %ecx,20(%esp) | 821 movl %ecx,20(%esp) |
796 movl %ecx,24(%esp) | 822 movl %ecx,24(%esp) |
(...skipping 17 matching lines...) Expand all Loading... |
814 incl %ebp | 840 incl %ebp |
815 .byte 102,15,58,34,205,2 | 841 .byte 102,15,58,34,205,2 |
816 movdqa %xmm0,48(%esp) | 842 movdqa %xmm0,48(%esp) |
817 .byte 102,15,56,0,194 | 843 .byte 102,15,56,0,194 |
818 movdqu (%edx),%xmm6 | 844 movdqu (%edx),%xmm6 |
819 movdqa %xmm1,64(%esp) | 845 movdqa %xmm1,64(%esp) |
820 .byte 102,15,56,0,202 | 846 .byte 102,15,56,0,202 |
821 pshufd $192,%xmm0,%xmm2 | 847 pshufd $192,%xmm0,%xmm2 |
822 pshufd $128,%xmm0,%xmm3 | 848 pshufd $128,%xmm0,%xmm3 |
823 cmpl $6,%eax | 849 cmpl $6,%eax |
824 » jb» L036ctr32_tail | 850 » jb» L038ctr32_tail |
825 pxor %xmm6,%xmm7 | 851 pxor %xmm6,%xmm7 |
826 shll $4,%ecx | 852 shll $4,%ecx |
827 movl $16,%ebx | 853 movl $16,%ebx |
828 movdqa %xmm7,32(%esp) | 854 movdqa %xmm7,32(%esp) |
829 movl %edx,%ebp | 855 movl %edx,%ebp |
830 subl %ecx,%ebx | 856 subl %ecx,%ebx |
831 leal 32(%edx,%ecx,1),%edx | 857 leal 32(%edx,%ecx,1),%edx |
832 subl $6,%eax | 858 subl $6,%eax |
833 » jmp» L037ctr32_loop6 | 859 » jmp» L039ctr32_loop6 |
834 .align 4,0x90 | 860 .align 4,0x90 |
835 L037ctr32_loop6: | 861 L039ctr32_loop6: |
836 pshufd $64,%xmm0,%xmm4 | 862 pshufd $64,%xmm0,%xmm4 |
837 movdqa 32(%esp),%xmm0 | 863 movdqa 32(%esp),%xmm0 |
838 pshufd $192,%xmm1,%xmm5 | 864 pshufd $192,%xmm1,%xmm5 |
839 pxor %xmm0,%xmm2 | 865 pxor %xmm0,%xmm2 |
840 pshufd $128,%xmm1,%xmm6 | 866 pshufd $128,%xmm1,%xmm6 |
841 pxor %xmm0,%xmm3 | 867 pxor %xmm0,%xmm3 |
842 pshufd $64,%xmm1,%xmm7 | 868 pshufd $64,%xmm1,%xmm7 |
843 movups 16(%ebp),%xmm1 | 869 movups 16(%ebp),%xmm1 |
844 pxor %xmm0,%xmm4 | 870 pxor %xmm0,%xmm4 |
845 pxor %xmm0,%xmm5 | 871 pxor %xmm0,%xmm5 |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
879 movups %xmm5,48(%edi) | 905 movups %xmm5,48(%edi) |
880 xorps %xmm3,%xmm7 | 906 xorps %xmm3,%xmm7 |
881 movdqa %xmm1,64(%esp) | 907 movdqa %xmm1,64(%esp) |
882 .byte 102,15,56,0,202 | 908 .byte 102,15,56,0,202 |
883 movups %xmm6,64(%edi) | 909 movups %xmm6,64(%edi) |
884 pshufd $192,%xmm0,%xmm2 | 910 pshufd $192,%xmm0,%xmm2 |
885 movups %xmm7,80(%edi) | 911 movups %xmm7,80(%edi) |
886 leal 96(%edi),%edi | 912 leal 96(%edi),%edi |
887 pshufd $128,%xmm0,%xmm3 | 913 pshufd $128,%xmm0,%xmm3 |
888 subl $6,%eax | 914 subl $6,%eax |
889 » jnc» L037ctr32_loop6 | 915 » jnc» L039ctr32_loop6 |
890 addl $6,%eax | 916 addl $6,%eax |
891 » jz» L038ctr32_ret | 917 » jz» L040ctr32_ret |
892 movdqu (%ebp),%xmm7 | 918 movdqu (%ebp),%xmm7 |
893 movl %ebp,%edx | 919 movl %ebp,%edx |
894 pxor 32(%esp),%xmm7 | 920 pxor 32(%esp),%xmm7 |
895 movl 240(%ebp),%ecx | 921 movl 240(%ebp),%ecx |
896 L036ctr32_tail: | 922 L038ctr32_tail: |
897 por %xmm7,%xmm2 | 923 por %xmm7,%xmm2 |
898 cmpl $2,%eax | 924 cmpl $2,%eax |
899 » jb» L039ctr32_one | 925 » jb» L041ctr32_one |
900 pshufd $64,%xmm0,%xmm4 | 926 pshufd $64,%xmm0,%xmm4 |
901 por %xmm7,%xmm3 | 927 por %xmm7,%xmm3 |
902 » je» L040ctr32_two | 928 » je» L042ctr32_two |
903 pshufd $192,%xmm1,%xmm5 | 929 pshufd $192,%xmm1,%xmm5 |
904 por %xmm7,%xmm4 | 930 por %xmm7,%xmm4 |
905 cmpl $4,%eax | 931 cmpl $4,%eax |
906 » jb» L041ctr32_three | 932 » jb» L043ctr32_three |
907 pshufd $128,%xmm1,%xmm6 | 933 pshufd $128,%xmm1,%xmm6 |
908 por %xmm7,%xmm5 | 934 por %xmm7,%xmm5 |
909 » je» L042ctr32_four | 935 » je» L044ctr32_four |
910 por %xmm7,%xmm6 | 936 por %xmm7,%xmm6 |
911 call __aesni_encrypt6 | 937 call __aesni_encrypt6 |
912 movups (%esi),%xmm1 | 938 movups (%esi),%xmm1 |
913 movups 16(%esi),%xmm0 | 939 movups 16(%esi),%xmm0 |
914 xorps %xmm1,%xmm2 | 940 xorps %xmm1,%xmm2 |
915 movups 32(%esi),%xmm1 | 941 movups 32(%esi),%xmm1 |
916 xorps %xmm0,%xmm3 | 942 xorps %xmm0,%xmm3 |
917 movups 48(%esi),%xmm0 | 943 movups 48(%esi),%xmm0 |
918 xorps %xmm1,%xmm4 | 944 xorps %xmm1,%xmm4 |
919 movups 64(%esi),%xmm1 | 945 movups 64(%esi),%xmm1 |
920 xorps %xmm0,%xmm5 | 946 xorps %xmm0,%xmm5 |
921 movups %xmm2,(%edi) | 947 movups %xmm2,(%edi) |
922 xorps %xmm1,%xmm6 | 948 xorps %xmm1,%xmm6 |
923 movups %xmm3,16(%edi) | 949 movups %xmm3,16(%edi) |
924 movups %xmm4,32(%edi) | 950 movups %xmm4,32(%edi) |
925 movups %xmm5,48(%edi) | 951 movups %xmm5,48(%edi) |
926 movups %xmm6,64(%edi) | 952 movups %xmm6,64(%edi) |
927 » jmp» L038ctr32_ret | 953 » jmp» L040ctr32_ret |
928 .align 4,0x90 | 954 .align 4,0x90 |
929 L035ctr32_one_shortcut: | 955 L037ctr32_one_shortcut: |
930 movups (%ebx),%xmm2 | 956 movups (%ebx),%xmm2 |
931 movl 240(%edx),%ecx | 957 movl 240(%edx),%ecx |
932 L039ctr32_one: | 958 L041ctr32_one: |
933 movups (%edx),%xmm0 | 959 movups (%edx),%xmm0 |
934 movups 16(%edx),%xmm1 | 960 movups 16(%edx),%xmm1 |
935 leal 32(%edx),%edx | 961 leal 32(%edx),%edx |
936 xorps %xmm0,%xmm2 | 962 xorps %xmm0,%xmm2 |
937 L043enc1_loop_7: | 963 L045enc1_loop_7: |
938 .byte 102,15,56,220,209 | 964 .byte 102,15,56,220,209 |
939 decl %ecx | 965 decl %ecx |
940 movups (%edx),%xmm1 | 966 movups (%edx),%xmm1 |
941 leal 16(%edx),%edx | 967 leal 16(%edx),%edx |
942 » jnz» L043enc1_loop_7 | 968 » jnz» L045enc1_loop_7 |
943 .byte 102,15,56,221,209 | 969 .byte 102,15,56,221,209 |
944 movups (%esi),%xmm6 | 970 movups (%esi),%xmm6 |
945 xorps %xmm2,%xmm6 | 971 xorps %xmm2,%xmm6 |
946 movups %xmm6,(%edi) | 972 movups %xmm6,(%edi) |
947 » jmp» L038ctr32_ret | 973 » jmp» L040ctr32_ret |
948 .align 4,0x90 | 974 .align 4,0x90 |
949 L040ctr32_two: | 975 L042ctr32_two: |
950 call __aesni_encrypt2 | 976 call __aesni_encrypt2 |
951 movups (%esi),%xmm5 | 977 movups (%esi),%xmm5 |
952 movups 16(%esi),%xmm6 | 978 movups 16(%esi),%xmm6 |
953 xorps %xmm5,%xmm2 | 979 xorps %xmm5,%xmm2 |
954 xorps %xmm6,%xmm3 | 980 xorps %xmm6,%xmm3 |
955 movups %xmm2,(%edi) | 981 movups %xmm2,(%edi) |
956 movups %xmm3,16(%edi) | 982 movups %xmm3,16(%edi) |
957 » jmp» L038ctr32_ret | 983 » jmp» L040ctr32_ret |
958 .align 4,0x90 | 984 .align 4,0x90 |
959 L041ctr32_three: | 985 L043ctr32_three: |
960 call __aesni_encrypt3 | 986 call __aesni_encrypt3 |
961 movups (%esi),%xmm5 | 987 movups (%esi),%xmm5 |
962 movups 16(%esi),%xmm6 | 988 movups 16(%esi),%xmm6 |
963 xorps %xmm5,%xmm2 | 989 xorps %xmm5,%xmm2 |
964 movups 32(%esi),%xmm7 | 990 movups 32(%esi),%xmm7 |
965 xorps %xmm6,%xmm3 | 991 xorps %xmm6,%xmm3 |
966 movups %xmm2,(%edi) | 992 movups %xmm2,(%edi) |
967 xorps %xmm7,%xmm4 | 993 xorps %xmm7,%xmm4 |
968 movups %xmm3,16(%edi) | 994 movups %xmm3,16(%edi) |
969 movups %xmm4,32(%edi) | 995 movups %xmm4,32(%edi) |
970 » jmp» L038ctr32_ret | 996 » jmp» L040ctr32_ret |
971 .align 4,0x90 | 997 .align 4,0x90 |
972 L042ctr32_four: | 998 L044ctr32_four: |
973 call __aesni_encrypt4 | 999 call __aesni_encrypt4 |
974 movups (%esi),%xmm6 | 1000 movups (%esi),%xmm6 |
975 movups 16(%esi),%xmm7 | 1001 movups 16(%esi),%xmm7 |
976 movups 32(%esi),%xmm1 | 1002 movups 32(%esi),%xmm1 |
977 xorps %xmm6,%xmm2 | 1003 xorps %xmm6,%xmm2 |
978 movups 48(%esi),%xmm0 | 1004 movups 48(%esi),%xmm0 |
979 xorps %xmm7,%xmm3 | 1005 xorps %xmm7,%xmm3 |
980 movups %xmm2,(%edi) | 1006 movups %xmm2,(%edi) |
981 xorps %xmm1,%xmm4 | 1007 xorps %xmm1,%xmm4 |
982 movups %xmm3,16(%edi) | 1008 movups %xmm3,16(%edi) |
983 xorps %xmm0,%xmm5 | 1009 xorps %xmm0,%xmm5 |
984 movups %xmm4,32(%edi) | 1010 movups %xmm4,32(%edi) |
985 movups %xmm5,48(%edi) | 1011 movups %xmm5,48(%edi) |
986 L038ctr32_ret: | 1012 L040ctr32_ret: |
| 1013 » pxor» %xmm0,%xmm0 |
| 1014 » pxor» %xmm1,%xmm1 |
| 1015 » pxor» %xmm2,%xmm2 |
| 1016 » pxor» %xmm3,%xmm3 |
| 1017 » pxor» %xmm4,%xmm4 |
| 1018 » movdqa» %xmm0,32(%esp) |
| 1019 » pxor» %xmm5,%xmm5 |
| 1020 » movdqa» %xmm0,48(%esp) |
| 1021 » pxor» %xmm6,%xmm6 |
| 1022 » movdqa» %xmm0,64(%esp) |
| 1023 » pxor» %xmm7,%xmm7 |
987 movl 80(%esp),%esp | 1024 movl 80(%esp),%esp |
988 popl %edi | 1025 popl %edi |
989 popl %esi | 1026 popl %esi |
990 popl %ebx | 1027 popl %ebx |
991 popl %ebp | 1028 popl %ebp |
992 ret | 1029 ret |
993 .globl _aesni_xts_encrypt | 1030 .globl _aesni_xts_encrypt |
994 .private_extern _aesni_xts_encrypt | 1031 .private_extern _aesni_xts_encrypt |
995 .align 4 | 1032 .align 4 |
996 _aesni_xts_encrypt: | 1033 _aesni_xts_encrypt: |
997 L_aesni_xts_encrypt_begin: | 1034 L_aesni_xts_encrypt_begin: |
998 pushl %ebp | 1035 pushl %ebp |
999 pushl %ebx | 1036 pushl %ebx |
1000 pushl %esi | 1037 pushl %esi |
1001 pushl %edi | 1038 pushl %edi |
1002 movl 36(%esp),%edx | 1039 movl 36(%esp),%edx |
1003 movl 40(%esp),%esi | 1040 movl 40(%esp),%esi |
1004 movl 240(%edx),%ecx | 1041 movl 240(%edx),%ecx |
1005 movups (%esi),%xmm2 | 1042 movups (%esi),%xmm2 |
1006 movups (%edx),%xmm0 | 1043 movups (%edx),%xmm0 |
1007 movups 16(%edx),%xmm1 | 1044 movups 16(%edx),%xmm1 |
1008 leal 32(%edx),%edx | 1045 leal 32(%edx),%edx |
1009 xorps %xmm0,%xmm2 | 1046 xorps %xmm0,%xmm2 |
1010 L044enc1_loop_8: | 1047 L046enc1_loop_8: |
1011 .byte 102,15,56,220,209 | 1048 .byte 102,15,56,220,209 |
1012 decl %ecx | 1049 decl %ecx |
1013 movups (%edx),%xmm1 | 1050 movups (%edx),%xmm1 |
1014 leal 16(%edx),%edx | 1051 leal 16(%edx),%edx |
1015 » jnz» L044enc1_loop_8 | 1052 » jnz» L046enc1_loop_8 |
1016 .byte 102,15,56,221,209 | 1053 .byte 102,15,56,221,209 |
1017 movl 20(%esp),%esi | 1054 movl 20(%esp),%esi |
1018 movl 24(%esp),%edi | 1055 movl 24(%esp),%edi |
1019 movl 28(%esp),%eax | 1056 movl 28(%esp),%eax |
1020 movl 32(%esp),%edx | 1057 movl 32(%esp),%edx |
1021 movl %esp,%ebp | 1058 movl %esp,%ebp |
1022 subl $120,%esp | 1059 subl $120,%esp |
1023 movl 240(%edx),%ecx | 1060 movl 240(%edx),%ecx |
1024 andl $-16,%esp | 1061 andl $-16,%esp |
1025 movl $135,96(%esp) | 1062 movl $135,96(%esp) |
1026 movl $0,100(%esp) | 1063 movl $0,100(%esp) |
1027 movl $1,104(%esp) | 1064 movl $1,104(%esp) |
1028 movl $0,108(%esp) | 1065 movl $0,108(%esp) |
1029 movl %eax,112(%esp) | 1066 movl %eax,112(%esp) |
1030 movl %ebp,116(%esp) | 1067 movl %ebp,116(%esp) |
1031 movdqa %xmm2,%xmm1 | 1068 movdqa %xmm2,%xmm1 |
1032 pxor %xmm0,%xmm0 | 1069 pxor %xmm0,%xmm0 |
1033 movdqa 96(%esp),%xmm3 | 1070 movdqa 96(%esp),%xmm3 |
1034 pcmpgtd %xmm1,%xmm0 | 1071 pcmpgtd %xmm1,%xmm0 |
1035 andl $-16,%eax | 1072 andl $-16,%eax |
1036 movl %edx,%ebp | 1073 movl %edx,%ebp |
1037 movl %ecx,%ebx | 1074 movl %ecx,%ebx |
1038 subl $96,%eax | 1075 subl $96,%eax |
1039 » jc» L045xts_enc_short | 1076 » jc» L047xts_enc_short |
1040 shll $4,%ecx | 1077 shll $4,%ecx |
1041 movl $16,%ebx | 1078 movl $16,%ebx |
1042 subl %ecx,%ebx | 1079 subl %ecx,%ebx |
1043 leal 32(%edx,%ecx,1),%edx | 1080 leal 32(%edx,%ecx,1),%edx |
1044 » jmp» L046xts_enc_loop6 | 1081 » jmp» L048xts_enc_loop6 |
1045 .align 4,0x90 | 1082 .align 4,0x90 |
1046 L046xts_enc_loop6: | 1083 L048xts_enc_loop6: |
1047 pshufd $19,%xmm0,%xmm2 | 1084 pshufd $19,%xmm0,%xmm2 |
1048 pxor %xmm0,%xmm0 | 1085 pxor %xmm0,%xmm0 |
1049 movdqa %xmm1,(%esp) | 1086 movdqa %xmm1,(%esp) |
1050 paddq %xmm1,%xmm1 | 1087 paddq %xmm1,%xmm1 |
1051 pand %xmm3,%xmm2 | 1088 pand %xmm3,%xmm2 |
1052 pcmpgtd %xmm1,%xmm0 | 1089 pcmpgtd %xmm1,%xmm0 |
1053 pxor %xmm2,%xmm1 | 1090 pxor %xmm2,%xmm1 |
1054 pshufd $19,%xmm0,%xmm2 | 1091 pshufd $19,%xmm0,%xmm2 |
1055 pxor %xmm0,%xmm0 | 1092 pxor %xmm0,%xmm0 |
1056 movdqa %xmm1,16(%esp) | 1093 movdqa %xmm1,16(%esp) |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1125 pshufd $19,%xmm0,%xmm2 | 1162 pshufd $19,%xmm0,%xmm2 |
1126 movups %xmm7,80(%edi) | 1163 movups %xmm7,80(%edi) |
1127 leal 96(%edi),%edi | 1164 leal 96(%edi),%edi |
1128 movdqa 96(%esp),%xmm3 | 1165 movdqa 96(%esp),%xmm3 |
1129 pxor %xmm0,%xmm0 | 1166 pxor %xmm0,%xmm0 |
1130 paddq %xmm1,%xmm1 | 1167 paddq %xmm1,%xmm1 |
1131 pand %xmm3,%xmm2 | 1168 pand %xmm3,%xmm2 |
1132 pcmpgtd %xmm1,%xmm0 | 1169 pcmpgtd %xmm1,%xmm0 |
1133 pxor %xmm2,%xmm1 | 1170 pxor %xmm2,%xmm1 |
1134 subl $96,%eax | 1171 subl $96,%eax |
1135 » jnc» L046xts_enc_loop6 | 1172 » jnc» L048xts_enc_loop6 |
1136 movl 240(%ebp),%ecx | 1173 movl 240(%ebp),%ecx |
1137 movl %ebp,%edx | 1174 movl %ebp,%edx |
1138 movl %ecx,%ebx | 1175 movl %ecx,%ebx |
1139 L045xts_enc_short: | 1176 L047xts_enc_short: |
1140 addl $96,%eax | 1177 addl $96,%eax |
1141 » jz» L047xts_enc_done6x | 1178 » jz» L049xts_enc_done6x |
1142 movdqa %xmm1,%xmm5 | 1179 movdqa %xmm1,%xmm5 |
1143 cmpl $32,%eax | 1180 cmpl $32,%eax |
1144 » jb» L048xts_enc_one | 1181 » jb» L050xts_enc_one |
1145 pshufd $19,%xmm0,%xmm2 | 1182 pshufd $19,%xmm0,%xmm2 |
1146 pxor %xmm0,%xmm0 | 1183 pxor %xmm0,%xmm0 |
1147 paddq %xmm1,%xmm1 | 1184 paddq %xmm1,%xmm1 |
1148 pand %xmm3,%xmm2 | 1185 pand %xmm3,%xmm2 |
1149 pcmpgtd %xmm1,%xmm0 | 1186 pcmpgtd %xmm1,%xmm0 |
1150 pxor %xmm2,%xmm1 | 1187 pxor %xmm2,%xmm1 |
1151 » je» L049xts_enc_two | 1188 » je» L051xts_enc_two |
1152 pshufd $19,%xmm0,%xmm2 | 1189 pshufd $19,%xmm0,%xmm2 |
1153 pxor %xmm0,%xmm0 | 1190 pxor %xmm0,%xmm0 |
1154 movdqa %xmm1,%xmm6 | 1191 movdqa %xmm1,%xmm6 |
1155 paddq %xmm1,%xmm1 | 1192 paddq %xmm1,%xmm1 |
1156 pand %xmm3,%xmm2 | 1193 pand %xmm3,%xmm2 |
1157 pcmpgtd %xmm1,%xmm0 | 1194 pcmpgtd %xmm1,%xmm0 |
1158 pxor %xmm2,%xmm1 | 1195 pxor %xmm2,%xmm1 |
1159 cmpl $64,%eax | 1196 cmpl $64,%eax |
1160 » jb» L050xts_enc_three | 1197 » jb» L052xts_enc_three |
1161 pshufd $19,%xmm0,%xmm2 | 1198 pshufd $19,%xmm0,%xmm2 |
1162 pxor %xmm0,%xmm0 | 1199 pxor %xmm0,%xmm0 |
1163 movdqa %xmm1,%xmm7 | 1200 movdqa %xmm1,%xmm7 |
1164 paddq %xmm1,%xmm1 | 1201 paddq %xmm1,%xmm1 |
1165 pand %xmm3,%xmm2 | 1202 pand %xmm3,%xmm2 |
1166 pcmpgtd %xmm1,%xmm0 | 1203 pcmpgtd %xmm1,%xmm0 |
1167 pxor %xmm2,%xmm1 | 1204 pxor %xmm2,%xmm1 |
1168 movdqa %xmm5,(%esp) | 1205 movdqa %xmm5,(%esp) |
1169 movdqa %xmm6,16(%esp) | 1206 movdqa %xmm6,16(%esp) |
1170 » je» L051xts_enc_four | 1207 » je» L053xts_enc_four |
1171 movdqa %xmm7,32(%esp) | 1208 movdqa %xmm7,32(%esp) |
1172 pshufd $19,%xmm0,%xmm7 | 1209 pshufd $19,%xmm0,%xmm7 |
1173 movdqa %xmm1,48(%esp) | 1210 movdqa %xmm1,48(%esp) |
1174 paddq %xmm1,%xmm1 | 1211 paddq %xmm1,%xmm1 |
1175 pand %xmm3,%xmm7 | 1212 pand %xmm3,%xmm7 |
1176 pxor %xmm1,%xmm7 | 1213 pxor %xmm1,%xmm7 |
1177 movdqu (%esi),%xmm2 | 1214 movdqu (%esi),%xmm2 |
1178 movdqu 16(%esi),%xmm3 | 1215 movdqu 16(%esi),%xmm3 |
1179 movdqu 32(%esi),%xmm4 | 1216 movdqu 32(%esi),%xmm4 |
1180 pxor (%esp),%xmm2 | 1217 pxor (%esp),%xmm2 |
(...skipping 11 matching lines...) Expand all Loading... |
1192 xorps 16(%esp),%xmm3 | 1229 xorps 16(%esp),%xmm3 |
1193 xorps 32(%esp),%xmm4 | 1230 xorps 32(%esp),%xmm4 |
1194 movups %xmm2,(%edi) | 1231 movups %xmm2,(%edi) |
1195 xorps 48(%esp),%xmm5 | 1232 xorps 48(%esp),%xmm5 |
1196 movups %xmm3,16(%edi) | 1233 movups %xmm3,16(%edi) |
1197 xorps %xmm1,%xmm6 | 1234 xorps %xmm1,%xmm6 |
1198 movups %xmm4,32(%edi) | 1235 movups %xmm4,32(%edi) |
1199 movups %xmm5,48(%edi) | 1236 movups %xmm5,48(%edi) |
1200 movups %xmm6,64(%edi) | 1237 movups %xmm6,64(%edi) |
1201 leal 80(%edi),%edi | 1238 leal 80(%edi),%edi |
1202 » jmp» L052xts_enc_done | 1239 » jmp» L054xts_enc_done |
1203 .align 4,0x90 | 1240 .align 4,0x90 |
1204 L048xts_enc_one: | 1241 L050xts_enc_one: |
1205 movups (%esi),%xmm2 | 1242 movups (%esi),%xmm2 |
1206 leal 16(%esi),%esi | 1243 leal 16(%esi),%esi |
1207 xorps %xmm5,%xmm2 | 1244 xorps %xmm5,%xmm2 |
1208 movups (%edx),%xmm0 | 1245 movups (%edx),%xmm0 |
1209 movups 16(%edx),%xmm1 | 1246 movups 16(%edx),%xmm1 |
1210 leal 32(%edx),%edx | 1247 leal 32(%edx),%edx |
1211 xorps %xmm0,%xmm2 | 1248 xorps %xmm0,%xmm2 |
1212 L053enc1_loop_9: | 1249 L055enc1_loop_9: |
1213 .byte 102,15,56,220,209 | 1250 .byte 102,15,56,220,209 |
1214 decl %ecx | 1251 decl %ecx |
1215 movups (%edx),%xmm1 | 1252 movups (%edx),%xmm1 |
1216 leal 16(%edx),%edx | 1253 leal 16(%edx),%edx |
1217 » jnz» L053enc1_loop_9 | 1254 » jnz» L055enc1_loop_9 |
1218 .byte 102,15,56,221,209 | 1255 .byte 102,15,56,221,209 |
1219 xorps %xmm5,%xmm2 | 1256 xorps %xmm5,%xmm2 |
1220 movups %xmm2,(%edi) | 1257 movups %xmm2,(%edi) |
1221 leal 16(%edi),%edi | 1258 leal 16(%edi),%edi |
1222 movdqa %xmm5,%xmm1 | 1259 movdqa %xmm5,%xmm1 |
1223 » jmp» L052xts_enc_done | 1260 » jmp» L054xts_enc_done |
1224 .align 4,0x90 | 1261 .align 4,0x90 |
1225 L049xts_enc_two: | 1262 L051xts_enc_two: |
1226 movaps %xmm1,%xmm6 | 1263 movaps %xmm1,%xmm6 |
1227 movups (%esi),%xmm2 | 1264 movups (%esi),%xmm2 |
1228 movups 16(%esi),%xmm3 | 1265 movups 16(%esi),%xmm3 |
1229 leal 32(%esi),%esi | 1266 leal 32(%esi),%esi |
1230 xorps %xmm5,%xmm2 | 1267 xorps %xmm5,%xmm2 |
1231 xorps %xmm6,%xmm3 | 1268 xorps %xmm6,%xmm3 |
1232 call __aesni_encrypt2 | 1269 call __aesni_encrypt2 |
1233 xorps %xmm5,%xmm2 | 1270 xorps %xmm5,%xmm2 |
1234 xorps %xmm6,%xmm3 | 1271 xorps %xmm6,%xmm3 |
1235 movups %xmm2,(%edi) | 1272 movups %xmm2,(%edi) |
1236 movups %xmm3,16(%edi) | 1273 movups %xmm3,16(%edi) |
1237 leal 32(%edi),%edi | 1274 leal 32(%edi),%edi |
1238 movdqa %xmm6,%xmm1 | 1275 movdqa %xmm6,%xmm1 |
1239 » jmp» L052xts_enc_done | 1276 » jmp» L054xts_enc_done |
1240 .align 4,0x90 | 1277 .align 4,0x90 |
1241 L050xts_enc_three: | 1278 L052xts_enc_three: |
1242 movaps %xmm1,%xmm7 | 1279 movaps %xmm1,%xmm7 |
1243 movups (%esi),%xmm2 | 1280 movups (%esi),%xmm2 |
1244 movups 16(%esi),%xmm3 | 1281 movups 16(%esi),%xmm3 |
1245 movups 32(%esi),%xmm4 | 1282 movups 32(%esi),%xmm4 |
1246 leal 48(%esi),%esi | 1283 leal 48(%esi),%esi |
1247 xorps %xmm5,%xmm2 | 1284 xorps %xmm5,%xmm2 |
1248 xorps %xmm6,%xmm3 | 1285 xorps %xmm6,%xmm3 |
1249 xorps %xmm7,%xmm4 | 1286 xorps %xmm7,%xmm4 |
1250 call __aesni_encrypt3 | 1287 call __aesni_encrypt3 |
1251 xorps %xmm5,%xmm2 | 1288 xorps %xmm5,%xmm2 |
1252 xorps %xmm6,%xmm3 | 1289 xorps %xmm6,%xmm3 |
1253 xorps %xmm7,%xmm4 | 1290 xorps %xmm7,%xmm4 |
1254 movups %xmm2,(%edi) | 1291 movups %xmm2,(%edi) |
1255 movups %xmm3,16(%edi) | 1292 movups %xmm3,16(%edi) |
1256 movups %xmm4,32(%edi) | 1293 movups %xmm4,32(%edi) |
1257 leal 48(%edi),%edi | 1294 leal 48(%edi),%edi |
1258 movdqa %xmm7,%xmm1 | 1295 movdqa %xmm7,%xmm1 |
1259 » jmp» L052xts_enc_done | 1296 » jmp» L054xts_enc_done |
1260 .align 4,0x90 | 1297 .align 4,0x90 |
1261 L051xts_enc_four: | 1298 L053xts_enc_four: |
1262 movaps %xmm1,%xmm6 | 1299 movaps %xmm1,%xmm6 |
1263 movups (%esi),%xmm2 | 1300 movups (%esi),%xmm2 |
1264 movups 16(%esi),%xmm3 | 1301 movups 16(%esi),%xmm3 |
1265 movups 32(%esi),%xmm4 | 1302 movups 32(%esi),%xmm4 |
1266 xorps (%esp),%xmm2 | 1303 xorps (%esp),%xmm2 |
1267 movups 48(%esi),%xmm5 | 1304 movups 48(%esi),%xmm5 |
1268 leal 64(%esi),%esi | 1305 leal 64(%esi),%esi |
1269 xorps 16(%esp),%xmm3 | 1306 xorps 16(%esp),%xmm3 |
1270 xorps %xmm7,%xmm4 | 1307 xorps %xmm7,%xmm4 |
1271 xorps %xmm6,%xmm5 | 1308 xorps %xmm6,%xmm5 |
1272 call __aesni_encrypt4 | 1309 call __aesni_encrypt4 |
1273 xorps (%esp),%xmm2 | 1310 xorps (%esp),%xmm2 |
1274 xorps 16(%esp),%xmm3 | 1311 xorps 16(%esp),%xmm3 |
1275 xorps %xmm7,%xmm4 | 1312 xorps %xmm7,%xmm4 |
1276 movups %xmm2,(%edi) | 1313 movups %xmm2,(%edi) |
1277 xorps %xmm6,%xmm5 | 1314 xorps %xmm6,%xmm5 |
1278 movups %xmm3,16(%edi) | 1315 movups %xmm3,16(%edi) |
1279 movups %xmm4,32(%edi) | 1316 movups %xmm4,32(%edi) |
1280 movups %xmm5,48(%edi) | 1317 movups %xmm5,48(%edi) |
1281 leal 64(%edi),%edi | 1318 leal 64(%edi),%edi |
1282 movdqa %xmm6,%xmm1 | 1319 movdqa %xmm6,%xmm1 |
1283 » jmp» L052xts_enc_done | 1320 » jmp» L054xts_enc_done |
1284 .align 4,0x90 | 1321 .align 4,0x90 |
1285 L047xts_enc_done6x: | 1322 L049xts_enc_done6x: |
1286 movl 112(%esp),%eax | 1323 movl 112(%esp),%eax |
1287 andl $15,%eax | 1324 andl $15,%eax |
1288 » jz» L054xts_enc_ret | 1325 » jz» L056xts_enc_ret |
1289 movdqa %xmm1,%xmm5 | 1326 movdqa %xmm1,%xmm5 |
1290 movl %eax,112(%esp) | 1327 movl %eax,112(%esp) |
1291 » jmp» L055xts_enc_steal | 1328 » jmp» L057xts_enc_steal |
1292 .align 4,0x90 | 1329 .align 4,0x90 |
1293 L052xts_enc_done: | 1330 L054xts_enc_done: |
1294 movl 112(%esp),%eax | 1331 movl 112(%esp),%eax |
1295 pxor %xmm0,%xmm0 | 1332 pxor %xmm0,%xmm0 |
1296 andl $15,%eax | 1333 andl $15,%eax |
1297 » jz» L054xts_enc_ret | 1334 » jz» L056xts_enc_ret |
1298 pcmpgtd %xmm1,%xmm0 | 1335 pcmpgtd %xmm1,%xmm0 |
1299 movl %eax,112(%esp) | 1336 movl %eax,112(%esp) |
1300 pshufd $19,%xmm0,%xmm5 | 1337 pshufd $19,%xmm0,%xmm5 |
1301 paddq %xmm1,%xmm1 | 1338 paddq %xmm1,%xmm1 |
1302 pand 96(%esp),%xmm5 | 1339 pand 96(%esp),%xmm5 |
1303 pxor %xmm1,%xmm5 | 1340 pxor %xmm1,%xmm5 |
1304 L055xts_enc_steal: | 1341 L057xts_enc_steal: |
1305 movzbl (%esi),%ecx | 1342 movzbl (%esi),%ecx |
1306 movzbl -16(%edi),%edx | 1343 movzbl -16(%edi),%edx |
1307 leal 1(%esi),%esi | 1344 leal 1(%esi),%esi |
1308 movb %cl,-16(%edi) | 1345 movb %cl,-16(%edi) |
1309 movb %dl,(%edi) | 1346 movb %dl,(%edi) |
1310 leal 1(%edi),%edi | 1347 leal 1(%edi),%edi |
1311 subl $1,%eax | 1348 subl $1,%eax |
1312 » jnz» L055xts_enc_steal | 1349 » jnz» L057xts_enc_steal |
1313 subl 112(%esp),%edi | 1350 subl 112(%esp),%edi |
1314 movl %ebp,%edx | 1351 movl %ebp,%edx |
1315 movl %ebx,%ecx | 1352 movl %ebx,%ecx |
1316 movups -16(%edi),%xmm2 | 1353 movups -16(%edi),%xmm2 |
1317 xorps %xmm5,%xmm2 | 1354 xorps %xmm5,%xmm2 |
1318 movups (%edx),%xmm0 | 1355 movups (%edx),%xmm0 |
1319 movups 16(%edx),%xmm1 | 1356 movups 16(%edx),%xmm1 |
1320 leal 32(%edx),%edx | 1357 leal 32(%edx),%edx |
1321 xorps %xmm0,%xmm2 | 1358 xorps %xmm0,%xmm2 |
1322 L056enc1_loop_10: | 1359 L058enc1_loop_10: |
1323 .byte 102,15,56,220,209 | 1360 .byte 102,15,56,220,209 |
1324 decl %ecx | 1361 decl %ecx |
1325 movups (%edx),%xmm1 | 1362 movups (%edx),%xmm1 |
1326 leal 16(%edx),%edx | 1363 leal 16(%edx),%edx |
1327 » jnz» L056enc1_loop_10 | 1364 » jnz» L058enc1_loop_10 |
1328 .byte 102,15,56,221,209 | 1365 .byte 102,15,56,221,209 |
1329 xorps %xmm5,%xmm2 | 1366 xorps %xmm5,%xmm2 |
1330 movups %xmm2,-16(%edi) | 1367 movups %xmm2,-16(%edi) |
1331 L054xts_enc_ret: | 1368 L056xts_enc_ret: |
| 1369 » pxor» %xmm0,%xmm0 |
| 1370 » pxor» %xmm1,%xmm1 |
| 1371 » pxor» %xmm2,%xmm2 |
| 1372 » movdqa» %xmm0,(%esp) |
| 1373 » pxor» %xmm3,%xmm3 |
| 1374 » movdqa» %xmm0,16(%esp) |
| 1375 » pxor» %xmm4,%xmm4 |
| 1376 » movdqa» %xmm0,32(%esp) |
| 1377 » pxor» %xmm5,%xmm5 |
| 1378 » movdqa» %xmm0,48(%esp) |
| 1379 » pxor» %xmm6,%xmm6 |
| 1380 » movdqa» %xmm0,64(%esp) |
| 1381 » pxor» %xmm7,%xmm7 |
| 1382 » movdqa» %xmm0,80(%esp) |
1332 movl 116(%esp),%esp | 1383 movl 116(%esp),%esp |
1333 popl %edi | 1384 popl %edi |
1334 popl %esi | 1385 popl %esi |
1335 popl %ebx | 1386 popl %ebx |
1336 popl %ebp | 1387 popl %ebp |
1337 ret | 1388 ret |
1338 .globl _aesni_xts_decrypt | 1389 .globl _aesni_xts_decrypt |
1339 .private_extern _aesni_xts_decrypt | 1390 .private_extern _aesni_xts_decrypt |
1340 .align 4 | 1391 .align 4 |
1341 _aesni_xts_decrypt: | 1392 _aesni_xts_decrypt: |
1342 L_aesni_xts_decrypt_begin: | 1393 L_aesni_xts_decrypt_begin: |
1343 pushl %ebp | 1394 pushl %ebp |
1344 pushl %ebx | 1395 pushl %ebx |
1345 pushl %esi | 1396 pushl %esi |
1346 pushl %edi | 1397 pushl %edi |
1347 movl 36(%esp),%edx | 1398 movl 36(%esp),%edx |
1348 movl 40(%esp),%esi | 1399 movl 40(%esp),%esi |
1349 movl 240(%edx),%ecx | 1400 movl 240(%edx),%ecx |
1350 movups (%esi),%xmm2 | 1401 movups (%esi),%xmm2 |
1351 movups (%edx),%xmm0 | 1402 movups (%edx),%xmm0 |
1352 movups 16(%edx),%xmm1 | 1403 movups 16(%edx),%xmm1 |
1353 leal 32(%edx),%edx | 1404 leal 32(%edx),%edx |
1354 xorps %xmm0,%xmm2 | 1405 xorps %xmm0,%xmm2 |
1355 L057enc1_loop_11: | 1406 L059enc1_loop_11: |
1356 .byte 102,15,56,220,209 | 1407 .byte 102,15,56,220,209 |
1357 decl %ecx | 1408 decl %ecx |
1358 movups (%edx),%xmm1 | 1409 movups (%edx),%xmm1 |
1359 leal 16(%edx),%edx | 1410 leal 16(%edx),%edx |
1360 » jnz» L057enc1_loop_11 | 1411 » jnz» L059enc1_loop_11 |
1361 .byte 102,15,56,221,209 | 1412 .byte 102,15,56,221,209 |
1362 movl 20(%esp),%esi | 1413 movl 20(%esp),%esi |
1363 movl 24(%esp),%edi | 1414 movl 24(%esp),%edi |
1364 movl 28(%esp),%eax | 1415 movl 28(%esp),%eax |
1365 movl 32(%esp),%edx | 1416 movl 32(%esp),%edx |
1366 movl %esp,%ebp | 1417 movl %esp,%ebp |
1367 subl $120,%esp | 1418 subl $120,%esp |
1368 andl $-16,%esp | 1419 andl $-16,%esp |
1369 xorl %ebx,%ebx | 1420 xorl %ebx,%ebx |
1370 testl $15,%eax | 1421 testl $15,%eax |
1371 setnz %bl | 1422 setnz %bl |
1372 shll $4,%ebx | 1423 shll $4,%ebx |
1373 subl %ebx,%eax | 1424 subl %ebx,%eax |
1374 movl $135,96(%esp) | 1425 movl $135,96(%esp) |
1375 movl $0,100(%esp) | 1426 movl $0,100(%esp) |
1376 movl $1,104(%esp) | 1427 movl $1,104(%esp) |
1377 movl $0,108(%esp) | 1428 movl $0,108(%esp) |
1378 movl %eax,112(%esp) | 1429 movl %eax,112(%esp) |
1379 movl %ebp,116(%esp) | 1430 movl %ebp,116(%esp) |
1380 movl 240(%edx),%ecx | 1431 movl 240(%edx),%ecx |
1381 movl %edx,%ebp | 1432 movl %edx,%ebp |
1382 movl %ecx,%ebx | 1433 movl %ecx,%ebx |
1383 movdqa %xmm2,%xmm1 | 1434 movdqa %xmm2,%xmm1 |
1384 pxor %xmm0,%xmm0 | 1435 pxor %xmm0,%xmm0 |
1385 movdqa 96(%esp),%xmm3 | 1436 movdqa 96(%esp),%xmm3 |
1386 pcmpgtd %xmm1,%xmm0 | 1437 pcmpgtd %xmm1,%xmm0 |
1387 andl $-16,%eax | 1438 andl $-16,%eax |
1388 subl $96,%eax | 1439 subl $96,%eax |
1389 » jc» L058xts_dec_short | 1440 » jc» L060xts_dec_short |
1390 shll $4,%ecx | 1441 shll $4,%ecx |
1391 movl $16,%ebx | 1442 movl $16,%ebx |
1392 subl %ecx,%ebx | 1443 subl %ecx,%ebx |
1393 leal 32(%edx,%ecx,1),%edx | 1444 leal 32(%edx,%ecx,1),%edx |
1394 » jmp» L059xts_dec_loop6 | 1445 » jmp» L061xts_dec_loop6 |
1395 .align 4,0x90 | 1446 .align 4,0x90 |
1396 L059xts_dec_loop6: | 1447 L061xts_dec_loop6: |
1397 pshufd $19,%xmm0,%xmm2 | 1448 pshufd $19,%xmm0,%xmm2 |
1398 pxor %xmm0,%xmm0 | 1449 pxor %xmm0,%xmm0 |
1399 movdqa %xmm1,(%esp) | 1450 movdqa %xmm1,(%esp) |
1400 paddq %xmm1,%xmm1 | 1451 paddq %xmm1,%xmm1 |
1401 pand %xmm3,%xmm2 | 1452 pand %xmm3,%xmm2 |
1402 pcmpgtd %xmm1,%xmm0 | 1453 pcmpgtd %xmm1,%xmm0 |
1403 pxor %xmm2,%xmm1 | 1454 pxor %xmm2,%xmm1 |
1404 pshufd $19,%xmm0,%xmm2 | 1455 pshufd $19,%xmm0,%xmm2 |
1405 pxor %xmm0,%xmm0 | 1456 pxor %xmm0,%xmm0 |
1406 movdqa %xmm1,16(%esp) | 1457 movdqa %xmm1,16(%esp) |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1475 pshufd $19,%xmm0,%xmm2 | 1526 pshufd $19,%xmm0,%xmm2 |
1476 movups %xmm7,80(%edi) | 1527 movups %xmm7,80(%edi) |
1477 leal 96(%edi),%edi | 1528 leal 96(%edi),%edi |
1478 movdqa 96(%esp),%xmm3 | 1529 movdqa 96(%esp),%xmm3 |
1479 pxor %xmm0,%xmm0 | 1530 pxor %xmm0,%xmm0 |
1480 paddq %xmm1,%xmm1 | 1531 paddq %xmm1,%xmm1 |
1481 pand %xmm3,%xmm2 | 1532 pand %xmm3,%xmm2 |
1482 pcmpgtd %xmm1,%xmm0 | 1533 pcmpgtd %xmm1,%xmm0 |
1483 pxor %xmm2,%xmm1 | 1534 pxor %xmm2,%xmm1 |
1484 subl $96,%eax | 1535 subl $96,%eax |
1485 » jnc» L059xts_dec_loop6 | 1536 » jnc» L061xts_dec_loop6 |
1486 movl 240(%ebp),%ecx | 1537 movl 240(%ebp),%ecx |
1487 movl %ebp,%edx | 1538 movl %ebp,%edx |
1488 movl %ecx,%ebx | 1539 movl %ecx,%ebx |
1489 L058xts_dec_short: | 1540 L060xts_dec_short: |
1490 addl $96,%eax | 1541 addl $96,%eax |
1491 » jz» L060xts_dec_done6x | 1542 » jz» L062xts_dec_done6x |
1492 movdqa %xmm1,%xmm5 | 1543 movdqa %xmm1,%xmm5 |
1493 cmpl $32,%eax | 1544 cmpl $32,%eax |
1494 » jb» L061xts_dec_one | 1545 » jb» L063xts_dec_one |
1495 pshufd $19,%xmm0,%xmm2 | 1546 pshufd $19,%xmm0,%xmm2 |
1496 pxor %xmm0,%xmm0 | 1547 pxor %xmm0,%xmm0 |
1497 paddq %xmm1,%xmm1 | 1548 paddq %xmm1,%xmm1 |
1498 pand %xmm3,%xmm2 | 1549 pand %xmm3,%xmm2 |
1499 pcmpgtd %xmm1,%xmm0 | 1550 pcmpgtd %xmm1,%xmm0 |
1500 pxor %xmm2,%xmm1 | 1551 pxor %xmm2,%xmm1 |
1501 » je» L062xts_dec_two | 1552 » je» L064xts_dec_two |
1502 pshufd $19,%xmm0,%xmm2 | 1553 pshufd $19,%xmm0,%xmm2 |
1503 pxor %xmm0,%xmm0 | 1554 pxor %xmm0,%xmm0 |
1504 movdqa %xmm1,%xmm6 | 1555 movdqa %xmm1,%xmm6 |
1505 paddq %xmm1,%xmm1 | 1556 paddq %xmm1,%xmm1 |
1506 pand %xmm3,%xmm2 | 1557 pand %xmm3,%xmm2 |
1507 pcmpgtd %xmm1,%xmm0 | 1558 pcmpgtd %xmm1,%xmm0 |
1508 pxor %xmm2,%xmm1 | 1559 pxor %xmm2,%xmm1 |
1509 cmpl $64,%eax | 1560 cmpl $64,%eax |
1510 » jb» L063xts_dec_three | 1561 » jb» L065xts_dec_three |
1511 pshufd $19,%xmm0,%xmm2 | 1562 pshufd $19,%xmm0,%xmm2 |
1512 pxor %xmm0,%xmm0 | 1563 pxor %xmm0,%xmm0 |
1513 movdqa %xmm1,%xmm7 | 1564 movdqa %xmm1,%xmm7 |
1514 paddq %xmm1,%xmm1 | 1565 paddq %xmm1,%xmm1 |
1515 pand %xmm3,%xmm2 | 1566 pand %xmm3,%xmm2 |
1516 pcmpgtd %xmm1,%xmm0 | 1567 pcmpgtd %xmm1,%xmm0 |
1517 pxor %xmm2,%xmm1 | 1568 pxor %xmm2,%xmm1 |
1518 movdqa %xmm5,(%esp) | 1569 movdqa %xmm5,(%esp) |
1519 movdqa %xmm6,16(%esp) | 1570 movdqa %xmm6,16(%esp) |
1520 » je» L064xts_dec_four | 1571 » je» L066xts_dec_four |
1521 movdqa %xmm7,32(%esp) | 1572 movdqa %xmm7,32(%esp) |
1522 pshufd $19,%xmm0,%xmm7 | 1573 pshufd $19,%xmm0,%xmm7 |
1523 movdqa %xmm1,48(%esp) | 1574 movdqa %xmm1,48(%esp) |
1524 paddq %xmm1,%xmm1 | 1575 paddq %xmm1,%xmm1 |
1525 pand %xmm3,%xmm7 | 1576 pand %xmm3,%xmm7 |
1526 pxor %xmm1,%xmm7 | 1577 pxor %xmm1,%xmm7 |
1527 movdqu (%esi),%xmm2 | 1578 movdqu (%esi),%xmm2 |
1528 movdqu 16(%esi),%xmm3 | 1579 movdqu 16(%esi),%xmm3 |
1529 movdqu 32(%esi),%xmm4 | 1580 movdqu 32(%esi),%xmm4 |
1530 pxor (%esp),%xmm2 | 1581 pxor (%esp),%xmm2 |
(...skipping 11 matching lines...) Expand all Loading... |
1542 xorps 16(%esp),%xmm3 | 1593 xorps 16(%esp),%xmm3 |
1543 xorps 32(%esp),%xmm4 | 1594 xorps 32(%esp),%xmm4 |
1544 movups %xmm2,(%edi) | 1595 movups %xmm2,(%edi) |
1545 xorps 48(%esp),%xmm5 | 1596 xorps 48(%esp),%xmm5 |
1546 movups %xmm3,16(%edi) | 1597 movups %xmm3,16(%edi) |
1547 xorps %xmm1,%xmm6 | 1598 xorps %xmm1,%xmm6 |
1548 movups %xmm4,32(%edi) | 1599 movups %xmm4,32(%edi) |
1549 movups %xmm5,48(%edi) | 1600 movups %xmm5,48(%edi) |
1550 movups %xmm6,64(%edi) | 1601 movups %xmm6,64(%edi) |
1551 leal 80(%edi),%edi | 1602 leal 80(%edi),%edi |
1552 » jmp» L065xts_dec_done | 1603 » jmp» L067xts_dec_done |
1553 .align 4,0x90 | 1604 .align 4,0x90 |
1554 L061xts_dec_one: | 1605 L063xts_dec_one: |
1555 movups (%esi),%xmm2 | 1606 movups (%esi),%xmm2 |
1556 leal 16(%esi),%esi | 1607 leal 16(%esi),%esi |
1557 xorps %xmm5,%xmm2 | 1608 xorps %xmm5,%xmm2 |
1558 movups (%edx),%xmm0 | 1609 movups (%edx),%xmm0 |
1559 movups 16(%edx),%xmm1 | 1610 movups 16(%edx),%xmm1 |
1560 leal 32(%edx),%edx | 1611 leal 32(%edx),%edx |
1561 xorps %xmm0,%xmm2 | 1612 xorps %xmm0,%xmm2 |
1562 L066dec1_loop_12: | 1613 L068dec1_loop_12: |
1563 .byte 102,15,56,222,209 | 1614 .byte 102,15,56,222,209 |
1564 decl %ecx | 1615 decl %ecx |
1565 movups (%edx),%xmm1 | 1616 movups (%edx),%xmm1 |
1566 leal 16(%edx),%edx | 1617 leal 16(%edx),%edx |
1567 » jnz» L066dec1_loop_12 | 1618 » jnz» L068dec1_loop_12 |
1568 .byte 102,15,56,223,209 | 1619 .byte 102,15,56,223,209 |
1569 xorps %xmm5,%xmm2 | 1620 xorps %xmm5,%xmm2 |
1570 movups %xmm2,(%edi) | 1621 movups %xmm2,(%edi) |
1571 leal 16(%edi),%edi | 1622 leal 16(%edi),%edi |
1572 movdqa %xmm5,%xmm1 | 1623 movdqa %xmm5,%xmm1 |
1573 » jmp» L065xts_dec_done | 1624 » jmp» L067xts_dec_done |
1574 .align 4,0x90 | 1625 .align 4,0x90 |
1575 L062xts_dec_two: | 1626 L064xts_dec_two: |
1576 movaps %xmm1,%xmm6 | 1627 movaps %xmm1,%xmm6 |
1577 movups (%esi),%xmm2 | 1628 movups (%esi),%xmm2 |
1578 movups 16(%esi),%xmm3 | 1629 movups 16(%esi),%xmm3 |
1579 leal 32(%esi),%esi | 1630 leal 32(%esi),%esi |
1580 xorps %xmm5,%xmm2 | 1631 xorps %xmm5,%xmm2 |
1581 xorps %xmm6,%xmm3 | 1632 xorps %xmm6,%xmm3 |
1582 call __aesni_decrypt2 | 1633 call __aesni_decrypt2 |
1583 xorps %xmm5,%xmm2 | 1634 xorps %xmm5,%xmm2 |
1584 xorps %xmm6,%xmm3 | 1635 xorps %xmm6,%xmm3 |
1585 movups %xmm2,(%edi) | 1636 movups %xmm2,(%edi) |
1586 movups %xmm3,16(%edi) | 1637 movups %xmm3,16(%edi) |
1587 leal 32(%edi),%edi | 1638 leal 32(%edi),%edi |
1588 movdqa %xmm6,%xmm1 | 1639 movdqa %xmm6,%xmm1 |
1589 » jmp» L065xts_dec_done | 1640 » jmp» L067xts_dec_done |
1590 .align 4,0x90 | 1641 .align 4,0x90 |
1591 L063xts_dec_three: | 1642 L065xts_dec_three: |
1592 movaps %xmm1,%xmm7 | 1643 movaps %xmm1,%xmm7 |
1593 movups (%esi),%xmm2 | 1644 movups (%esi),%xmm2 |
1594 movups 16(%esi),%xmm3 | 1645 movups 16(%esi),%xmm3 |
1595 movups 32(%esi),%xmm4 | 1646 movups 32(%esi),%xmm4 |
1596 leal 48(%esi),%esi | 1647 leal 48(%esi),%esi |
1597 xorps %xmm5,%xmm2 | 1648 xorps %xmm5,%xmm2 |
1598 xorps %xmm6,%xmm3 | 1649 xorps %xmm6,%xmm3 |
1599 xorps %xmm7,%xmm4 | 1650 xorps %xmm7,%xmm4 |
1600 call __aesni_decrypt3 | 1651 call __aesni_decrypt3 |
1601 xorps %xmm5,%xmm2 | 1652 xorps %xmm5,%xmm2 |
1602 xorps %xmm6,%xmm3 | 1653 xorps %xmm6,%xmm3 |
1603 xorps %xmm7,%xmm4 | 1654 xorps %xmm7,%xmm4 |
1604 movups %xmm2,(%edi) | 1655 movups %xmm2,(%edi) |
1605 movups %xmm3,16(%edi) | 1656 movups %xmm3,16(%edi) |
1606 movups %xmm4,32(%edi) | 1657 movups %xmm4,32(%edi) |
1607 leal 48(%edi),%edi | 1658 leal 48(%edi),%edi |
1608 movdqa %xmm7,%xmm1 | 1659 movdqa %xmm7,%xmm1 |
1609 » jmp» L065xts_dec_done | 1660 » jmp» L067xts_dec_done |
1610 .align 4,0x90 | 1661 .align 4,0x90 |
1611 L064xts_dec_four: | 1662 L066xts_dec_four: |
1612 movaps %xmm1,%xmm6 | 1663 movaps %xmm1,%xmm6 |
1613 movups (%esi),%xmm2 | 1664 movups (%esi),%xmm2 |
1614 movups 16(%esi),%xmm3 | 1665 movups 16(%esi),%xmm3 |
1615 movups 32(%esi),%xmm4 | 1666 movups 32(%esi),%xmm4 |
1616 xorps (%esp),%xmm2 | 1667 xorps (%esp),%xmm2 |
1617 movups 48(%esi),%xmm5 | 1668 movups 48(%esi),%xmm5 |
1618 leal 64(%esi),%esi | 1669 leal 64(%esi),%esi |
1619 xorps 16(%esp),%xmm3 | 1670 xorps 16(%esp),%xmm3 |
1620 xorps %xmm7,%xmm4 | 1671 xorps %xmm7,%xmm4 |
1621 xorps %xmm6,%xmm5 | 1672 xorps %xmm6,%xmm5 |
1622 call __aesni_decrypt4 | 1673 call __aesni_decrypt4 |
1623 xorps (%esp),%xmm2 | 1674 xorps (%esp),%xmm2 |
1624 xorps 16(%esp),%xmm3 | 1675 xorps 16(%esp),%xmm3 |
1625 xorps %xmm7,%xmm4 | 1676 xorps %xmm7,%xmm4 |
1626 movups %xmm2,(%edi) | 1677 movups %xmm2,(%edi) |
1627 xorps %xmm6,%xmm5 | 1678 xorps %xmm6,%xmm5 |
1628 movups %xmm3,16(%edi) | 1679 movups %xmm3,16(%edi) |
1629 movups %xmm4,32(%edi) | 1680 movups %xmm4,32(%edi) |
1630 movups %xmm5,48(%edi) | 1681 movups %xmm5,48(%edi) |
1631 leal 64(%edi),%edi | 1682 leal 64(%edi),%edi |
1632 movdqa %xmm6,%xmm1 | 1683 movdqa %xmm6,%xmm1 |
1633 » jmp» L065xts_dec_done | 1684 » jmp» L067xts_dec_done |
1634 .align 4,0x90 | 1685 .align 4,0x90 |
1635 L060xts_dec_done6x: | 1686 L062xts_dec_done6x: |
1636 movl 112(%esp),%eax | 1687 movl 112(%esp),%eax |
1637 andl $15,%eax | 1688 andl $15,%eax |
1638 » jz» L067xts_dec_ret | 1689 » jz» L069xts_dec_ret |
1639 movl %eax,112(%esp) | 1690 movl %eax,112(%esp) |
1640 » jmp» L068xts_dec_only_one_more | 1691 » jmp» L070xts_dec_only_one_more |
1641 .align 4,0x90 | 1692 .align 4,0x90 |
1642 L065xts_dec_done: | 1693 L067xts_dec_done: |
1643 movl 112(%esp),%eax | 1694 movl 112(%esp),%eax |
1644 pxor %xmm0,%xmm0 | 1695 pxor %xmm0,%xmm0 |
1645 andl $15,%eax | 1696 andl $15,%eax |
1646 » jz» L067xts_dec_ret | 1697 » jz» L069xts_dec_ret |
1647 pcmpgtd %xmm1,%xmm0 | 1698 pcmpgtd %xmm1,%xmm0 |
1648 movl %eax,112(%esp) | 1699 movl %eax,112(%esp) |
1649 pshufd $19,%xmm0,%xmm2 | 1700 pshufd $19,%xmm0,%xmm2 |
1650 pxor %xmm0,%xmm0 | 1701 pxor %xmm0,%xmm0 |
1651 movdqa 96(%esp),%xmm3 | 1702 movdqa 96(%esp),%xmm3 |
1652 paddq %xmm1,%xmm1 | 1703 paddq %xmm1,%xmm1 |
1653 pand %xmm3,%xmm2 | 1704 pand %xmm3,%xmm2 |
1654 pcmpgtd %xmm1,%xmm0 | 1705 pcmpgtd %xmm1,%xmm0 |
1655 pxor %xmm2,%xmm1 | 1706 pxor %xmm2,%xmm1 |
1656 L068xts_dec_only_one_more: | 1707 L070xts_dec_only_one_more: |
1657 pshufd $19,%xmm0,%xmm5 | 1708 pshufd $19,%xmm0,%xmm5 |
1658 movdqa %xmm1,%xmm6 | 1709 movdqa %xmm1,%xmm6 |
1659 paddq %xmm1,%xmm1 | 1710 paddq %xmm1,%xmm1 |
1660 pand %xmm3,%xmm5 | 1711 pand %xmm3,%xmm5 |
1661 pxor %xmm1,%xmm5 | 1712 pxor %xmm1,%xmm5 |
1662 movl %ebp,%edx | 1713 movl %ebp,%edx |
1663 movl %ebx,%ecx | 1714 movl %ebx,%ecx |
1664 movups (%esi),%xmm2 | 1715 movups (%esi),%xmm2 |
1665 xorps %xmm5,%xmm2 | 1716 xorps %xmm5,%xmm2 |
1666 movups (%edx),%xmm0 | 1717 movups (%edx),%xmm0 |
1667 movups 16(%edx),%xmm1 | 1718 movups 16(%edx),%xmm1 |
1668 leal 32(%edx),%edx | 1719 leal 32(%edx),%edx |
1669 xorps %xmm0,%xmm2 | 1720 xorps %xmm0,%xmm2 |
1670 L069dec1_loop_13: | 1721 L071dec1_loop_13: |
1671 .byte 102,15,56,222,209 | 1722 .byte 102,15,56,222,209 |
1672 decl %ecx | 1723 decl %ecx |
1673 movups (%edx),%xmm1 | 1724 movups (%edx),%xmm1 |
1674 leal 16(%edx),%edx | 1725 leal 16(%edx),%edx |
1675 » jnz» L069dec1_loop_13 | 1726 » jnz» L071dec1_loop_13 |
1676 .byte 102,15,56,223,209 | 1727 .byte 102,15,56,223,209 |
1677 xorps %xmm5,%xmm2 | 1728 xorps %xmm5,%xmm2 |
1678 movups %xmm2,(%edi) | 1729 movups %xmm2,(%edi) |
1679 L070xts_dec_steal: | 1730 L072xts_dec_steal: |
1680 movzbl 16(%esi),%ecx | 1731 movzbl 16(%esi),%ecx |
1681 movzbl (%edi),%edx | 1732 movzbl (%edi),%edx |
1682 leal 1(%esi),%esi | 1733 leal 1(%esi),%esi |
1683 movb %cl,(%edi) | 1734 movb %cl,(%edi) |
1684 movb %dl,16(%edi) | 1735 movb %dl,16(%edi) |
1685 leal 1(%edi),%edi | 1736 leal 1(%edi),%edi |
1686 subl $1,%eax | 1737 subl $1,%eax |
1687 » jnz» L070xts_dec_steal | 1738 » jnz» L072xts_dec_steal |
1688 subl 112(%esp),%edi | 1739 subl 112(%esp),%edi |
1689 movl %ebp,%edx | 1740 movl %ebp,%edx |
1690 movl %ebx,%ecx | 1741 movl %ebx,%ecx |
1691 movups (%edi),%xmm2 | 1742 movups (%edi),%xmm2 |
1692 xorps %xmm6,%xmm2 | 1743 xorps %xmm6,%xmm2 |
1693 movups (%edx),%xmm0 | 1744 movups (%edx),%xmm0 |
1694 movups 16(%edx),%xmm1 | 1745 movups 16(%edx),%xmm1 |
1695 leal 32(%edx),%edx | 1746 leal 32(%edx),%edx |
1696 xorps %xmm0,%xmm2 | 1747 xorps %xmm0,%xmm2 |
1697 L071dec1_loop_14: | 1748 L073dec1_loop_14: |
1698 .byte 102,15,56,222,209 | 1749 .byte 102,15,56,222,209 |
1699 decl %ecx | 1750 decl %ecx |
1700 movups (%edx),%xmm1 | 1751 movups (%edx),%xmm1 |
1701 leal 16(%edx),%edx | 1752 leal 16(%edx),%edx |
1702 » jnz» L071dec1_loop_14 | 1753 » jnz» L073dec1_loop_14 |
1703 .byte 102,15,56,223,209 | 1754 .byte 102,15,56,223,209 |
1704 xorps %xmm6,%xmm2 | 1755 xorps %xmm6,%xmm2 |
1705 movups %xmm2,(%edi) | 1756 movups %xmm2,(%edi) |
1706 L067xts_dec_ret: | 1757 L069xts_dec_ret: |
| 1758 » pxor» %xmm0,%xmm0 |
| 1759 » pxor» %xmm1,%xmm1 |
| 1760 » pxor» %xmm2,%xmm2 |
| 1761 » movdqa» %xmm0,(%esp) |
| 1762 » pxor» %xmm3,%xmm3 |
| 1763 » movdqa» %xmm0,16(%esp) |
| 1764 » pxor» %xmm4,%xmm4 |
| 1765 » movdqa» %xmm0,32(%esp) |
| 1766 » pxor» %xmm5,%xmm5 |
| 1767 » movdqa» %xmm0,48(%esp) |
| 1768 » pxor» %xmm6,%xmm6 |
| 1769 » movdqa» %xmm0,64(%esp) |
| 1770 » pxor» %xmm7,%xmm7 |
| 1771 » movdqa» %xmm0,80(%esp) |
1707 movl 116(%esp),%esp | 1772 movl 116(%esp),%esp |
1708 popl %edi | 1773 popl %edi |
1709 popl %esi | 1774 popl %esi |
1710 popl %ebx | 1775 popl %ebx |
1711 popl %ebp | 1776 popl %ebp |
1712 ret | 1777 ret |
1713 .globl _aesni_cbc_encrypt | 1778 .globl _aesni_cbc_encrypt |
1714 .private_extern _aesni_cbc_encrypt | 1779 .private_extern _aesni_cbc_encrypt |
1715 .align 4 | 1780 .align 4 |
1716 _aesni_cbc_encrypt: | 1781 _aesni_cbc_encrypt: |
1717 L_aesni_cbc_encrypt_begin: | 1782 L_aesni_cbc_encrypt_begin: |
1718 pushl %ebp | 1783 pushl %ebp |
1719 pushl %ebx | 1784 pushl %ebx |
1720 pushl %esi | 1785 pushl %esi |
1721 pushl %edi | 1786 pushl %edi |
1722 movl 20(%esp),%esi | 1787 movl 20(%esp),%esi |
1723 movl %esp,%ebx | 1788 movl %esp,%ebx |
1724 movl 24(%esp),%edi | 1789 movl 24(%esp),%edi |
1725 subl $24,%ebx | 1790 subl $24,%ebx |
1726 movl 28(%esp),%eax | 1791 movl 28(%esp),%eax |
1727 andl $-16,%ebx | 1792 andl $-16,%ebx |
1728 movl 32(%esp),%edx | 1793 movl 32(%esp),%edx |
1729 movl 36(%esp),%ebp | 1794 movl 36(%esp),%ebp |
1730 testl %eax,%eax | 1795 testl %eax,%eax |
1731 » jz» L072cbc_abort | 1796 » jz» L074cbc_abort |
1732 cmpl $0,40(%esp) | 1797 cmpl $0,40(%esp) |
1733 xchgl %esp,%ebx | 1798 xchgl %esp,%ebx |
1734 movups (%ebp),%xmm7 | 1799 movups (%ebp),%xmm7 |
1735 movl 240(%edx),%ecx | 1800 movl 240(%edx),%ecx |
1736 movl %edx,%ebp | 1801 movl %edx,%ebp |
1737 movl %ebx,16(%esp) | 1802 movl %ebx,16(%esp) |
1738 movl %ecx,%ebx | 1803 movl %ecx,%ebx |
1739 » je» L073cbc_decrypt | 1804 » je» L075cbc_decrypt |
1740 movaps %xmm7,%xmm2 | 1805 movaps %xmm7,%xmm2 |
1741 cmpl $16,%eax | 1806 cmpl $16,%eax |
1742 » jb» L074cbc_enc_tail | 1807 » jb» L076cbc_enc_tail |
1743 subl $16,%eax | 1808 subl $16,%eax |
1744 » jmp» L075cbc_enc_loop | 1809 » jmp» L077cbc_enc_loop |
1745 .align 4,0x90 | 1810 .align 4,0x90 |
1746 L075cbc_enc_loop: | 1811 L077cbc_enc_loop: |
1747 movups (%esi),%xmm7 | 1812 movups (%esi),%xmm7 |
1748 leal 16(%esi),%esi | 1813 leal 16(%esi),%esi |
1749 movups (%edx),%xmm0 | 1814 movups (%edx),%xmm0 |
1750 movups 16(%edx),%xmm1 | 1815 movups 16(%edx),%xmm1 |
1751 xorps %xmm0,%xmm7 | 1816 xorps %xmm0,%xmm7 |
1752 leal 32(%edx),%edx | 1817 leal 32(%edx),%edx |
1753 xorps %xmm7,%xmm2 | 1818 xorps %xmm7,%xmm2 |
1754 L076enc1_loop_15: | 1819 L078enc1_loop_15: |
1755 .byte 102,15,56,220,209 | 1820 .byte 102,15,56,220,209 |
1756 decl %ecx | 1821 decl %ecx |
1757 movups (%edx),%xmm1 | 1822 movups (%edx),%xmm1 |
1758 leal 16(%edx),%edx | 1823 leal 16(%edx),%edx |
1759 » jnz» L076enc1_loop_15 | 1824 » jnz» L078enc1_loop_15 |
1760 .byte 102,15,56,221,209 | 1825 .byte 102,15,56,221,209 |
1761 movl %ebx,%ecx | 1826 movl %ebx,%ecx |
1762 movl %ebp,%edx | 1827 movl %ebp,%edx |
1763 movups %xmm2,(%edi) | 1828 movups %xmm2,(%edi) |
1764 leal 16(%edi),%edi | 1829 leal 16(%edi),%edi |
1765 subl $16,%eax | 1830 subl $16,%eax |
1766 » jnc» L075cbc_enc_loop | 1831 » jnc» L077cbc_enc_loop |
1767 addl $16,%eax | 1832 addl $16,%eax |
1768 » jnz» L074cbc_enc_tail | 1833 » jnz» L076cbc_enc_tail |
1769 movaps %xmm2,%xmm7 | 1834 movaps %xmm2,%xmm7 |
1770 » jmp» L077cbc_ret | 1835 » pxor» %xmm2,%xmm2 |
1771 L074cbc_enc_tail: | 1836 » jmp» L079cbc_ret |
| 1837 L076cbc_enc_tail: |
1772 movl %eax,%ecx | 1838 movl %eax,%ecx |
1773 .long 2767451785 | 1839 .long 2767451785 |
1774 movl $16,%ecx | 1840 movl $16,%ecx |
1775 subl %eax,%ecx | 1841 subl %eax,%ecx |
1776 xorl %eax,%eax | 1842 xorl %eax,%eax |
1777 .long 2868115081 | 1843 .long 2868115081 |
1778 leal -16(%edi),%edi | 1844 leal -16(%edi),%edi |
1779 movl %ebx,%ecx | 1845 movl %ebx,%ecx |
1780 movl %edi,%esi | 1846 movl %edi,%esi |
1781 movl %ebp,%edx | 1847 movl %ebp,%edx |
1782 » jmp» L075cbc_enc_loop | 1848 » jmp» L077cbc_enc_loop |
1783 .align 4,0x90 | 1849 .align 4,0x90 |
1784 L073cbc_decrypt: | 1850 L075cbc_decrypt: |
1785 cmpl $80,%eax | 1851 cmpl $80,%eax |
1786 » jbe» L078cbc_dec_tail | 1852 » jbe» L080cbc_dec_tail |
1787 movaps %xmm7,(%esp) | 1853 movaps %xmm7,(%esp) |
1788 subl $80,%eax | 1854 subl $80,%eax |
1789 » jmp» L079cbc_dec_loop6_enter | 1855 » jmp» L081cbc_dec_loop6_enter |
1790 .align 4,0x90 | 1856 .align 4,0x90 |
1791 L080cbc_dec_loop6: | 1857 L082cbc_dec_loop6: |
1792 movaps %xmm0,(%esp) | 1858 movaps %xmm0,(%esp) |
1793 movups %xmm7,(%edi) | 1859 movups %xmm7,(%edi) |
1794 leal 16(%edi),%edi | 1860 leal 16(%edi),%edi |
1795 L079cbc_dec_loop6_enter: | 1861 L081cbc_dec_loop6_enter: |
1796 movdqu (%esi),%xmm2 | 1862 movdqu (%esi),%xmm2 |
1797 movdqu 16(%esi),%xmm3 | 1863 movdqu 16(%esi),%xmm3 |
1798 movdqu 32(%esi),%xmm4 | 1864 movdqu 32(%esi),%xmm4 |
1799 movdqu 48(%esi),%xmm5 | 1865 movdqu 48(%esi),%xmm5 |
1800 movdqu 64(%esi),%xmm6 | 1866 movdqu 64(%esi),%xmm6 |
1801 movdqu 80(%esi),%xmm7 | 1867 movdqu 80(%esi),%xmm7 |
1802 call __aesni_decrypt6 | 1868 call __aesni_decrypt6 |
1803 movups (%esi),%xmm1 | 1869 movups (%esi),%xmm1 |
1804 movups 16(%esi),%xmm0 | 1870 movups 16(%esi),%xmm0 |
1805 xorps (%esp),%xmm2 | 1871 xorps (%esp),%xmm2 |
1806 xorps %xmm1,%xmm3 | 1872 xorps %xmm1,%xmm3 |
1807 movups 32(%esi),%xmm1 | 1873 movups 32(%esi),%xmm1 |
1808 xorps %xmm0,%xmm4 | 1874 xorps %xmm0,%xmm4 |
1809 movups 48(%esi),%xmm0 | 1875 movups 48(%esi),%xmm0 |
1810 xorps %xmm1,%xmm5 | 1876 xorps %xmm1,%xmm5 |
1811 movups 64(%esi),%xmm1 | 1877 movups 64(%esi),%xmm1 |
1812 xorps %xmm0,%xmm6 | 1878 xorps %xmm0,%xmm6 |
1813 movups 80(%esi),%xmm0 | 1879 movups 80(%esi),%xmm0 |
1814 xorps %xmm1,%xmm7 | 1880 xorps %xmm1,%xmm7 |
1815 movups %xmm2,(%edi) | 1881 movups %xmm2,(%edi) |
1816 movups %xmm3,16(%edi) | 1882 movups %xmm3,16(%edi) |
1817 leal 96(%esi),%esi | 1883 leal 96(%esi),%esi |
1818 movups %xmm4,32(%edi) | 1884 movups %xmm4,32(%edi) |
1819 movl %ebx,%ecx | 1885 movl %ebx,%ecx |
1820 movups %xmm5,48(%edi) | 1886 movups %xmm5,48(%edi) |
1821 movl %ebp,%edx | 1887 movl %ebp,%edx |
1822 movups %xmm6,64(%edi) | 1888 movups %xmm6,64(%edi) |
1823 leal 80(%edi),%edi | 1889 leal 80(%edi),%edi |
1824 subl $96,%eax | 1890 subl $96,%eax |
1825 » ja» L080cbc_dec_loop6 | 1891 » ja» L082cbc_dec_loop6 |
1826 movaps %xmm7,%xmm2 | 1892 movaps %xmm7,%xmm2 |
1827 movaps %xmm0,%xmm7 | 1893 movaps %xmm0,%xmm7 |
1828 addl $80,%eax | 1894 addl $80,%eax |
1829 » jle» L081cbc_dec_tail_collected | 1895 » jle» L083cbc_dec_clear_tail_collected |
1830 movups %xmm2,(%edi) | 1896 movups %xmm2,(%edi) |
1831 leal 16(%edi),%edi | 1897 leal 16(%edi),%edi |
1832 L078cbc_dec_tail: | 1898 L080cbc_dec_tail: |
1833 movups (%esi),%xmm2 | 1899 movups (%esi),%xmm2 |
1834 movaps %xmm2,%xmm6 | 1900 movaps %xmm2,%xmm6 |
1835 cmpl $16,%eax | 1901 cmpl $16,%eax |
1836 » jbe» L082cbc_dec_one | 1902 » jbe» L084cbc_dec_one |
1837 movups 16(%esi),%xmm3 | 1903 movups 16(%esi),%xmm3 |
1838 movaps %xmm3,%xmm5 | 1904 movaps %xmm3,%xmm5 |
1839 cmpl $32,%eax | 1905 cmpl $32,%eax |
1840 » jbe» L083cbc_dec_two | 1906 » jbe» L085cbc_dec_two |
1841 movups 32(%esi),%xmm4 | 1907 movups 32(%esi),%xmm4 |
1842 cmpl $48,%eax | 1908 cmpl $48,%eax |
1843 » jbe» L084cbc_dec_three | 1909 » jbe» L086cbc_dec_three |
1844 movups 48(%esi),%xmm5 | 1910 movups 48(%esi),%xmm5 |
1845 cmpl $64,%eax | 1911 cmpl $64,%eax |
1846 » jbe» L085cbc_dec_four | 1912 » jbe» L087cbc_dec_four |
1847 movups 64(%esi),%xmm6 | 1913 movups 64(%esi),%xmm6 |
1848 movaps %xmm7,(%esp) | 1914 movaps %xmm7,(%esp) |
1849 movups (%esi),%xmm2 | 1915 movups (%esi),%xmm2 |
1850 xorps %xmm7,%xmm7 | 1916 xorps %xmm7,%xmm7 |
1851 call __aesni_decrypt6 | 1917 call __aesni_decrypt6 |
1852 movups (%esi),%xmm1 | 1918 movups (%esi),%xmm1 |
1853 movups 16(%esi),%xmm0 | 1919 movups 16(%esi),%xmm0 |
1854 xorps (%esp),%xmm2 | 1920 xorps (%esp),%xmm2 |
1855 xorps %xmm1,%xmm3 | 1921 xorps %xmm1,%xmm3 |
1856 movups 32(%esi),%xmm1 | 1922 movups 32(%esi),%xmm1 |
1857 xorps %xmm0,%xmm4 | 1923 xorps %xmm0,%xmm4 |
1858 movups 48(%esi),%xmm0 | 1924 movups 48(%esi),%xmm0 |
1859 xorps %xmm1,%xmm5 | 1925 xorps %xmm1,%xmm5 |
1860 movups 64(%esi),%xmm7 | 1926 movups 64(%esi),%xmm7 |
1861 xorps %xmm0,%xmm6 | 1927 xorps %xmm0,%xmm6 |
1862 movups %xmm2,(%edi) | 1928 movups %xmm2,(%edi) |
1863 movups %xmm3,16(%edi) | 1929 movups %xmm3,16(%edi) |
| 1930 pxor %xmm3,%xmm3 |
1864 movups %xmm4,32(%edi) | 1931 movups %xmm4,32(%edi) |
| 1932 pxor %xmm4,%xmm4 |
1865 movups %xmm5,48(%edi) | 1933 movups %xmm5,48(%edi) |
| 1934 pxor %xmm5,%xmm5 |
1866 leal 64(%edi),%edi | 1935 leal 64(%edi),%edi |
1867 movaps %xmm6,%xmm2 | 1936 movaps %xmm6,%xmm2 |
| 1937 pxor %xmm6,%xmm6 |
1868 subl $80,%eax | 1938 subl $80,%eax |
1869 » jmp» L081cbc_dec_tail_collected | 1939 » jmp» L088cbc_dec_tail_collected |
1870 .align 4,0x90 | 1940 .align 4,0x90 |
1871 L082cbc_dec_one: | 1941 L084cbc_dec_one: |
1872 movups (%edx),%xmm0 | 1942 movups (%edx),%xmm0 |
1873 movups 16(%edx),%xmm1 | 1943 movups 16(%edx),%xmm1 |
1874 leal 32(%edx),%edx | 1944 leal 32(%edx),%edx |
1875 xorps %xmm0,%xmm2 | 1945 xorps %xmm0,%xmm2 |
1876 L086dec1_loop_16: | 1946 L089dec1_loop_16: |
1877 .byte 102,15,56,222,209 | 1947 .byte 102,15,56,222,209 |
1878 decl %ecx | 1948 decl %ecx |
1879 movups (%edx),%xmm1 | 1949 movups (%edx),%xmm1 |
1880 leal 16(%edx),%edx | 1950 leal 16(%edx),%edx |
1881 » jnz» L086dec1_loop_16 | 1951 » jnz» L089dec1_loop_16 |
1882 .byte 102,15,56,223,209 | 1952 .byte 102,15,56,223,209 |
1883 xorps %xmm7,%xmm2 | 1953 xorps %xmm7,%xmm2 |
1884 movaps %xmm6,%xmm7 | 1954 movaps %xmm6,%xmm7 |
1885 subl $16,%eax | 1955 subl $16,%eax |
1886 » jmp» L081cbc_dec_tail_collected | 1956 » jmp» L088cbc_dec_tail_collected |
1887 .align 4,0x90 | 1957 .align 4,0x90 |
1888 L083cbc_dec_two: | 1958 L085cbc_dec_two: |
1889 call __aesni_decrypt2 | 1959 call __aesni_decrypt2 |
1890 xorps %xmm7,%xmm2 | 1960 xorps %xmm7,%xmm2 |
1891 xorps %xmm6,%xmm3 | 1961 xorps %xmm6,%xmm3 |
1892 movups %xmm2,(%edi) | 1962 movups %xmm2,(%edi) |
1893 movaps %xmm3,%xmm2 | 1963 movaps %xmm3,%xmm2 |
| 1964 pxor %xmm3,%xmm3 |
1894 leal 16(%edi),%edi | 1965 leal 16(%edi),%edi |
1895 movaps %xmm5,%xmm7 | 1966 movaps %xmm5,%xmm7 |
1896 subl $32,%eax | 1967 subl $32,%eax |
1897 » jmp» L081cbc_dec_tail_collected | 1968 » jmp» L088cbc_dec_tail_collected |
1898 .align 4,0x90 | 1969 .align 4,0x90 |
1899 L084cbc_dec_three: | 1970 L086cbc_dec_three: |
1900 call __aesni_decrypt3 | 1971 call __aesni_decrypt3 |
1901 xorps %xmm7,%xmm2 | 1972 xorps %xmm7,%xmm2 |
1902 xorps %xmm6,%xmm3 | 1973 xorps %xmm6,%xmm3 |
1903 xorps %xmm5,%xmm4 | 1974 xorps %xmm5,%xmm4 |
1904 movups %xmm2,(%edi) | 1975 movups %xmm2,(%edi) |
1905 movaps %xmm4,%xmm2 | 1976 movaps %xmm4,%xmm2 |
| 1977 pxor %xmm4,%xmm4 |
1906 movups %xmm3,16(%edi) | 1978 movups %xmm3,16(%edi) |
| 1979 pxor %xmm3,%xmm3 |
1907 leal 32(%edi),%edi | 1980 leal 32(%edi),%edi |
1908 movups 32(%esi),%xmm7 | 1981 movups 32(%esi),%xmm7 |
1909 subl $48,%eax | 1982 subl $48,%eax |
1910 » jmp» L081cbc_dec_tail_collected | 1983 » jmp» L088cbc_dec_tail_collected |
1911 .align 4,0x90 | 1984 .align 4,0x90 |
1912 L085cbc_dec_four: | 1985 L087cbc_dec_four: |
1913 call __aesni_decrypt4 | 1986 call __aesni_decrypt4 |
1914 movups 16(%esi),%xmm1 | 1987 movups 16(%esi),%xmm1 |
1915 movups 32(%esi),%xmm0 | 1988 movups 32(%esi),%xmm0 |
1916 xorps %xmm7,%xmm2 | 1989 xorps %xmm7,%xmm2 |
1917 movups 48(%esi),%xmm7 | 1990 movups 48(%esi),%xmm7 |
1918 xorps %xmm6,%xmm3 | 1991 xorps %xmm6,%xmm3 |
1919 movups %xmm2,(%edi) | 1992 movups %xmm2,(%edi) |
1920 xorps %xmm1,%xmm4 | 1993 xorps %xmm1,%xmm4 |
1921 movups %xmm3,16(%edi) | 1994 movups %xmm3,16(%edi) |
| 1995 pxor %xmm3,%xmm3 |
1922 xorps %xmm0,%xmm5 | 1996 xorps %xmm0,%xmm5 |
1923 movups %xmm4,32(%edi) | 1997 movups %xmm4,32(%edi) |
| 1998 pxor %xmm4,%xmm4 |
1924 leal 48(%edi),%edi | 1999 leal 48(%edi),%edi |
1925 movaps %xmm5,%xmm2 | 2000 movaps %xmm5,%xmm2 |
| 2001 pxor %xmm5,%xmm5 |
1926 subl $64,%eax | 2002 subl $64,%eax |
1927 L081cbc_dec_tail_collected: | 2003 » jmp» L088cbc_dec_tail_collected |
| 2004 .align» 4,0x90 |
| 2005 L083cbc_dec_clear_tail_collected: |
| 2006 » pxor» %xmm3,%xmm3 |
| 2007 » pxor» %xmm4,%xmm4 |
| 2008 » pxor» %xmm5,%xmm5 |
| 2009 » pxor» %xmm6,%xmm6 |
| 2010 L088cbc_dec_tail_collected: |
1928 andl $15,%eax | 2011 andl $15,%eax |
1929 » jnz» L087cbc_dec_tail_partial | 2012 » jnz» L090cbc_dec_tail_partial |
1930 movups %xmm2,(%edi) | 2013 movups %xmm2,(%edi) |
1931 » jmp» L077cbc_ret | 2014 » pxor» %xmm0,%xmm0 |
| 2015 » jmp» L079cbc_ret |
1932 .align 4,0x90 | 2016 .align 4,0x90 |
1933 L087cbc_dec_tail_partial: | 2017 L090cbc_dec_tail_partial: |
1934 movaps %xmm2,(%esp) | 2018 movaps %xmm2,(%esp) |
| 2019 pxor %xmm0,%xmm0 |
1935 movl $16,%ecx | 2020 movl $16,%ecx |
1936 movl %esp,%esi | 2021 movl %esp,%esi |
1937 subl %eax,%ecx | 2022 subl %eax,%ecx |
1938 .long 2767451785 | 2023 .long 2767451785 |
1939 L077cbc_ret: | 2024 » movdqa» %xmm2,(%esp) |
| 2025 L079cbc_ret: |
1940 movl 16(%esp),%esp | 2026 movl 16(%esp),%esp |
1941 movl 36(%esp),%ebp | 2027 movl 36(%esp),%ebp |
| 2028 pxor %xmm2,%xmm2 |
| 2029 pxor %xmm1,%xmm1 |
1942 movups %xmm7,(%ebp) | 2030 movups %xmm7,(%ebp) |
1943 L072cbc_abort: | 2031 » pxor» %xmm7,%xmm7 |
| 2032 L074cbc_abort: |
1944 popl %edi | 2033 popl %edi |
1945 popl %esi | 2034 popl %esi |
1946 popl %ebx | 2035 popl %ebx |
1947 popl %ebp | 2036 popl %ebp |
1948 ret | 2037 ret |
1949 .private_extern __aesni_set_encrypt_key | 2038 .private_extern __aesni_set_encrypt_key |
1950 .align 4 | 2039 .align 4 |
1951 __aesni_set_encrypt_key: | 2040 __aesni_set_encrypt_key: |
| 2041 pushl %ebp |
| 2042 pushl %ebx |
1952 testl %eax,%eax | 2043 testl %eax,%eax |
1953 » jz» L088bad_pointer | 2044 » jz» L091bad_pointer |
1954 testl %edx,%edx | 2045 testl %edx,%edx |
1955 » jz» L088bad_pointer | 2046 » jz» L091bad_pointer |
| 2047 » call» L092pic |
| 2048 L092pic: |
| 2049 » popl» %ebx |
| 2050 » leal» Lkey_const-L092pic(%ebx),%ebx |
| 2051 » movl» L_OPENSSL_ia32cap_P$non_lazy_ptr-Lkey_const(%ebx),%ebp |
1956 movups (%eax),%xmm0 | 2052 movups (%eax),%xmm0 |
1957 xorps %xmm4,%xmm4 | 2053 xorps %xmm4,%xmm4 |
| 2054 movl 4(%ebp),%ebp |
1958 leal 16(%edx),%edx | 2055 leal 16(%edx),%edx |
| 2056 andl $268437504,%ebp |
1959 cmpl $256,%ecx | 2057 cmpl $256,%ecx |
1960 » je» L08914rounds | 2058 » je» L09314rounds |
1961 cmpl $192,%ecx | 2059 cmpl $192,%ecx |
1962 » je» L09012rounds | 2060 » je» L09412rounds |
1963 cmpl $128,%ecx | 2061 cmpl $128,%ecx |
1964 » jne» L091bad_keybits | 2062 » jne» L095bad_keybits |
1965 .align 4,0x90 | 2063 .align 4,0x90 |
1966 L09210rounds: | 2064 L09610rounds: |
| 2065 » cmpl» $268435456,%ebp |
| 2066 » je» L09710rounds_alt |
1967 movl $9,%ecx | 2067 movl $9,%ecx |
1968 movups %xmm0,-16(%edx) | 2068 movups %xmm0,-16(%edx) |
1969 .byte 102,15,58,223,200,1 | 2069 .byte 102,15,58,223,200,1 |
1970 » call» L093key_128_cold | 2070 » call» L098key_128_cold |
1971 .byte 102,15,58,223,200,2 | 2071 .byte 102,15,58,223,200,2 |
1972 » call» L094key_128 | 2072 » call» L099key_128 |
1973 .byte 102,15,58,223,200,4 | 2073 .byte 102,15,58,223,200,4 |
1974 » call» L094key_128 | 2074 » call» L099key_128 |
1975 .byte 102,15,58,223,200,8 | 2075 .byte 102,15,58,223,200,8 |
1976 » call» L094key_128 | 2076 » call» L099key_128 |
1977 .byte 102,15,58,223,200,16 | 2077 .byte 102,15,58,223,200,16 |
1978 » call» L094key_128 | 2078 » call» L099key_128 |
1979 .byte 102,15,58,223,200,32 | 2079 .byte 102,15,58,223,200,32 |
1980 » call» L094key_128 | 2080 » call» L099key_128 |
1981 .byte 102,15,58,223,200,64 | 2081 .byte 102,15,58,223,200,64 |
1982 » call» L094key_128 | 2082 » call» L099key_128 |
1983 .byte 102,15,58,223,200,128 | 2083 .byte 102,15,58,223,200,128 |
1984 » call» L094key_128 | 2084 » call» L099key_128 |
1985 .byte 102,15,58,223,200,27 | 2085 .byte 102,15,58,223,200,27 |
1986 » call» L094key_128 | 2086 » call» L099key_128 |
1987 .byte 102,15,58,223,200,54 | 2087 .byte 102,15,58,223,200,54 |
1988 » call» L094key_128 | 2088 » call» L099key_128 |
1989 movups %xmm0,(%edx) | 2089 movups %xmm0,(%edx) |
1990 movl %ecx,80(%edx) | 2090 movl %ecx,80(%edx) |
1991 » xorl» %eax,%eax | 2091 » jmp» L100good_key |
1992 » ret | |
1993 .align 4,0x90 | 2092 .align 4,0x90 |
1994 L094key_128: | 2093 L099key_128: |
1995 movups %xmm0,(%edx) | 2094 movups %xmm0,(%edx) |
1996 leal 16(%edx),%edx | 2095 leal 16(%edx),%edx |
1997 L093key_128_cold: | 2096 L098key_128_cold: |
1998 shufps $16,%xmm0,%xmm4 | 2097 shufps $16,%xmm0,%xmm4 |
1999 xorps %xmm4,%xmm0 | 2098 xorps %xmm4,%xmm0 |
2000 shufps $140,%xmm0,%xmm4 | 2099 shufps $140,%xmm0,%xmm4 |
2001 xorps %xmm4,%xmm0 | 2100 xorps %xmm4,%xmm0 |
2002 shufps $255,%xmm1,%xmm1 | 2101 shufps $255,%xmm1,%xmm1 |
2003 xorps %xmm1,%xmm0 | 2102 xorps %xmm1,%xmm0 |
2004 ret | 2103 ret |
2005 .align 4,0x90 | 2104 .align 4,0x90 |
2006 L09012rounds: | 2105 L09710rounds_alt: |
| 2106 » movdqa» (%ebx),%xmm5 |
| 2107 » movl» $8,%ecx |
| 2108 » movdqa» 32(%ebx),%xmm4 |
| 2109 » movdqa» %xmm0,%xmm2 |
| 2110 » movdqu» %xmm0,-16(%edx) |
| 2111 L101loop_key128: |
| 2112 .byte» 102,15,56,0,197 |
| 2113 .byte» 102,15,56,221,196 |
| 2114 » pslld» $1,%xmm4 |
| 2115 » leal» 16(%edx),%edx |
| 2116 » movdqa» %xmm2,%xmm3 |
| 2117 » pslldq» $4,%xmm2 |
| 2118 » pxor» %xmm2,%xmm3 |
| 2119 » pslldq» $4,%xmm2 |
| 2120 » pxor» %xmm2,%xmm3 |
| 2121 » pslldq» $4,%xmm2 |
| 2122 » pxor» %xmm3,%xmm2 |
| 2123 » pxor» %xmm2,%xmm0 |
| 2124 » movdqu» %xmm0,-16(%edx) |
| 2125 » movdqa» %xmm0,%xmm2 |
| 2126 » decl» %ecx |
| 2127 » jnz» L101loop_key128 |
| 2128 » movdqa» 48(%ebx),%xmm4 |
| 2129 .byte» 102,15,56,0,197 |
| 2130 .byte» 102,15,56,221,196 |
| 2131 » pslld» $1,%xmm4 |
| 2132 » movdqa» %xmm2,%xmm3 |
| 2133 » pslldq» $4,%xmm2 |
| 2134 » pxor» %xmm2,%xmm3 |
| 2135 » pslldq» $4,%xmm2 |
| 2136 » pxor» %xmm2,%xmm3 |
| 2137 » pslldq» $4,%xmm2 |
| 2138 » pxor» %xmm3,%xmm2 |
| 2139 » pxor» %xmm2,%xmm0 |
| 2140 » movdqu» %xmm0,(%edx) |
| 2141 » movdqa» %xmm0,%xmm2 |
| 2142 .byte» 102,15,56,0,197 |
| 2143 .byte» 102,15,56,221,196 |
| 2144 » movdqa» %xmm2,%xmm3 |
| 2145 » pslldq» $4,%xmm2 |
| 2146 » pxor» %xmm2,%xmm3 |
| 2147 » pslldq» $4,%xmm2 |
| 2148 » pxor» %xmm2,%xmm3 |
| 2149 » pslldq» $4,%xmm2 |
| 2150 » pxor» %xmm3,%xmm2 |
| 2151 » pxor» %xmm2,%xmm0 |
| 2152 » movdqu» %xmm0,16(%edx) |
| 2153 » movl» $9,%ecx |
| 2154 » movl» %ecx,96(%edx) |
| 2155 » jmp» L100good_key |
| 2156 .align» 4,0x90 |
| 2157 L09412rounds: |
2007 movq 16(%eax),%xmm2 | 2158 movq 16(%eax),%xmm2 |
| 2159 cmpl $268435456,%ebp |
| 2160 je L10212rounds_alt |
2008 movl $11,%ecx | 2161 movl $11,%ecx |
2009 movups %xmm0,-16(%edx) | 2162 movups %xmm0,-16(%edx) |
2010 .byte 102,15,58,223,202,1 | 2163 .byte 102,15,58,223,202,1 |
2011 » call» L095key_192a_cold | 2164 » call» L103key_192a_cold |
2012 .byte 102,15,58,223,202,2 | 2165 .byte 102,15,58,223,202,2 |
2013 » call» L096key_192b | 2166 » call» L104key_192b |
2014 .byte 102,15,58,223,202,4 | 2167 .byte 102,15,58,223,202,4 |
2015 » call» L097key_192a | 2168 » call» L105key_192a |
2016 .byte 102,15,58,223,202,8 | 2169 .byte 102,15,58,223,202,8 |
2017 » call» L096key_192b | 2170 » call» L104key_192b |
2018 .byte 102,15,58,223,202,16 | 2171 .byte 102,15,58,223,202,16 |
2019 » call» L097key_192a | 2172 » call» L105key_192a |
2020 .byte 102,15,58,223,202,32 | 2173 .byte 102,15,58,223,202,32 |
2021 » call» L096key_192b | 2174 » call» L104key_192b |
2022 .byte 102,15,58,223,202,64 | 2175 .byte 102,15,58,223,202,64 |
2023 » call» L097key_192a | 2176 » call» L105key_192a |
2024 .byte 102,15,58,223,202,128 | 2177 .byte 102,15,58,223,202,128 |
2025 » call» L096key_192b | 2178 » call» L104key_192b |
2026 movups %xmm0,(%edx) | 2179 movups %xmm0,(%edx) |
2027 movl %ecx,48(%edx) | 2180 movl %ecx,48(%edx) |
2028 » xorl» %eax,%eax | 2181 » jmp» L100good_key |
2029 » ret | |
2030 .align 4,0x90 | 2182 .align 4,0x90 |
2031 L097key_192a: | 2183 L105key_192a: |
2032 movups %xmm0,(%edx) | 2184 movups %xmm0,(%edx) |
2033 leal 16(%edx),%edx | 2185 leal 16(%edx),%edx |
2034 .align 4,0x90 | 2186 .align 4,0x90 |
2035 L095key_192a_cold: | 2187 L103key_192a_cold: |
2036 movaps %xmm2,%xmm5 | 2188 movaps %xmm2,%xmm5 |
2037 L098key_192b_warm: | 2189 L106key_192b_warm: |
2038 shufps $16,%xmm0,%xmm4 | 2190 shufps $16,%xmm0,%xmm4 |
2039 movdqa %xmm2,%xmm3 | 2191 movdqa %xmm2,%xmm3 |
2040 xorps %xmm4,%xmm0 | 2192 xorps %xmm4,%xmm0 |
2041 shufps $140,%xmm0,%xmm4 | 2193 shufps $140,%xmm0,%xmm4 |
2042 pslldq $4,%xmm3 | 2194 pslldq $4,%xmm3 |
2043 xorps %xmm4,%xmm0 | 2195 xorps %xmm4,%xmm0 |
2044 pshufd $85,%xmm1,%xmm1 | 2196 pshufd $85,%xmm1,%xmm1 |
2045 pxor %xmm3,%xmm2 | 2197 pxor %xmm3,%xmm2 |
2046 pxor %xmm1,%xmm0 | 2198 pxor %xmm1,%xmm0 |
2047 pshufd $255,%xmm0,%xmm3 | 2199 pshufd $255,%xmm0,%xmm3 |
2048 pxor %xmm3,%xmm2 | 2200 pxor %xmm3,%xmm2 |
2049 ret | 2201 ret |
2050 .align 4,0x90 | 2202 .align 4,0x90 |
2051 L096key_192b: | 2203 L104key_192b: |
2052 movaps %xmm0,%xmm3 | 2204 movaps %xmm0,%xmm3 |
2053 shufps $68,%xmm0,%xmm5 | 2205 shufps $68,%xmm0,%xmm5 |
2054 movups %xmm5,(%edx) | 2206 movups %xmm5,(%edx) |
2055 shufps $78,%xmm2,%xmm3 | 2207 shufps $78,%xmm2,%xmm3 |
2056 movups %xmm3,16(%edx) | 2208 movups %xmm3,16(%edx) |
2057 leal 32(%edx),%edx | 2209 leal 32(%edx),%edx |
2058 » jmp» L098key_192b_warm | 2210 » jmp» L106key_192b_warm |
2059 .align 4,0x90 | 2211 .align 4,0x90 |
2060 L08914rounds: | 2212 L10212rounds_alt: |
| 2213 » movdqa» 16(%ebx),%xmm5 |
| 2214 » movdqa» 32(%ebx),%xmm4 |
| 2215 » movl» $8,%ecx |
| 2216 » movdqu» %xmm0,-16(%edx) |
| 2217 L107loop_key192: |
| 2218 » movq» %xmm2,(%edx) |
| 2219 » movdqa» %xmm2,%xmm1 |
| 2220 .byte» 102,15,56,0,213 |
| 2221 .byte» 102,15,56,221,212 |
| 2222 » pslld» $1,%xmm4 |
| 2223 » leal» 24(%edx),%edx |
| 2224 » movdqa» %xmm0,%xmm3 |
| 2225 » pslldq» $4,%xmm0 |
| 2226 » pxor» %xmm0,%xmm3 |
| 2227 » pslldq» $4,%xmm0 |
| 2228 » pxor» %xmm0,%xmm3 |
| 2229 » pslldq» $4,%xmm0 |
| 2230 » pxor» %xmm3,%xmm0 |
| 2231 » pshufd» $255,%xmm0,%xmm3 |
| 2232 » pxor» %xmm1,%xmm3 |
| 2233 » pslldq» $4,%xmm1 |
| 2234 » pxor» %xmm1,%xmm3 |
| 2235 » pxor» %xmm2,%xmm0 |
| 2236 » pxor» %xmm3,%xmm2 |
| 2237 » movdqu» %xmm0,-16(%edx) |
| 2238 » decl» %ecx |
| 2239 » jnz» L107loop_key192 |
| 2240 » movl» $11,%ecx |
| 2241 » movl» %ecx,32(%edx) |
| 2242 » jmp» L100good_key |
| 2243 .align» 4,0x90 |
| 2244 L09314rounds: |
2061 movups 16(%eax),%xmm2 | 2245 movups 16(%eax),%xmm2 |
| 2246 leal 16(%edx),%edx |
| 2247 cmpl $268435456,%ebp |
| 2248 je L10814rounds_alt |
2062 movl $13,%ecx | 2249 movl $13,%ecx |
2063 leal 16(%edx),%edx | |
2064 movups %xmm0,-32(%edx) | 2250 movups %xmm0,-32(%edx) |
2065 movups %xmm2,-16(%edx) | 2251 movups %xmm2,-16(%edx) |
2066 .byte 102,15,58,223,202,1 | 2252 .byte 102,15,58,223,202,1 |
2067 » call» L099key_256a_cold | 2253 » call» L109key_256a_cold |
2068 .byte 102,15,58,223,200,1 | 2254 .byte 102,15,58,223,200,1 |
2069 » call» L100key_256b | 2255 » call» L110key_256b |
2070 .byte 102,15,58,223,202,2 | 2256 .byte 102,15,58,223,202,2 |
2071 » call» L101key_256a | 2257 » call» L111key_256a |
2072 .byte 102,15,58,223,200,2 | 2258 .byte 102,15,58,223,200,2 |
2073 » call» L100key_256b | 2259 » call» L110key_256b |
2074 .byte 102,15,58,223,202,4 | 2260 .byte 102,15,58,223,202,4 |
2075 » call» L101key_256a | 2261 » call» L111key_256a |
2076 .byte 102,15,58,223,200,4 | 2262 .byte 102,15,58,223,200,4 |
2077 » call» L100key_256b | 2263 » call» L110key_256b |
2078 .byte 102,15,58,223,202,8 | 2264 .byte 102,15,58,223,202,8 |
2079 » call» L101key_256a | 2265 » call» L111key_256a |
2080 .byte 102,15,58,223,200,8 | 2266 .byte 102,15,58,223,200,8 |
2081 » call» L100key_256b | 2267 » call» L110key_256b |
2082 .byte 102,15,58,223,202,16 | 2268 .byte 102,15,58,223,202,16 |
2083 » call» L101key_256a | 2269 » call» L111key_256a |
2084 .byte 102,15,58,223,200,16 | 2270 .byte 102,15,58,223,200,16 |
2085 » call» L100key_256b | 2271 » call» L110key_256b |
2086 .byte 102,15,58,223,202,32 | 2272 .byte 102,15,58,223,202,32 |
2087 » call» L101key_256a | 2273 » call» L111key_256a |
2088 .byte 102,15,58,223,200,32 | 2274 .byte 102,15,58,223,200,32 |
2089 » call» L100key_256b | 2275 » call» L110key_256b |
2090 .byte 102,15,58,223,202,64 | 2276 .byte 102,15,58,223,202,64 |
2091 » call» L101key_256a | 2277 » call» L111key_256a |
2092 movups %xmm0,(%edx) | 2278 movups %xmm0,(%edx) |
2093 movl %ecx,16(%edx) | 2279 movl %ecx,16(%edx) |
2094 xorl %eax,%eax | 2280 xorl %eax,%eax |
2095 » ret | 2281 » jmp» L100good_key |
2096 .align 4,0x90 | 2282 .align 4,0x90 |
2097 L101key_256a: | 2283 L111key_256a: |
2098 movups %xmm2,(%edx) | 2284 movups %xmm2,(%edx) |
2099 leal 16(%edx),%edx | 2285 leal 16(%edx),%edx |
2100 L099key_256a_cold: | 2286 L109key_256a_cold: |
2101 shufps $16,%xmm0,%xmm4 | 2287 shufps $16,%xmm0,%xmm4 |
2102 xorps %xmm4,%xmm0 | 2288 xorps %xmm4,%xmm0 |
2103 shufps $140,%xmm0,%xmm4 | 2289 shufps $140,%xmm0,%xmm4 |
2104 xorps %xmm4,%xmm0 | 2290 xorps %xmm4,%xmm0 |
2105 shufps $255,%xmm1,%xmm1 | 2291 shufps $255,%xmm1,%xmm1 |
2106 xorps %xmm1,%xmm0 | 2292 xorps %xmm1,%xmm0 |
2107 ret | 2293 ret |
2108 .align 4,0x90 | 2294 .align 4,0x90 |
2109 L100key_256b: | 2295 L110key_256b: |
2110 movups %xmm0,(%edx) | 2296 movups %xmm0,(%edx) |
2111 leal 16(%edx),%edx | 2297 leal 16(%edx),%edx |
2112 shufps $16,%xmm2,%xmm4 | 2298 shufps $16,%xmm2,%xmm4 |
2113 xorps %xmm4,%xmm2 | 2299 xorps %xmm4,%xmm2 |
2114 shufps $140,%xmm2,%xmm4 | 2300 shufps $140,%xmm2,%xmm4 |
2115 xorps %xmm4,%xmm2 | 2301 xorps %xmm4,%xmm2 |
2116 shufps $170,%xmm1,%xmm1 | 2302 shufps $170,%xmm1,%xmm1 |
2117 xorps %xmm1,%xmm2 | 2303 xorps %xmm1,%xmm2 |
2118 ret | 2304 ret |
2119 .align» 2,0x90 | 2305 .align» 4,0x90 |
2120 L088bad_pointer: | 2306 L10814rounds_alt: |
2121 » movl» $-1,%eax | 2307 » movdqa» (%ebx),%xmm5 |
| 2308 » movdqa» 32(%ebx),%xmm4 |
| 2309 » movl» $7,%ecx |
| 2310 » movdqu» %xmm0,-32(%edx) |
| 2311 » movdqa» %xmm2,%xmm1 |
| 2312 » movdqu» %xmm2,-16(%edx) |
| 2313 L112loop_key256: |
| 2314 .byte» 102,15,56,0,213 |
| 2315 .byte» 102,15,56,221,212 |
| 2316 » movdqa» %xmm0,%xmm3 |
| 2317 » pslldq» $4,%xmm0 |
| 2318 » pxor» %xmm0,%xmm3 |
| 2319 » pslldq» $4,%xmm0 |
| 2320 » pxor» %xmm0,%xmm3 |
| 2321 » pslldq» $4,%xmm0 |
| 2322 » pxor» %xmm3,%xmm0 |
| 2323 » pslld» $1,%xmm4 |
| 2324 » pxor» %xmm2,%xmm0 |
| 2325 » movdqu» %xmm0,(%edx) |
| 2326 » decl» %ecx |
| 2327 » jz» L113done_key256 |
| 2328 » pshufd» $255,%xmm0,%xmm2 |
| 2329 » pxor» %xmm3,%xmm3 |
| 2330 .byte» 102,15,56,221,211 |
| 2331 » movdqa» %xmm1,%xmm3 |
| 2332 » pslldq» $4,%xmm1 |
| 2333 » pxor» %xmm1,%xmm3 |
| 2334 » pslldq» $4,%xmm1 |
| 2335 » pxor» %xmm1,%xmm3 |
| 2336 » pslldq» $4,%xmm1 |
| 2337 » pxor» %xmm3,%xmm1 |
| 2338 » pxor» %xmm1,%xmm2 |
| 2339 » movdqu» %xmm2,16(%edx) |
| 2340 » leal» 32(%edx),%edx |
| 2341 » movdqa» %xmm2,%xmm1 |
| 2342 » jmp» L112loop_key256 |
| 2343 L113done_key256: |
| 2344 » movl» $13,%ecx |
| 2345 » movl» %ecx,16(%edx) |
| 2346 L100good_key: |
| 2347 » pxor» %xmm0,%xmm0 |
| 2348 » pxor» %xmm1,%xmm1 |
| 2349 » pxor» %xmm2,%xmm2 |
| 2350 » pxor» %xmm3,%xmm3 |
| 2351 » pxor» %xmm4,%xmm4 |
| 2352 » pxor» %xmm5,%xmm5 |
| 2353 » xorl» %eax,%eax |
| 2354 » popl» %ebx |
| 2355 » popl» %ebp |
2122 ret | 2356 ret |
2123 .align 2,0x90 | 2357 .align 2,0x90 |
2124 L091bad_keybits: | 2358 L091bad_pointer: |
| 2359 » movl» $-1,%eax |
| 2360 » popl» %ebx |
| 2361 » popl» %ebp |
| 2362 » ret |
| 2363 .align» 2,0x90 |
| 2364 L095bad_keybits: |
| 2365 » pxor» %xmm0,%xmm0 |
2125 movl $-2,%eax | 2366 movl $-2,%eax |
| 2367 popl %ebx |
| 2368 popl %ebp |
2126 ret | 2369 ret |
2127 .globl _aesni_set_encrypt_key | 2370 .globl _aesni_set_encrypt_key |
2128 .private_extern _aesni_set_encrypt_key | 2371 .private_extern _aesni_set_encrypt_key |
2129 .align 4 | 2372 .align 4 |
2130 _aesni_set_encrypt_key: | 2373 _aesni_set_encrypt_key: |
2131 L_aesni_set_encrypt_key_begin: | 2374 L_aesni_set_encrypt_key_begin: |
2132 movl 4(%esp),%eax | 2375 movl 4(%esp),%eax |
2133 movl 8(%esp),%ecx | 2376 movl 8(%esp),%ecx |
2134 movl 12(%esp),%edx | 2377 movl 12(%esp),%edx |
2135 call __aesni_set_encrypt_key | 2378 call __aesni_set_encrypt_key |
2136 ret | 2379 ret |
2137 .globl _aesni_set_decrypt_key | 2380 .globl _aesni_set_decrypt_key |
2138 .private_extern _aesni_set_decrypt_key | 2381 .private_extern _aesni_set_decrypt_key |
2139 .align 4 | 2382 .align 4 |
2140 _aesni_set_decrypt_key: | 2383 _aesni_set_decrypt_key: |
2141 L_aesni_set_decrypt_key_begin: | 2384 L_aesni_set_decrypt_key_begin: |
2142 movl 4(%esp),%eax | 2385 movl 4(%esp),%eax |
2143 movl 8(%esp),%ecx | 2386 movl 8(%esp),%ecx |
2144 movl 12(%esp),%edx | 2387 movl 12(%esp),%edx |
2145 call __aesni_set_encrypt_key | 2388 call __aesni_set_encrypt_key |
2146 movl 12(%esp),%edx | 2389 movl 12(%esp),%edx |
2147 shll $4,%ecx | 2390 shll $4,%ecx |
2148 testl %eax,%eax | 2391 testl %eax,%eax |
2149 » jnz» L102dec_key_ret | 2392 » jnz» L114dec_key_ret |
2150 leal 16(%edx,%ecx,1),%eax | 2393 leal 16(%edx,%ecx,1),%eax |
2151 movups (%edx),%xmm0 | 2394 movups (%edx),%xmm0 |
2152 movups (%eax),%xmm1 | 2395 movups (%eax),%xmm1 |
2153 movups %xmm0,(%eax) | 2396 movups %xmm0,(%eax) |
2154 movups %xmm1,(%edx) | 2397 movups %xmm1,(%edx) |
2155 leal 16(%edx),%edx | 2398 leal 16(%edx),%edx |
2156 leal -16(%eax),%eax | 2399 leal -16(%eax),%eax |
2157 L103dec_key_inverse: | 2400 L115dec_key_inverse: |
2158 movups (%edx),%xmm0 | 2401 movups (%edx),%xmm0 |
2159 movups (%eax),%xmm1 | 2402 movups (%eax),%xmm1 |
2160 .byte 102,15,56,219,192 | 2403 .byte 102,15,56,219,192 |
2161 .byte 102,15,56,219,201 | 2404 .byte 102,15,56,219,201 |
2162 leal 16(%edx),%edx | 2405 leal 16(%edx),%edx |
2163 leal -16(%eax),%eax | 2406 leal -16(%eax),%eax |
2164 movups %xmm0,16(%eax) | 2407 movups %xmm0,16(%eax) |
2165 movups %xmm1,-16(%edx) | 2408 movups %xmm1,-16(%edx) |
2166 cmpl %edx,%eax | 2409 cmpl %edx,%eax |
2167 » ja» L103dec_key_inverse | 2410 » ja» L115dec_key_inverse |
2168 movups (%edx),%xmm0 | 2411 movups (%edx),%xmm0 |
2169 .byte 102,15,56,219,192 | 2412 .byte 102,15,56,219,192 |
2170 movups %xmm0,(%edx) | 2413 movups %xmm0,(%edx) |
| 2414 pxor %xmm0,%xmm0 |
| 2415 pxor %xmm1,%xmm1 |
2171 xorl %eax,%eax | 2416 xorl %eax,%eax |
2172 L102dec_key_ret: | 2417 L114dec_key_ret: |
2173 ret | 2418 ret |
| 2419 .align 6,0x90 |
| 2420 Lkey_const: |
| 2421 .long 202313229,202313229,202313229,202313229 |
| 2422 .long 67569157,67569157,67569157,67569157 |
| 2423 .long 1,1,1,1 |
| 2424 .long 27,27,27,27 |
2174 .byte 65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69 | 2425 .byte 65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69 |
2175 .byte 83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83 | 2426 .byte 83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83 |
2176 .byte 32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115 | 2427 .byte 32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115 |
2177 .byte 115,108,46,111,114,103,62,0 | 2428 .byte 115,108,46,111,114,103,62,0 |
| 2429 .section __IMPORT,__pointers,non_lazy_symbol_pointers |
| 2430 L_OPENSSL_ia32cap_P$non_lazy_ptr: |
| 2431 .indirect_symbol _OPENSSL_ia32cap_P |
| 2432 .long 0 |
2178 #endif | 2433 #endif |
OLD | NEW |