OLD | NEW |
(Empty) | |
| 1 .file "vpaes-x86.s" |
| 2 .text |
| 3 .align 6,0x90 |
| 4 L_vpaes_consts: |
| 5 .long 218628480,235210255,168496130,67568393 |
| 6 .long 252381056,17041926,33884169,51187212 |
| 7 .long 252645135,252645135,252645135,252645135 |
| 8 .long 1512730624,3266504856,1377990664,3401244816 |
| 9 .long 830229760,1275146365,2969422977,3447763452 |
| 10 .long 3411033600,2979783055,338359620,2782886510 |
| 11 .long 4209124096,907596821,221174255,1006095553 |
| 12 .long 191964160,3799684038,3164090317,1589111125 |
| 13 .long 182528256,1777043520,2877432650,3265356744 |
| 14 .long 1874708224,3503451415,3305285752,363511674 |
| 15 .long 1606117888,3487855781,1093350906,2384367825 |
| 16 .long 197121,67569157,134941193,202313229 |
| 17 .long 67569157,134941193,202313229,197121 |
| 18 .long 134941193,202313229,197121,67569157 |
| 19 .long 202313229,197121,67569157,134941193 |
| 20 .long 33619971,100992007,168364043,235736079 |
| 21 .long 235736079,33619971,100992007,168364043 |
| 22 .long 168364043,235736079,33619971,100992007 |
| 23 .long 100992007,168364043,235736079,33619971 |
| 24 .long 50462976,117835012,185207048,252579084 |
| 25 .long 252314880,51251460,117574920,184942860 |
| 26 .long 184682752,252054788,50987272,118359308 |
| 27 .long 118099200,185467140,251790600,50727180 |
| 28 .long 2946363062,528716217,1300004225,1881839624 |
| 29 .long 1532713819,1532713819,1532713819,1532713819 |
| 30 .long 3602276352,4288629033,3737020424,4153884961 |
| 31 .long 1354558464,32357713,2958822624,3775749553 |
| 32 .long 1201988352,132424512,1572796698,503232858 |
| 33 .long 2213177600,1597421020,4103937655,675398315 |
| 34 .long 2749646592,4273543773,1511898873,121693092 |
| 35 .long 3040248576,1103263732,2871565598,1608280554 |
| 36 .long 2236667136,2588920351,482954393,64377734 |
| 37 .long 3069987328,291237287,2117370568,3650299247 |
| 38 .long 533321216,3573750986,2572112006,1401264716 |
| 39 .long 1339849704,2721158661,548607111,3445553514 |
| 40 .long 2128193280,3054596040,2183486460,1257083700 |
| 41 .long 655635200,1165381986,3923443150,2344132524 |
| 42 .long 190078720,256924420,290342170,357187870 |
| 43 .long 1610966272,2263057382,4103205268,309794674 |
| 44 .long 2592527872,2233205587,1335446729,3402964816 |
| 45 .long 3973531904,3225098121,3002836325,1918774430 |
| 46 .long 3870401024,2102906079,2284471353,4117666579 |
| 47 .long 617007872,1021508343,366931923,691083277 |
| 48 .long 2528395776,3491914898,2968704004,1613121270 |
| 49 .long 3445188352,3247741094,844474987,4093578302 |
| 50 .long 651481088,1190302358,1689581232,574775300 |
| 51 .long 4289380608,206939853,2555985458,2489840491 |
| 52 .long 2130264064,327674451,3566485037,3349835193 |
| 53 .long 2470714624,316102159,3636825756,3393945945 |
| 54 .byte 86,101,99,116,111,114,32,80,101,114,109,117,116,97,116,105 |
| 55 .byte 111,110,32,65,69,83,32,102,111,114,32,120,56,54,47,83 |
| 56 .byte 83,83,69,51,44,32,77,105,107,101,32,72,97,109,98,117 |
| 57 .byte 114,103,32,40,83,116,97,110,102,111,114,100,32,85,110,105 |
| 58 .byte 118,101,114,115,105,116,121,41,0 |
| 59 .align 6,0x90 |
| 60 .align 4 |
| 61 __vpaes_preheat: |
| 62 addl (%esp),%ebp |
| 63 movdqa -48(%ebp),%xmm7 |
| 64 movdqa -16(%ebp),%xmm6 |
| 65 ret |
| 66 .align 4 |
| 67 __vpaes_encrypt_core: |
| 68 movl $16,%ecx |
| 69 movl 240(%edx),%eax |
| 70 movdqa %xmm6,%xmm1 |
| 71 movdqa (%ebp),%xmm2 |
| 72 pandn %xmm0,%xmm1 |
| 73 movdqu (%edx),%xmm5 |
| 74 psrld $4,%xmm1 |
| 75 pand %xmm6,%xmm0 |
| 76 .byte 102,15,56,0,208 |
| 77 movdqa 16(%ebp),%xmm0 |
| 78 .byte 102,15,56,0,193 |
| 79 pxor %xmm5,%xmm2 |
| 80 pxor %xmm2,%xmm0 |
| 81 addl $16,%edx |
| 82 leal 192(%ebp),%ebx |
| 83 jmp L000enc_entry |
| 84 .align 4,0x90 |
| 85 L001enc_loop: |
| 86 movdqa 32(%ebp),%xmm4 |
| 87 .byte 102,15,56,0,226 |
| 88 pxor %xmm5,%xmm4 |
| 89 movdqa 48(%ebp),%xmm0 |
| 90 .byte 102,15,56,0,195 |
| 91 pxor %xmm4,%xmm0 |
| 92 movdqa 64(%ebp),%xmm5 |
| 93 .byte 102,15,56,0,234 |
| 94 movdqa -64(%ebx,%ecx,1),%xmm1 |
| 95 movdqa 80(%ebp),%xmm2 |
| 96 .byte 102,15,56,0,211 |
| 97 pxor %xmm5,%xmm2 |
| 98 movdqa (%ebx,%ecx,1),%xmm4 |
| 99 movdqa %xmm0,%xmm3 |
| 100 .byte 102,15,56,0,193 |
| 101 addl $16,%edx |
| 102 pxor %xmm2,%xmm0 |
| 103 .byte 102,15,56,0,220 |
| 104 addl $16,%ecx |
| 105 pxor %xmm0,%xmm3 |
| 106 .byte 102,15,56,0,193 |
| 107 andl $48,%ecx |
| 108 pxor %xmm3,%xmm0 |
| 109 subl $1,%eax |
| 110 L000enc_entry: |
| 111 movdqa %xmm6,%xmm1 |
| 112 pandn %xmm0,%xmm1 |
| 113 psrld $4,%xmm1 |
| 114 pand %xmm6,%xmm0 |
| 115 movdqa -32(%ebp),%xmm5 |
| 116 .byte 102,15,56,0,232 |
| 117 pxor %xmm1,%xmm0 |
| 118 movdqa %xmm7,%xmm3 |
| 119 .byte 102,15,56,0,217 |
| 120 pxor %xmm5,%xmm3 |
| 121 movdqa %xmm7,%xmm4 |
| 122 .byte 102,15,56,0,224 |
| 123 pxor %xmm5,%xmm4 |
| 124 movdqa %xmm7,%xmm2 |
| 125 .byte 102,15,56,0,211 |
| 126 pxor %xmm0,%xmm2 |
| 127 movdqa %xmm7,%xmm3 |
| 128 movdqu (%edx),%xmm5 |
| 129 .byte 102,15,56,0,220 |
| 130 pxor %xmm1,%xmm3 |
| 131 jnz L001enc_loop |
| 132 movdqa 96(%ebp),%xmm4 |
| 133 movdqa 112(%ebp),%xmm0 |
| 134 .byte 102,15,56,0,226 |
| 135 pxor %xmm5,%xmm4 |
| 136 .byte 102,15,56,0,195 |
| 137 movdqa 64(%ebx,%ecx,1),%xmm1 |
| 138 pxor %xmm4,%xmm0 |
| 139 .byte 102,15,56,0,193 |
| 140 ret |
| 141 .align 4 |
| 142 __vpaes_decrypt_core: |
| 143 movl 240(%edx),%eax |
| 144 leal 608(%ebp),%ebx |
| 145 movdqa %xmm6,%xmm1 |
| 146 movdqa -64(%ebx),%xmm2 |
| 147 pandn %xmm0,%xmm1 |
| 148 movl %eax,%ecx |
| 149 psrld $4,%xmm1 |
| 150 movdqu (%edx),%xmm5 |
| 151 shll $4,%ecx |
| 152 pand %xmm6,%xmm0 |
| 153 .byte 102,15,56,0,208 |
| 154 movdqa -48(%ebx),%xmm0 |
| 155 xorl $48,%ecx |
| 156 .byte 102,15,56,0,193 |
| 157 andl $48,%ecx |
| 158 pxor %xmm5,%xmm2 |
| 159 movdqa 176(%ebp),%xmm5 |
| 160 pxor %xmm2,%xmm0 |
| 161 addl $16,%edx |
| 162 leal -352(%ebx,%ecx,1),%ecx |
| 163 jmp L002dec_entry |
| 164 .align 4,0x90 |
| 165 L003dec_loop: |
| 166 movdqa -32(%ebx),%xmm4 |
| 167 .byte 102,15,56,0,226 |
| 168 pxor %xmm0,%xmm4 |
| 169 movdqa -16(%ebx),%xmm0 |
| 170 .byte 102,15,56,0,195 |
| 171 pxor %xmm4,%xmm0 |
| 172 addl $16,%edx |
| 173 .byte 102,15,56,0,197 |
| 174 movdqa (%ebx),%xmm4 |
| 175 .byte 102,15,56,0,226 |
| 176 pxor %xmm0,%xmm4 |
| 177 movdqa 16(%ebx),%xmm0 |
| 178 .byte 102,15,56,0,195 |
| 179 pxor %xmm4,%xmm0 |
| 180 subl $1,%eax |
| 181 .byte 102,15,56,0,197 |
| 182 movdqa 32(%ebx),%xmm4 |
| 183 .byte 102,15,56,0,226 |
| 184 pxor %xmm0,%xmm4 |
| 185 movdqa 48(%ebx),%xmm0 |
| 186 .byte 102,15,56,0,195 |
| 187 pxor %xmm4,%xmm0 |
| 188 .byte 102,15,56,0,197 |
| 189 movdqa 64(%ebx),%xmm4 |
| 190 .byte 102,15,56,0,226 |
| 191 pxor %xmm0,%xmm4 |
| 192 movdqa 80(%ebx),%xmm0 |
| 193 .byte 102,15,56,0,195 |
| 194 pxor %xmm4,%xmm0 |
| 195 .byte 102,15,58,15,237,12 |
| 196 L002dec_entry: |
| 197 movdqa %xmm6,%xmm1 |
| 198 pandn %xmm0,%xmm1 |
| 199 psrld $4,%xmm1 |
| 200 pand %xmm6,%xmm0 |
| 201 movdqa -32(%ebp),%xmm2 |
| 202 .byte 102,15,56,0,208 |
| 203 pxor %xmm1,%xmm0 |
| 204 movdqa %xmm7,%xmm3 |
| 205 .byte 102,15,56,0,217 |
| 206 pxor %xmm2,%xmm3 |
| 207 movdqa %xmm7,%xmm4 |
| 208 .byte 102,15,56,0,224 |
| 209 pxor %xmm2,%xmm4 |
| 210 movdqa %xmm7,%xmm2 |
| 211 .byte 102,15,56,0,211 |
| 212 pxor %xmm0,%xmm2 |
| 213 movdqa %xmm7,%xmm3 |
| 214 .byte 102,15,56,0,220 |
| 215 pxor %xmm1,%xmm3 |
| 216 movdqu (%edx),%xmm0 |
| 217 jnz L003dec_loop |
| 218 movdqa 96(%ebx),%xmm4 |
| 219 .byte 102,15,56,0,226 |
| 220 pxor %xmm0,%xmm4 |
| 221 movdqa 112(%ebx),%xmm0 |
| 222 movdqa (%ecx),%xmm2 |
| 223 .byte 102,15,56,0,195 |
| 224 pxor %xmm4,%xmm0 |
| 225 .byte 102,15,56,0,194 |
| 226 ret |
| 227 .align 4 |
| 228 __vpaes_schedule_core: |
| 229 addl (%esp),%ebp |
| 230 movdqu (%esi),%xmm0 |
| 231 movdqa 320(%ebp),%xmm2 |
| 232 movdqa %xmm0,%xmm3 |
| 233 leal (%ebp),%ebx |
| 234 movdqa %xmm2,4(%esp) |
| 235 call __vpaes_schedule_transform |
| 236 movdqa %xmm0,%xmm7 |
| 237 testl %edi,%edi |
| 238 jnz L004schedule_am_decrypting |
| 239 movdqu %xmm0,(%edx) |
| 240 jmp L005schedule_go |
| 241 L004schedule_am_decrypting: |
| 242 movdqa 256(%ebp,%ecx,1),%xmm1 |
| 243 .byte 102,15,56,0,217 |
| 244 movdqu %xmm3,(%edx) |
| 245 xorl $48,%ecx |
| 246 L005schedule_go: |
| 247 cmpl $192,%eax |
| 248 ja L006schedule_256 |
| 249 je L007schedule_192 |
| 250 L008schedule_128: |
| 251 movl $10,%eax |
| 252 L009loop_schedule_128: |
| 253 call __vpaes_schedule_round |
| 254 decl %eax |
| 255 jz L010schedule_mangle_last |
| 256 call __vpaes_schedule_mangle |
| 257 jmp L009loop_schedule_128 |
| 258 .align 4,0x90 |
| 259 L007schedule_192: |
| 260 movdqu 8(%esi),%xmm0 |
| 261 call __vpaes_schedule_transform |
| 262 movdqa %xmm0,%xmm6 |
| 263 pxor %xmm4,%xmm4 |
| 264 movhlps %xmm4,%xmm6 |
| 265 movl $4,%eax |
| 266 L011loop_schedule_192: |
| 267 call __vpaes_schedule_round |
| 268 .byte 102,15,58,15,198,8 |
| 269 call __vpaes_schedule_mangle |
| 270 call __vpaes_schedule_192_smear |
| 271 call __vpaes_schedule_mangle |
| 272 call __vpaes_schedule_round |
| 273 decl %eax |
| 274 jz L010schedule_mangle_last |
| 275 call __vpaes_schedule_mangle |
| 276 call __vpaes_schedule_192_smear |
| 277 jmp L011loop_schedule_192 |
| 278 .align 4,0x90 |
| 279 L006schedule_256: |
| 280 movdqu 16(%esi),%xmm0 |
| 281 call __vpaes_schedule_transform |
| 282 movl $7,%eax |
| 283 L012loop_schedule_256: |
| 284 call __vpaes_schedule_mangle |
| 285 movdqa %xmm0,%xmm6 |
| 286 call __vpaes_schedule_round |
| 287 decl %eax |
| 288 jz L010schedule_mangle_last |
| 289 call __vpaes_schedule_mangle |
| 290 pshufd $255,%xmm0,%xmm0 |
| 291 movdqa %xmm7,20(%esp) |
| 292 movdqa %xmm6,%xmm7 |
| 293 call L_vpaes_schedule_low_round |
| 294 movdqa 20(%esp),%xmm7 |
| 295 jmp L012loop_schedule_256 |
| 296 .align 4,0x90 |
| 297 L010schedule_mangle_last: |
| 298 leal 384(%ebp),%ebx |
| 299 testl %edi,%edi |
| 300 jnz L013schedule_mangle_last_dec |
| 301 movdqa 256(%ebp,%ecx,1),%xmm1 |
| 302 .byte 102,15,56,0,193 |
| 303 leal 352(%ebp),%ebx |
| 304 addl $32,%edx |
| 305 L013schedule_mangle_last_dec: |
| 306 addl $-16,%edx |
| 307 pxor 336(%ebp),%xmm0 |
| 308 call __vpaes_schedule_transform |
| 309 movdqu %xmm0,(%edx) |
| 310 pxor %xmm0,%xmm0 |
| 311 pxor %xmm1,%xmm1 |
| 312 pxor %xmm2,%xmm2 |
| 313 pxor %xmm3,%xmm3 |
| 314 pxor %xmm4,%xmm4 |
| 315 pxor %xmm5,%xmm5 |
| 316 pxor %xmm6,%xmm6 |
| 317 pxor %xmm7,%xmm7 |
| 318 ret |
| 319 .align 4 |
| 320 __vpaes_schedule_192_smear: |
| 321 pshufd $128,%xmm6,%xmm0 |
| 322 pxor %xmm0,%xmm6 |
| 323 pshufd $254,%xmm7,%xmm0 |
| 324 pxor %xmm0,%xmm6 |
| 325 movdqa %xmm6,%xmm0 |
| 326 pxor %xmm1,%xmm1 |
| 327 movhlps %xmm1,%xmm6 |
| 328 ret |
| 329 .align 4 |
| 330 __vpaes_schedule_round: |
| 331 movdqa 8(%esp),%xmm2 |
| 332 pxor %xmm1,%xmm1 |
| 333 .byte 102,15,58,15,202,15 |
| 334 .byte 102,15,58,15,210,15 |
| 335 pxor %xmm1,%xmm7 |
| 336 pshufd $255,%xmm0,%xmm0 |
| 337 .byte 102,15,58,15,192,1 |
| 338 movdqa %xmm2,8(%esp) |
| 339 L_vpaes_schedule_low_round: |
| 340 movdqa %xmm7,%xmm1 |
| 341 pslldq $4,%xmm7 |
| 342 pxor %xmm1,%xmm7 |
| 343 movdqa %xmm7,%xmm1 |
| 344 pslldq $8,%xmm7 |
| 345 pxor %xmm1,%xmm7 |
| 346 pxor 336(%ebp),%xmm7 |
| 347 movdqa -16(%ebp),%xmm4 |
| 348 movdqa -48(%ebp),%xmm5 |
| 349 movdqa %xmm4,%xmm1 |
| 350 pandn %xmm0,%xmm1 |
| 351 psrld $4,%xmm1 |
| 352 pand %xmm4,%xmm0 |
| 353 movdqa -32(%ebp),%xmm2 |
| 354 .byte 102,15,56,0,208 |
| 355 pxor %xmm1,%xmm0 |
| 356 movdqa %xmm5,%xmm3 |
| 357 .byte 102,15,56,0,217 |
| 358 pxor %xmm2,%xmm3 |
| 359 movdqa %xmm5,%xmm4 |
| 360 .byte 102,15,56,0,224 |
| 361 pxor %xmm2,%xmm4 |
| 362 movdqa %xmm5,%xmm2 |
| 363 .byte 102,15,56,0,211 |
| 364 pxor %xmm0,%xmm2 |
| 365 movdqa %xmm5,%xmm3 |
| 366 .byte 102,15,56,0,220 |
| 367 pxor %xmm1,%xmm3 |
| 368 movdqa 32(%ebp),%xmm4 |
| 369 .byte 102,15,56,0,226 |
| 370 movdqa 48(%ebp),%xmm0 |
| 371 .byte 102,15,56,0,195 |
| 372 pxor %xmm4,%xmm0 |
| 373 pxor %xmm7,%xmm0 |
| 374 movdqa %xmm0,%xmm7 |
| 375 ret |
| 376 .align 4 |
| 377 __vpaes_schedule_transform: |
| 378 movdqa -16(%ebp),%xmm2 |
| 379 movdqa %xmm2,%xmm1 |
| 380 pandn %xmm0,%xmm1 |
| 381 psrld $4,%xmm1 |
| 382 pand %xmm2,%xmm0 |
| 383 movdqa (%ebx),%xmm2 |
| 384 .byte 102,15,56,0,208 |
| 385 movdqa 16(%ebx),%xmm0 |
| 386 .byte 102,15,56,0,193 |
| 387 pxor %xmm2,%xmm0 |
| 388 ret |
| 389 .align 4 |
| 390 __vpaes_schedule_mangle: |
| 391 movdqa %xmm0,%xmm4 |
| 392 movdqa 128(%ebp),%xmm5 |
| 393 testl %edi,%edi |
| 394 jnz L014schedule_mangle_dec |
| 395 addl $16,%edx |
| 396 pxor 336(%ebp),%xmm4 |
| 397 .byte 102,15,56,0,229 |
| 398 movdqa %xmm4,%xmm3 |
| 399 .byte 102,15,56,0,229 |
| 400 pxor %xmm4,%xmm3 |
| 401 .byte 102,15,56,0,229 |
| 402 pxor %xmm4,%xmm3 |
| 403 jmp L015schedule_mangle_both |
| 404 .align 4,0x90 |
| 405 L014schedule_mangle_dec: |
| 406 movdqa -16(%ebp),%xmm2 |
| 407 leal 416(%ebp),%esi |
| 408 movdqa %xmm2,%xmm1 |
| 409 pandn %xmm4,%xmm1 |
| 410 psrld $4,%xmm1 |
| 411 pand %xmm2,%xmm4 |
| 412 movdqa (%esi),%xmm2 |
| 413 .byte 102,15,56,0,212 |
| 414 movdqa 16(%esi),%xmm3 |
| 415 .byte 102,15,56,0,217 |
| 416 pxor %xmm2,%xmm3 |
| 417 .byte 102,15,56,0,221 |
| 418 movdqa 32(%esi),%xmm2 |
| 419 .byte 102,15,56,0,212 |
| 420 pxor %xmm3,%xmm2 |
| 421 movdqa 48(%esi),%xmm3 |
| 422 .byte 102,15,56,0,217 |
| 423 pxor %xmm2,%xmm3 |
| 424 .byte 102,15,56,0,221 |
| 425 movdqa 64(%esi),%xmm2 |
| 426 .byte 102,15,56,0,212 |
| 427 pxor %xmm3,%xmm2 |
| 428 movdqa 80(%esi),%xmm3 |
| 429 .byte 102,15,56,0,217 |
| 430 pxor %xmm2,%xmm3 |
| 431 .byte 102,15,56,0,221 |
| 432 movdqa 96(%esi),%xmm2 |
| 433 .byte 102,15,56,0,212 |
| 434 pxor %xmm3,%xmm2 |
| 435 movdqa 112(%esi),%xmm3 |
| 436 .byte 102,15,56,0,217 |
| 437 pxor %xmm2,%xmm3 |
| 438 addl $-16,%edx |
| 439 L015schedule_mangle_both: |
| 440 movdqa 256(%ebp,%ecx,1),%xmm1 |
| 441 .byte 102,15,56,0,217 |
| 442 addl $-16,%ecx |
| 443 andl $48,%ecx |
| 444 movdqu %xmm3,(%edx) |
| 445 ret |
| 446 .globl _vpaes_set_encrypt_key |
| 447 .align 4 |
| 448 _vpaes_set_encrypt_key: |
| 449 L_vpaes_set_encrypt_key_begin: |
| 450 pushl %ebp |
| 451 pushl %ebx |
| 452 pushl %esi |
| 453 pushl %edi |
| 454 movl 20(%esp),%esi |
| 455 leal -56(%esp),%ebx |
| 456 movl 24(%esp),%eax |
| 457 andl $-16,%ebx |
| 458 movl 28(%esp),%edx |
| 459 xchgl %esp,%ebx |
| 460 movl %ebx,48(%esp) |
| 461 movl %eax,%ebx |
| 462 shrl $5,%ebx |
| 463 addl $5,%ebx |
| 464 movl %ebx,240(%edx) |
| 465 movl $48,%ecx |
| 466 movl $0,%edi |
| 467 leal L_vpaes_consts+0x30-L016pic_point,%ebp |
| 468 call __vpaes_schedule_core |
| 469 L016pic_point: |
| 470 movl 48(%esp),%esp |
| 471 xorl %eax,%eax |
| 472 popl %edi |
| 473 popl %esi |
| 474 popl %ebx |
| 475 popl %ebp |
| 476 ret |
| 477 .globl _vpaes_set_decrypt_key |
| 478 .align 4 |
| 479 _vpaes_set_decrypt_key: |
| 480 L_vpaes_set_decrypt_key_begin: |
| 481 pushl %ebp |
| 482 pushl %ebx |
| 483 pushl %esi |
| 484 pushl %edi |
| 485 movl 20(%esp),%esi |
| 486 leal -56(%esp),%ebx |
| 487 movl 24(%esp),%eax |
| 488 andl $-16,%ebx |
| 489 movl 28(%esp),%edx |
| 490 xchgl %esp,%ebx |
| 491 movl %ebx,48(%esp) |
| 492 movl %eax,%ebx |
| 493 shrl $5,%ebx |
| 494 addl $5,%ebx |
| 495 movl %ebx,240(%edx) |
| 496 shll $4,%ebx |
| 497 leal 16(%edx,%ebx,1),%edx |
| 498 movl $1,%edi |
| 499 movl %eax,%ecx |
| 500 shrl $1,%ecx |
| 501 andl $32,%ecx |
| 502 xorl $32,%ecx |
| 503 leal L_vpaes_consts+0x30-L017pic_point,%ebp |
| 504 call __vpaes_schedule_core |
| 505 L017pic_point: |
| 506 movl 48(%esp),%esp |
| 507 xorl %eax,%eax |
| 508 popl %edi |
| 509 popl %esi |
| 510 popl %ebx |
| 511 popl %ebp |
| 512 ret |
| 513 .globl _vpaes_encrypt |
| 514 .align 4 |
| 515 _vpaes_encrypt: |
| 516 L_vpaes_encrypt_begin: |
| 517 pushl %ebp |
| 518 pushl %ebx |
| 519 pushl %esi |
| 520 pushl %edi |
| 521 leal L_vpaes_consts+0x30-L018pic_point,%ebp |
| 522 call __vpaes_preheat |
| 523 L018pic_point: |
| 524 movl 20(%esp),%esi |
| 525 leal -56(%esp),%ebx |
| 526 movl 24(%esp),%edi |
| 527 andl $-16,%ebx |
| 528 movl 28(%esp),%edx |
| 529 xchgl %esp,%ebx |
| 530 movl %ebx,48(%esp) |
| 531 movdqu (%esi),%xmm0 |
| 532 call __vpaes_encrypt_core |
| 533 movdqu %xmm0,(%edi) |
| 534 movl 48(%esp),%esp |
| 535 popl %edi |
| 536 popl %esi |
| 537 popl %ebx |
| 538 popl %ebp |
| 539 ret |
| 540 .globl _vpaes_decrypt |
| 541 .align 4 |
| 542 _vpaes_decrypt: |
| 543 L_vpaes_decrypt_begin: |
| 544 pushl %ebp |
| 545 pushl %ebx |
| 546 pushl %esi |
| 547 pushl %edi |
| 548 leal L_vpaes_consts+0x30-L019pic_point,%ebp |
| 549 call __vpaes_preheat |
| 550 L019pic_point: |
| 551 movl 20(%esp),%esi |
| 552 leal -56(%esp),%ebx |
| 553 movl 24(%esp),%edi |
| 554 andl $-16,%ebx |
| 555 movl 28(%esp),%edx |
| 556 xchgl %esp,%ebx |
| 557 movl %ebx,48(%esp) |
| 558 movdqu (%esi),%xmm0 |
| 559 call __vpaes_decrypt_core |
| 560 movdqu %xmm0,(%edi) |
| 561 movl 48(%esp),%esp |
| 562 popl %edi |
| 563 popl %esi |
| 564 popl %ebx |
| 565 popl %ebp |
| 566 ret |
| 567 .globl _vpaes_cbc_encrypt |
| 568 .align 4 |
| 569 _vpaes_cbc_encrypt: |
| 570 L_vpaes_cbc_encrypt_begin: |
| 571 pushl %ebp |
| 572 pushl %ebx |
| 573 pushl %esi |
| 574 pushl %edi |
| 575 movl 20(%esp),%esi |
| 576 movl 24(%esp),%edi |
| 577 movl 28(%esp),%eax |
| 578 movl 32(%esp),%edx |
| 579 subl $16,%eax |
| 580 jc L020cbc_abort |
| 581 leal -56(%esp),%ebx |
| 582 movl 36(%esp),%ebp |
| 583 andl $-16,%ebx |
| 584 movl 40(%esp),%ecx |
| 585 xchgl %esp,%ebx |
| 586 movdqu (%ebp),%xmm1 |
| 587 subl %esi,%edi |
| 588 movl %ebx,48(%esp) |
| 589 movl %edi,(%esp) |
| 590 movl %edx,4(%esp) |
| 591 movl %ebp,8(%esp) |
| 592 movl %eax,%edi |
| 593 leal L_vpaes_consts+0x30-L021pic_point,%ebp |
| 594 call __vpaes_preheat |
| 595 L021pic_point: |
| 596 cmpl $0,%ecx |
| 597 je L022cbc_dec_loop |
| 598 jmp L023cbc_enc_loop |
| 599 .align 4,0x90 |
| 600 L023cbc_enc_loop: |
| 601 movdqu (%esi),%xmm0 |
| 602 pxor %xmm1,%xmm0 |
| 603 call __vpaes_encrypt_core |
| 604 movl (%esp),%ebx |
| 605 movl 4(%esp),%edx |
| 606 movdqa %xmm0,%xmm1 |
| 607 movdqu %xmm0,(%ebx,%esi,1) |
| 608 leal 16(%esi),%esi |
| 609 subl $16,%edi |
| 610 jnc L023cbc_enc_loop |
| 611 jmp L024cbc_done |
| 612 .align 4,0x90 |
| 613 L022cbc_dec_loop: |
| 614 movdqu (%esi),%xmm0 |
| 615 movdqa %xmm1,16(%esp) |
| 616 movdqa %xmm0,32(%esp) |
| 617 call __vpaes_decrypt_core |
| 618 movl (%esp),%ebx |
| 619 movl 4(%esp),%edx |
| 620 pxor 16(%esp),%xmm0 |
| 621 movdqa 32(%esp),%xmm1 |
| 622 movdqu %xmm0,(%ebx,%esi,1) |
| 623 leal 16(%esi),%esi |
| 624 subl $16,%edi |
| 625 jnc L022cbc_dec_loop |
| 626 L024cbc_done: |
| 627 movl 8(%esp),%ebx |
| 628 movl 48(%esp),%esp |
| 629 movdqu %xmm1,(%ebx) |
| 630 L020cbc_abort: |
| 631 popl %edi |
| 632 popl %esi |
| 633 popl %ebx |
| 634 popl %ebp |
| 635 ret |
OLD | NEW |