OLD | NEW |
| (Empty) |
1 .file "vpaes-x86.s" | |
2 .text | |
3 .align 64 | |
4 .L_vpaes_consts: | |
5 .long 218628480,235210255,168496130,67568393 | |
6 .long 252381056,17041926,33884169,51187212 | |
7 .long 252645135,252645135,252645135,252645135 | |
8 .long 1512730624,3266504856,1377990664,3401244816 | |
9 .long 830229760,1275146365,2969422977,3447763452 | |
10 .long 3411033600,2979783055,338359620,2782886510 | |
11 .long 4209124096,907596821,221174255,1006095553 | |
12 .long 191964160,3799684038,3164090317,1589111125 | |
13 .long 182528256,1777043520,2877432650,3265356744 | |
14 .long 1874708224,3503451415,3305285752,363511674 | |
15 .long 1606117888,3487855781,1093350906,2384367825 | |
16 .long 197121,67569157,134941193,202313229 | |
17 .long 67569157,134941193,202313229,197121 | |
18 .long 134941193,202313229,197121,67569157 | |
19 .long 202313229,197121,67569157,134941193 | |
20 .long 33619971,100992007,168364043,235736079 | |
21 .long 235736079,33619971,100992007,168364043 | |
22 .long 168364043,235736079,33619971,100992007 | |
23 .long 100992007,168364043,235736079,33619971 | |
24 .long 50462976,117835012,185207048,252579084 | |
25 .long 252314880,51251460,117574920,184942860 | |
26 .long 184682752,252054788,50987272,118359308 | |
27 .long 118099200,185467140,251790600,50727180 | |
28 .long 2946363062,528716217,1300004225,1881839624 | |
29 .long 1532713819,1532713819,1532713819,1532713819 | |
30 .long 3602276352,4288629033,3737020424,4153884961 | |
31 .long 1354558464,32357713,2958822624,3775749553 | |
32 .long 1201988352,132424512,1572796698,503232858 | |
33 .long 2213177600,1597421020,4103937655,675398315 | |
34 .long 2749646592,4273543773,1511898873,121693092 | |
35 .long 3040248576,1103263732,2871565598,1608280554 | |
36 .long 2236667136,2588920351,482954393,64377734 | |
37 .long 3069987328,291237287,2117370568,3650299247 | |
38 .long 533321216,3573750986,2572112006,1401264716 | |
39 .long 1339849704,2721158661,548607111,3445553514 | |
40 .long 2128193280,3054596040,2183486460,1257083700 | |
41 .long 655635200,1165381986,3923443150,2344132524 | |
42 .long 190078720,256924420,290342170,357187870 | |
43 .long 1610966272,2263057382,4103205268,309794674 | |
44 .long 2592527872,2233205587,1335446729,3402964816 | |
45 .long 3973531904,3225098121,3002836325,1918774430 | |
46 .long 3870401024,2102906079,2284471353,4117666579 | |
47 .long 617007872,1021508343,366931923,691083277 | |
48 .long 2528395776,3491914898,2968704004,1613121270 | |
49 .long 3445188352,3247741094,844474987,4093578302 | |
50 .long 651481088,1190302358,1689581232,574775300 | |
51 .long 4289380608,206939853,2555985458,2489840491 | |
52 .long 2130264064,327674451,3566485037,3349835193 | |
53 .long 2470714624,316102159,3636825756,3393945945 | |
54 .byte 86,101,99,116,111,114,32,80,101,114,109,117,116,97,116,105 | |
55 .byte 111,110,32,65,69,83,32,102,111,114,32,120,56,54,47,83 | |
56 .byte 83,83,69,51,44,32,77,105,107,101,32,72,97,109,98,117 | |
57 .byte 114,103,32,40,83,116,97,110,102,111,114,100,32,85,110,105 | |
58 .byte 118,101,114,115,105,116,121,41,0 | |
59 .align 64 | |
60 .type _vpaes_preheat,@function | |
61 .align 16 | |
62 _vpaes_preheat: | |
63 addl (%esp),%ebp | |
64 movdqa -48(%ebp),%xmm7 | |
65 movdqa -16(%ebp),%xmm6 | |
66 ret | |
67 .size _vpaes_preheat,.-_vpaes_preheat | |
68 .type _vpaes_encrypt_core,@function | |
69 .align 16 | |
70 _vpaes_encrypt_core: | |
71 movl $16,%ecx | |
72 movl 240(%edx),%eax | |
73 movdqa %xmm6,%xmm1 | |
74 movdqa (%ebp),%xmm2 | |
75 pandn %xmm0,%xmm1 | |
76 movdqu (%edx),%xmm5 | |
77 psrld $4,%xmm1 | |
78 pand %xmm6,%xmm0 | |
79 .byte 102,15,56,0,208 | |
80 movdqa 16(%ebp),%xmm0 | |
81 .byte 102,15,56,0,193 | |
82 pxor %xmm5,%xmm2 | |
83 pxor %xmm2,%xmm0 | |
84 addl $16,%edx | |
85 leal 192(%ebp),%ebx | |
86 jmp .L000enc_entry | |
87 .align 16 | |
88 .L001enc_loop: | |
89 movdqa 32(%ebp),%xmm4 | |
90 .byte 102,15,56,0,226 | |
91 pxor %xmm5,%xmm4 | |
92 movdqa 48(%ebp),%xmm0 | |
93 .byte 102,15,56,0,195 | |
94 pxor %xmm4,%xmm0 | |
95 movdqa 64(%ebp),%xmm5 | |
96 .byte 102,15,56,0,234 | |
97 movdqa -64(%ebx,%ecx,1),%xmm1 | |
98 movdqa 80(%ebp),%xmm2 | |
99 .byte 102,15,56,0,211 | |
100 pxor %xmm5,%xmm2 | |
101 movdqa (%ebx,%ecx,1),%xmm4 | |
102 movdqa %xmm0,%xmm3 | |
103 .byte 102,15,56,0,193 | |
104 addl $16,%edx | |
105 pxor %xmm2,%xmm0 | |
106 .byte 102,15,56,0,220 | |
107 addl $16,%ecx | |
108 pxor %xmm0,%xmm3 | |
109 .byte 102,15,56,0,193 | |
110 andl $48,%ecx | |
111 pxor %xmm3,%xmm0 | |
112 subl $1,%eax | |
113 .L000enc_entry: | |
114 movdqa %xmm6,%xmm1 | |
115 pandn %xmm0,%xmm1 | |
116 psrld $4,%xmm1 | |
117 pand %xmm6,%xmm0 | |
118 movdqa -32(%ebp),%xmm5 | |
119 .byte 102,15,56,0,232 | |
120 pxor %xmm1,%xmm0 | |
121 movdqa %xmm7,%xmm3 | |
122 .byte 102,15,56,0,217 | |
123 pxor %xmm5,%xmm3 | |
124 movdqa %xmm7,%xmm4 | |
125 .byte 102,15,56,0,224 | |
126 pxor %xmm5,%xmm4 | |
127 movdqa %xmm7,%xmm2 | |
128 .byte 102,15,56,0,211 | |
129 pxor %xmm0,%xmm2 | |
130 movdqa %xmm7,%xmm3 | |
131 movdqu (%edx),%xmm5 | |
132 .byte 102,15,56,0,220 | |
133 pxor %xmm1,%xmm3 | |
134 jnz .L001enc_loop | |
135 movdqa 96(%ebp),%xmm4 | |
136 movdqa 112(%ebp),%xmm0 | |
137 .byte 102,15,56,0,226 | |
138 pxor %xmm5,%xmm4 | |
139 .byte 102,15,56,0,195 | |
140 movdqa 64(%ebx,%ecx,1),%xmm1 | |
141 pxor %xmm4,%xmm0 | |
142 .byte 102,15,56,0,193 | |
143 ret | |
144 .size _vpaes_encrypt_core,.-_vpaes_encrypt_core | |
145 .type _vpaes_decrypt_core,@function | |
146 .align 16 | |
147 _vpaes_decrypt_core: | |
148 movl 240(%edx),%eax | |
149 leal 608(%ebp),%ebx | |
150 movdqa %xmm6,%xmm1 | |
151 movdqa -64(%ebx),%xmm2 | |
152 pandn %xmm0,%xmm1 | |
153 movl %eax,%ecx | |
154 psrld $4,%xmm1 | |
155 movdqu (%edx),%xmm5 | |
156 shll $4,%ecx | |
157 pand %xmm6,%xmm0 | |
158 .byte 102,15,56,0,208 | |
159 movdqa -48(%ebx),%xmm0 | |
160 xorl $48,%ecx | |
161 .byte 102,15,56,0,193 | |
162 andl $48,%ecx | |
163 pxor %xmm5,%xmm2 | |
164 movdqa 176(%ebp),%xmm5 | |
165 pxor %xmm2,%xmm0 | |
166 addl $16,%edx | |
167 leal -352(%ebx,%ecx,1),%ecx | |
168 jmp .L002dec_entry | |
169 .align 16 | |
170 .L003dec_loop: | |
171 movdqa -32(%ebx),%xmm4 | |
172 .byte 102,15,56,0,226 | |
173 pxor %xmm0,%xmm4 | |
174 movdqa -16(%ebx),%xmm0 | |
175 .byte 102,15,56,0,195 | |
176 pxor %xmm4,%xmm0 | |
177 addl $16,%edx | |
178 .byte 102,15,56,0,197 | |
179 movdqa (%ebx),%xmm4 | |
180 .byte 102,15,56,0,226 | |
181 pxor %xmm0,%xmm4 | |
182 movdqa 16(%ebx),%xmm0 | |
183 .byte 102,15,56,0,195 | |
184 pxor %xmm4,%xmm0 | |
185 subl $1,%eax | |
186 .byte 102,15,56,0,197 | |
187 movdqa 32(%ebx),%xmm4 | |
188 .byte 102,15,56,0,226 | |
189 pxor %xmm0,%xmm4 | |
190 movdqa 48(%ebx),%xmm0 | |
191 .byte 102,15,56,0,195 | |
192 pxor %xmm4,%xmm0 | |
193 .byte 102,15,56,0,197 | |
194 movdqa 64(%ebx),%xmm4 | |
195 .byte 102,15,56,0,226 | |
196 pxor %xmm0,%xmm4 | |
197 movdqa 80(%ebx),%xmm0 | |
198 .byte 102,15,56,0,195 | |
199 pxor %xmm4,%xmm0 | |
200 .byte 102,15,58,15,237,12 | |
201 .L002dec_entry: | |
202 movdqa %xmm6,%xmm1 | |
203 pandn %xmm0,%xmm1 | |
204 psrld $4,%xmm1 | |
205 pand %xmm6,%xmm0 | |
206 movdqa -32(%ebp),%xmm2 | |
207 .byte 102,15,56,0,208 | |
208 pxor %xmm1,%xmm0 | |
209 movdqa %xmm7,%xmm3 | |
210 .byte 102,15,56,0,217 | |
211 pxor %xmm2,%xmm3 | |
212 movdqa %xmm7,%xmm4 | |
213 .byte 102,15,56,0,224 | |
214 pxor %xmm2,%xmm4 | |
215 movdqa %xmm7,%xmm2 | |
216 .byte 102,15,56,0,211 | |
217 pxor %xmm0,%xmm2 | |
218 movdqa %xmm7,%xmm3 | |
219 .byte 102,15,56,0,220 | |
220 pxor %xmm1,%xmm3 | |
221 movdqu (%edx),%xmm0 | |
222 jnz .L003dec_loop | |
223 movdqa 96(%ebx),%xmm4 | |
224 .byte 102,15,56,0,226 | |
225 pxor %xmm0,%xmm4 | |
226 movdqa 112(%ebx),%xmm0 | |
227 movdqa (%ecx),%xmm2 | |
228 .byte 102,15,56,0,195 | |
229 pxor %xmm4,%xmm0 | |
230 .byte 102,15,56,0,194 | |
231 ret | |
232 .size _vpaes_decrypt_core,.-_vpaes_decrypt_core | |
233 .type _vpaes_schedule_core,@function | |
234 .align 16 | |
235 _vpaes_schedule_core: | |
236 addl (%esp),%ebp | |
237 movdqu (%esi),%xmm0 | |
238 movdqa 320(%ebp),%xmm2 | |
239 movdqa %xmm0,%xmm3 | |
240 leal (%ebp),%ebx | |
241 movdqa %xmm2,4(%esp) | |
242 call _vpaes_schedule_transform | |
243 movdqa %xmm0,%xmm7 | |
244 testl %edi,%edi | |
245 jnz .L004schedule_am_decrypting | |
246 movdqu %xmm0,(%edx) | |
247 jmp .L005schedule_go | |
248 .L004schedule_am_decrypting: | |
249 movdqa 256(%ebp,%ecx,1),%xmm1 | |
250 .byte 102,15,56,0,217 | |
251 movdqu %xmm3,(%edx) | |
252 xorl $48,%ecx | |
253 .L005schedule_go: | |
254 cmpl $192,%eax | |
255 ja .L006schedule_256 | |
256 je .L007schedule_192 | |
257 .L008schedule_128: | |
258 movl $10,%eax | |
259 .L009loop_schedule_128: | |
260 call _vpaes_schedule_round | |
261 decl %eax | |
262 jz .L010schedule_mangle_last | |
263 call _vpaes_schedule_mangle | |
264 jmp .L009loop_schedule_128 | |
265 .align 16 | |
266 .L007schedule_192: | |
267 movdqu 8(%esi),%xmm0 | |
268 call _vpaes_schedule_transform | |
269 movdqa %xmm0,%xmm6 | |
270 pxor %xmm4,%xmm4 | |
271 movhlps %xmm4,%xmm6 | |
272 movl $4,%eax | |
273 .L011loop_schedule_192: | |
274 call _vpaes_schedule_round | |
275 .byte 102,15,58,15,198,8 | |
276 call _vpaes_schedule_mangle | |
277 call _vpaes_schedule_192_smear | |
278 call _vpaes_schedule_mangle | |
279 call _vpaes_schedule_round | |
280 decl %eax | |
281 jz .L010schedule_mangle_last | |
282 call _vpaes_schedule_mangle | |
283 call _vpaes_schedule_192_smear | |
284 jmp .L011loop_schedule_192 | |
285 .align 16 | |
286 .L006schedule_256: | |
287 movdqu 16(%esi),%xmm0 | |
288 call _vpaes_schedule_transform | |
289 movl $7,%eax | |
290 .L012loop_schedule_256: | |
291 call _vpaes_schedule_mangle | |
292 movdqa %xmm0,%xmm6 | |
293 call _vpaes_schedule_round | |
294 decl %eax | |
295 jz .L010schedule_mangle_last | |
296 call _vpaes_schedule_mangle | |
297 pshufd $255,%xmm0,%xmm0 | |
298 movdqa %xmm7,20(%esp) | |
299 movdqa %xmm6,%xmm7 | |
300 call .L_vpaes_schedule_low_round | |
301 movdqa 20(%esp),%xmm7 | |
302 jmp .L012loop_schedule_256 | |
303 .align 16 | |
304 .L010schedule_mangle_last: | |
305 leal 384(%ebp),%ebx | |
306 testl %edi,%edi | |
307 jnz .L013schedule_mangle_last_dec | |
308 movdqa 256(%ebp,%ecx,1),%xmm1 | |
309 .byte 102,15,56,0,193 | |
310 leal 352(%ebp),%ebx | |
311 addl $32,%edx | |
312 .L013schedule_mangle_last_dec: | |
313 addl $-16,%edx | |
314 pxor 336(%ebp),%xmm0 | |
315 call _vpaes_schedule_transform | |
316 movdqu %xmm0,(%edx) | |
317 pxor %xmm0,%xmm0 | |
318 pxor %xmm1,%xmm1 | |
319 pxor %xmm2,%xmm2 | |
320 pxor %xmm3,%xmm3 | |
321 pxor %xmm4,%xmm4 | |
322 pxor %xmm5,%xmm5 | |
323 pxor %xmm6,%xmm6 | |
324 pxor %xmm7,%xmm7 | |
325 ret | |
326 .size _vpaes_schedule_core,.-_vpaes_schedule_core | |
327 .type _vpaes_schedule_192_smear,@function | |
328 .align 16 | |
329 _vpaes_schedule_192_smear: | |
330 pshufd $128,%xmm6,%xmm0 | |
331 pxor %xmm0,%xmm6 | |
332 pshufd $254,%xmm7,%xmm0 | |
333 pxor %xmm0,%xmm6 | |
334 movdqa %xmm6,%xmm0 | |
335 pxor %xmm1,%xmm1 | |
336 movhlps %xmm1,%xmm6 | |
337 ret | |
338 .size _vpaes_schedule_192_smear,.-_vpaes_schedule_192_smear | |
339 .type _vpaes_schedule_round,@function | |
340 .align 16 | |
341 _vpaes_schedule_round: | |
342 movdqa 8(%esp),%xmm2 | |
343 pxor %xmm1,%xmm1 | |
344 .byte 102,15,58,15,202,15 | |
345 .byte 102,15,58,15,210,15 | |
346 pxor %xmm1,%xmm7 | |
347 pshufd $255,%xmm0,%xmm0 | |
348 .byte 102,15,58,15,192,1 | |
349 movdqa %xmm2,8(%esp) | |
350 .L_vpaes_schedule_low_round: | |
351 movdqa %xmm7,%xmm1 | |
352 pslldq $4,%xmm7 | |
353 pxor %xmm1,%xmm7 | |
354 movdqa %xmm7,%xmm1 | |
355 pslldq $8,%xmm7 | |
356 pxor %xmm1,%xmm7 | |
357 pxor 336(%ebp),%xmm7 | |
358 movdqa -16(%ebp),%xmm4 | |
359 movdqa -48(%ebp),%xmm5 | |
360 movdqa %xmm4,%xmm1 | |
361 pandn %xmm0,%xmm1 | |
362 psrld $4,%xmm1 | |
363 pand %xmm4,%xmm0 | |
364 movdqa -32(%ebp),%xmm2 | |
365 .byte 102,15,56,0,208 | |
366 pxor %xmm1,%xmm0 | |
367 movdqa %xmm5,%xmm3 | |
368 .byte 102,15,56,0,217 | |
369 pxor %xmm2,%xmm3 | |
370 movdqa %xmm5,%xmm4 | |
371 .byte 102,15,56,0,224 | |
372 pxor %xmm2,%xmm4 | |
373 movdqa %xmm5,%xmm2 | |
374 .byte 102,15,56,0,211 | |
375 pxor %xmm0,%xmm2 | |
376 movdqa %xmm5,%xmm3 | |
377 .byte 102,15,56,0,220 | |
378 pxor %xmm1,%xmm3 | |
379 movdqa 32(%ebp),%xmm4 | |
380 .byte 102,15,56,0,226 | |
381 movdqa 48(%ebp),%xmm0 | |
382 .byte 102,15,56,0,195 | |
383 pxor %xmm4,%xmm0 | |
384 pxor %xmm7,%xmm0 | |
385 movdqa %xmm0,%xmm7 | |
386 ret | |
387 .size _vpaes_schedule_round,.-_vpaes_schedule_round | |
388 .type _vpaes_schedule_transform,@function | |
389 .align 16 | |
390 _vpaes_schedule_transform: | |
391 movdqa -16(%ebp),%xmm2 | |
392 movdqa %xmm2,%xmm1 | |
393 pandn %xmm0,%xmm1 | |
394 psrld $4,%xmm1 | |
395 pand %xmm2,%xmm0 | |
396 movdqa (%ebx),%xmm2 | |
397 .byte 102,15,56,0,208 | |
398 movdqa 16(%ebx),%xmm0 | |
399 .byte 102,15,56,0,193 | |
400 pxor %xmm2,%xmm0 | |
401 ret | |
402 .size _vpaes_schedule_transform,.-_vpaes_schedule_transform | |
403 .type _vpaes_schedule_mangle,@function | |
404 .align 16 | |
405 _vpaes_schedule_mangle: | |
406 movdqa %xmm0,%xmm4 | |
407 movdqa 128(%ebp),%xmm5 | |
408 testl %edi,%edi | |
409 jnz .L014schedule_mangle_dec | |
410 addl $16,%edx | |
411 pxor 336(%ebp),%xmm4 | |
412 .byte 102,15,56,0,229 | |
413 movdqa %xmm4,%xmm3 | |
414 .byte 102,15,56,0,229 | |
415 pxor %xmm4,%xmm3 | |
416 .byte 102,15,56,0,229 | |
417 pxor %xmm4,%xmm3 | |
418 jmp .L015schedule_mangle_both | |
419 .align 16 | |
420 .L014schedule_mangle_dec: | |
421 movdqa -16(%ebp),%xmm2 | |
422 leal 416(%ebp),%esi | |
423 movdqa %xmm2,%xmm1 | |
424 pandn %xmm4,%xmm1 | |
425 psrld $4,%xmm1 | |
426 pand %xmm2,%xmm4 | |
427 movdqa (%esi),%xmm2 | |
428 .byte 102,15,56,0,212 | |
429 movdqa 16(%esi),%xmm3 | |
430 .byte 102,15,56,0,217 | |
431 pxor %xmm2,%xmm3 | |
432 .byte 102,15,56,0,221 | |
433 movdqa 32(%esi),%xmm2 | |
434 .byte 102,15,56,0,212 | |
435 pxor %xmm3,%xmm2 | |
436 movdqa 48(%esi),%xmm3 | |
437 .byte 102,15,56,0,217 | |
438 pxor %xmm2,%xmm3 | |
439 .byte 102,15,56,0,221 | |
440 movdqa 64(%esi),%xmm2 | |
441 .byte 102,15,56,0,212 | |
442 pxor %xmm3,%xmm2 | |
443 movdqa 80(%esi),%xmm3 | |
444 .byte 102,15,56,0,217 | |
445 pxor %xmm2,%xmm3 | |
446 .byte 102,15,56,0,221 | |
447 movdqa 96(%esi),%xmm2 | |
448 .byte 102,15,56,0,212 | |
449 pxor %xmm3,%xmm2 | |
450 movdqa 112(%esi),%xmm3 | |
451 .byte 102,15,56,0,217 | |
452 pxor %xmm2,%xmm3 | |
453 addl $-16,%edx | |
454 .L015schedule_mangle_both: | |
455 movdqa 256(%ebp,%ecx,1),%xmm1 | |
456 .byte 102,15,56,0,217 | |
457 addl $-16,%ecx | |
458 andl $48,%ecx | |
459 movdqu %xmm3,(%edx) | |
460 ret | |
461 .size _vpaes_schedule_mangle,.-_vpaes_schedule_mangle | |
462 .globl vpaes_set_encrypt_key | |
463 .type vpaes_set_encrypt_key,@function | |
464 .align 16 | |
465 vpaes_set_encrypt_key: | |
466 .L_vpaes_set_encrypt_key_begin: | |
467 pushl %ebp | |
468 pushl %ebx | |
469 pushl %esi | |
470 pushl %edi | |
471 movl 20(%esp),%esi | |
472 leal -56(%esp),%ebx | |
473 movl 24(%esp),%eax | |
474 andl $-16,%ebx | |
475 movl 28(%esp),%edx | |
476 xchgl %esp,%ebx | |
477 movl %ebx,48(%esp) | |
478 movl %eax,%ebx | |
479 shrl $5,%ebx | |
480 addl $5,%ebx | |
481 movl %ebx,240(%edx) | |
482 movl $48,%ecx | |
483 movl $0,%edi | |
484 leal .L_vpaes_consts+0x30-.L016pic_point,%ebp | |
485 call _vpaes_schedule_core | |
486 .L016pic_point: | |
487 movl 48(%esp),%esp | |
488 xorl %eax,%eax | |
489 popl %edi | |
490 popl %esi | |
491 popl %ebx | |
492 popl %ebp | |
493 ret | |
494 .size vpaes_set_encrypt_key,.-.L_vpaes_set_encrypt_key_begin | |
495 .globl vpaes_set_decrypt_key | |
496 .type vpaes_set_decrypt_key,@function | |
497 .align 16 | |
498 vpaes_set_decrypt_key: | |
499 .L_vpaes_set_decrypt_key_begin: | |
500 pushl %ebp | |
501 pushl %ebx | |
502 pushl %esi | |
503 pushl %edi | |
504 movl 20(%esp),%esi | |
505 leal -56(%esp),%ebx | |
506 movl 24(%esp),%eax | |
507 andl $-16,%ebx | |
508 movl 28(%esp),%edx | |
509 xchgl %esp,%ebx | |
510 movl %ebx,48(%esp) | |
511 movl %eax,%ebx | |
512 shrl $5,%ebx | |
513 addl $5,%ebx | |
514 movl %ebx,240(%edx) | |
515 shll $4,%ebx | |
516 leal 16(%edx,%ebx,1),%edx | |
517 movl $1,%edi | |
518 movl %eax,%ecx | |
519 shrl $1,%ecx | |
520 andl $32,%ecx | |
521 xorl $32,%ecx | |
522 leal .L_vpaes_consts+0x30-.L017pic_point,%ebp | |
523 call _vpaes_schedule_core | |
524 .L017pic_point: | |
525 movl 48(%esp),%esp | |
526 xorl %eax,%eax | |
527 popl %edi | |
528 popl %esi | |
529 popl %ebx | |
530 popl %ebp | |
531 ret | |
532 .size vpaes_set_decrypt_key,.-.L_vpaes_set_decrypt_key_begin | |
533 .globl vpaes_encrypt | |
534 .type vpaes_encrypt,@function | |
535 .align 16 | |
536 vpaes_encrypt: | |
537 .L_vpaes_encrypt_begin: | |
538 pushl %ebp | |
539 pushl %ebx | |
540 pushl %esi | |
541 pushl %edi | |
542 leal .L_vpaes_consts+0x30-.L018pic_point,%ebp | |
543 call _vpaes_preheat | |
544 .L018pic_point: | |
545 movl 20(%esp),%esi | |
546 leal -56(%esp),%ebx | |
547 movl 24(%esp),%edi | |
548 andl $-16,%ebx | |
549 movl 28(%esp),%edx | |
550 xchgl %esp,%ebx | |
551 movl %ebx,48(%esp) | |
552 movdqu (%esi),%xmm0 | |
553 call _vpaes_encrypt_core | |
554 movdqu %xmm0,(%edi) | |
555 movl 48(%esp),%esp | |
556 popl %edi | |
557 popl %esi | |
558 popl %ebx | |
559 popl %ebp | |
560 ret | |
561 .size vpaes_encrypt,.-.L_vpaes_encrypt_begin | |
562 .globl vpaes_decrypt | |
563 .type vpaes_decrypt,@function | |
564 .align 16 | |
565 vpaes_decrypt: | |
566 .L_vpaes_decrypt_begin: | |
567 pushl %ebp | |
568 pushl %ebx | |
569 pushl %esi | |
570 pushl %edi | |
571 leal .L_vpaes_consts+0x30-.L019pic_point,%ebp | |
572 call _vpaes_preheat | |
573 .L019pic_point: | |
574 movl 20(%esp),%esi | |
575 leal -56(%esp),%ebx | |
576 movl 24(%esp),%edi | |
577 andl $-16,%ebx | |
578 movl 28(%esp),%edx | |
579 xchgl %esp,%ebx | |
580 movl %ebx,48(%esp) | |
581 movdqu (%esi),%xmm0 | |
582 call _vpaes_decrypt_core | |
583 movdqu %xmm0,(%edi) | |
584 movl 48(%esp),%esp | |
585 popl %edi | |
586 popl %esi | |
587 popl %ebx | |
588 popl %ebp | |
589 ret | |
590 .size vpaes_decrypt,.-.L_vpaes_decrypt_begin | |
591 .globl vpaes_cbc_encrypt | |
592 .type vpaes_cbc_encrypt,@function | |
593 .align 16 | |
594 vpaes_cbc_encrypt: | |
595 .L_vpaes_cbc_encrypt_begin: | |
596 pushl %ebp | |
597 pushl %ebx | |
598 pushl %esi | |
599 pushl %edi | |
600 movl 20(%esp),%esi | |
601 movl 24(%esp),%edi | |
602 movl 28(%esp),%eax | |
603 movl 32(%esp),%edx | |
604 subl $16,%eax | |
605 jc .L020cbc_abort | |
606 leal -56(%esp),%ebx | |
607 movl 36(%esp),%ebp | |
608 andl $-16,%ebx | |
609 movl 40(%esp),%ecx | |
610 xchgl %esp,%ebx | |
611 movdqu (%ebp),%xmm1 | |
612 subl %esi,%edi | |
613 movl %ebx,48(%esp) | |
614 movl %edi,(%esp) | |
615 movl %edx,4(%esp) | |
616 movl %ebp,8(%esp) | |
617 movl %eax,%edi | |
618 leal .L_vpaes_consts+0x30-.L021pic_point,%ebp | |
619 call _vpaes_preheat | |
620 .L021pic_point: | |
621 cmpl $0,%ecx | |
622 je .L022cbc_dec_loop | |
623 jmp .L023cbc_enc_loop | |
624 .align 16 | |
625 .L023cbc_enc_loop: | |
626 movdqu (%esi),%xmm0 | |
627 pxor %xmm1,%xmm0 | |
628 call _vpaes_encrypt_core | |
629 movl (%esp),%ebx | |
630 movl 4(%esp),%edx | |
631 movdqa %xmm0,%xmm1 | |
632 movdqu %xmm0,(%ebx,%esi,1) | |
633 leal 16(%esi),%esi | |
634 subl $16,%edi | |
635 jnc .L023cbc_enc_loop | |
636 jmp .L024cbc_done | |
637 .align 16 | |
638 .L022cbc_dec_loop: | |
639 movdqu (%esi),%xmm0 | |
640 movdqa %xmm1,16(%esp) | |
641 movdqa %xmm0,32(%esp) | |
642 call _vpaes_decrypt_core | |
643 movl (%esp),%ebx | |
644 movl 4(%esp),%edx | |
645 pxor 16(%esp),%xmm0 | |
646 movdqa 32(%esp),%xmm1 | |
647 movdqu %xmm0,(%ebx,%esi,1) | |
648 leal 16(%esi),%esi | |
649 subl $16,%edi | |
650 jnc .L022cbc_dec_loop | |
651 .L024cbc_done: | |
652 movl 8(%esp),%ebx | |
653 movl 48(%esp),%esp | |
654 movdqu %xmm1,(%ebx) | |
655 .L020cbc_abort: | |
656 popl %edi | |
657 popl %esi | |
658 popl %ebx | |
659 popl %ebp | |
660 ret | |
661 .size vpaes_cbc_encrypt,.-.L_vpaes_cbc_encrypt_begin | |
OLD | NEW |