Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(142)

Side by Side Diff: third_party/boringssl/linux-arm/crypto/sha/sha1-armv4-large.S

Issue 1128293004: Revert of Roll src/third_party/boringssl/src 68de407:de12d6c (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Thanks Rietveld, Thietveld. Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 #include "arm_arch.h" 1 #include "arm_arch.h"
2 2
3 .text 3 .text
4 .code 32 4 .code 32
5 5
6 .globl» sha1_block_data_order 6 .global»sha1_block_data_order
7 .type sha1_block_data_order,%function 7 .type sha1_block_data_order,%function
8 8
9 .align 5 9 .align 5
10 sha1_block_data_order: 10 sha1_block_data_order:
11 #if __ARM_MAX_ARCH__>=7 11 #if __ARM_MAX_ARCH__>=7
12 sub r3,pc,#8 @ sha1_block_data_order 12 sub r3,pc,#8 @ sha1_block_data_order
13 ldr r12,.LOPENSSL_armcap 13 ldr r12,.LOPENSSL_armcap
14 ldr r12,[r3,r12] @ OPENSSL_armcap_P 14 ldr r12,[r3,r12] @ OPENSSL_armcap_P
15 #ifdef __APPLE__
16 ldr r12,[r12]
17 #endif
18 tst r12,#ARMV8_SHA1 15 tst r12,#ARMV8_SHA1
19 bne .LARMv8 16 bne .LARMv8
20 tst r12,#ARMV7_NEON 17 tst r12,#ARMV7_NEON
21 bne .LNEON 18 bne .LNEON
22 #endif 19 #endif
23 » stmdb» sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,lr} 20 » stmdb» sp!,{r4-r12,lr}
24 add r2,r1,r2,lsl#6 @ r2 to point at the end of r1 21 add r2,r1,r2,lsl#6 @ r2 to point at the end of r1
25 ldmia r0,{r3,r4,r5,r6,r7} 22 ldmia r0,{r3,r4,r5,r6,r7}
26 .Lloop: 23 .Lloop:
27 ldr r8,.LK_00_19 24 ldr r8,.LK_00_19
28 mov r14,sp 25 mov r14,sp
29 sub sp,sp,#15*4 26 sub sp,sp,#15*4
30 mov r5,r5,ror#30 27 mov r5,r5,ror#30
31 mov r6,r6,ror#30 28 mov r6,r6,ror#30
32 mov r7,r7,ror#30 @ [6] 29 mov r7,r7,ror#30 @ [6]
33 .L_00_15: 30 .L_00_15:
(...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after
189 ldr r11,[r14,#7*4] 186 ldr r11,[r14,#7*4]
190 add r6,r8,r6,ror#2 @ E+=K_xx_xx 187 add r6,r8,r6,ror#2 @ E+=K_xx_xx
191 ldr r12,[r14,#2*4] 188 ldr r12,[r14,#2*4]
192 eor r9,r9,r10 189 eor r9,r9,r10
193 eor r11,r11,r12 @ 1 cycle stall 190 eor r11,r11,r12 @ 1 cycle stall
194 eor r10,r4,r5 @ F_xx_xx 191 eor r10,r4,r5 @ F_xx_xx
195 mov r9,r9,ror#31 192 mov r9,r9,ror#31
196 add r6,r6,r7,ror#27 @ E+=ROR(A,27) 193 add r6,r6,r7,ror#27 @ E+=ROR(A,27)
197 eor r9,r9,r11,ror#31 194 eor r9,r9,r11,ror#31
198 str r9,[r14,#-4]! 195 str r9,[r14,#-4]!
199 » and» r10,r3,r10,ror#2» » » » » @ F_xx_x x 196 » and r10,r3,r10,ror#2» » » » » @ F_xx_xx
200 @ F_xx_xx 197 @ F_xx_xx
201 add r6,r6,r9 @ E+=X[i] 198 add r6,r6,r9 @ E+=X[i]
202 eor r10,r10,r5,ror#2 @ F_00_19(B,C,D) 199 eor r10,r10,r5,ror#2 @ F_00_19(B,C,D)
203 add r6,r6,r10 @ E+=F_00_19(B,C,D) 200 add r6,r6,r10 @ E+=F_00_19(B,C,D)
204 ldr r9,[r14,#15*4] 201 ldr r9,[r14,#15*4]
205 ldr r10,[r14,#13*4] 202 ldr r10,[r14,#13*4]
206 ldr r11,[r14,#7*4] 203 ldr r11,[r14,#7*4]
207 add r5,r8,r5,ror#2 @ E+=K_xx_xx 204 add r5,r8,r5,ror#2 @ E+=K_xx_xx
208 ldr r12,[r14,#2*4] 205 ldr r12,[r14,#2*4]
209 eor r9,r9,r10 206 eor r9,r9,r10
210 eor r11,r11,r12 @ 1 cycle stall 207 eor r11,r11,r12 @ 1 cycle stall
211 eor r10,r3,r4 @ F_xx_xx 208 eor r10,r3,r4 @ F_xx_xx
212 mov r9,r9,ror#31 209 mov r9,r9,ror#31
213 add r5,r5,r6,ror#27 @ E+=ROR(A,27) 210 add r5,r5,r6,ror#27 @ E+=ROR(A,27)
214 eor r9,r9,r11,ror#31 211 eor r9,r9,r11,ror#31
215 str r9,[r14,#-4]! 212 str r9,[r14,#-4]!
216 » and» r10,r7,r10,ror#2» » » » » @ F_xx_x x 213 » and r10,r7,r10,ror#2» » » » » @ F_xx_xx
217 @ F_xx_xx 214 @ F_xx_xx
218 add r5,r5,r9 @ E+=X[i] 215 add r5,r5,r9 @ E+=X[i]
219 eor r10,r10,r4,ror#2 @ F_00_19(B,C,D) 216 eor r10,r10,r4,ror#2 @ F_00_19(B,C,D)
220 add r5,r5,r10 @ E+=F_00_19(B,C,D) 217 add r5,r5,r10 @ E+=F_00_19(B,C,D)
221 ldr r9,[r14,#15*4] 218 ldr r9,[r14,#15*4]
222 ldr r10,[r14,#13*4] 219 ldr r10,[r14,#13*4]
223 ldr r11,[r14,#7*4] 220 ldr r11,[r14,#7*4]
224 add r4,r8,r4,ror#2 @ E+=K_xx_xx 221 add r4,r8,r4,ror#2 @ E+=K_xx_xx
225 ldr r12,[r14,#2*4] 222 ldr r12,[r14,#2*4]
226 eor r9,r9,r10 223 eor r9,r9,r10
227 eor r11,r11,r12 @ 1 cycle stall 224 eor r11,r11,r12 @ 1 cycle stall
228 eor r10,r7,r3 @ F_xx_xx 225 eor r10,r7,r3 @ F_xx_xx
229 mov r9,r9,ror#31 226 mov r9,r9,ror#31
230 add r4,r4,r5,ror#27 @ E+=ROR(A,27) 227 add r4,r4,r5,ror#27 @ E+=ROR(A,27)
231 eor r9,r9,r11,ror#31 228 eor r9,r9,r11,ror#31
232 str r9,[r14,#-4]! 229 str r9,[r14,#-4]!
233 » and» r10,r6,r10,ror#2» » » » » @ F_xx_x x 230 » and r10,r6,r10,ror#2» » » » » @ F_xx_xx
234 @ F_xx_xx 231 @ F_xx_xx
235 add r4,r4,r9 @ E+=X[i] 232 add r4,r4,r9 @ E+=X[i]
236 eor r10,r10,r3,ror#2 @ F_00_19(B,C,D) 233 eor r10,r10,r3,ror#2 @ F_00_19(B,C,D)
237 add r4,r4,r10 @ E+=F_00_19(B,C,D) 234 add r4,r4,r10 @ E+=F_00_19(B,C,D)
238 ldr r9,[r14,#15*4] 235 ldr r9,[r14,#15*4]
239 ldr r10,[r14,#13*4] 236 ldr r10,[r14,#13*4]
240 ldr r11,[r14,#7*4] 237 ldr r11,[r14,#7*4]
241 add r3,r8,r3,ror#2 @ E+=K_xx_xx 238 add r3,r8,r3,ror#2 @ E+=K_xx_xx
242 ldr r12,[r14,#2*4] 239 ldr r12,[r14,#2*4]
243 eor r9,r9,r10 240 eor r9,r9,r10
244 eor r11,r11,r12 @ 1 cycle stall 241 eor r11,r11,r12 @ 1 cycle stall
245 eor r10,r6,r7 @ F_xx_xx 242 eor r10,r6,r7 @ F_xx_xx
246 mov r9,r9,ror#31 243 mov r9,r9,ror#31
247 add r3,r3,r4,ror#27 @ E+=ROR(A,27) 244 add r3,r3,r4,ror#27 @ E+=ROR(A,27)
248 eor r9,r9,r11,ror#31 245 eor r9,r9,r11,ror#31
249 str r9,[r14,#-4]! 246 str r9,[r14,#-4]!
250 » and» r10,r5,r10,ror#2» » » » » @ F_xx_x x 247 » and r10,r5,r10,ror#2» » » » » @ F_xx_xx
251 @ F_xx_xx 248 @ F_xx_xx
252 add r3,r3,r9 @ E+=X[i] 249 add r3,r3,r9 @ E+=X[i]
253 eor r10,r10,r7,ror#2 @ F_00_19(B,C,D) 250 eor r10,r10,r7,ror#2 @ F_00_19(B,C,D)
254 add r3,r3,r10 @ E+=F_00_19(B,C,D) 251 add r3,r3,r10 @ E+=F_00_19(B,C,D)
255 252
256 ldr r8,.LK_20_39 @ [+15+16*4] 253 ldr r8,.LK_20_39 @ [+15+16*4]
257 cmn sp,#0 @ [+3], clear carry to denote 20_39 254 cmn sp,#0 @ [+3], clear carry to denote 20_39
258 .L_20_39_or_60_79: 255 .L_20_39_or_60_79:
259 ldr r9,[r14,#15*4] 256 ldr r9,[r14,#15*4]
260 ldr r10,[r14,#13*4] 257 ldr r10,[r14,#13*4]
261 ldr r11,[r14,#7*4] 258 ldr r11,[r14,#7*4]
262 add r7,r8,r7,ror#2 @ E+=K_xx_xx 259 add r7,r8,r7,ror#2 @ E+=K_xx_xx
263 ldr r12,[r14,#2*4] 260 ldr r12,[r14,#2*4]
264 eor r9,r9,r10 261 eor r9,r9,r10
265 eor r11,r11,r12 @ 1 cycle stall 262 eor r11,r11,r12 @ 1 cycle stall
266 eor r10,r5,r6 @ F_xx_xx 263 eor r10,r5,r6 @ F_xx_xx
267 mov r9,r9,ror#31 264 mov r9,r9,ror#31
268 add r7,r7,r3,ror#27 @ E+=ROR(A,27) 265 add r7,r7,r3,ror#27 @ E+=ROR(A,27)
269 eor r9,r9,r11,ror#31 266 eor r9,r9,r11,ror#31
270 str r9,[r14,#-4]! 267 str r9,[r14,#-4]!
271 » eor» r10,r4,r10,ror#2» » » » » @ F_xx_x x 268 » eor r10,r4,r10,ror#2» » » » » @ F_xx_xx
272 @ F_xx_xx 269 @ F_xx_xx
273 add r7,r7,r9 @ E+=X[i] 270 add r7,r7,r9 @ E+=X[i]
274 add r7,r7,r10 @ E+=F_20_39(B,C,D) 271 add r7,r7,r10 @ E+=F_20_39(B,C,D)
275 ldr r9,[r14,#15*4] 272 ldr r9,[r14,#15*4]
276 ldr r10,[r14,#13*4] 273 ldr r10,[r14,#13*4]
277 ldr r11,[r14,#7*4] 274 ldr r11,[r14,#7*4]
278 add r6,r8,r6,ror#2 @ E+=K_xx_xx 275 add r6,r8,r6,ror#2 @ E+=K_xx_xx
279 ldr r12,[r14,#2*4] 276 ldr r12,[r14,#2*4]
280 eor r9,r9,r10 277 eor r9,r9,r10
281 eor r11,r11,r12 @ 1 cycle stall 278 eor r11,r11,r12 @ 1 cycle stall
282 eor r10,r4,r5 @ F_xx_xx 279 eor r10,r4,r5 @ F_xx_xx
283 mov r9,r9,ror#31 280 mov r9,r9,ror#31
284 add r6,r6,r7,ror#27 @ E+=ROR(A,27) 281 add r6,r6,r7,ror#27 @ E+=ROR(A,27)
285 eor r9,r9,r11,ror#31 282 eor r9,r9,r11,ror#31
286 str r9,[r14,#-4]! 283 str r9,[r14,#-4]!
287 » eor» r10,r3,r10,ror#2» » » » » @ F_xx_x x 284 » eor r10,r3,r10,ror#2» » » » » @ F_xx_xx
288 @ F_xx_xx 285 @ F_xx_xx
289 add r6,r6,r9 @ E+=X[i] 286 add r6,r6,r9 @ E+=X[i]
290 add r6,r6,r10 @ E+=F_20_39(B,C,D) 287 add r6,r6,r10 @ E+=F_20_39(B,C,D)
291 ldr r9,[r14,#15*4] 288 ldr r9,[r14,#15*4]
292 ldr r10,[r14,#13*4] 289 ldr r10,[r14,#13*4]
293 ldr r11,[r14,#7*4] 290 ldr r11,[r14,#7*4]
294 add r5,r8,r5,ror#2 @ E+=K_xx_xx 291 add r5,r8,r5,ror#2 @ E+=K_xx_xx
295 ldr r12,[r14,#2*4] 292 ldr r12,[r14,#2*4]
296 eor r9,r9,r10 293 eor r9,r9,r10
297 eor r11,r11,r12 @ 1 cycle stall 294 eor r11,r11,r12 @ 1 cycle stall
298 eor r10,r3,r4 @ F_xx_xx 295 eor r10,r3,r4 @ F_xx_xx
299 mov r9,r9,ror#31 296 mov r9,r9,ror#31
300 add r5,r5,r6,ror#27 @ E+=ROR(A,27) 297 add r5,r5,r6,ror#27 @ E+=ROR(A,27)
301 eor r9,r9,r11,ror#31 298 eor r9,r9,r11,ror#31
302 str r9,[r14,#-4]! 299 str r9,[r14,#-4]!
303 » eor» r10,r7,r10,ror#2» » » » » @ F_xx_x x 300 » eor r10,r7,r10,ror#2» » » » » @ F_xx_xx
304 @ F_xx_xx 301 @ F_xx_xx
305 add r5,r5,r9 @ E+=X[i] 302 add r5,r5,r9 @ E+=X[i]
306 add r5,r5,r10 @ E+=F_20_39(B,C,D) 303 add r5,r5,r10 @ E+=F_20_39(B,C,D)
307 ldr r9,[r14,#15*4] 304 ldr r9,[r14,#15*4]
308 ldr r10,[r14,#13*4] 305 ldr r10,[r14,#13*4]
309 ldr r11,[r14,#7*4] 306 ldr r11,[r14,#7*4]
310 add r4,r8,r4,ror#2 @ E+=K_xx_xx 307 add r4,r8,r4,ror#2 @ E+=K_xx_xx
311 ldr r12,[r14,#2*4] 308 ldr r12,[r14,#2*4]
312 eor r9,r9,r10 309 eor r9,r9,r10
313 eor r11,r11,r12 @ 1 cycle stall 310 eor r11,r11,r12 @ 1 cycle stall
314 eor r10,r7,r3 @ F_xx_xx 311 eor r10,r7,r3 @ F_xx_xx
315 mov r9,r9,ror#31 312 mov r9,r9,ror#31
316 add r4,r4,r5,ror#27 @ E+=ROR(A,27) 313 add r4,r4,r5,ror#27 @ E+=ROR(A,27)
317 eor r9,r9,r11,ror#31 314 eor r9,r9,r11,ror#31
318 str r9,[r14,#-4]! 315 str r9,[r14,#-4]!
319 » eor» r10,r6,r10,ror#2» » » » » @ F_xx_x x 316 » eor r10,r6,r10,ror#2» » » » » @ F_xx_xx
320 @ F_xx_xx 317 @ F_xx_xx
321 add r4,r4,r9 @ E+=X[i] 318 add r4,r4,r9 @ E+=X[i]
322 add r4,r4,r10 @ E+=F_20_39(B,C,D) 319 add r4,r4,r10 @ E+=F_20_39(B,C,D)
323 ldr r9,[r14,#15*4] 320 ldr r9,[r14,#15*4]
324 ldr r10,[r14,#13*4] 321 ldr r10,[r14,#13*4]
325 ldr r11,[r14,#7*4] 322 ldr r11,[r14,#7*4]
326 add r3,r8,r3,ror#2 @ E+=K_xx_xx 323 add r3,r8,r3,ror#2 @ E+=K_xx_xx
327 ldr r12,[r14,#2*4] 324 ldr r12,[r14,#2*4]
328 eor r9,r9,r10 325 eor r9,r9,r10
329 eor r11,r11,r12 @ 1 cycle stall 326 eor r11,r11,r12 @ 1 cycle stall
330 eor r10,r6,r7 @ F_xx_xx 327 eor r10,r6,r7 @ F_xx_xx
331 mov r9,r9,ror#31 328 mov r9,r9,ror#31
332 add r3,r3,r4,ror#27 @ E+=ROR(A,27) 329 add r3,r3,r4,ror#27 @ E+=ROR(A,27)
333 eor r9,r9,r11,ror#31 330 eor r9,r9,r11,ror#31
334 str r9,[r14,#-4]! 331 str r9,[r14,#-4]!
335 » eor» r10,r5,r10,ror#2» » » » » @ F_xx_x x 332 » eor r10,r5,r10,ror#2» » » » » @ F_xx_xx
336 @ F_xx_xx 333 @ F_xx_xx
337 add r3,r3,r9 @ E+=X[i] 334 add r3,r3,r9 @ E+=X[i]
338 add r3,r3,r10 @ E+=F_20_39(B,C,D) 335 add r3,r3,r10 @ E+=F_20_39(B,C,D)
339 teq r14,sp @ preserve carry 336 teq r14,sp @ preserve carry
340 bne .L_20_39_or_60_79 @ [+((12+3)*5+2)*4] 337 bne .L_20_39_or_60_79 @ [+((12+3)*5+2)*4]
341 bcs .L_done @ [+((12+3)*5+2)*4], spare 300 bytes 338 bcs .L_done @ [+((12+3)*5+2)*4], spare 300 bytes
342 339
343 ldr r8,.LK_40_59 340 ldr r8,.LK_40_59
344 sub sp,sp,#20*4 @ [+2] 341 sub sp,sp,#20*4 @ [+2]
345 .L_40_59: 342 .L_40_59:
346 ldr r9,[r14,#15*4] 343 ldr r9,[r14,#15*4]
347 ldr r10,[r14,#13*4] 344 ldr r10,[r14,#13*4]
348 ldr r11,[r14,#7*4] 345 ldr r11,[r14,#7*4]
349 add r7,r8,r7,ror#2 @ E+=K_xx_xx 346 add r7,r8,r7,ror#2 @ E+=K_xx_xx
350 ldr r12,[r14,#2*4] 347 ldr r12,[r14,#2*4]
351 eor r9,r9,r10 348 eor r9,r9,r10
352 eor r11,r11,r12 @ 1 cycle stall 349 eor r11,r11,r12 @ 1 cycle stall
353 eor r10,r5,r6 @ F_xx_xx 350 eor r10,r5,r6 @ F_xx_xx
354 mov r9,r9,ror#31 351 mov r9,r9,ror#31
355 add r7,r7,r3,ror#27 @ E+=ROR(A,27) 352 add r7,r7,r3,ror#27 @ E+=ROR(A,27)
356 eor r9,r9,r11,ror#31 353 eor r9,r9,r11,ror#31
357 str r9,[r14,#-4]! 354 str r9,[r14,#-4]!
358 » and» r10,r4,r10,ror#2» » » » » @ F_xx_x x 355 » and r10,r4,r10,ror#2» » » » » @ F_xx_xx
359 » and» r11,r5,r6» » » » » @ F_xx_xx 356 » and r11,r5,r6» » » » » @ F_xx_xx
360 add r7,r7,r9 @ E+=X[i] 357 add r7,r7,r9 @ E+=X[i]
361 add r7,r7,r10 @ E+=F_40_59(B,C,D) 358 add r7,r7,r10 @ E+=F_40_59(B,C,D)
362 add r7,r7,r11,ror#2 359 add r7,r7,r11,ror#2
363 ldr r9,[r14,#15*4] 360 ldr r9,[r14,#15*4]
364 ldr r10,[r14,#13*4] 361 ldr r10,[r14,#13*4]
365 ldr r11,[r14,#7*4] 362 ldr r11,[r14,#7*4]
366 add r6,r8,r6,ror#2 @ E+=K_xx_xx 363 add r6,r8,r6,ror#2 @ E+=K_xx_xx
367 ldr r12,[r14,#2*4] 364 ldr r12,[r14,#2*4]
368 eor r9,r9,r10 365 eor r9,r9,r10
369 eor r11,r11,r12 @ 1 cycle stall 366 eor r11,r11,r12 @ 1 cycle stall
370 eor r10,r4,r5 @ F_xx_xx 367 eor r10,r4,r5 @ F_xx_xx
371 mov r9,r9,ror#31 368 mov r9,r9,ror#31
372 add r6,r6,r7,ror#27 @ E+=ROR(A,27) 369 add r6,r6,r7,ror#27 @ E+=ROR(A,27)
373 eor r9,r9,r11,ror#31 370 eor r9,r9,r11,ror#31
374 str r9,[r14,#-4]! 371 str r9,[r14,#-4]!
375 » and» r10,r3,r10,ror#2» » » » » @ F_xx_x x 372 » and r10,r3,r10,ror#2» » » » » @ F_xx_xx
376 » and» r11,r4,r5» » » » » @ F_xx_xx 373 » and r11,r4,r5» » » » » @ F_xx_xx
377 add r6,r6,r9 @ E+=X[i] 374 add r6,r6,r9 @ E+=X[i]
378 add r6,r6,r10 @ E+=F_40_59(B,C,D) 375 add r6,r6,r10 @ E+=F_40_59(B,C,D)
379 add r6,r6,r11,ror#2 376 add r6,r6,r11,ror#2
380 ldr r9,[r14,#15*4] 377 ldr r9,[r14,#15*4]
381 ldr r10,[r14,#13*4] 378 ldr r10,[r14,#13*4]
382 ldr r11,[r14,#7*4] 379 ldr r11,[r14,#7*4]
383 add r5,r8,r5,ror#2 @ E+=K_xx_xx 380 add r5,r8,r5,ror#2 @ E+=K_xx_xx
384 ldr r12,[r14,#2*4] 381 ldr r12,[r14,#2*4]
385 eor r9,r9,r10 382 eor r9,r9,r10
386 eor r11,r11,r12 @ 1 cycle stall 383 eor r11,r11,r12 @ 1 cycle stall
387 eor r10,r3,r4 @ F_xx_xx 384 eor r10,r3,r4 @ F_xx_xx
388 mov r9,r9,ror#31 385 mov r9,r9,ror#31
389 add r5,r5,r6,ror#27 @ E+=ROR(A,27) 386 add r5,r5,r6,ror#27 @ E+=ROR(A,27)
390 eor r9,r9,r11,ror#31 387 eor r9,r9,r11,ror#31
391 str r9,[r14,#-4]! 388 str r9,[r14,#-4]!
392 » and» r10,r7,r10,ror#2» » » » » @ F_xx_x x 389 » and r10,r7,r10,ror#2» » » » » @ F_xx_xx
393 » and» r11,r3,r4» » » » » @ F_xx_xx 390 » and r11,r3,r4» » » » » @ F_xx_xx
394 add r5,r5,r9 @ E+=X[i] 391 add r5,r5,r9 @ E+=X[i]
395 add r5,r5,r10 @ E+=F_40_59(B,C,D) 392 add r5,r5,r10 @ E+=F_40_59(B,C,D)
396 add r5,r5,r11,ror#2 393 add r5,r5,r11,ror#2
397 ldr r9,[r14,#15*4] 394 ldr r9,[r14,#15*4]
398 ldr r10,[r14,#13*4] 395 ldr r10,[r14,#13*4]
399 ldr r11,[r14,#7*4] 396 ldr r11,[r14,#7*4]
400 add r4,r8,r4,ror#2 @ E+=K_xx_xx 397 add r4,r8,r4,ror#2 @ E+=K_xx_xx
401 ldr r12,[r14,#2*4] 398 ldr r12,[r14,#2*4]
402 eor r9,r9,r10 399 eor r9,r9,r10
403 eor r11,r11,r12 @ 1 cycle stall 400 eor r11,r11,r12 @ 1 cycle stall
404 eor r10,r7,r3 @ F_xx_xx 401 eor r10,r7,r3 @ F_xx_xx
405 mov r9,r9,ror#31 402 mov r9,r9,ror#31
406 add r4,r4,r5,ror#27 @ E+=ROR(A,27) 403 add r4,r4,r5,ror#27 @ E+=ROR(A,27)
407 eor r9,r9,r11,ror#31 404 eor r9,r9,r11,ror#31
408 str r9,[r14,#-4]! 405 str r9,[r14,#-4]!
409 » and» r10,r6,r10,ror#2» » » » » @ F_xx_x x 406 » and r10,r6,r10,ror#2» » » » » @ F_xx_xx
410 » and» r11,r7,r3» » » » » @ F_xx_xx 407 » and r11,r7,r3» » » » » @ F_xx_xx
411 add r4,r4,r9 @ E+=X[i] 408 add r4,r4,r9 @ E+=X[i]
412 add r4,r4,r10 @ E+=F_40_59(B,C,D) 409 add r4,r4,r10 @ E+=F_40_59(B,C,D)
413 add r4,r4,r11,ror#2 410 add r4,r4,r11,ror#2
414 ldr r9,[r14,#15*4] 411 ldr r9,[r14,#15*4]
415 ldr r10,[r14,#13*4] 412 ldr r10,[r14,#13*4]
416 ldr r11,[r14,#7*4] 413 ldr r11,[r14,#7*4]
417 add r3,r8,r3,ror#2 @ E+=K_xx_xx 414 add r3,r8,r3,ror#2 @ E+=K_xx_xx
418 ldr r12,[r14,#2*4] 415 ldr r12,[r14,#2*4]
419 eor r9,r9,r10 416 eor r9,r9,r10
420 eor r11,r11,r12 @ 1 cycle stall 417 eor r11,r11,r12 @ 1 cycle stall
421 eor r10,r6,r7 @ F_xx_xx 418 eor r10,r6,r7 @ F_xx_xx
422 mov r9,r9,ror#31 419 mov r9,r9,ror#31
423 add r3,r3,r4,ror#27 @ E+=ROR(A,27) 420 add r3,r3,r4,ror#27 @ E+=ROR(A,27)
424 eor r9,r9,r11,ror#31 421 eor r9,r9,r11,ror#31
425 str r9,[r14,#-4]! 422 str r9,[r14,#-4]!
426 » and» r10,r5,r10,ror#2» » » » » @ F_xx_x x 423 » and r10,r5,r10,ror#2» » » » » @ F_xx_xx
427 » and» r11,r6,r7» » » » » @ F_xx_xx 424 » and r11,r6,r7» » » » » @ F_xx_xx
428 add r3,r3,r9 @ E+=X[i] 425 add r3,r3,r9 @ E+=X[i]
429 add r3,r3,r10 @ E+=F_40_59(B,C,D) 426 add r3,r3,r10 @ E+=F_40_59(B,C,D)
430 add r3,r3,r11,ror#2 427 add r3,r3,r11,ror#2
431 teq r14,sp 428 teq r14,sp
432 bne .L_40_59 @ [+((12+5)*5+2)*4] 429 bne .L_40_59 @ [+((12+5)*5+2)*4]
433 430
434 ldr r8,.LK_60_79 431 ldr r8,.LK_60_79
435 sub sp,sp,#20*4 432 sub sp,sp,#20*4
436 cmp sp,#0 @ set carry to denote 60_79 433 cmp sp,#0 @ set carry to denote 60_79
437 b .L_20_39_or_60_79 @ [+4], spare 300 bytes 434 b .L_20_39_or_60_79 @ [+4], spare 300 bytes
438 .L_done: 435 .L_done:
439 add sp,sp,#80*4 @ "deallocate" stack frame 436 add sp,sp,#80*4 @ "deallocate" stack frame
440 ldmia r0,{r8,r9,r10,r11,r12} 437 ldmia r0,{r8,r9,r10,r11,r12}
441 add r3,r8,r3 438 add r3,r8,r3
442 add r4,r9,r4 439 add r4,r9,r4
443 add r5,r10,r5,ror#2 440 add r5,r10,r5,ror#2
444 add r6,r11,r6,ror#2 441 add r6,r11,r6,ror#2
445 add r7,r12,r7,ror#2 442 add r7,r12,r7,ror#2
446 stmia r0,{r3,r4,r5,r6,r7} 443 stmia r0,{r3,r4,r5,r6,r7}
447 teq r1,r2 444 teq r1,r2
448 bne .Lloop @ [+18], total 1307 445 bne .Lloop @ [+18], total 1307
449 446
450 #if __ARM_ARCH__>=5 447 #if __ARM_ARCH__>=5
451 » ldmia» sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,pc} 448 » ldmia» sp!,{r4-r12,pc}
452 #else 449 #else
453 » ldmia» sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,lr} 450 » ldmia» sp!,{r4-r12,lr}
454 tst lr,#1 451 tst lr,#1
455 moveq pc,lr @ be binary compatible with V4, yet 452 moveq pc,lr @ be binary compatible with V4, yet
456 .word» 0xe12fff1e» » » @ interoperable with Thumb ISA:-) 453 » .word» 0xe12fff1e» » » @ interoperable with Thumb ISA:- )
457 #endif 454 #endif
458 .size sha1_block_data_order,.-sha1_block_data_order 455 .size sha1_block_data_order,.-sha1_block_data_order
459 456
460 .align 5 457 .align 5
461 .LK_00_19:.word»0x5a827999 458 .LK_00_19:» .word» 0x5a827999
462 .LK_20_39:.word»0x6ed9eba1 459 .LK_20_39:» .word» 0x6ed9eba1
463 .LK_40_59:.word»0x8f1bbcdc 460 .LK_40_59:» .word» 0x8f1bbcdc
464 .LK_60_79:.word»0xca62c1d6 461 .LK_60_79:» .word» 0xca62c1d6
465 #if __ARM_MAX_ARCH__>=7 462 #if __ARM_MAX_ARCH__>=7
466 .LOPENSSL_armcap: 463 .LOPENSSL_armcap:
467 .word OPENSSL_armcap_P-sha1_block_data_order 464 .word OPENSSL_armcap_P-sha1_block_data_order
468 #endif 465 #endif
469 .byte» 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,3 2,102,111,114,32,65,82,77,118,52,47,78,69,79,78,47,65,82,77,118,56,44,32,67,82,8 9,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115 ,115,108,46,111,114,103,62,0 466 .asciz» "SHA1 block transform for ARMv4/NEON/ARMv8, CRYPTOGAMS by <appro@openssl .org>"
470 .align» 2
471 .align 5 467 .align 5
472 #if __ARM_MAX_ARCH__>=7 468 #if __ARM_MAX_ARCH__>=7
473 .arch armv7-a 469 .arch armv7-a
474 .fpu neon 470 .fpu neon
475 471
476 .type sha1_block_data_order_neon,%function 472 .type sha1_block_data_order_neon,%function
477 .align 4 473 .align 4
478 sha1_block_data_order_neon: 474 sha1_block_data_order_neon:
479 .LNEON: 475 .LNEON:
480 » stmdb» sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,lr} 476 » stmdb» sp!,{r4-r12,lr}
481 add r2,r1,r2,lsl#6 @ r2 to point at the end of r1 477 add r2,r1,r2,lsl#6 @ r2 to point at the end of r1
482 @ dmb @ errata #451034 on early Cortex A8 478 @ dmb @ errata #451034 on early Cortex A8
483 @ vstmdb sp!,{d8-d15} @ ABI specification says so 479 @ vstmdb sp!,{d8-d15} @ ABI specification says so
484 mov r14,sp 480 mov r14,sp
485 sub sp,sp,#64 @ alloca 481 sub sp,sp,#64 @ alloca
486 adr r8,.LK_00_19 482 adr r8,.LK_00_19
487 bic sp,sp,#15 @ align for 128-bit stores 483 bic sp,sp,#15 @ align for 128-bit stores
488 484
489 ldmia r0,{r3,r4,r5,r6,r7} @ load context 485 ldmia r0,{r3,r4,r5,r6,r7} @ load context
490 mov r12,sp 486 mov r12,sp
491 487
492 » vld1.8» {q0,q1},[r1]!» @ handles unaligned 488 » vld1.8» » {q0-q1},[r1]!» @ handles unaligned
493 » veor» q15,q15,q15 489 » veor» » q15,q15,q15
494 » vld1.8» {q2,q3},[r1]! 490 » vld1.8» » {q2-q3},[r1]!
495 » vld1.32»{d28[],d29[]},[r8,:32]!»@ load K_00_19 491 » vld1.32»» {d28[],d29[]},[r8,:32]!»@ load K_00_19
496 vrev32.8 q0,q0 @ yes, even on 492 vrev32.8 q0,q0 @ yes, even on
497 vrev32.8 q1,q1 @ big-endian... 493 vrev32.8 q1,q1 @ big-endian...
498 vrev32.8 q2,q2 494 vrev32.8 q2,q2
499 vadd.i32 q8,q0,q14 495 vadd.i32 q8,q0,q14
500 vrev32.8 q3,q3 496 vrev32.8 q3,q3
501 vadd.i32 q9,q1,q14 497 vadd.i32 q9,q1,q14
502 » vst1.32»{q8},[r12,:128]! 498 » vst1.32»» {q8},[r12,:128]!
503 vadd.i32 q10,q2,q14 499 vadd.i32 q10,q2,q14
504 » vst1.32»{q9},[r12,:128]! 500 » vst1.32»» {q9},[r12,:128]!
505 » vst1.32»{q10},[r12,:128]! 501 » vst1.32»» {q10},[r12,:128]!
506 » ldr» r9,[sp]»» » @ big RAW stall 502 » ldr» » r9,[sp]»» » @ big RAW stall
507 503
508 .Loop_neon: 504 .Loop_neon:
509 vext.8 q8,q0,q1,#8 505 vext.8 q8,q0,q1,#8
510 bic r10,r6,r4 506 bic r10,r6,r4
511 add r7,r7,r9 507 add r7,r7,r9
512 and r11,r5,r4 508 and r11,r5,r4
513 vadd.i32 q13,q3,q14 509 vadd.i32 q13,q3,q14
514 ldr r9,[sp,#4] 510 ldr r9,[sp,#4]
515 add r7,r7,r3,ror#27 511 add r7,r7,r3,ror#27
516 vext.8 q12,q3,q15,#4 512 vext.8 q12,q3,q15,#4
(...skipping 657 matching lines...) Expand 10 before | Expand all | Expand 10 after
1174 mov r6,r6,ror#2 1170 mov r6,r6,ror#2
1175 add r4,r4,r11 1171 add r4,r4,r11
1176 vadd.i32 q13,q3,q14 1172 vadd.i32 q13,q3,q14
1177 eor r10,r5,r7 1173 eor r10,r5,r7
1178 add r3,r3,r9 1174 add r3,r3,r9
1179 vst1.32 {q13},[r12,:128]! 1175 vst1.32 {q13},[r12,:128]!
1180 sub r12,r12,#64 1176 sub r12,r12,#64
1181 teq r1,r2 1177 teq r1,r2
1182 sub r8,r8,#16 1178 sub r8,r8,#16
1183 subeq r1,r1,#64 1179 subeq r1,r1,#64
1184 » vld1.8» {q0,q1},[r1]! 1180 » vld1.8» {q0-q1},[r1]!
1185 ldr r9,[sp,#4] 1181 ldr r9,[sp,#4]
1186 eor r11,r10,r6 1182 eor r11,r10,r6
1187 » vld1.8» {q2,q3},[r1]! 1183 » vld1.8» {q2-q3},[r1]!
1188 add r3,r3,r4,ror#27 1184 add r3,r3,r4,ror#27
1189 mov r5,r5,ror#2 1185 mov r5,r5,ror#2
1190 vld1.32 {d28[],d29[]},[r8,:32]! 1186 vld1.32 {d28[],d29[]},[r8,:32]!
1191 add r3,r3,r11 1187 add r3,r3,r11
1192 eor r10,r4,r6 1188 eor r10,r4,r6
1193 vrev32.8 q0,q0 1189 vrev32.8 q0,q0
1194 add r7,r7,r9 1190 add r7,r7,r9
1195 ldr r9,[sp,#8] 1191 ldr r9,[sp,#8]
1196 eor r11,r10,r5 1192 eor r11,r10,r5
1197 add r7,r7,r3,ror#27 1193 add r7,r7,r3,ror#27
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after
1310 add r5,r5,r11 1306 add r5,r5,r11
1311 add r6,r6,r12 1307 add r6,r6,r12
1312 moveq sp,r14 1308 moveq sp,r14
1313 add r7,r7,r9 1309 add r7,r7,r9
1314 ldrne r9,[sp] 1310 ldrne r9,[sp]
1315 stmia r0,{r3,r4,r5,r6,r7} 1311 stmia r0,{r3,r4,r5,r6,r7}
1316 addne r12,sp,#3*16 1312 addne r12,sp,#3*16
1317 bne .Loop_neon 1313 bne .Loop_neon
1318 1314
1319 @ vldmia sp!,{d8-d15} 1315 @ vldmia sp!,{d8-d15}
1320 » ldmia» sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,pc} 1316 » ldmia» sp!,{r4-r12,pc}
1321 .size sha1_block_data_order_neon,.-sha1_block_data_order_neon 1317 .size sha1_block_data_order_neon,.-sha1_block_data_order_neon
1322 #endif 1318 #endif
1323 #if __ARM_MAX_ARCH__>=7 1319 #if __ARM_MAX_ARCH__>=7
1324 .type sha1_block_data_order_armv8,%function 1320 .type sha1_block_data_order_armv8,%function
1325 .align 5 1321 .align 5
1326 sha1_block_data_order_armv8: 1322 sha1_block_data_order_armv8:
1327 .LARMv8: 1323 .LARMv8:
1328 » vstmdb» sp!,{d8,d9,d10,d11,d12,d13,d14,d15}» » @ ABI specificat ion says so 1324 » vstmdb» sp!,{d8-d15}» » @ ABI specification says so
1329 1325
1330 veor q1,q1,q1 1326 veor q1,q1,q1
1331 adr r3,.LK_00_19 1327 adr r3,.LK_00_19
1332 vld1.32 {q0},[r0]! 1328 vld1.32 {q0},[r0]!
1333 vld1.32 {d2[0]},[r0] 1329 vld1.32 {d2[0]},[r0]
1334 sub r0,r0,#16 1330 sub r0,r0,#16
1335 vld1.32 {d16[],d17[]},[r3,:32]! 1331 vld1.32 {d16[],d17[]},[r3,:32]!
1336 vld1.32 {d18[],d19[]},[r3,:32]! 1332 vld1.32 {d18[],d19[]},[r3,:32]!
1337 vld1.32 {d20[],d21[]},[r3,:32]! 1333 vld1.32 {d20[],d21[]},[r3,:32]!
1338 vld1.32 {d22[],d23[]},[r3,:32] 1334 vld1.32 {d22[],d23[]},[r3,:32]
1339 1335
1340 .Loop_v8: 1336 .Loop_v8:
1341 » vld1.8» {q4,q5},[r1]! 1337 » vld1.8» » {q4-q5},[r1]!
1342 » vld1.8» {q6,q7},[r1]! 1338 » vld1.8» » {q6-q7},[r1]!
1343 vrev32.8 q4,q4 1339 vrev32.8 q4,q4
1344 vrev32.8 q5,q5 1340 vrev32.8 q5,q5
1345 1341
1346 vadd.i32 q12,q8,q4 1342 vadd.i32 q12,q8,q4
1347 vrev32.8 q6,q6 1343 vrev32.8 q6,q6
1348 » vmov» q14,q0» @ offload 1344 » vmov» » q14,q0» @ offload
1349 » subs» r2,r2,#1 1345 » subs» » r2,r2,#1
1350 1346
1351 vadd.i32 q13,q8,q5 1347 vadd.i32 q13,q8,q5
1352 vrev32.8 q7,q7 1348 vrev32.8 q7,q7
1353 .byte» 0xc0,0x62,0xb9,0xf3» @ sha1h q3,q0» » @ 0 1349 » .byte» 0xc0,0x62,0xb9,0xf3» @ sha1h q3,q0» » @ 0
1354 .byte» 0x68,0x0c,0x02,0xf2» @ sha1c q0,q1,q12 1350 » .byte» 0x68,0x0c,0x02,0xf2» @ sha1c q0,q1,q12
1355 vadd.i32 q12,q8,q6 1351 vadd.i32 q12,q8,q6
1356 .byte» 0x4c,0x8c,0x3a,0xf2» @ sha1su0 q4,q5,q6 1352 » .byte» 0x4c,0x8c,0x3a,0xf2» @ sha1su0 q4,q5,q6
1357 .byte» 0xc0,0x42,0xb9,0xf3» @ sha1h q2,q0» » @ 1 1353 » .byte» 0xc0,0x42,0xb9,0xf3» @ sha1h q2,q0» » @ 1
1358 .byte» 0x6a,0x0c,0x06,0xf2» @ sha1c q0,q3,q13 1354 » .byte» 0x6a,0x0c,0x06,0xf2» @ sha1c q0,q3,q13
1359 vadd.i32 q13,q8,q7 1355 vadd.i32 q13,q8,q7
1360 .byte» 0x8e,0x83,0xba,0xf3» @ sha1su1 q4,q7 1356 » .byte» 0x8e,0x83,0xba,0xf3» @ sha1su1 q4,q7
1361 .byte» 0x4e,0xac,0x3c,0xf2» @ sha1su0 q5,q6,q7 1357 » .byte» 0x4e,0xac,0x3c,0xf2» @ sha1su0 q5,q6,q7
1362 .byte» 0xc0,0x62,0xb9,0xf3» @ sha1h q3,q0» » @ 2 1358 » .byte» 0xc0,0x62,0xb9,0xf3» @ sha1h q3,q0» » @ 2
1363 .byte» 0x68,0x0c,0x04,0xf2» @ sha1c q0,q2,q12 1359 » .byte» 0x68,0x0c,0x04,0xf2» @ sha1c q0,q2,q12
1364 vadd.i32 q12,q8,q4 1360 vadd.i32 q12,q8,q4
1365 .byte» 0x88,0xa3,0xba,0xf3» @ sha1su1 q5,q4 1361 » .byte» 0x88,0xa3,0xba,0xf3» @ sha1su1 q5,q4
1366 .byte» 0x48,0xcc,0x3e,0xf2» @ sha1su0 q6,q7,q4 1362 » .byte» 0x48,0xcc,0x3e,0xf2» @ sha1su0 q6,q7,q4
1367 .byte» 0xc0,0x42,0xb9,0xf3» @ sha1h q2,q0» » @ 3 1363 » .byte» 0xc0,0x42,0xb9,0xf3» @ sha1h q2,q0» » @ 3
1368 .byte» 0x6a,0x0c,0x06,0xf2» @ sha1c q0,q3,q13 1364 » .byte» 0x6a,0x0c,0x06,0xf2» @ sha1c q0,q3,q13
1369 vadd.i32 q13,q9,q5 1365 vadd.i32 q13,q9,q5
1370 .byte» 0x8a,0xc3,0xba,0xf3» @ sha1su1 q6,q5 1366 » .byte» 0x8a,0xc3,0xba,0xf3» @ sha1su1 q6,q5
1371 .byte» 0x4a,0xec,0x38,0xf2» @ sha1su0 q7,q4,q5 1367 » .byte» 0x4a,0xec,0x38,0xf2» @ sha1su0 q7,q4,q5
1372 .byte» 0xc0,0x62,0xb9,0xf3» @ sha1h q3,q0» » @ 4 1368 » .byte» 0xc0,0x62,0xb9,0xf3» @ sha1h q3,q0» » @ 4
1373 .byte» 0x68,0x0c,0x04,0xf2» @ sha1c q0,q2,q12 1369 » .byte» 0x68,0x0c,0x04,0xf2» @ sha1c q0,q2,q12
1374 vadd.i32 q12,q9,q6 1370 vadd.i32 q12,q9,q6
1375 .byte» 0x8c,0xe3,0xba,0xf3» @ sha1su1 q7,q6 1371 » .byte» 0x8c,0xe3,0xba,0xf3» @ sha1su1 q7,q6
1376 .byte» 0x4c,0x8c,0x3a,0xf2» @ sha1su0 q4,q5,q6 1372 » .byte» 0x4c,0x8c,0x3a,0xf2» @ sha1su0 q4,q5,q6
1377 .byte» 0xc0,0x42,0xb9,0xf3» @ sha1h q2,q0» » @ 5 1373 » .byte» 0xc0,0x42,0xb9,0xf3» @ sha1h q2,q0» » @ 5
1378 .byte» 0x6a,0x0c,0x16,0xf2» @ sha1p q0,q3,q13 1374 » .byte» 0x6a,0x0c,0x16,0xf2» @ sha1p q0,q3,q13
1379 vadd.i32 q13,q9,q7 1375 vadd.i32 q13,q9,q7
1380 .byte» 0x8e,0x83,0xba,0xf3» @ sha1su1 q4,q7 1376 » .byte» 0x8e,0x83,0xba,0xf3» @ sha1su1 q4,q7
1381 .byte» 0x4e,0xac,0x3c,0xf2» @ sha1su0 q5,q6,q7 1377 » .byte» 0x4e,0xac,0x3c,0xf2» @ sha1su0 q5,q6,q7
1382 .byte» 0xc0,0x62,0xb9,0xf3» @ sha1h q3,q0» » @ 6 1378 » .byte» 0xc0,0x62,0xb9,0xf3» @ sha1h q3,q0» » @ 6
1383 .byte» 0x68,0x0c,0x14,0xf2» @ sha1p q0,q2,q12 1379 » .byte» 0x68,0x0c,0x14,0xf2» @ sha1p q0,q2,q12
1384 vadd.i32 q12,q9,q4 1380 vadd.i32 q12,q9,q4
1385 .byte» 0x88,0xa3,0xba,0xf3» @ sha1su1 q5,q4 1381 » .byte» 0x88,0xa3,0xba,0xf3» @ sha1su1 q5,q4
1386 .byte» 0x48,0xcc,0x3e,0xf2» @ sha1su0 q6,q7,q4 1382 » .byte» 0x48,0xcc,0x3e,0xf2» @ sha1su0 q6,q7,q4
1387 .byte» 0xc0,0x42,0xb9,0xf3» @ sha1h q2,q0» » @ 7 1383 » .byte» 0xc0,0x42,0xb9,0xf3» @ sha1h q2,q0» » @ 7
1388 .byte» 0x6a,0x0c,0x16,0xf2» @ sha1p q0,q3,q13 1384 » .byte» 0x6a,0x0c,0x16,0xf2» @ sha1p q0,q3,q13
1389 vadd.i32 q13,q9,q5 1385 vadd.i32 q13,q9,q5
1390 .byte» 0x8a,0xc3,0xba,0xf3» @ sha1su1 q6,q5 1386 » .byte» 0x8a,0xc3,0xba,0xf3» @ sha1su1 q6,q5
1391 .byte» 0x4a,0xec,0x38,0xf2» @ sha1su0 q7,q4,q5 1387 » .byte» 0x4a,0xec,0x38,0xf2» @ sha1su0 q7,q4,q5
1392 .byte» 0xc0,0x62,0xb9,0xf3» @ sha1h q3,q0» » @ 8 1388 » .byte» 0xc0,0x62,0xb9,0xf3» @ sha1h q3,q0» » @ 8
1393 .byte» 0x68,0x0c,0x14,0xf2» @ sha1p q0,q2,q12 1389 » .byte» 0x68,0x0c,0x14,0xf2» @ sha1p q0,q2,q12
1394 vadd.i32 q12,q10,q6 1390 vadd.i32 q12,q10,q6
1395 .byte» 0x8c,0xe3,0xba,0xf3» @ sha1su1 q7,q6 1391 » .byte» 0x8c,0xe3,0xba,0xf3» @ sha1su1 q7,q6
1396 .byte» 0x4c,0x8c,0x3a,0xf2» @ sha1su0 q4,q5,q6 1392 » .byte» 0x4c,0x8c,0x3a,0xf2» @ sha1su0 q4,q5,q6
1397 .byte» 0xc0,0x42,0xb9,0xf3» @ sha1h q2,q0» » @ 9 1393 » .byte» 0xc0,0x42,0xb9,0xf3» @ sha1h q2,q0» » @ 9
1398 .byte» 0x6a,0x0c,0x16,0xf2» @ sha1p q0,q3,q13 1394 » .byte» 0x6a,0x0c,0x16,0xf2» @ sha1p q0,q3,q13
1399 vadd.i32 q13,q10,q7 1395 vadd.i32 q13,q10,q7
1400 .byte» 0x8e,0x83,0xba,0xf3» @ sha1su1 q4,q7 1396 » .byte» 0x8e,0x83,0xba,0xf3» @ sha1su1 q4,q7
1401 .byte» 0x4e,0xac,0x3c,0xf2» @ sha1su0 q5,q6,q7 1397 » .byte» 0x4e,0xac,0x3c,0xf2» @ sha1su0 q5,q6,q7
1402 .byte» 0xc0,0x62,0xb9,0xf3» @ sha1h q3,q0» » @ 10 1398 » .byte» 0xc0,0x62,0xb9,0xf3» @ sha1h q3,q0» » @ 10
1403 .byte» 0x68,0x0c,0x24,0xf2» @ sha1m q0,q2,q12 1399 » .byte» 0x68,0x0c,0x24,0xf2» @ sha1m q0,q2,q12
1404 vadd.i32 q12,q10,q4 1400 vadd.i32 q12,q10,q4
1405 .byte» 0x88,0xa3,0xba,0xf3» @ sha1su1 q5,q4 1401 » .byte» 0x88,0xa3,0xba,0xf3» @ sha1su1 q5,q4
1406 .byte» 0x48,0xcc,0x3e,0xf2» @ sha1su0 q6,q7,q4 1402 » .byte» 0x48,0xcc,0x3e,0xf2» @ sha1su0 q6,q7,q4
1407 .byte» 0xc0,0x42,0xb9,0xf3» @ sha1h q2,q0» » @ 11 1403 » .byte» 0xc0,0x42,0xb9,0xf3» @ sha1h q2,q0» » @ 11
1408 .byte» 0x6a,0x0c,0x26,0xf2» @ sha1m q0,q3,q13 1404 » .byte» 0x6a,0x0c,0x26,0xf2» @ sha1m q0,q3,q13
1409 vadd.i32 q13,q10,q5 1405 vadd.i32 q13,q10,q5
1410 .byte» 0x8a,0xc3,0xba,0xf3» @ sha1su1 q6,q5 1406 » .byte» 0x8a,0xc3,0xba,0xf3» @ sha1su1 q6,q5
1411 .byte» 0x4a,0xec,0x38,0xf2» @ sha1su0 q7,q4,q5 1407 » .byte» 0x4a,0xec,0x38,0xf2» @ sha1su0 q7,q4,q5
1412 .byte» 0xc0,0x62,0xb9,0xf3» @ sha1h q3,q0» » @ 12 1408 » .byte» 0xc0,0x62,0xb9,0xf3» @ sha1h q3,q0» » @ 12
1413 .byte» 0x68,0x0c,0x24,0xf2» @ sha1m q0,q2,q12 1409 » .byte» 0x68,0x0c,0x24,0xf2» @ sha1m q0,q2,q12
1414 vadd.i32 q12,q10,q6 1410 vadd.i32 q12,q10,q6
1415 .byte» 0x8c,0xe3,0xba,0xf3» @ sha1su1 q7,q6 1411 » .byte» 0x8c,0xe3,0xba,0xf3» @ sha1su1 q7,q6
1416 .byte» 0x4c,0x8c,0x3a,0xf2» @ sha1su0 q4,q5,q6 1412 » .byte» 0x4c,0x8c,0x3a,0xf2» @ sha1su0 q4,q5,q6
1417 .byte» 0xc0,0x42,0xb9,0xf3» @ sha1h q2,q0» » @ 13 1413 » .byte» 0xc0,0x42,0xb9,0xf3» @ sha1h q2,q0» » @ 13
1418 .byte» 0x6a,0x0c,0x26,0xf2» @ sha1m q0,q3,q13 1414 » .byte» 0x6a,0x0c,0x26,0xf2» @ sha1m q0,q3,q13
1419 vadd.i32 q13,q11,q7 1415 vadd.i32 q13,q11,q7
1420 .byte» 0x8e,0x83,0xba,0xf3» @ sha1su1 q4,q7 1416 » .byte» 0x8e,0x83,0xba,0xf3» @ sha1su1 q4,q7
1421 .byte» 0x4e,0xac,0x3c,0xf2» @ sha1su0 q5,q6,q7 1417 » .byte» 0x4e,0xac,0x3c,0xf2» @ sha1su0 q5,q6,q7
1422 .byte» 0xc0,0x62,0xb9,0xf3» @ sha1h q3,q0» » @ 14 1418 » .byte» 0xc0,0x62,0xb9,0xf3» @ sha1h q3,q0» » @ 14
1423 .byte» 0x68,0x0c,0x24,0xf2» @ sha1m q0,q2,q12 1419 » .byte» 0x68,0x0c,0x24,0xf2» @ sha1m q0,q2,q12
1424 vadd.i32 q12,q11,q4 1420 vadd.i32 q12,q11,q4
1425 .byte» 0x88,0xa3,0xba,0xf3» @ sha1su1 q5,q4 1421 » .byte» 0x88,0xa3,0xba,0xf3» @ sha1su1 q5,q4
1426 .byte» 0x48,0xcc,0x3e,0xf2» @ sha1su0 q6,q7,q4 1422 » .byte» 0x48,0xcc,0x3e,0xf2» @ sha1su0 q6,q7,q4
1427 .byte» 0xc0,0x42,0xb9,0xf3» @ sha1h q2,q0» » @ 15 1423 » .byte» 0xc0,0x42,0xb9,0xf3» @ sha1h q2,q0» » @ 15
1428 .byte» 0x6a,0x0c,0x16,0xf2» @ sha1p q0,q3,q13 1424 » .byte» 0x6a,0x0c,0x16,0xf2» @ sha1p q0,q3,q13
1429 vadd.i32 q13,q11,q5 1425 vadd.i32 q13,q11,q5
1430 .byte» 0x8a,0xc3,0xba,0xf3» @ sha1su1 q6,q5 1426 » .byte» 0x8a,0xc3,0xba,0xf3» @ sha1su1 q6,q5
1431 .byte» 0x4a,0xec,0x38,0xf2» @ sha1su0 q7,q4,q5 1427 » .byte» 0x4a,0xec,0x38,0xf2» @ sha1su0 q7,q4,q5
1432 .byte» 0xc0,0x62,0xb9,0xf3» @ sha1h q3,q0» » @ 16 1428 » .byte» 0xc0,0x62,0xb9,0xf3» @ sha1h q3,q0» » @ 16
1433 .byte» 0x68,0x0c,0x14,0xf2» @ sha1p q0,q2,q12 1429 » .byte» 0x68,0x0c,0x14,0xf2» @ sha1p q0,q2,q12
1434 vadd.i32 q12,q11,q6 1430 vadd.i32 q12,q11,q6
1435 .byte» 0x8c,0xe3,0xba,0xf3» @ sha1su1 q7,q6 1431 » .byte» 0x8c,0xe3,0xba,0xf3» @ sha1su1 q7,q6
1436 .byte» 0xc0,0x42,0xb9,0xf3» @ sha1h q2,q0» » @ 17 1432 » .byte» 0xc0,0x42,0xb9,0xf3» @ sha1h q2,q0» » @ 17
1437 .byte» 0x6a,0x0c,0x16,0xf2» @ sha1p q0,q3,q13 1433 » .byte» 0x6a,0x0c,0x16,0xf2» @ sha1p q0,q3,q13
1438 vadd.i32 q13,q11,q7 1434 vadd.i32 q13,q11,q7
1439 1435
1440 .byte» 0xc0,0x62,0xb9,0xf3» @ sha1h q3,q0» » @ 18 1436 » .byte» 0xc0,0x62,0xb9,0xf3» @ sha1h q3,q0» » @ 18
1441 .byte» 0x68,0x0c,0x14,0xf2» @ sha1p q0,q2,q12 1437 » .byte» 0x68,0x0c,0x14,0xf2» @ sha1p q0,q2,q12
1442 1438
1443 .byte» 0xc0,0x42,0xb9,0xf3» @ sha1h q2,q0» » @ 19 1439 » .byte» 0xc0,0x42,0xb9,0xf3» @ sha1h q2,q0» » @ 19
1444 .byte» 0x6a,0x0c,0x16,0xf2» @ sha1p q0,q3,q13 1440 » .byte» 0x6a,0x0c,0x16,0xf2» @ sha1p q0,q3,q13
1445 1441
1446 vadd.i32 q1,q1,q2 1442 vadd.i32 q1,q1,q2
1447 vadd.i32 q0,q0,q14 1443 vadd.i32 q0,q0,q14
1448 » bne» .Loop_v8 1444 » bne» » .Loop_v8
1449 1445
1450 » vst1.32»{q0},[r0]! 1446 » vst1.32»» {q0},[r0]!
1451 » vst1.32»{d2[0]},[r0] 1447 » vst1.32»» {d2[0]},[r0]
1452 1448
1453 » vldmia» sp!,{d8,d9,d10,d11,d12,d13,d14,d15} 1449 » vldmia» sp!,{d8-d15}
1454 bx lr @ bx lr 1450 bx lr @ bx lr
1455 .size sha1_block_data_order_armv8,.-sha1_block_data_order_armv8 1451 .size sha1_block_data_order_armv8,.-sha1_block_data_order_armv8
1456 #endif 1452 #endif
1457 #if __ARM_MAX_ARCH__>=7 1453 #if __ARM_MAX_ARCH__>=7
1458 .comm OPENSSL_armcap_P,4,4 1454 .comm OPENSSL_armcap_P,4,4
1459 .hidden OPENSSL_armcap_P 1455 .hidden OPENSSL_armcap_P
1460 #endif 1456 #endif
OLDNEW
« no previous file with comments | « third_party/boringssl/linux-arm/crypto/modes/ghash-armv4.S ('k') | third_party/boringssl/linux-arm/crypto/sha/sha256-armv4.S » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698