Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(134)

Side by Side Diff: third_party/boringssl/linux-arm/crypto/sha/sha256-armv4.S

Issue 1128293004: Revert of Roll src/third_party/boringssl/src 68de407:de12d6c (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Thanks Rietveld, Thietveld. Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 1
2 @ ==================================================================== 2 @ ====================================================================
3 @ Written by Andy Polyakov <appro@openssl.org> for the OpenSSL 3 @ Written by Andy Polyakov <appro@openssl.org> for the OpenSSL
4 @ project. The module is, however, dual licensed under OpenSSL and 4 @ project. The module is, however, dual licensed under OpenSSL and
5 @ CRYPTOGAMS licenses depending on where you obtain it. For further 5 @ CRYPTOGAMS licenses depending on where you obtain it. For further
6 @ details see http://www.openssl.org/~appro/cryptogams/. 6 @ details see http://www.openssl.org/~appro/cryptogams/.
7 @ 7 @
8 @ Permission to use under GPL terms is granted. 8 @ Permission to use under GPL terms is granted.
9 @ ==================================================================== 9 @ ====================================================================
10 10
(...skipping 29 matching lines...) Expand all
40 # include "arm_arch.h" 40 # include "arm_arch.h"
41 #else 41 #else
42 # define __ARM_ARCH__ __LINUX_ARM_ARCH__ 42 # define __ARM_ARCH__ __LINUX_ARM_ARCH__
43 # define __ARM_MAX_ARCH__ 7 43 # define __ARM_MAX_ARCH__ 7
44 #endif 44 #endif
45 45
46 .text 46 .text
47 #if __ARM_ARCH__<7 47 #if __ARM_ARCH__<7
48 .code 32 48 .code 32
49 #else 49 #else
50 .syntax»unified 50 .syntax unified
51 # if defined(__thumb2__) && !defined(__APPLE__) 51 # ifdef __thumb2__
52 # define adrl adr 52 # define adrl adr
53 .thumb 53 .thumb
54 # else 54 # else
55 .code» 32 55 .code 32
56 # endif 56 # endif
57 #endif 57 #endif
58 58
59 .type K256,%object 59 .type K256,%object
60 .align 5 60 .align 5
61 K256: 61 K256:
62 .word 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5 62 .word 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
63 .word 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5 63 .word 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
64 .word 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3 64 .word 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
65 .word 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174 65 .word 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
66 .word 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc 66 .word 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
67 .word 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da 67 .word 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
68 .word 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7 68 .word 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
69 .word 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967 69 .word 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
70 .word 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13 70 .word 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
71 .word 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85 71 .word 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
72 .word 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3 72 .word 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
73 .word 0xd192e819,0xd6990624,0xf40e3585,0x106aa070 73 .word 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
74 .word 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5 74 .word 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
75 .word 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3 75 .word 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
76 .word 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208 76 .word 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
77 .word 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2 77 .word 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
78 .size K256,.-K256 78 .size K256,.-K256
79 .word 0 @ terminator 79 .word 0 @ terminator
80 #if __ARM_MAX_ARCH__>=7 && !defined(__KERNEL__) 80 #if __ARM_MAX_ARCH__>=7 && !defined(__KERNEL__)
81 .LOPENSSL_armcap: 81 .LOPENSSL_armcap:
82 .word» OPENSSL_armcap_P-.Lsha256_block_data_order 82 .word» OPENSSL_armcap_P-sha256_block_data_order
83 #endif 83 #endif
84 .align 5 84 .align 5
85 85
86 .globl» sha256_block_data_order 86 .global»sha256_block_data_order
87 .type sha256_block_data_order,%function 87 .type sha256_block_data_order,%function
88 sha256_block_data_order: 88 sha256_block_data_order:
89 .Lsha256_block_data_order:
90 #if __ARM_ARCH__<7 89 #if __ARM_ARCH__<7
91 sub r3,pc,#8 @ sha256_block_data_order 90 sub r3,pc,#8 @ sha256_block_data_order
92 #else 91 #else
93 adr r3,sha256_block_data_order 92 adr r3,sha256_block_data_order
94 #endif 93 #endif
95 #if __ARM_MAX_ARCH__>=7 && !defined(__KERNEL__) 94 #if __ARM_MAX_ARCH__>=7 && !defined(__KERNEL__)
96 ldr r12,.LOPENSSL_armcap 95 ldr r12,.LOPENSSL_armcap
97 ldr r12,[r3,r12] @ OPENSSL_armcap_P 96 ldr r12,[r3,r12] @ OPENSSL_armcap_P
98 #ifdef __APPLE__
99 ldr r12,[r12]
100 #endif
101 tst r12,#ARMV8_SHA256 97 tst r12,#ARMV8_SHA256
102 bne .LARMv8 98 bne .LARMv8
103 tst r12,#ARMV7_NEON 99 tst r12,#ARMV7_NEON
104 bne .LNEON 100 bne .LNEON
105 #endif 101 #endif
106 add r2,r1,r2,lsl#6 @ len to point at the end of inp 102 add r2,r1,r2,lsl#6 @ len to point at the end of inp
107 stmdb sp!,{r0,r1,r2,r4-r11,lr} 103 stmdb sp!,{r0,r1,r2,r4-r11,lr}
108 ldmia r0,{r4,r5,r6,r7,r8,r9,r10,r11} 104 ldmia r0,{r4,r5,r6,r7,r8,r9,r10,r11}
109 sub r14,r3,#256+32 @ K256 105 sub r14,r3,#256+32 @ K256
110 sub sp,sp,#16*4 @ alloca(X[16]) 106 sub sp,sp,#16*4 @ alloca(X[16])
(...skipping 1743 matching lines...) Expand 10 before | Expand all | Expand 10 after
1854 ldr r12,[sp,#18*4] @ pull inp+len 1850 ldr r12,[sp,#18*4] @ pull inp+len
1855 add r10,r10,r0 1851 add r10,r10,r0
1856 add r11,r11,r2 1852 add r11,r11,r2
1857 stmia r3,{r4,r5,r6,r7,r8,r9,r10,r11} 1853 stmia r3,{r4,r5,r6,r7,r8,r9,r10,r11}
1858 cmp r1,r12 1854 cmp r1,r12
1859 sub r14,r14,#256 @ rewind Ktbl 1855 sub r14,r14,#256 @ rewind Ktbl
1860 bne .Loop 1856 bne .Loop
1861 1857
1862 add sp,sp,#19*4 @ destroy frame 1858 add sp,sp,#19*4 @ destroy frame
1863 #if __ARM_ARCH__>=5 1859 #if __ARM_ARCH__>=5
1864 » ldmia» sp!,{r4,r5,r6,r7,r8,r9,r10,r11,pc} 1860 » ldmia» sp!,{r4-r11,pc}
1865 #else 1861 #else
1866 » ldmia» sp!,{r4,r5,r6,r7,r8,r9,r10,r11,lr} 1862 » ldmia» sp!,{r4-r11,lr}
1867 tst lr,#1 1863 tst lr,#1
1868 moveq pc,lr @ be binary compatible with V4, yet 1864 moveq pc,lr @ be binary compatible with V4, yet
1869 .word» 0xe12fff1e» » » @ interoperable with Thumb ISA:-) 1865 » .word» 0xe12fff1e» » » @ interoperable with Thumb ISA:- )
1870 #endif 1866 #endif
1871 .size sha256_block_data_order,.-sha256_block_data_order 1867 .size sha256_block_data_order,.-sha256_block_data_order
1872 #if __ARM_MAX_ARCH__>=7 1868 #if __ARM_MAX_ARCH__>=7
1873 .arch armv7-a 1869 .arch armv7-a
1874 .fpu neon 1870 .fpu neon
1875 1871
1876 .globl» sha256_block_data_order_neon 1872 .global»sha256_block_data_order_neon
1877 .type sha256_block_data_order_neon,%function 1873 .type sha256_block_data_order_neon,%function
1878 .align 4 1874 .align 4
1879 sha256_block_data_order_neon: 1875 sha256_block_data_order_neon:
1880 .LNEON: 1876 .LNEON:
1881 » stmdb» sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,lr} 1877 » stmdb» sp!,{r4-r12,lr}
1882 1878
1883 sub r11,sp,#16*4+16 1879 sub r11,sp,#16*4+16
1884 » adr» r14,K256 1880 » adrl» r14,K256
1885 bic r11,r11,#15 @ align for 128-bit stores 1881 bic r11,r11,#15 @ align for 128-bit stores
1886 mov r12,sp 1882 mov r12,sp
1887 mov sp,r11 @ alloca 1883 mov sp,r11 @ alloca
1888 add r2,r1,r2,lsl#6 @ len to point at the end of inp 1884 add r2,r1,r2,lsl#6 @ len to point at the end of inp
1889 1885
1890 » vld1.8» {q0},[r1]! 1886 » vld1.8» » {q0},[r1]!
1891 » vld1.8» {q1},[r1]! 1887 » vld1.8» » {q1},[r1]!
1892 » vld1.8» {q2},[r1]! 1888 » vld1.8» » {q2},[r1]!
1893 » vld1.8» {q3},[r1]! 1889 » vld1.8» » {q3},[r1]!
1894 » vld1.32»{q8},[r14,:128]! 1890 » vld1.32»» {q8},[r14,:128]!
1895 » vld1.32»{q9},[r14,:128]! 1891 » vld1.32»» {q9},[r14,:128]!
1896 » vld1.32»{q10},[r14,:128]! 1892 » vld1.32»» {q10},[r14,:128]!
1897 » vld1.32»{q11},[r14,:128]! 1893 » vld1.32»» {q11},[r14,:128]!
1898 vrev32.8 q0,q0 @ yes, even on 1894 vrev32.8 q0,q0 @ yes, even on
1899 » str» r0,[sp,#64] 1895 » str» » r0,[sp,#64]
1900 vrev32.8 q1,q1 @ big-endian 1896 vrev32.8 q1,q1 @ big-endian
1901 » str» r1,[sp,#68] 1897 » str» » r1,[sp,#68]
1902 » mov» r1,sp 1898 » mov» » r1,sp
1903 vrev32.8 q2,q2 1899 vrev32.8 q2,q2
1904 » str» r2,[sp,#72] 1900 » str» » r2,[sp,#72]
1905 vrev32.8 q3,q3 1901 vrev32.8 q3,q3
1906 » str» r12,[sp,#76]» » @ save original sp 1902 » str» » r12,[sp,#76]» » @ save original sp
1907 vadd.i32 q8,q8,q0 1903 vadd.i32 q8,q8,q0
1908 vadd.i32 q9,q9,q1 1904 vadd.i32 q9,q9,q1
1909 » vst1.32»{q8},[r1,:128]! 1905 » vst1.32»» {q8},[r1,:128]!
1910 vadd.i32 q10,q10,q2 1906 vadd.i32 q10,q10,q2
1911 » vst1.32»{q9},[r1,:128]! 1907 » vst1.32»» {q9},[r1,:128]!
1912 vadd.i32 q11,q11,q3 1908 vadd.i32 q11,q11,q3
1913 » vst1.32»{q10},[r1,:128]! 1909 » vst1.32»» {q10},[r1,:128]!
1914 » vst1.32»{q11},[r1,:128]! 1910 » vst1.32»» {q11},[r1,:128]!
1915 1911
1916 » ldmia» r0,{r4,r5,r6,r7,r8,r9,r10,r11} 1912 » ldmia» » r0,{r4-r11}
1917 » sub» r1,r1,#64 1913 » sub» » r1,r1,#64
1918 » ldr» r2,[sp,#0] 1914 » ldr» » r2,[sp,#0]
1919 » eor» r12,r12,r12 1915 » eor» » r12,r12,r12
1920 » eor» r3,r5,r6 1916 » eor» » r3,r5,r6
1921 » b» .L_00_48 1917 » b» » .L_00_48
1922 1918
1923 .align 4 1919 .align 4
1924 .L_00_48: 1920 .L_00_48:
1925 vext.8 q8,q0,q1,#4 1921 vext.8 q8,q0,q1,#4
1926 add r11,r11,r2 1922 add r11,r11,r2
1927 eor r2,r9,r10 1923 eor r2,r9,r10
1928 eor r0,r8,r8,ror#5 1924 eor r0,r8,r8,ror#5
1929 vext.8 q9,q2,q3,#4 1925 vext.8 q9,q2,q3,#4
1930 add r4,r4,r12 1926 add r4,r4,r12
1931 and r2,r2,r8 1927 and r2,r2,r8
(...skipping 380 matching lines...) Expand 10 before | Expand all | Expand 10 after
2312 and r12,r12,r3 2308 and r12,r12,r3
2313 add r8,r8,r4 2309 add r8,r8,r4
2314 vst1.32 {q8},[r1,:128]! 2310 vst1.32 {q8},[r1,:128]!
2315 add r4,r4,r0,ror#2 2311 add r4,r4,r0,ror#2
2316 eor r12,r12,r6 2312 eor r12,r12,r6
2317 teq r2,#0 @ check for K256 terminator 2313 teq r2,#0 @ check for K256 terminator
2318 ldr r2,[sp,#0] 2314 ldr r2,[sp,#0]
2319 sub r1,r1,#64 2315 sub r1,r1,#64
2320 bne .L_00_48 2316 bne .L_00_48
2321 2317
2322 » ldr» r1,[sp,#68] 2318 » ldr» » r1,[sp,#68]
2323 » ldr» r0,[sp,#72] 2319 » ldr» » r0,[sp,#72]
2324 » sub» r14,r14,#256» @ rewind r14 2320 » sub» » r14,r14,#256» @ rewind r14
2325 » teq» r1,r0 2321 » teq» » r1,r0
2326 » it» eq 2322 » it» » eq
2327 » subeq» r1,r1,#64» » @ avoid SEGV 2323 » subeq» » r1,r1,#64» » @ avoid SEGV
2328 » vld1.8» {q0},[r1]!» » @ load next input block 2324 » vld1.8» » {q0},[r1]!» » @ load next input block
2329 » vld1.8» {q1},[r1]! 2325 » vld1.8» » {q1},[r1]!
2330 » vld1.8» {q2},[r1]! 2326 » vld1.8» » {q2},[r1]!
2331 » vld1.8» {q3},[r1]! 2327 » vld1.8» » {q3},[r1]!
2332 » it» ne 2328 » it» » ne
2333 » strne» r1,[sp,#68] 2329 » strne» » r1,[sp,#68]
2334 » mov» r1,sp 2330 » mov» » r1,sp
2335 add r11,r11,r2 2331 add r11,r11,r2
2336 eor r2,r9,r10 2332 eor r2,r9,r10
2337 eor r0,r8,r8,ror#5 2333 eor r0,r8,r8,ror#5
2338 add r4,r4,r12 2334 add r4,r4,r12
2339 vld1.32 {q8},[r14,:128]! 2335 vld1.32 {q8},[r14,:128]!
2340 and r2,r2,r8 2336 and r2,r2,r8
2341 eor r12,r0,r8,ror#19 2337 eor r12,r0,r8,ror#19
2342 eor r0,r4,r4,ror#11 2338 eor r0,r4,r4,ror#11
2343 eor r2,r2,r10 2339 eor r2,r2,r10
2344 vrev32.8 q0,q0 2340 vrev32.8 q0,q0
(...skipping 289 matching lines...) Expand 10 before | Expand all | Expand 10 after
2634 add r7,r7,r1 2630 add r7,r7,r1
2635 ldr r1,[r2,#28] 2631 ldr r1,[r2,#28]
2636 add r8,r8,r0 2632 add r8,r8,r0
2637 str r4,[r2],#4 2633 str r4,[r2],#4
2638 add r9,r9,r12 2634 add r9,r9,r12
2639 str r5,[r2],#4 2635 str r5,[r2],#4
2640 add r10,r10,r3 2636 add r10,r10,r3
2641 str r6,[r2],#4 2637 str r6,[r2],#4
2642 add r11,r11,r1 2638 add r11,r11,r1
2643 str r7,[r2],#4 2639 str r7,[r2],#4
2644 » stmia» r2,{r8,r9,r10,r11} 2640 » stmia» r2,{r8-r11}
2645 2641
2646 ittte ne 2642 ittte ne
2647 movne r1,sp 2643 movne r1,sp
2648 ldrne r2,[sp,#0] 2644 ldrne r2,[sp,#0]
2649 eorne r12,r12,r12 2645 eorne r12,r12,r12
2650 ldreq sp,[sp,#76] @ restore original sp 2646 ldreq sp,[sp,#76] @ restore original sp
2651 itt ne 2647 itt ne
2652 eorne r3,r5,r6 2648 eorne r3,r5,r6
2653 bne .L_00_48 2649 bne .L_00_48
2654 2650
2655 » ldmia» sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,pc} 2651 » ldmia» sp!,{r4-r12,pc}
2656 .size sha256_block_data_order_neon,.-sha256_block_data_order_neon 2652 .size sha256_block_data_order_neon,.-sha256_block_data_order_neon
2657 #endif 2653 #endif
2658 #if __ARM_MAX_ARCH__>=7 && !defined(__KERNEL__) 2654 #if __ARM_MAX_ARCH__>=7 && !defined(__KERNEL__)
2659 2655
2660 # if defined(__thumb2__) && !defined(__APPLE__) 2656 # ifdef __thumb2__
2661 # define INST(a,b,c,d) .byte c,d|0xc,a,b 2657 # define INST(a,b,c,d) .byte c,d|0xc,a,b
2662 # else 2658 # else
2663 # define INST(a,b,c,d) .byte a,b,c,d 2659 # define INST(a,b,c,d) .byte a,b,c,d
2664 # endif 2660 # endif
2665 2661
2666 .type sha256_block_data_order_armv8,%function 2662 .type sha256_block_data_order_armv8,%function
2667 .align 5 2663 .align 5
2668 sha256_block_data_order_armv8: 2664 sha256_block_data_order_armv8:
2669 .LARMv8: 2665 .LARMv8:
2670 vld1.32 {q0,q1},[r0] 2666 vld1.32 {q0,q1},[r0]
2671 # ifdef»__APPLE__ 2667 # ifdef __thumb2__
2672 » sub» r3,r3,#256+32
2673 # elif» defined(__thumb2__)
2674 adr r3,.LARMv8 2668 adr r3,.LARMv8
2675 sub r3,r3,#.LARMv8-K256 2669 sub r3,r3,#.LARMv8-K256
2676 # else 2670 # else
2677 adrl r3,K256 2671 adrl r3,K256
2678 # endif 2672 # endif
2679 add r2,r1,r2,lsl#6 @ len to point at the end of inp 2673 add r2,r1,r2,lsl#6 @ len to point at the end of inp
2680 2674
2681 .Loop_v8: 2675 .Loop_v8:
2682 » vld1.8» {q8,q9},[r1]! 2676 » vld1.8» » {q8-q9},[r1]!
2683 » vld1.8» {q10,q11},[r1]! 2677 » vld1.8» » {q10-q11},[r1]!
2684 » vld1.32»{q12},[r3]! 2678 » vld1.32»» {q12},[r3]!
2685 vrev32.8 q8,q8 2679 vrev32.8 q8,q8
2686 vrev32.8 q9,q9 2680 vrev32.8 q9,q9
2687 vrev32.8 q10,q10 2681 vrev32.8 q10,q10
2688 vrev32.8 q11,q11 2682 vrev32.8 q11,q11
2689 » vmov» q14,q0» @ offload 2683 » vmov» » q14,q0» @ offload
2690 » vmov» q15,q1 2684 » vmov» » q15,q1
2691 » teq» r1,r2 2685 » teq» » r1,r2
2692 » vld1.32»{q13},[r3]! 2686 » vld1.32»» {q13},[r3]!
2693 vadd.i32 q12,q12,q8 2687 vadd.i32 q12,q12,q8
2694 INST(0xe2,0x03,0xfa,0xf3) @ sha256su0 q8,q9 2688 INST(0xe2,0x03,0xfa,0xf3) @ sha256su0 q8,q9
2695 » vmov» q2,q0 2689 » vmov» » q2,q0
2696 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12 2690 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12
2697 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12 2691 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12
2698 INST(0xe6,0x0c,0x64,0xf3) @ sha256su1 q8,q10,q11 2692 INST(0xe6,0x0c,0x64,0xf3) @ sha256su1 q8,q10,q11
2699 » vld1.32»{q12},[r3]! 2693 » vld1.32»» {q12},[r3]!
2700 vadd.i32 q13,q13,q9 2694 vadd.i32 q13,q13,q9
2701 INST(0xe4,0x23,0xfa,0xf3) @ sha256su0 q9,q10 2695 INST(0xe4,0x23,0xfa,0xf3) @ sha256su0 q9,q10
2702 » vmov» q2,q0 2696 » vmov» » q2,q0
2703 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13 2697 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13
2704 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13 2698 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13
2705 INST(0xe0,0x2c,0x66,0xf3) @ sha256su1 q9,q11,q8 2699 INST(0xe0,0x2c,0x66,0xf3) @ sha256su1 q9,q11,q8
2706 » vld1.32»{q13},[r3]! 2700 » vld1.32»» {q13},[r3]!
2707 vadd.i32 q12,q12,q10 2701 vadd.i32 q12,q12,q10
2708 INST(0xe6,0x43,0xfa,0xf3) @ sha256su0 q10,q11 2702 INST(0xe6,0x43,0xfa,0xf3) @ sha256su0 q10,q11
2709 » vmov» q2,q0 2703 » vmov» » q2,q0
2710 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12 2704 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12
2711 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12 2705 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12
2712 INST(0xe2,0x4c,0x60,0xf3) @ sha256su1 q10,q8,q9 2706 INST(0xe2,0x4c,0x60,0xf3) @ sha256su1 q10,q8,q9
2713 » vld1.32»{q12},[r3]! 2707 » vld1.32»» {q12},[r3]!
2714 vadd.i32 q13,q13,q11 2708 vadd.i32 q13,q13,q11
2715 INST(0xe0,0x63,0xfa,0xf3) @ sha256su0 q11,q8 2709 INST(0xe0,0x63,0xfa,0xf3) @ sha256su0 q11,q8
2716 » vmov» q2,q0 2710 » vmov» » q2,q0
2717 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13 2711 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13
2718 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13 2712 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13
2719 INST(0xe4,0x6c,0x62,0xf3) @ sha256su1 q11,q9,q10 2713 INST(0xe4,0x6c,0x62,0xf3) @ sha256su1 q11,q9,q10
2720 » vld1.32»{q13},[r3]! 2714 » vld1.32»» {q13},[r3]!
2721 vadd.i32 q12,q12,q8 2715 vadd.i32 q12,q12,q8
2722 INST(0xe2,0x03,0xfa,0xf3) @ sha256su0 q8,q9 2716 INST(0xe2,0x03,0xfa,0xf3) @ sha256su0 q8,q9
2723 » vmov» q2,q0 2717 » vmov» » q2,q0
2724 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12 2718 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12
2725 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12 2719 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12
2726 INST(0xe6,0x0c,0x64,0xf3) @ sha256su1 q8,q10,q11 2720 INST(0xe6,0x0c,0x64,0xf3) @ sha256su1 q8,q10,q11
2727 » vld1.32»{q12},[r3]! 2721 » vld1.32»» {q12},[r3]!
2728 vadd.i32 q13,q13,q9 2722 vadd.i32 q13,q13,q9
2729 INST(0xe4,0x23,0xfa,0xf3) @ sha256su0 q9,q10 2723 INST(0xe4,0x23,0xfa,0xf3) @ sha256su0 q9,q10
2730 » vmov» q2,q0 2724 » vmov» » q2,q0
2731 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13 2725 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13
2732 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13 2726 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13
2733 INST(0xe0,0x2c,0x66,0xf3) @ sha256su1 q9,q11,q8 2727 INST(0xe0,0x2c,0x66,0xf3) @ sha256su1 q9,q11,q8
2734 » vld1.32»{q13},[r3]! 2728 » vld1.32»» {q13},[r3]!
2735 vadd.i32 q12,q12,q10 2729 vadd.i32 q12,q12,q10
2736 INST(0xe6,0x43,0xfa,0xf3) @ sha256su0 q10,q11 2730 INST(0xe6,0x43,0xfa,0xf3) @ sha256su0 q10,q11
2737 » vmov» q2,q0 2731 » vmov» » q2,q0
2738 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12 2732 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12
2739 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12 2733 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12
2740 INST(0xe2,0x4c,0x60,0xf3) @ sha256su1 q10,q8,q9 2734 INST(0xe2,0x4c,0x60,0xf3) @ sha256su1 q10,q8,q9
2741 » vld1.32»{q12},[r3]! 2735 » vld1.32»» {q12},[r3]!
2742 vadd.i32 q13,q13,q11 2736 vadd.i32 q13,q13,q11
2743 INST(0xe0,0x63,0xfa,0xf3) @ sha256su0 q11,q8 2737 INST(0xe0,0x63,0xfa,0xf3) @ sha256su0 q11,q8
2744 » vmov» q2,q0 2738 » vmov» » q2,q0
2745 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13 2739 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13
2746 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13 2740 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13
2747 INST(0xe4,0x6c,0x62,0xf3) @ sha256su1 q11,q9,q10 2741 INST(0xe4,0x6c,0x62,0xf3) @ sha256su1 q11,q9,q10
2748 » vld1.32»{q13},[r3]! 2742 » vld1.32»» {q13},[r3]!
2749 vadd.i32 q12,q12,q8 2743 vadd.i32 q12,q12,q8
2750 INST(0xe2,0x03,0xfa,0xf3) @ sha256su0 q8,q9 2744 INST(0xe2,0x03,0xfa,0xf3) @ sha256su0 q8,q9
2751 » vmov» q2,q0 2745 » vmov» » q2,q0
2752 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12 2746 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12
2753 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12 2747 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12
2754 INST(0xe6,0x0c,0x64,0xf3) @ sha256su1 q8,q10,q11 2748 INST(0xe6,0x0c,0x64,0xf3) @ sha256su1 q8,q10,q11
2755 » vld1.32»{q12},[r3]! 2749 » vld1.32»» {q12},[r3]!
2756 vadd.i32 q13,q13,q9 2750 vadd.i32 q13,q13,q9
2757 INST(0xe4,0x23,0xfa,0xf3) @ sha256su0 q9,q10 2751 INST(0xe4,0x23,0xfa,0xf3) @ sha256su0 q9,q10
2758 » vmov» q2,q0 2752 » vmov» » q2,q0
2759 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13 2753 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13
2760 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13 2754 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13
2761 INST(0xe0,0x2c,0x66,0xf3) @ sha256su1 q9,q11,q8 2755 INST(0xe0,0x2c,0x66,0xf3) @ sha256su1 q9,q11,q8
2762 » vld1.32»{q13},[r3]! 2756 » vld1.32»» {q13},[r3]!
2763 vadd.i32 q12,q12,q10 2757 vadd.i32 q12,q12,q10
2764 INST(0xe6,0x43,0xfa,0xf3) @ sha256su0 q10,q11 2758 INST(0xe6,0x43,0xfa,0xf3) @ sha256su0 q10,q11
2765 » vmov» q2,q0 2759 » vmov» » q2,q0
2766 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12 2760 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12
2767 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12 2761 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12
2768 INST(0xe2,0x4c,0x60,0xf3) @ sha256su1 q10,q8,q9 2762 INST(0xe2,0x4c,0x60,0xf3) @ sha256su1 q10,q8,q9
2769 » vld1.32»{q12},[r3]! 2763 » vld1.32»» {q12},[r3]!
2770 vadd.i32 q13,q13,q11 2764 vadd.i32 q13,q13,q11
2771 INST(0xe0,0x63,0xfa,0xf3) @ sha256su0 q11,q8 2765 INST(0xe0,0x63,0xfa,0xf3) @ sha256su0 q11,q8
2772 » vmov» q2,q0 2766 » vmov» » q2,q0
2773 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13 2767 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13
2774 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13 2768 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13
2775 INST(0xe4,0x6c,0x62,0xf3) @ sha256su1 q11,q9,q10 2769 INST(0xe4,0x6c,0x62,0xf3) @ sha256su1 q11,q9,q10
2776 » vld1.32»{q13},[r3]! 2770 » vld1.32»» {q13},[r3]!
2777 vadd.i32 q12,q12,q8 2771 vadd.i32 q12,q12,q8
2778 » vmov» q2,q0 2772 » vmov» » q2,q0
2779 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12 2773 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12
2780 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12 2774 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12
2781 2775
2782 » vld1.32»{q12},[r3]! 2776 » vld1.32»» {q12},[r3]!
2783 vadd.i32 q13,q13,q9 2777 vadd.i32 q13,q13,q9
2784 » vmov» q2,q0 2778 » vmov» » q2,q0
2785 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13 2779 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13
2786 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13 2780 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13
2787 2781
2788 » vld1.32»{q13},[r3] 2782 » vld1.32»» {q13},[r3]
2789 vadd.i32 q12,q12,q10 2783 vadd.i32 q12,q12,q10
2790 » sub» r3,r3,#256-16» @ rewind 2784 » sub» » r3,r3,#256-16» @ rewind
2791 » vmov» q2,q0 2785 » vmov» » q2,q0
2792 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12 2786 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12
2793 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12 2787 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12
2794 2788
2795 vadd.i32 q13,q13,q11 2789 vadd.i32 q13,q13,q11
2796 » vmov» q2,q0 2790 » vmov» » q2,q0
2797 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13 2791 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13
2798 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13 2792 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13
2799 2793
2800 vadd.i32 q0,q0,q14 2794 vadd.i32 q0,q0,q14
2801 vadd.i32 q1,q1,q15 2795 vadd.i32 q1,q1,q15
2802 » it» ne 2796 » it» » ne
2803 » bne» .Loop_v8 2797 » bne» » .Loop_v8
2804 2798
2805 » vst1.32»{q0,q1},[r0] 2799 » vst1.32»» {q0,q1},[r0]
2806 2800
2807 bx lr @ bx lr 2801 bx lr @ bx lr
2808 .size sha256_block_data_order_armv8,.-sha256_block_data_order_armv8 2802 .size sha256_block_data_order_armv8,.-sha256_block_data_order_armv8
2809 #endif 2803 #endif
2810 .byte» 83,72,65,50,53,54,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114 ,109,32,102,111,114,32,65,82,77,118,52,47,78,69,79,78,47,65,82,77,118,56,44,32,6 7,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,1 10,115,115,108,46,111,114,103,62,0 2804 .asciz "SHA256 block transform for ARMv4/NEON/ARMv8, CRYPTOGAMS by <appro@opens sl.org>"
2811 .align» 2
2812 .align 2 2805 .align 2
2813 #if __ARM_MAX_ARCH__>=7 && !defined(__KERNEL__) 2806 #if __ARM_MAX_ARCH__>=7 && !defined(__KERNEL__)
2814 .comm» OPENSSL_armcap_P,4,4 2807 .comm OPENSSL_armcap_P,4,4
2815 .hidden»OPENSSL_armcap_P 2808 .hidden OPENSSL_armcap_P
2816 #endif 2809 #endif
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698