Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(202)

Side by Side Diff: third_party/boringssl/linux-arm/crypto/sha/sha256-armv4.S

Issue 1136743004: Roll src/third_party/boringssl/src 68de407:771a138 (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 1
2 @ ==================================================================== 2 @ ====================================================================
3 @ Written by Andy Polyakov <appro@openssl.org> for the OpenSSL 3 @ Written by Andy Polyakov <appro@openssl.org> for the OpenSSL
4 @ project. The module is, however, dual licensed under OpenSSL and 4 @ project. The module is, however, dual licensed under OpenSSL and
5 @ CRYPTOGAMS licenses depending on where you obtain it. For further 5 @ CRYPTOGAMS licenses depending on where you obtain it. For further
6 @ details see http://www.openssl.org/~appro/cryptogams/. 6 @ details see http://www.openssl.org/~appro/cryptogams/.
7 @ 7 @
8 @ Permission to use under GPL terms is granted. 8 @ Permission to use under GPL terms is granted.
9 @ ==================================================================== 9 @ ====================================================================
10 10
(...skipping 29 matching lines...) Expand all
40 # include "arm_arch.h" 40 # include "arm_arch.h"
41 #else 41 #else
42 # define __ARM_ARCH__ __LINUX_ARM_ARCH__ 42 # define __ARM_ARCH__ __LINUX_ARM_ARCH__
43 # define __ARM_MAX_ARCH__ 7 43 # define __ARM_MAX_ARCH__ 7
44 #endif 44 #endif
45 45
46 .text 46 .text
47 #if __ARM_ARCH__<7 47 #if __ARM_ARCH__<7
48 .code 32 48 .code 32
49 #else 49 #else
50 .syntax unified 50 .syntax»unified
51 # ifdef __thumb2__ 51 # if defined(__thumb2__) && !defined(__APPLE__)
52 # define adrl adr 52 # define adrl adr
53 .thumb 53 .thumb
54 # else 54 # else
55 .code 32 55 .code» 32
56 # endif 56 # endif
57 #endif 57 #endif
58 58
59 .type K256,%object 59 .type K256,%object
60 .align 5 60 .align 5
61 K256: 61 K256:
62 .word 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5 62 .word 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
63 .word 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5 63 .word 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
64 .word 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3 64 .word 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
65 .word 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174 65 .word 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
66 .word 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc 66 .word 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
67 .word 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da 67 .word 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
68 .word 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7 68 .word 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
69 .word 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967 69 .word 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
70 .word 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13 70 .word 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
71 .word 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85 71 .word 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
72 .word 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3 72 .word 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
73 .word 0xd192e819,0xd6990624,0xf40e3585,0x106aa070 73 .word 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
74 .word 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5 74 .word 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
75 .word 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3 75 .word 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
76 .word 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208 76 .word 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
77 .word 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2 77 .word 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
78 .size K256,.-K256 78 .size K256,.-K256
79 .word 0 @ terminator 79 .word 0 @ terminator
80 #if __ARM_MAX_ARCH__>=7 && !defined(__KERNEL__) 80 #if __ARM_MAX_ARCH__>=7 && !defined(__KERNEL__)
81 .LOPENSSL_armcap: 81 .LOPENSSL_armcap:
82 .word» OPENSSL_armcap_P-sha256_block_data_order 82 .word» OPENSSL_armcap_P-.Lsha256_block_data_order
83 #endif 83 #endif
84 .align 5 84 .align 5
85 85
86 .global»sha256_block_data_order 86 .globl» sha256_block_data_order
87 .type sha256_block_data_order,%function 87 .type sha256_block_data_order,%function
88 sha256_block_data_order: 88 sha256_block_data_order:
89 .Lsha256_block_data_order:
89 #if __ARM_ARCH__<7 90 #if __ARM_ARCH__<7
90 sub r3,pc,#8 @ sha256_block_data_order 91 sub r3,pc,#8 @ sha256_block_data_order
91 #else 92 #else
92 adr r3,sha256_block_data_order 93 adr r3,sha256_block_data_order
93 #endif 94 #endif
94 #if __ARM_MAX_ARCH__>=7 && !defined(__KERNEL__) 95 #if __ARM_MAX_ARCH__>=7 && !defined(__KERNEL__)
95 ldr r12,.LOPENSSL_armcap 96 ldr r12,.LOPENSSL_armcap
96 ldr r12,[r3,r12] @ OPENSSL_armcap_P 97 ldr r12,[r3,r12] @ OPENSSL_armcap_P
98 #ifdef __APPLE__
99 ldr r12,[r12]
100 #endif
97 tst r12,#ARMV8_SHA256 101 tst r12,#ARMV8_SHA256
98 bne .LARMv8 102 bne .LARMv8
99 tst r12,#ARMV7_NEON 103 tst r12,#ARMV7_NEON
100 bne .LNEON 104 bne .LNEON
101 #endif 105 #endif
102 add r2,r1,r2,lsl#6 @ len to point at the end of inp 106 add r2,r1,r2,lsl#6 @ len to point at the end of inp
103 stmdb sp!,{r0,r1,r2,r4-r11,lr} 107 stmdb sp!,{r0,r1,r2,r4-r11,lr}
104 ldmia r0,{r4,r5,r6,r7,r8,r9,r10,r11} 108 ldmia r0,{r4,r5,r6,r7,r8,r9,r10,r11}
105 sub r14,r3,#256+32 @ K256 109 sub r14,r3,#256+32 @ K256
106 sub sp,sp,#16*4 @ alloca(X[16]) 110 sub sp,sp,#16*4 @ alloca(X[16])
(...skipping 1743 matching lines...) Expand 10 before | Expand all | Expand 10 after
1850 ldr r12,[sp,#18*4] @ pull inp+len 1854 ldr r12,[sp,#18*4] @ pull inp+len
1851 add r10,r10,r0 1855 add r10,r10,r0
1852 add r11,r11,r2 1856 add r11,r11,r2
1853 stmia r3,{r4,r5,r6,r7,r8,r9,r10,r11} 1857 stmia r3,{r4,r5,r6,r7,r8,r9,r10,r11}
1854 cmp r1,r12 1858 cmp r1,r12
1855 sub r14,r14,#256 @ rewind Ktbl 1859 sub r14,r14,#256 @ rewind Ktbl
1856 bne .Loop 1860 bne .Loop
1857 1861
1858 add sp,sp,#19*4 @ destroy frame 1862 add sp,sp,#19*4 @ destroy frame
1859 #if __ARM_ARCH__>=5 1863 #if __ARM_ARCH__>=5
1860 » ldmia» sp!,{r4-r11,pc} 1864 » ldmia» sp!,{r4,r5,r6,r7,r8,r9,r10,r11,pc}
1861 #else 1865 #else
1862 » ldmia» sp!,{r4-r11,lr} 1866 » ldmia» sp!,{r4,r5,r6,r7,r8,r9,r10,r11,lr}
1863 tst lr,#1 1867 tst lr,#1
1864 moveq pc,lr @ be binary compatible with V4, yet 1868 moveq pc,lr @ be binary compatible with V4, yet
1865 » .word» 0xe12fff1e» » » @ interoperable with Thumb ISA:- ) 1869 .word» 0xe12fff1e» » » @ interoperable with Thumb ISA:-)
1866 #endif 1870 #endif
1867 .size sha256_block_data_order,.-sha256_block_data_order 1871 .size sha256_block_data_order,.-sha256_block_data_order
1868 #if __ARM_MAX_ARCH__>=7 1872 #if __ARM_MAX_ARCH__>=7
1869 .arch armv7-a 1873 .arch armv7-a
1870 .fpu neon 1874 .fpu neon
1871 1875
1872 .global»sha256_block_data_order_neon 1876 .globl» sha256_block_data_order_neon
1873 .type sha256_block_data_order_neon,%function 1877 .type sha256_block_data_order_neon,%function
1874 .align 4 1878 .align 4
1875 sha256_block_data_order_neon: 1879 sha256_block_data_order_neon:
1876 .LNEON: 1880 .LNEON:
1877 » stmdb» sp!,{r4-r12,lr} 1881 » stmdb» sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,lr}
1878 1882
1879 sub r11,sp,#16*4+16 1883 sub r11,sp,#16*4+16
1880 » adrl» r14,K256 1884 » adr» r14,K256
1881 bic r11,r11,#15 @ align for 128-bit stores 1885 bic r11,r11,#15 @ align for 128-bit stores
1882 mov r12,sp 1886 mov r12,sp
1883 mov sp,r11 @ alloca 1887 mov sp,r11 @ alloca
1884 add r2,r1,r2,lsl#6 @ len to point at the end of inp 1888 add r2,r1,r2,lsl#6 @ len to point at the end of inp
1885 1889
1886 » vld1.8» » {q0},[r1]! 1890 » vld1.8» {q0},[r1]!
1887 » vld1.8» » {q1},[r1]! 1891 » vld1.8» {q1},[r1]!
1888 » vld1.8» » {q2},[r1]! 1892 » vld1.8» {q2},[r1]!
1889 » vld1.8» » {q3},[r1]! 1893 » vld1.8» {q3},[r1]!
1890 » vld1.32»» {q8},[r14,:128]! 1894 » vld1.32»{q8},[r14,:128]!
1891 » vld1.32»» {q9},[r14,:128]! 1895 » vld1.32»{q9},[r14,:128]!
1892 » vld1.32»» {q10},[r14,:128]! 1896 » vld1.32»{q10},[r14,:128]!
1893 » vld1.32»» {q11},[r14,:128]! 1897 » vld1.32»{q11},[r14,:128]!
1894 vrev32.8 q0,q0 @ yes, even on 1898 vrev32.8 q0,q0 @ yes, even on
1895 » str» » r0,[sp,#64] 1899 » str» r0,[sp,#64]
1896 vrev32.8 q1,q1 @ big-endian 1900 vrev32.8 q1,q1 @ big-endian
1897 » str» » r1,[sp,#68] 1901 » str» r1,[sp,#68]
1898 » mov» » r1,sp 1902 » mov» r1,sp
1899 vrev32.8 q2,q2 1903 vrev32.8 q2,q2
1900 » str» » r2,[sp,#72] 1904 » str» r2,[sp,#72]
1901 vrev32.8 q3,q3 1905 vrev32.8 q3,q3
1902 » str» » r12,[sp,#76]» » @ save original sp 1906 » str» r12,[sp,#76]» » @ save original sp
1903 vadd.i32 q8,q8,q0 1907 vadd.i32 q8,q8,q0
1904 vadd.i32 q9,q9,q1 1908 vadd.i32 q9,q9,q1
1905 » vst1.32»» {q8},[r1,:128]! 1909 » vst1.32»{q8},[r1,:128]!
1906 vadd.i32 q10,q10,q2 1910 vadd.i32 q10,q10,q2
1907 » vst1.32»» {q9},[r1,:128]! 1911 » vst1.32»{q9},[r1,:128]!
1908 vadd.i32 q11,q11,q3 1912 vadd.i32 q11,q11,q3
1909 » vst1.32»» {q10},[r1,:128]! 1913 » vst1.32»{q10},[r1,:128]!
1910 » vst1.32»» {q11},[r1,:128]! 1914 » vst1.32»{q11},[r1,:128]!
1911 1915
1912 » ldmia» » r0,{r4-r11} 1916 » ldmia» r0,{r4,r5,r6,r7,r8,r9,r10,r11}
1913 » sub» » r1,r1,#64 1917 » sub» r1,r1,#64
1914 » ldr» » r2,[sp,#0] 1918 » ldr» r2,[sp,#0]
1915 » eor» » r12,r12,r12 1919 » eor» r12,r12,r12
1916 » eor» » r3,r5,r6 1920 » eor» r3,r5,r6
1917 » b» » .L_00_48 1921 » b» .L_00_48
1918 1922
1919 .align 4 1923 .align 4
1920 .L_00_48: 1924 .L_00_48:
1921 vext.8 q8,q0,q1,#4 1925 vext.8 q8,q0,q1,#4
1922 add r11,r11,r2 1926 add r11,r11,r2
1923 eor r2,r9,r10 1927 eor r2,r9,r10
1924 eor r0,r8,r8,ror#5 1928 eor r0,r8,r8,ror#5
1925 vext.8 q9,q2,q3,#4 1929 vext.8 q9,q2,q3,#4
1926 add r4,r4,r12 1930 add r4,r4,r12
1927 and r2,r2,r8 1931 and r2,r2,r8
(...skipping 380 matching lines...) Expand 10 before | Expand all | Expand 10 after
2308 and r12,r12,r3 2312 and r12,r12,r3
2309 add r8,r8,r4 2313 add r8,r8,r4
2310 vst1.32 {q8},[r1,:128]! 2314 vst1.32 {q8},[r1,:128]!
2311 add r4,r4,r0,ror#2 2315 add r4,r4,r0,ror#2
2312 eor r12,r12,r6 2316 eor r12,r12,r6
2313 teq r2,#0 @ check for K256 terminator 2317 teq r2,#0 @ check for K256 terminator
2314 ldr r2,[sp,#0] 2318 ldr r2,[sp,#0]
2315 sub r1,r1,#64 2319 sub r1,r1,#64
2316 bne .L_00_48 2320 bne .L_00_48
2317 2321
2318 » ldr» » r1,[sp,#68] 2322 » ldr» r1,[sp,#68]
2319 » ldr» » r0,[sp,#72] 2323 » ldr» r0,[sp,#72]
2320 » sub» » r14,r14,#256» @ rewind r14 2324 » sub» r14,r14,#256» @ rewind r14
2321 » teq» » r1,r0 2325 » teq» r1,r0
2322 » it» » eq 2326 » it» eq
2323 » subeq» » r1,r1,#64» » @ avoid SEGV 2327 » subeq» r1,r1,#64» » @ avoid SEGV
2324 » vld1.8» » {q0},[r1]!» » @ load next input block 2328 » vld1.8» {q0},[r1]!» » @ load next input block
2325 » vld1.8» » {q1},[r1]! 2329 » vld1.8» {q1},[r1]!
2326 » vld1.8» » {q2},[r1]! 2330 » vld1.8» {q2},[r1]!
2327 » vld1.8» » {q3},[r1]! 2331 » vld1.8» {q3},[r1]!
2328 » it» » ne 2332 » it» ne
2329 » strne» » r1,[sp,#68] 2333 » strne» r1,[sp,#68]
2330 » mov» » r1,sp 2334 » mov» r1,sp
2331 add r11,r11,r2 2335 add r11,r11,r2
2332 eor r2,r9,r10 2336 eor r2,r9,r10
2333 eor r0,r8,r8,ror#5 2337 eor r0,r8,r8,ror#5
2334 add r4,r4,r12 2338 add r4,r4,r12
2335 vld1.32 {q8},[r14,:128]! 2339 vld1.32 {q8},[r14,:128]!
2336 and r2,r2,r8 2340 and r2,r2,r8
2337 eor r12,r0,r8,ror#19 2341 eor r12,r0,r8,ror#19
2338 eor r0,r4,r4,ror#11 2342 eor r0,r4,r4,ror#11
2339 eor r2,r2,r10 2343 eor r2,r2,r10
2340 vrev32.8 q0,q0 2344 vrev32.8 q0,q0
(...skipping 289 matching lines...) Expand 10 before | Expand all | Expand 10 after
2630 add r7,r7,r1 2634 add r7,r7,r1
2631 ldr r1,[r2,#28] 2635 ldr r1,[r2,#28]
2632 add r8,r8,r0 2636 add r8,r8,r0
2633 str r4,[r2],#4 2637 str r4,[r2],#4
2634 add r9,r9,r12 2638 add r9,r9,r12
2635 str r5,[r2],#4 2639 str r5,[r2],#4
2636 add r10,r10,r3 2640 add r10,r10,r3
2637 str r6,[r2],#4 2641 str r6,[r2],#4
2638 add r11,r11,r1 2642 add r11,r11,r1
2639 str r7,[r2],#4 2643 str r7,[r2],#4
2640 » stmia» r2,{r8-r11} 2644 » stmia» r2,{r8,r9,r10,r11}
2641 2645
2642 ittte ne 2646 ittte ne
2643 movne r1,sp 2647 movne r1,sp
2644 ldrne r2,[sp,#0] 2648 ldrne r2,[sp,#0]
2645 eorne r12,r12,r12 2649 eorne r12,r12,r12
2646 ldreq sp,[sp,#76] @ restore original sp 2650 ldreq sp,[sp,#76] @ restore original sp
2647 itt ne 2651 itt ne
2648 eorne r3,r5,r6 2652 eorne r3,r5,r6
2649 bne .L_00_48 2653 bne .L_00_48
2650 2654
2651 » ldmia» sp!,{r4-r12,pc} 2655 » ldmia» sp!,{r4,r5,r6,r7,r8,r9,r10,r11,r12,pc}
2652 .size sha256_block_data_order_neon,.-sha256_block_data_order_neon 2656 .size sha256_block_data_order_neon,.-sha256_block_data_order_neon
2653 #endif 2657 #endif
2654 #if __ARM_MAX_ARCH__>=7 && !defined(__KERNEL__) 2658 #if __ARM_MAX_ARCH__>=7 && !defined(__KERNEL__)
2655 2659
2656 # ifdef __thumb2__ 2660 # if defined(__thumb2__) && !defined(__APPLE__)
2657 # define INST(a,b,c,d) .byte c,d|0xc,a,b 2661 # define INST(a,b,c,d) .byte c,d|0xc,a,b
2658 # else 2662 # else
2659 # define INST(a,b,c,d) .byte a,b,c,d 2663 # define INST(a,b,c,d) .byte a,b,c,d
2660 # endif 2664 # endif
2661 2665
2662 .type sha256_block_data_order_armv8,%function 2666 .type sha256_block_data_order_armv8,%function
2663 .align 5 2667 .align 5
2664 sha256_block_data_order_armv8: 2668 sha256_block_data_order_armv8:
2665 .LARMv8: 2669 .LARMv8:
2666 vld1.32 {q0,q1},[r0] 2670 vld1.32 {q0,q1},[r0]
2667 # ifdef __thumb2__ 2671 # ifdef»__APPLE__
2672 » sub» r3,r3,#256+32
2673 # elif» defined(__thumb2__)
2668 adr r3,.LARMv8 2674 adr r3,.LARMv8
2669 sub r3,r3,#.LARMv8-K256 2675 sub r3,r3,#.LARMv8-K256
2670 # else 2676 # else
2671 adrl r3,K256 2677 adrl r3,K256
2672 # endif 2678 # endif
2673 add r2,r1,r2,lsl#6 @ len to point at the end of inp 2679 add r2,r1,r2,lsl#6 @ len to point at the end of inp
2674 2680
2675 .Loop_v8: 2681 .Loop_v8:
2676 » vld1.8» » {q8-q9},[r1]! 2682 » vld1.8» {q8,q9},[r1]!
2677 » vld1.8» » {q10-q11},[r1]! 2683 » vld1.8» {q10,q11},[r1]!
2678 » vld1.32»» {q12},[r3]! 2684 » vld1.32»{q12},[r3]!
2679 vrev32.8 q8,q8 2685 vrev32.8 q8,q8
2680 vrev32.8 q9,q9 2686 vrev32.8 q9,q9
2681 vrev32.8 q10,q10 2687 vrev32.8 q10,q10
2682 vrev32.8 q11,q11 2688 vrev32.8 q11,q11
2683 » vmov» » q14,q0» @ offload 2689 » vmov» q14,q0» @ offload
2684 » vmov» » q15,q1 2690 » vmov» q15,q1
2685 » teq» » r1,r2 2691 » teq» r1,r2
2686 » vld1.32»» {q13},[r3]! 2692 » vld1.32»{q13},[r3]!
2687 vadd.i32 q12,q12,q8 2693 vadd.i32 q12,q12,q8
2688 INST(0xe2,0x03,0xfa,0xf3) @ sha256su0 q8,q9 2694 INST(0xe2,0x03,0xfa,0xf3) @ sha256su0 q8,q9
2689 » vmov» » q2,q0 2695 » vmov» q2,q0
2690 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12 2696 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12
2691 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12 2697 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12
2692 INST(0xe6,0x0c,0x64,0xf3) @ sha256su1 q8,q10,q11 2698 INST(0xe6,0x0c,0x64,0xf3) @ sha256su1 q8,q10,q11
2693 » vld1.32»» {q12},[r3]! 2699 » vld1.32»{q12},[r3]!
2694 vadd.i32 q13,q13,q9 2700 vadd.i32 q13,q13,q9
2695 INST(0xe4,0x23,0xfa,0xf3) @ sha256su0 q9,q10 2701 INST(0xe4,0x23,0xfa,0xf3) @ sha256su0 q9,q10
2696 » vmov» » q2,q0 2702 » vmov» q2,q0
2697 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13 2703 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13
2698 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13 2704 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13
2699 INST(0xe0,0x2c,0x66,0xf3) @ sha256su1 q9,q11,q8 2705 INST(0xe0,0x2c,0x66,0xf3) @ sha256su1 q9,q11,q8
2700 » vld1.32»» {q13},[r3]! 2706 » vld1.32»{q13},[r3]!
2701 vadd.i32 q12,q12,q10 2707 vadd.i32 q12,q12,q10
2702 INST(0xe6,0x43,0xfa,0xf3) @ sha256su0 q10,q11 2708 INST(0xe6,0x43,0xfa,0xf3) @ sha256su0 q10,q11
2703 » vmov» » q2,q0 2709 » vmov» q2,q0
2704 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12 2710 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12
2705 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12 2711 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12
2706 INST(0xe2,0x4c,0x60,0xf3) @ sha256su1 q10,q8,q9 2712 INST(0xe2,0x4c,0x60,0xf3) @ sha256su1 q10,q8,q9
2707 » vld1.32»» {q12},[r3]! 2713 » vld1.32»{q12},[r3]!
2708 vadd.i32 q13,q13,q11 2714 vadd.i32 q13,q13,q11
2709 INST(0xe0,0x63,0xfa,0xf3) @ sha256su0 q11,q8 2715 INST(0xe0,0x63,0xfa,0xf3) @ sha256su0 q11,q8
2710 » vmov» » q2,q0 2716 » vmov» q2,q0
2711 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13 2717 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13
2712 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13 2718 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13
2713 INST(0xe4,0x6c,0x62,0xf3) @ sha256su1 q11,q9,q10 2719 INST(0xe4,0x6c,0x62,0xf3) @ sha256su1 q11,q9,q10
2714 » vld1.32»» {q13},[r3]! 2720 » vld1.32»{q13},[r3]!
2715 vadd.i32 q12,q12,q8 2721 vadd.i32 q12,q12,q8
2716 INST(0xe2,0x03,0xfa,0xf3) @ sha256su0 q8,q9 2722 INST(0xe2,0x03,0xfa,0xf3) @ sha256su0 q8,q9
2717 » vmov» » q2,q0 2723 » vmov» q2,q0
2718 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12 2724 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12
2719 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12 2725 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12
2720 INST(0xe6,0x0c,0x64,0xf3) @ sha256su1 q8,q10,q11 2726 INST(0xe6,0x0c,0x64,0xf3) @ sha256su1 q8,q10,q11
2721 » vld1.32»» {q12},[r3]! 2727 » vld1.32»{q12},[r3]!
2722 vadd.i32 q13,q13,q9 2728 vadd.i32 q13,q13,q9
2723 INST(0xe4,0x23,0xfa,0xf3) @ sha256su0 q9,q10 2729 INST(0xe4,0x23,0xfa,0xf3) @ sha256su0 q9,q10
2724 » vmov» » q2,q0 2730 » vmov» q2,q0
2725 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13 2731 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13
2726 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13 2732 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13
2727 INST(0xe0,0x2c,0x66,0xf3) @ sha256su1 q9,q11,q8 2733 INST(0xe0,0x2c,0x66,0xf3) @ sha256su1 q9,q11,q8
2728 » vld1.32»» {q13},[r3]! 2734 » vld1.32»{q13},[r3]!
2729 vadd.i32 q12,q12,q10 2735 vadd.i32 q12,q12,q10
2730 INST(0xe6,0x43,0xfa,0xf3) @ sha256su0 q10,q11 2736 INST(0xe6,0x43,0xfa,0xf3) @ sha256su0 q10,q11
2731 » vmov» » q2,q0 2737 » vmov» q2,q0
2732 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12 2738 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12
2733 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12 2739 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12
2734 INST(0xe2,0x4c,0x60,0xf3) @ sha256su1 q10,q8,q9 2740 INST(0xe2,0x4c,0x60,0xf3) @ sha256su1 q10,q8,q9
2735 » vld1.32»» {q12},[r3]! 2741 » vld1.32»{q12},[r3]!
2736 vadd.i32 q13,q13,q11 2742 vadd.i32 q13,q13,q11
2737 INST(0xe0,0x63,0xfa,0xf3) @ sha256su0 q11,q8 2743 INST(0xe0,0x63,0xfa,0xf3) @ sha256su0 q11,q8
2738 » vmov» » q2,q0 2744 » vmov» q2,q0
2739 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13 2745 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13
2740 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13 2746 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13
2741 INST(0xe4,0x6c,0x62,0xf3) @ sha256su1 q11,q9,q10 2747 INST(0xe4,0x6c,0x62,0xf3) @ sha256su1 q11,q9,q10
2742 » vld1.32»» {q13},[r3]! 2748 » vld1.32»{q13},[r3]!
2743 vadd.i32 q12,q12,q8 2749 vadd.i32 q12,q12,q8
2744 INST(0xe2,0x03,0xfa,0xf3) @ sha256su0 q8,q9 2750 INST(0xe2,0x03,0xfa,0xf3) @ sha256su0 q8,q9
2745 » vmov» » q2,q0 2751 » vmov» q2,q0
2746 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12 2752 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12
2747 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12 2753 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12
2748 INST(0xe6,0x0c,0x64,0xf3) @ sha256su1 q8,q10,q11 2754 INST(0xe6,0x0c,0x64,0xf3) @ sha256su1 q8,q10,q11
2749 » vld1.32»» {q12},[r3]! 2755 » vld1.32»{q12},[r3]!
2750 vadd.i32 q13,q13,q9 2756 vadd.i32 q13,q13,q9
2751 INST(0xe4,0x23,0xfa,0xf3) @ sha256su0 q9,q10 2757 INST(0xe4,0x23,0xfa,0xf3) @ sha256su0 q9,q10
2752 » vmov» » q2,q0 2758 » vmov» q2,q0
2753 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13 2759 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13
2754 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13 2760 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13
2755 INST(0xe0,0x2c,0x66,0xf3) @ sha256su1 q9,q11,q8 2761 INST(0xe0,0x2c,0x66,0xf3) @ sha256su1 q9,q11,q8
2756 » vld1.32»» {q13},[r3]! 2762 » vld1.32»{q13},[r3]!
2757 vadd.i32 q12,q12,q10 2763 vadd.i32 q12,q12,q10
2758 INST(0xe6,0x43,0xfa,0xf3) @ sha256su0 q10,q11 2764 INST(0xe6,0x43,0xfa,0xf3) @ sha256su0 q10,q11
2759 » vmov» » q2,q0 2765 » vmov» q2,q0
2760 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12 2766 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12
2761 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12 2767 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12
2762 INST(0xe2,0x4c,0x60,0xf3) @ sha256su1 q10,q8,q9 2768 INST(0xe2,0x4c,0x60,0xf3) @ sha256su1 q10,q8,q9
2763 » vld1.32»» {q12},[r3]! 2769 » vld1.32»{q12},[r3]!
2764 vadd.i32 q13,q13,q11 2770 vadd.i32 q13,q13,q11
2765 INST(0xe0,0x63,0xfa,0xf3) @ sha256su0 q11,q8 2771 INST(0xe0,0x63,0xfa,0xf3) @ sha256su0 q11,q8
2766 » vmov» » q2,q0 2772 » vmov» q2,q0
2767 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13 2773 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13
2768 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13 2774 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13
2769 INST(0xe4,0x6c,0x62,0xf3) @ sha256su1 q11,q9,q10 2775 INST(0xe4,0x6c,0x62,0xf3) @ sha256su1 q11,q9,q10
2770 » vld1.32»» {q13},[r3]! 2776 » vld1.32»{q13},[r3]!
2771 vadd.i32 q12,q12,q8 2777 vadd.i32 q12,q12,q8
2772 » vmov» » q2,q0 2778 » vmov» q2,q0
2773 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12 2779 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12
2774 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12 2780 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12
2775 2781
2776 » vld1.32»» {q12},[r3]! 2782 » vld1.32»{q12},[r3]!
2777 vadd.i32 q13,q13,q9 2783 vadd.i32 q13,q13,q9
2778 » vmov» » q2,q0 2784 » vmov» q2,q0
2779 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13 2785 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13
2780 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13 2786 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13
2781 2787
2782 » vld1.32»» {q13},[r3] 2788 » vld1.32»{q13},[r3]
2783 vadd.i32 q12,q12,q10 2789 vadd.i32 q12,q12,q10
2784 » sub» » r3,r3,#256-16» @ rewind 2790 » sub» r3,r3,#256-16» @ rewind
2785 » vmov» » q2,q0 2791 » vmov» q2,q0
2786 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12 2792 INST(0x68,0x0c,0x02,0xf3) @ sha256h q0,q1,q12
2787 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12 2793 INST(0x68,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q12
2788 2794
2789 vadd.i32 q13,q13,q11 2795 vadd.i32 q13,q13,q11
2790 » vmov» » q2,q0 2796 » vmov» q2,q0
2791 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13 2797 INST(0x6a,0x0c,0x02,0xf3) @ sha256h q0,q1,q13
2792 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13 2798 INST(0x6a,0x2c,0x14,0xf3) @ sha256h2 q1,q2,q13
2793 2799
2794 vadd.i32 q0,q0,q14 2800 vadd.i32 q0,q0,q14
2795 vadd.i32 q1,q1,q15 2801 vadd.i32 q1,q1,q15
2796 » it» » ne 2802 » it» ne
2797 » bne» » .Loop_v8 2803 » bne» .Loop_v8
2798 2804
2799 » vst1.32»» {q0,q1},[r0] 2805 » vst1.32»{q0,q1},[r0]
2800 2806
2801 bx lr @ bx lr 2807 bx lr @ bx lr
2802 .size sha256_block_data_order_armv8,.-sha256_block_data_order_armv8 2808 .size sha256_block_data_order_armv8,.-sha256_block_data_order_armv8
2803 #endif 2809 #endif
2804 .asciz "SHA256 block transform for ARMv4/NEON/ARMv8, CRYPTOGAMS by <appro@opens sl.org>" 2810 .byte» 83,72,65,50,53,54,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114 ,109,32,102,111,114,32,65,82,77,118,52,47,78,69,79,78,47,65,82,77,118,56,44,32,6 7,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,1 10,115,115,108,46,111,114,103,62,0
2811 .align» 2
2805 .align 2 2812 .align 2
2806 #if __ARM_MAX_ARCH__>=7 && !defined(__KERNEL__) 2813 #if __ARM_MAX_ARCH__>=7 && !defined(__KERNEL__)
2807 .comm OPENSSL_armcap_P,4,4 2814 .comm» OPENSSL_armcap_P,4,4
2808 .hidden OPENSSL_armcap_P 2815 .hidden»OPENSSL_armcap_P
2809 #endif 2816 #endif
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698