Index: third_party/boringssl/linux-x86_64/crypto/aes/bsaes-x86_64.S |
diff --git a/third_party/boringssl/linux-x86_64/crypto/aes/bsaes-x86_64.S b/third_party/boringssl/linux-x86_64/crypto/aes/bsaes-x86_64.S |
index 8cfa4df5ba327ec5605f3d296c1faa087a638834..c5491ce4d00b76a92fe8a353f22a0739fd002c41 100644 |
--- a/third_party/boringssl/linux-x86_64/crypto/aes/bsaes-x86_64.S |
+++ b/third_party/boringssl/linux-x86_64/crypto/aes/bsaes-x86_64.S |
@@ -327,45 +327,45 @@ _bsaes_encrypt8_bitslice: |
pxor %xmm2,%xmm5 |
decl %r10d |
jl .Lenc_done |
- pshufd $147,%xmm15,%xmm7 |
- pshufd $147,%xmm0,%xmm8 |
+ pshufd $0x93,%xmm15,%xmm7 |
+ pshufd $0x93,%xmm0,%xmm8 |
pxor %xmm7,%xmm15 |
- pshufd $147,%xmm3,%xmm9 |
+ pshufd $0x93,%xmm3,%xmm9 |
pxor %xmm8,%xmm0 |
- pshufd $147,%xmm5,%xmm10 |
+ pshufd $0x93,%xmm5,%xmm10 |
pxor %xmm9,%xmm3 |
- pshufd $147,%xmm2,%xmm11 |
+ pshufd $0x93,%xmm2,%xmm11 |
pxor %xmm10,%xmm5 |
- pshufd $147,%xmm6,%xmm12 |
+ pshufd $0x93,%xmm6,%xmm12 |
pxor %xmm11,%xmm2 |
- pshufd $147,%xmm1,%xmm13 |
+ pshufd $0x93,%xmm1,%xmm13 |
pxor %xmm12,%xmm6 |
- pshufd $147,%xmm4,%xmm14 |
+ pshufd $0x93,%xmm4,%xmm14 |
pxor %xmm13,%xmm1 |
pxor %xmm14,%xmm4 |
pxor %xmm15,%xmm8 |
pxor %xmm4,%xmm7 |
pxor %xmm4,%xmm8 |
- pshufd $78,%xmm15,%xmm15 |
+ pshufd $0x4E,%xmm15,%xmm15 |
pxor %xmm0,%xmm9 |
- pshufd $78,%xmm0,%xmm0 |
+ pshufd $0x4E,%xmm0,%xmm0 |
pxor %xmm2,%xmm12 |
pxor %xmm7,%xmm15 |
pxor %xmm6,%xmm13 |
pxor %xmm8,%xmm0 |
pxor %xmm5,%xmm11 |
- pshufd $78,%xmm2,%xmm7 |
+ pshufd $0x4E,%xmm2,%xmm7 |
pxor %xmm1,%xmm14 |
- pshufd $78,%xmm6,%xmm8 |
+ pshufd $0x4E,%xmm6,%xmm8 |
pxor %xmm3,%xmm10 |
- pshufd $78,%xmm5,%xmm2 |
+ pshufd $0x4E,%xmm5,%xmm2 |
pxor %xmm4,%xmm10 |
- pshufd $78,%xmm4,%xmm6 |
+ pshufd $0x4E,%xmm4,%xmm6 |
pxor %xmm4,%xmm11 |
- pshufd $78,%xmm1,%xmm5 |
+ pshufd $0x4E,%xmm1,%xmm5 |
pxor %xmm11,%xmm7 |
- pshufd $78,%xmm3,%xmm1 |
+ pshufd $0x4E,%xmm3,%xmm1 |
pxor %xmm12,%xmm8 |
pxor %xmm10,%xmm2 |
pxor %xmm14,%xmm6 |
@@ -799,24 +799,24 @@ _bsaes_decrypt8: |
decl %r10d |
jl .Ldec_done |
- pshufd $78,%xmm15,%xmm7 |
- pshufd $78,%xmm2,%xmm13 |
+ pshufd $0x4E,%xmm15,%xmm7 |
+ pshufd $0x4E,%xmm2,%xmm13 |
pxor %xmm15,%xmm7 |
- pshufd $78,%xmm4,%xmm14 |
+ pshufd $0x4E,%xmm4,%xmm14 |
pxor %xmm2,%xmm13 |
- pshufd $78,%xmm0,%xmm8 |
+ pshufd $0x4E,%xmm0,%xmm8 |
pxor %xmm4,%xmm14 |
- pshufd $78,%xmm5,%xmm9 |
+ pshufd $0x4E,%xmm5,%xmm9 |
pxor %xmm0,%xmm8 |
- pshufd $78,%xmm3,%xmm10 |
+ pshufd $0x4E,%xmm3,%xmm10 |
pxor %xmm5,%xmm9 |
pxor %xmm13,%xmm15 |
pxor %xmm13,%xmm0 |
- pshufd $78,%xmm1,%xmm11 |
+ pshufd $0x4E,%xmm1,%xmm11 |
pxor %xmm3,%xmm10 |
pxor %xmm7,%xmm5 |
pxor %xmm8,%xmm3 |
- pshufd $78,%xmm6,%xmm12 |
+ pshufd $0x4E,%xmm6,%xmm12 |
pxor %xmm1,%xmm11 |
pxor %xmm14,%xmm0 |
pxor %xmm9,%xmm1 |
@@ -830,45 +830,45 @@ _bsaes_decrypt8: |
pxor %xmm14,%xmm1 |
pxor %xmm14,%xmm6 |
pxor %xmm12,%xmm4 |
- pshufd $147,%xmm15,%xmm7 |
- pshufd $147,%xmm0,%xmm8 |
+ pshufd $0x93,%xmm15,%xmm7 |
+ pshufd $0x93,%xmm0,%xmm8 |
pxor %xmm7,%xmm15 |
- pshufd $147,%xmm5,%xmm9 |
+ pshufd $0x93,%xmm5,%xmm9 |
pxor %xmm8,%xmm0 |
- pshufd $147,%xmm3,%xmm10 |
+ pshufd $0x93,%xmm3,%xmm10 |
pxor %xmm9,%xmm5 |
- pshufd $147,%xmm1,%xmm11 |
+ pshufd $0x93,%xmm1,%xmm11 |
pxor %xmm10,%xmm3 |
- pshufd $147,%xmm6,%xmm12 |
+ pshufd $0x93,%xmm6,%xmm12 |
pxor %xmm11,%xmm1 |
- pshufd $147,%xmm2,%xmm13 |
+ pshufd $0x93,%xmm2,%xmm13 |
pxor %xmm12,%xmm6 |
- pshufd $147,%xmm4,%xmm14 |
+ pshufd $0x93,%xmm4,%xmm14 |
pxor %xmm13,%xmm2 |
pxor %xmm14,%xmm4 |
pxor %xmm15,%xmm8 |
pxor %xmm4,%xmm7 |
pxor %xmm4,%xmm8 |
- pshufd $78,%xmm15,%xmm15 |
+ pshufd $0x4E,%xmm15,%xmm15 |
pxor %xmm0,%xmm9 |
- pshufd $78,%xmm0,%xmm0 |
+ pshufd $0x4E,%xmm0,%xmm0 |
pxor %xmm1,%xmm12 |
pxor %xmm7,%xmm15 |
pxor %xmm6,%xmm13 |
pxor %xmm8,%xmm0 |
pxor %xmm3,%xmm11 |
- pshufd $78,%xmm1,%xmm7 |
+ pshufd $0x4E,%xmm1,%xmm7 |
pxor %xmm2,%xmm14 |
- pshufd $78,%xmm6,%xmm8 |
+ pshufd $0x4E,%xmm6,%xmm8 |
pxor %xmm5,%xmm10 |
- pshufd $78,%xmm3,%xmm1 |
+ pshufd $0x4E,%xmm3,%xmm1 |
pxor %xmm4,%xmm10 |
- pshufd $78,%xmm4,%xmm6 |
+ pshufd $0x4E,%xmm4,%xmm6 |
pxor %xmm4,%xmm11 |
- pshufd $78,%xmm2,%xmm3 |
+ pshufd $0x4E,%xmm2,%xmm3 |
pxor %xmm11,%xmm7 |
- pshufd $78,%xmm5,%xmm2 |
+ pshufd $0x4E,%xmm5,%xmm2 |
pxor %xmm12,%xmm8 |
pxor %xmm1,%xmm10 |
pxor %xmm14,%xmm6 |
@@ -1559,20 +1559,20 @@ bsaes_xts_encrypt: |
movdqa %xmm7,(%rax) |
andq $-16,%r14 |
- subq $128,%rsp |
+ subq $0x80,%rsp |
movdqa 32(%rbp),%xmm6 |
pxor %xmm14,%xmm14 |
movdqa .Lxts_magic(%rip),%xmm12 |
pcmpgtd %xmm6,%xmm14 |
- subq $128,%r14 |
+ subq $0x80,%r14 |
jc .Lxts_enc_short |
jmp .Lxts_enc_loop |
.align 16 |
.Lxts_enc_loop: |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm15 |
movdqa %xmm6,0(%rsp) |
@@ -1580,7 +1580,7 @@ bsaes_xts_encrypt: |
pand %xmm12,%xmm13 |
pcmpgtd %xmm6,%xmm14 |
pxor %xmm13,%xmm6 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm0 |
movdqa %xmm6,16(%rsp) |
@@ -1589,7 +1589,7 @@ bsaes_xts_encrypt: |
pcmpgtd %xmm6,%xmm14 |
pxor %xmm13,%xmm6 |
movdqu 0(%r12),%xmm7 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm1 |
movdqa %xmm6,32(%rsp) |
@@ -1599,7 +1599,7 @@ bsaes_xts_encrypt: |
pxor %xmm13,%xmm6 |
movdqu 16(%r12),%xmm8 |
pxor %xmm7,%xmm15 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm2 |
movdqa %xmm6,48(%rsp) |
@@ -1609,7 +1609,7 @@ bsaes_xts_encrypt: |
pxor %xmm13,%xmm6 |
movdqu 32(%r12),%xmm9 |
pxor %xmm8,%xmm0 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm3 |
movdqa %xmm6,64(%rsp) |
@@ -1619,7 +1619,7 @@ bsaes_xts_encrypt: |
pxor %xmm13,%xmm6 |
movdqu 48(%r12),%xmm10 |
pxor %xmm9,%xmm1 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm4 |
movdqa %xmm6,80(%rsp) |
@@ -1629,7 +1629,7 @@ bsaes_xts_encrypt: |
pxor %xmm13,%xmm6 |
movdqu 64(%r12),%xmm11 |
pxor %xmm10,%xmm2 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm5 |
movdqa %xmm6,96(%rsp) |
@@ -1673,20 +1673,20 @@ bsaes_xts_encrypt: |
pxor %xmm14,%xmm14 |
movdqa .Lxts_magic(%rip),%xmm12 |
pcmpgtd %xmm6,%xmm14 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
paddq %xmm6,%xmm6 |
pand %xmm12,%xmm13 |
pcmpgtd %xmm6,%xmm14 |
pxor %xmm13,%xmm6 |
- subq $128,%r14 |
+ subq $0x80,%r14 |
jnc .Lxts_enc_loop |
.Lxts_enc_short: |
- addq $128,%r14 |
+ addq $0x80,%r14 |
jz .Lxts_enc_done |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm15 |
movdqa %xmm6,0(%rsp) |
@@ -1694,7 +1694,7 @@ bsaes_xts_encrypt: |
pand %xmm12,%xmm13 |
pcmpgtd %xmm6,%xmm14 |
pxor %xmm13,%xmm6 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm0 |
movdqa %xmm6,16(%rsp) |
@@ -1705,7 +1705,7 @@ bsaes_xts_encrypt: |
movdqu 0(%r12),%xmm7 |
cmpq $16,%r14 |
je .Lxts_enc_1 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm1 |
movdqa %xmm6,32(%rsp) |
@@ -1717,7 +1717,7 @@ bsaes_xts_encrypt: |
cmpq $32,%r14 |
je .Lxts_enc_2 |
pxor %xmm7,%xmm15 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm2 |
movdqa %xmm6,48(%rsp) |
@@ -1729,7 +1729,7 @@ bsaes_xts_encrypt: |
cmpq $48,%r14 |
je .Lxts_enc_3 |
pxor %xmm8,%xmm0 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm3 |
movdqa %xmm6,64(%rsp) |
@@ -1741,7 +1741,7 @@ bsaes_xts_encrypt: |
cmpq $64,%r14 |
je .Lxts_enc_4 |
pxor %xmm9,%xmm1 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm4 |
movdqa %xmm6,80(%rsp) |
@@ -1753,7 +1753,7 @@ bsaes_xts_encrypt: |
cmpq $80,%r14 |
je .Lxts_enc_5 |
pxor %xmm10,%xmm2 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm5 |
movdqa %xmm6,96(%rsp) |
@@ -2019,20 +2019,20 @@ bsaes_xts_decrypt: |
shlq $4,%rax |
subq %rax,%r14 |
- subq $128,%rsp |
+ subq $0x80,%rsp |
movdqa 32(%rbp),%xmm6 |
pxor %xmm14,%xmm14 |
movdqa .Lxts_magic(%rip),%xmm12 |
pcmpgtd %xmm6,%xmm14 |
- subq $128,%r14 |
+ subq $0x80,%r14 |
jc .Lxts_dec_short |
jmp .Lxts_dec_loop |
.align 16 |
.Lxts_dec_loop: |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm15 |
movdqa %xmm6,0(%rsp) |
@@ -2040,7 +2040,7 @@ bsaes_xts_decrypt: |
pand %xmm12,%xmm13 |
pcmpgtd %xmm6,%xmm14 |
pxor %xmm13,%xmm6 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm0 |
movdqa %xmm6,16(%rsp) |
@@ -2049,7 +2049,7 @@ bsaes_xts_decrypt: |
pcmpgtd %xmm6,%xmm14 |
pxor %xmm13,%xmm6 |
movdqu 0(%r12),%xmm7 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm1 |
movdqa %xmm6,32(%rsp) |
@@ -2059,7 +2059,7 @@ bsaes_xts_decrypt: |
pxor %xmm13,%xmm6 |
movdqu 16(%r12),%xmm8 |
pxor %xmm7,%xmm15 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm2 |
movdqa %xmm6,48(%rsp) |
@@ -2069,7 +2069,7 @@ bsaes_xts_decrypt: |
pxor %xmm13,%xmm6 |
movdqu 32(%r12),%xmm9 |
pxor %xmm8,%xmm0 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm3 |
movdqa %xmm6,64(%rsp) |
@@ -2079,7 +2079,7 @@ bsaes_xts_decrypt: |
pxor %xmm13,%xmm6 |
movdqu 48(%r12),%xmm10 |
pxor %xmm9,%xmm1 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm4 |
movdqa %xmm6,80(%rsp) |
@@ -2089,7 +2089,7 @@ bsaes_xts_decrypt: |
pxor %xmm13,%xmm6 |
movdqu 64(%r12),%xmm11 |
pxor %xmm10,%xmm2 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm5 |
movdqa %xmm6,96(%rsp) |
@@ -2133,20 +2133,20 @@ bsaes_xts_decrypt: |
pxor %xmm14,%xmm14 |
movdqa .Lxts_magic(%rip),%xmm12 |
pcmpgtd %xmm6,%xmm14 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
paddq %xmm6,%xmm6 |
pand %xmm12,%xmm13 |
pcmpgtd %xmm6,%xmm14 |
pxor %xmm13,%xmm6 |
- subq $128,%r14 |
+ subq $0x80,%r14 |
jnc .Lxts_dec_loop |
.Lxts_dec_short: |
- addq $128,%r14 |
+ addq $0x80,%r14 |
jz .Lxts_dec_done |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm15 |
movdqa %xmm6,0(%rsp) |
@@ -2154,7 +2154,7 @@ bsaes_xts_decrypt: |
pand %xmm12,%xmm13 |
pcmpgtd %xmm6,%xmm14 |
pxor %xmm13,%xmm6 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm0 |
movdqa %xmm6,16(%rsp) |
@@ -2165,7 +2165,7 @@ bsaes_xts_decrypt: |
movdqu 0(%r12),%xmm7 |
cmpq $16,%r14 |
je .Lxts_dec_1 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm1 |
movdqa %xmm6,32(%rsp) |
@@ -2177,7 +2177,7 @@ bsaes_xts_decrypt: |
cmpq $32,%r14 |
je .Lxts_dec_2 |
pxor %xmm7,%xmm15 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm2 |
movdqa %xmm6,48(%rsp) |
@@ -2189,7 +2189,7 @@ bsaes_xts_decrypt: |
cmpq $48,%r14 |
je .Lxts_dec_3 |
pxor %xmm8,%xmm0 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm3 |
movdqa %xmm6,64(%rsp) |
@@ -2201,7 +2201,7 @@ bsaes_xts_decrypt: |
cmpq $64,%r14 |
je .Lxts_dec_4 |
pxor %xmm9,%xmm1 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm4 |
movdqa %xmm6,80(%rsp) |
@@ -2213,7 +2213,7 @@ bsaes_xts_decrypt: |
cmpq $80,%r14 |
je .Lxts_dec_5 |
pxor %xmm10,%xmm2 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm5 |
movdqa %xmm6,96(%rsp) |
@@ -2390,7 +2390,7 @@ bsaes_xts_decrypt: |
pxor %xmm14,%xmm14 |
movdqa .Lxts_magic(%rip),%xmm12 |
pcmpgtd %xmm6,%xmm14 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
movdqa %xmm6,%xmm5 |
paddq %xmm6,%xmm6 |
pand %xmm12,%xmm13 |