Index: third_party/boringssl/mac-x86_64/crypto/aes/bsaes-x86_64.S |
diff --git a/third_party/boringssl/mac-x86_64/crypto/aes/bsaes-x86_64.S b/third_party/boringssl/mac-x86_64/crypto/aes/bsaes-x86_64.S |
index c2d04776a9bd2550b06690a917802553b051cc25..ad802e3d5da427fcb000125437f6692fbb5bbf14 100644 |
--- a/third_party/boringssl/mac-x86_64/crypto/aes/bsaes-x86_64.S |
+++ b/third_party/boringssl/mac-x86_64/crypto/aes/bsaes-x86_64.S |
@@ -325,45 +325,45 @@ L$enc_sbox: |
pxor %xmm2,%xmm5 |
decl %r10d |
jl L$enc_done |
- pshufd $147,%xmm15,%xmm7 |
- pshufd $147,%xmm0,%xmm8 |
+ pshufd $0x93,%xmm15,%xmm7 |
+ pshufd $0x93,%xmm0,%xmm8 |
pxor %xmm7,%xmm15 |
- pshufd $147,%xmm3,%xmm9 |
+ pshufd $0x93,%xmm3,%xmm9 |
pxor %xmm8,%xmm0 |
- pshufd $147,%xmm5,%xmm10 |
+ pshufd $0x93,%xmm5,%xmm10 |
pxor %xmm9,%xmm3 |
- pshufd $147,%xmm2,%xmm11 |
+ pshufd $0x93,%xmm2,%xmm11 |
pxor %xmm10,%xmm5 |
- pshufd $147,%xmm6,%xmm12 |
+ pshufd $0x93,%xmm6,%xmm12 |
pxor %xmm11,%xmm2 |
- pshufd $147,%xmm1,%xmm13 |
+ pshufd $0x93,%xmm1,%xmm13 |
pxor %xmm12,%xmm6 |
- pshufd $147,%xmm4,%xmm14 |
+ pshufd $0x93,%xmm4,%xmm14 |
pxor %xmm13,%xmm1 |
pxor %xmm14,%xmm4 |
pxor %xmm15,%xmm8 |
pxor %xmm4,%xmm7 |
pxor %xmm4,%xmm8 |
- pshufd $78,%xmm15,%xmm15 |
+ pshufd $0x4E,%xmm15,%xmm15 |
pxor %xmm0,%xmm9 |
- pshufd $78,%xmm0,%xmm0 |
+ pshufd $0x4E,%xmm0,%xmm0 |
pxor %xmm2,%xmm12 |
pxor %xmm7,%xmm15 |
pxor %xmm6,%xmm13 |
pxor %xmm8,%xmm0 |
pxor %xmm5,%xmm11 |
- pshufd $78,%xmm2,%xmm7 |
+ pshufd $0x4E,%xmm2,%xmm7 |
pxor %xmm1,%xmm14 |
- pshufd $78,%xmm6,%xmm8 |
+ pshufd $0x4E,%xmm6,%xmm8 |
pxor %xmm3,%xmm10 |
- pshufd $78,%xmm5,%xmm2 |
+ pshufd $0x4E,%xmm5,%xmm2 |
pxor %xmm4,%xmm10 |
- pshufd $78,%xmm4,%xmm6 |
+ pshufd $0x4E,%xmm4,%xmm6 |
pxor %xmm4,%xmm11 |
- pshufd $78,%xmm1,%xmm5 |
+ pshufd $0x4E,%xmm1,%xmm5 |
pxor %xmm11,%xmm7 |
- pshufd $78,%xmm3,%xmm1 |
+ pshufd $0x4E,%xmm3,%xmm1 |
pxor %xmm12,%xmm8 |
pxor %xmm10,%xmm2 |
pxor %xmm14,%xmm6 |
@@ -797,24 +797,24 @@ L$dec_sbox: |
decl %r10d |
jl L$dec_done |
- pshufd $78,%xmm15,%xmm7 |
- pshufd $78,%xmm2,%xmm13 |
+ pshufd $0x4E,%xmm15,%xmm7 |
+ pshufd $0x4E,%xmm2,%xmm13 |
pxor %xmm15,%xmm7 |
- pshufd $78,%xmm4,%xmm14 |
+ pshufd $0x4E,%xmm4,%xmm14 |
pxor %xmm2,%xmm13 |
- pshufd $78,%xmm0,%xmm8 |
+ pshufd $0x4E,%xmm0,%xmm8 |
pxor %xmm4,%xmm14 |
- pshufd $78,%xmm5,%xmm9 |
+ pshufd $0x4E,%xmm5,%xmm9 |
pxor %xmm0,%xmm8 |
- pshufd $78,%xmm3,%xmm10 |
+ pshufd $0x4E,%xmm3,%xmm10 |
pxor %xmm5,%xmm9 |
pxor %xmm13,%xmm15 |
pxor %xmm13,%xmm0 |
- pshufd $78,%xmm1,%xmm11 |
+ pshufd $0x4E,%xmm1,%xmm11 |
pxor %xmm3,%xmm10 |
pxor %xmm7,%xmm5 |
pxor %xmm8,%xmm3 |
- pshufd $78,%xmm6,%xmm12 |
+ pshufd $0x4E,%xmm6,%xmm12 |
pxor %xmm1,%xmm11 |
pxor %xmm14,%xmm0 |
pxor %xmm9,%xmm1 |
@@ -828,45 +828,45 @@ L$dec_sbox: |
pxor %xmm14,%xmm1 |
pxor %xmm14,%xmm6 |
pxor %xmm12,%xmm4 |
- pshufd $147,%xmm15,%xmm7 |
- pshufd $147,%xmm0,%xmm8 |
+ pshufd $0x93,%xmm15,%xmm7 |
+ pshufd $0x93,%xmm0,%xmm8 |
pxor %xmm7,%xmm15 |
- pshufd $147,%xmm5,%xmm9 |
+ pshufd $0x93,%xmm5,%xmm9 |
pxor %xmm8,%xmm0 |
- pshufd $147,%xmm3,%xmm10 |
+ pshufd $0x93,%xmm3,%xmm10 |
pxor %xmm9,%xmm5 |
- pshufd $147,%xmm1,%xmm11 |
+ pshufd $0x93,%xmm1,%xmm11 |
pxor %xmm10,%xmm3 |
- pshufd $147,%xmm6,%xmm12 |
+ pshufd $0x93,%xmm6,%xmm12 |
pxor %xmm11,%xmm1 |
- pshufd $147,%xmm2,%xmm13 |
+ pshufd $0x93,%xmm2,%xmm13 |
pxor %xmm12,%xmm6 |
- pshufd $147,%xmm4,%xmm14 |
+ pshufd $0x93,%xmm4,%xmm14 |
pxor %xmm13,%xmm2 |
pxor %xmm14,%xmm4 |
pxor %xmm15,%xmm8 |
pxor %xmm4,%xmm7 |
pxor %xmm4,%xmm8 |
- pshufd $78,%xmm15,%xmm15 |
+ pshufd $0x4E,%xmm15,%xmm15 |
pxor %xmm0,%xmm9 |
- pshufd $78,%xmm0,%xmm0 |
+ pshufd $0x4E,%xmm0,%xmm0 |
pxor %xmm1,%xmm12 |
pxor %xmm7,%xmm15 |
pxor %xmm6,%xmm13 |
pxor %xmm8,%xmm0 |
pxor %xmm3,%xmm11 |
- pshufd $78,%xmm1,%xmm7 |
+ pshufd $0x4E,%xmm1,%xmm7 |
pxor %xmm2,%xmm14 |
- pshufd $78,%xmm6,%xmm8 |
+ pshufd $0x4E,%xmm6,%xmm8 |
pxor %xmm5,%xmm10 |
- pshufd $78,%xmm3,%xmm1 |
+ pshufd $0x4E,%xmm3,%xmm1 |
pxor %xmm4,%xmm10 |
- pshufd $78,%xmm4,%xmm6 |
+ pshufd $0x4E,%xmm4,%xmm6 |
pxor %xmm4,%xmm11 |
- pshufd $78,%xmm2,%xmm3 |
+ pshufd $0x4E,%xmm2,%xmm3 |
pxor %xmm11,%xmm7 |
- pshufd $78,%xmm5,%xmm2 |
+ pshufd $0x4E,%xmm5,%xmm2 |
pxor %xmm12,%xmm8 |
pxor %xmm1,%xmm10 |
pxor %xmm14,%xmm6 |
@@ -1556,20 +1556,20 @@ L$xts_enc_prologue: |
movdqa %xmm7,(%rax) |
andq $-16,%r14 |
- subq $128,%rsp |
+ subq $0x80,%rsp |
movdqa 32(%rbp),%xmm6 |
pxor %xmm14,%xmm14 |
movdqa L$xts_magic(%rip),%xmm12 |
pcmpgtd %xmm6,%xmm14 |
- subq $128,%r14 |
+ subq $0x80,%r14 |
jc L$xts_enc_short |
jmp L$xts_enc_loop |
.p2align 4 |
L$xts_enc_loop: |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm15 |
movdqa %xmm6,0(%rsp) |
@@ -1577,7 +1577,7 @@ L$xts_enc_loop: |
pand %xmm12,%xmm13 |
pcmpgtd %xmm6,%xmm14 |
pxor %xmm13,%xmm6 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm0 |
movdqa %xmm6,16(%rsp) |
@@ -1586,7 +1586,7 @@ L$xts_enc_loop: |
pcmpgtd %xmm6,%xmm14 |
pxor %xmm13,%xmm6 |
movdqu 0(%r12),%xmm7 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm1 |
movdqa %xmm6,32(%rsp) |
@@ -1596,7 +1596,7 @@ L$xts_enc_loop: |
pxor %xmm13,%xmm6 |
movdqu 16(%r12),%xmm8 |
pxor %xmm7,%xmm15 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm2 |
movdqa %xmm6,48(%rsp) |
@@ -1606,7 +1606,7 @@ L$xts_enc_loop: |
pxor %xmm13,%xmm6 |
movdqu 32(%r12),%xmm9 |
pxor %xmm8,%xmm0 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm3 |
movdqa %xmm6,64(%rsp) |
@@ -1616,7 +1616,7 @@ L$xts_enc_loop: |
pxor %xmm13,%xmm6 |
movdqu 48(%r12),%xmm10 |
pxor %xmm9,%xmm1 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm4 |
movdqa %xmm6,80(%rsp) |
@@ -1626,7 +1626,7 @@ L$xts_enc_loop: |
pxor %xmm13,%xmm6 |
movdqu 64(%r12),%xmm11 |
pxor %xmm10,%xmm2 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm5 |
movdqa %xmm6,96(%rsp) |
@@ -1670,20 +1670,20 @@ L$xts_enc_loop: |
pxor %xmm14,%xmm14 |
movdqa L$xts_magic(%rip),%xmm12 |
pcmpgtd %xmm6,%xmm14 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
paddq %xmm6,%xmm6 |
pand %xmm12,%xmm13 |
pcmpgtd %xmm6,%xmm14 |
pxor %xmm13,%xmm6 |
- subq $128,%r14 |
+ subq $0x80,%r14 |
jnc L$xts_enc_loop |
L$xts_enc_short: |
- addq $128,%r14 |
+ addq $0x80,%r14 |
jz L$xts_enc_done |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm15 |
movdqa %xmm6,0(%rsp) |
@@ -1691,7 +1691,7 @@ L$xts_enc_short: |
pand %xmm12,%xmm13 |
pcmpgtd %xmm6,%xmm14 |
pxor %xmm13,%xmm6 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm0 |
movdqa %xmm6,16(%rsp) |
@@ -1702,7 +1702,7 @@ L$xts_enc_short: |
movdqu 0(%r12),%xmm7 |
cmpq $16,%r14 |
je L$xts_enc_1 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm1 |
movdqa %xmm6,32(%rsp) |
@@ -1714,7 +1714,7 @@ L$xts_enc_short: |
cmpq $32,%r14 |
je L$xts_enc_2 |
pxor %xmm7,%xmm15 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm2 |
movdqa %xmm6,48(%rsp) |
@@ -1726,7 +1726,7 @@ L$xts_enc_short: |
cmpq $48,%r14 |
je L$xts_enc_3 |
pxor %xmm8,%xmm0 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm3 |
movdqa %xmm6,64(%rsp) |
@@ -1738,7 +1738,7 @@ L$xts_enc_short: |
cmpq $64,%r14 |
je L$xts_enc_4 |
pxor %xmm9,%xmm1 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm4 |
movdqa %xmm6,80(%rsp) |
@@ -1750,7 +1750,7 @@ L$xts_enc_short: |
cmpq $80,%r14 |
je L$xts_enc_5 |
pxor %xmm10,%xmm2 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm5 |
movdqa %xmm6,96(%rsp) |
@@ -2016,20 +2016,20 @@ L$xts_dec_prologue: |
shlq $4,%rax |
subq %rax,%r14 |
- subq $128,%rsp |
+ subq $0x80,%rsp |
movdqa 32(%rbp),%xmm6 |
pxor %xmm14,%xmm14 |
movdqa L$xts_magic(%rip),%xmm12 |
pcmpgtd %xmm6,%xmm14 |
- subq $128,%r14 |
+ subq $0x80,%r14 |
jc L$xts_dec_short |
jmp L$xts_dec_loop |
.p2align 4 |
L$xts_dec_loop: |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm15 |
movdqa %xmm6,0(%rsp) |
@@ -2037,7 +2037,7 @@ L$xts_dec_loop: |
pand %xmm12,%xmm13 |
pcmpgtd %xmm6,%xmm14 |
pxor %xmm13,%xmm6 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm0 |
movdqa %xmm6,16(%rsp) |
@@ -2046,7 +2046,7 @@ L$xts_dec_loop: |
pcmpgtd %xmm6,%xmm14 |
pxor %xmm13,%xmm6 |
movdqu 0(%r12),%xmm7 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm1 |
movdqa %xmm6,32(%rsp) |
@@ -2056,7 +2056,7 @@ L$xts_dec_loop: |
pxor %xmm13,%xmm6 |
movdqu 16(%r12),%xmm8 |
pxor %xmm7,%xmm15 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm2 |
movdqa %xmm6,48(%rsp) |
@@ -2066,7 +2066,7 @@ L$xts_dec_loop: |
pxor %xmm13,%xmm6 |
movdqu 32(%r12),%xmm9 |
pxor %xmm8,%xmm0 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm3 |
movdqa %xmm6,64(%rsp) |
@@ -2076,7 +2076,7 @@ L$xts_dec_loop: |
pxor %xmm13,%xmm6 |
movdqu 48(%r12),%xmm10 |
pxor %xmm9,%xmm1 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm4 |
movdqa %xmm6,80(%rsp) |
@@ -2086,7 +2086,7 @@ L$xts_dec_loop: |
pxor %xmm13,%xmm6 |
movdqu 64(%r12),%xmm11 |
pxor %xmm10,%xmm2 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm5 |
movdqa %xmm6,96(%rsp) |
@@ -2130,20 +2130,20 @@ L$xts_dec_loop: |
pxor %xmm14,%xmm14 |
movdqa L$xts_magic(%rip),%xmm12 |
pcmpgtd %xmm6,%xmm14 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
paddq %xmm6,%xmm6 |
pand %xmm12,%xmm13 |
pcmpgtd %xmm6,%xmm14 |
pxor %xmm13,%xmm6 |
- subq $128,%r14 |
+ subq $0x80,%r14 |
jnc L$xts_dec_loop |
L$xts_dec_short: |
- addq $128,%r14 |
+ addq $0x80,%r14 |
jz L$xts_dec_done |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm15 |
movdqa %xmm6,0(%rsp) |
@@ -2151,7 +2151,7 @@ L$xts_dec_short: |
pand %xmm12,%xmm13 |
pcmpgtd %xmm6,%xmm14 |
pxor %xmm13,%xmm6 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm0 |
movdqa %xmm6,16(%rsp) |
@@ -2162,7 +2162,7 @@ L$xts_dec_short: |
movdqu 0(%r12),%xmm7 |
cmpq $16,%r14 |
je L$xts_dec_1 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm1 |
movdqa %xmm6,32(%rsp) |
@@ -2174,7 +2174,7 @@ L$xts_dec_short: |
cmpq $32,%r14 |
je L$xts_dec_2 |
pxor %xmm7,%xmm15 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm2 |
movdqa %xmm6,48(%rsp) |
@@ -2186,7 +2186,7 @@ L$xts_dec_short: |
cmpq $48,%r14 |
je L$xts_dec_3 |
pxor %xmm8,%xmm0 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm3 |
movdqa %xmm6,64(%rsp) |
@@ -2198,7 +2198,7 @@ L$xts_dec_short: |
cmpq $64,%r14 |
je L$xts_dec_4 |
pxor %xmm9,%xmm1 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm4 |
movdqa %xmm6,80(%rsp) |
@@ -2210,7 +2210,7 @@ L$xts_dec_short: |
cmpq $80,%r14 |
je L$xts_dec_5 |
pxor %xmm10,%xmm2 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
pxor %xmm14,%xmm14 |
movdqa %xmm6,%xmm5 |
movdqa %xmm6,96(%rsp) |
@@ -2387,7 +2387,7 @@ L$xts_dec_done: |
pxor %xmm14,%xmm14 |
movdqa L$xts_magic(%rip),%xmm12 |
pcmpgtd %xmm6,%xmm14 |
- pshufd $19,%xmm14,%xmm13 |
+ pshufd $0x13,%xmm14,%xmm13 |
movdqa %xmm6,%xmm5 |
paddq %xmm6,%xmm6 |
pand %xmm12,%xmm13 |