Index: src/x64/assembler-x64.cc |
diff --git a/src/x64/assembler-x64.cc b/src/x64/assembler-x64.cc |
index 096bcbd69ea6f6564fb2169d79027110c5e95fdc..44e0e2babcffa6bbc915c0f2d368b353730425ed 100644 |
--- a/src/x64/assembler-x64.cc |
+++ b/src/x64/assembler-x64.cc |
@@ -2541,6 +2541,7 @@ void Assembler::divps(XMMRegister dst, const Operand& src) { |
// SSE 2 operations. |
void Assembler::movd(XMMRegister dst, Register src) { |
+ DCHECK(!IsEnabled(AVX)); |
EnsureSpace ensure_space(this); |
emit(0x66); |
emit_optional_rex_32(dst, src); |
@@ -2551,6 +2552,7 @@ void Assembler::movd(XMMRegister dst, Register src) { |
void Assembler::movd(XMMRegister dst, const Operand& src) { |
+ DCHECK(!IsEnabled(AVX)); |
EnsureSpace ensure_space(this); |
emit(0x66); |
emit_optional_rex_32(dst, src); |
@@ -2561,6 +2563,7 @@ void Assembler::movd(XMMRegister dst, const Operand& src) { |
void Assembler::movd(Register dst, XMMRegister src) { |
+ DCHECK(!IsEnabled(AVX)); |
EnsureSpace ensure_space(this); |
emit(0x66); |
emit_optional_rex_32(src, dst); |
@@ -2571,6 +2574,7 @@ void Assembler::movd(Register dst, XMMRegister src) { |
void Assembler::movq(XMMRegister dst, Register src) { |
+ DCHECK(!IsEnabled(AVX)); |
EnsureSpace ensure_space(this); |
emit(0x66); |
emit_rex_64(dst, src); |
@@ -2581,6 +2585,7 @@ void Assembler::movq(XMMRegister dst, Register src) { |
void Assembler::movq(Register dst, XMMRegister src) { |
+ DCHECK(!IsEnabled(AVX)); |
EnsureSpace ensure_space(this); |
emit(0x66); |
emit_rex_64(src, dst); |
@@ -2591,6 +2596,7 @@ void Assembler::movq(Register dst, XMMRegister src) { |
void Assembler::movq(XMMRegister dst, XMMRegister src) { |
+ DCHECK(!IsEnabled(AVX)); |
EnsureSpace ensure_space(this); |
if (dst.low_bits() == 4) { |
// Avoid unnecessary SIB byte. |
@@ -3481,6 +3487,64 @@ void Assembler::vfmass(byte op, XMMRegister dst, XMMRegister src1, |
} |
+void Assembler::vmovd(XMMRegister dst, Register src) { |
+ DCHECK(IsEnabled(AVX)); |
+ EnsureSpace ensure_space(this); |
+ XMMRegister isrc = {src.code()}; |
+ emit_vex_prefix(dst, xmm0, isrc, kL128, k66, k0F, kW0); |
+ emit(0x6e); |
+ emit_sse_operand(dst, src); |
+} |
+ |
+ |
+void Assembler::vmovd(XMMRegister dst, const Operand& src) { |
+ DCHECK(IsEnabled(AVX)); |
+ EnsureSpace ensure_space(this); |
+ emit_vex_prefix(dst, xmm0, src, kL128, k66, k0F, kW0); |
+ emit(0x6e); |
+ emit_sse_operand(dst, src); |
+} |
+ |
+ |
+void Assembler::vmovd(Register dst, XMMRegister src) { |
+ DCHECK(IsEnabled(AVX)); |
+ EnsureSpace ensure_space(this); |
+ XMMRegister idst = {dst.code()}; |
+ emit_vex_prefix(src, xmm0, idst, kL128, k66, k0F, kW0); |
+ emit(0x7e); |
+ emit_sse_operand(src, dst); |
+} |
+ |
+ |
+void Assembler::vmovq(XMMRegister dst, Register src) { |
+ DCHECK(IsEnabled(AVX)); |
+ EnsureSpace ensure_space(this); |
+ XMMRegister isrc = {src.code()}; |
+ emit_vex_prefix(dst, xmm0, isrc, kL128, k66, k0F, kW1); |
+ emit(0x6e); |
+ emit_sse_operand(dst, src); |
+} |
+ |
+ |
+void Assembler::vmovq(XMMRegister dst, const Operand& src) { |
+ DCHECK(IsEnabled(AVX)); |
+ EnsureSpace ensure_space(this); |
+ emit_vex_prefix(dst, xmm0, src, kL128, k66, k0F, kW1); |
+ emit(0x6e); |
+ emit_sse_operand(dst, src); |
+} |
+ |
+ |
+void Assembler::vmovq(Register dst, XMMRegister src) { |
+ DCHECK(IsEnabled(AVX)); |
+ EnsureSpace ensure_space(this); |
+ XMMRegister idst = {dst.code()}; |
+ emit_vex_prefix(src, xmm0, idst, kL128, k66, k0F, kW1); |
+ emit(0x7e); |
+ emit_sse_operand(src, dst); |
+} |
+ |
+ |
void Assembler::vmovapd(XMMRegister dst, XMMRegister src) { |
DCHECK(IsEnabled(AVX)); |
EnsureSpace ensure_space(this); |