Index: src/x64/assembler-x64.cc |
=================================================================== |
--- src/x64/assembler-x64.cc (revision 15486) |
+++ src/x64/assembler-x64.cc (working copy) |
@@ -166,7 +166,11 @@ |
void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) { |
// Load register with immediate 64 and call through a register instructions |
// takes up 13 bytes and int3 takes up one byte. |
+#ifndef V8_TARGET_ARCH_X32 |
static const int kCallCodeSize = 13; |
+#else |
+ static const int kCallCodeSize = 9; |
+#endif |
int code_size = kCallCodeSize + guard_bytes; |
// Create a code patcher. |
@@ -179,7 +183,11 @@ |
#endif |
// Patch the code. |
+#ifndef V8_TARGET_ARCH_X32 |
patcher.masm()->movq(r10, target, RelocInfo::NONE64); |
+#else |
+ patcher.masm()->movl(r10, target, RelocInfo::NONE32); |
+#endif |
patcher.masm()->call(r10); |
// Check that the size of the code generated is as expected. |
@@ -1373,9 +1381,19 @@ |
void Assembler::load_rax(void* value, RelocInfo::Mode mode) { |
EnsureSpace ensure_space(this); |
+#ifndef V8_TARGET_ARCH_X32 |
emit(0x48); // REX.W |
+#endif |
emit(0xA1); |
+#ifndef V8_TARGET_ARCH_X32 |
emitq(reinterpret_cast<uintptr_t>(value), mode); |
+#else |
+ // In 64-bit mode, need to zero extend the operand to 8 bytes. |
+ // See 2.2.1.4 in Intel64 and IA32 Architectures Software |
+ // Developer's Manual Volume 2. |
+ emitl(reinterpret_cast<uintptr_t>(value), mode); |
+ emitl(0); |
+#endif |
} |
@@ -1481,6 +1499,21 @@ |
} |
+#ifdef V8_TARGET_ARCH_X32 |
+void Assembler::movl(Register dst, int32_t value, RelocInfo::Mode rmode) { |
+ // Non-relocatable values might not need a 64-bit representation. |
+ if (RelocInfo::IsNone(rmode)) { |
+ movl(dst, Immediate(value)); |
+ return; |
+ } |
+ EnsureSpace ensure_space(this); |
+ emit_optional_rex_32(dst); |
+ emit(0xB8 | dst.low_bits()); |
+ emitl(value, rmode); |
+} |
+#endif |
+ |
+ |
void Assembler::movq(Register dst, const Operand& src) { |
EnsureSpace ensure_space(this); |
emit_rex_64(dst, src); |
@@ -1520,6 +1553,7 @@ |
} |
+#ifndef V8_TARGET_ARCH_X32 |
void Assembler::movq(Register dst, void* value, RelocInfo::Mode rmode) { |
// This method must not be used with heap object references. The stored |
// address is not GC safe. Use the handle version instead. |
@@ -1529,9 +1563,21 @@ |
emit(0xB8 | dst.low_bits()); |
emitq(reinterpret_cast<uintptr_t>(value), rmode); |
} |
+#else |
+void Assembler::movl(Register dst, void* value, RelocInfo::Mode rmode) { |
+ // This method must not be used with heap object references. The stored |
+ // address is not GC safe. Use the handle version instead. |
+ ASSERT(rmode > RelocInfo::LAST_GCED_ENUM); |
+ EnsureSpace ensure_space(this); |
+ emit_optional_rex_32(dst); |
+ emit(0xB8 | dst.low_bits()); |
+ emitl(reinterpret_cast<uintptr_t>(value), rmode); |
+} |
+#endif |
void Assembler::movq(Register dst, int64_t value, RelocInfo::Mode rmode) { |
+#ifndef V8_TARGET_ARCH_X32 |
// Non-relocatable values might not need a 64-bit representation. |
if (RelocInfo::IsNone(rmode)) { |
if (is_uint32(value)) { |
@@ -1544,6 +1590,18 @@ |
// Value cannot be represented by 32 bits, so do a full 64 bit immediate |
// value. |
} |
+#else |
+ ASSERT(RelocInfo::IsNone(rmode)); |
+ if (is_uint32(value)) { |
+ movl(dst, Immediate(static_cast<int32_t>(value))); |
+ return; |
+ } else if (is_int32(value)) { |
+ movq(dst, Immediate(static_cast<int32_t>(value))); |
+ return; |
+ } |
+ // Value cannot be represented by 32 bits, so do a full 64 bit immediate |
+ // value. |
+#endif |
EnsureSpace ensure_space(this); |
emit_rex_64(dst); |
emit(0xB8 | dst.low_bits()); |
@@ -1551,10 +1609,17 @@ |
} |
+#ifndef V8_TARGET_ARCH_X32 |
void Assembler::movq(Register dst, ExternalReference ref) { |
int64_t value = reinterpret_cast<int64_t>(ref.address()); |
movq(dst, value, RelocInfo::EXTERNAL_REFERENCE); |
} |
+#else |
+void Assembler::movl(Register dst, ExternalReference ref) { |
+ int32_t value = reinterpret_cast<int32_t>(ref.address()); |
+ movl(dst, value, RelocInfo::EXTERNAL_REFERENCE); |
+} |
+#endif |
void Assembler::movq(const Operand& dst, Immediate value) { |
@@ -1589,6 +1654,7 @@ |
} |
+#ifndef V8_TARGET_ARCH_X32 |
void Assembler::movq(Register dst, Handle<Object> value, RelocInfo::Mode mode) { |
AllowDeferredHandleDereference using_raw_address; |
// If there is no relocation info, emit the value of the handle efficiently |
@@ -1607,8 +1673,29 @@ |
emitq(reinterpret_cast<uintptr_t>(value.location()), mode); |
} |
} |
+#else |
+void Assembler::movl(Register dst, Handle<Object> value, RelocInfo::Mode mode) { |
+ AllowDeferredHandleDereference using_raw_address; |
+ // If there is no relocation info, emit the value of the handle efficiently |
+ // (possibly using less that 8 bytes for the value). |
+ if (RelocInfo::IsNone(mode)) { |
+ // There is no possible reason to store a heap pointer without relocation |
+ // info, so it must be a smi. |
+ ASSERT(value->IsSmi()); |
+ movl(dst, reinterpret_cast<int32_t>(*value), RelocInfo::NONE32); |
+ } else { |
+ EnsureSpace ensure_space(this); |
+ ASSERT(value->IsHeapObject()); |
+ ASSERT(!HEAP->InNewSpace(*value)); |
+ emit_optional_rex_32(dst); |
+ emit(0xB8 | dst.low_bits()); |
+ emitl(reinterpret_cast<uintptr_t>(value.location()), mode); |
+ } |
+} |
+#endif |
+ |
void Assembler::movsxbq(Register dst, const Operand& src) { |
EnsureSpace ensure_space(this); |
emit_rex_64(dst, src); |
@@ -1992,11 +2079,38 @@ |
} |
+#ifdef V8_TARGET_ARCH_X32 |
+void Assembler::xchgl(Register dst, Register src) { |
+ EnsureSpace ensure_space(this); |
+ if (src.is(rax) || dst.is(rax)) { // Single-byte encoding |
+ Register other = src.is(rax) ? dst : src; |
+ emit_optional_rex_32(other); |
+ emit(0x90 | other.low_bits()); |
+ } else if (dst.low_bits() == 4) { |
+ emit_optional_rex_32(dst, src); |
+ emit(0x87); |
+ emit_modrm(dst, src); |
+ } else { |
+ emit_optional_rex_32(src, dst); |
+ emit(0x87); |
+ emit_modrm(src, dst); |
+ } |
+} |
+#endif |
+ |
+ |
void Assembler::store_rax(void* dst, RelocInfo::Mode mode) { |
EnsureSpace ensure_space(this); |
+#ifndef V8_TARGET_ARCH_X32 |
emit(0x48); // REX.W |
+#endif |
emit(0xA3); |
+#ifndef V8_TARGET_ARCH_X32 |
emitq(reinterpret_cast<uintptr_t>(dst), mode); |
+#else |
+ emitl(reinterpret_cast<uintptr_t>(dst), mode); |
+ emitl(0); |
+#endif |
} |
@@ -2110,6 +2224,16 @@ |
} |
+#ifdef V8_TARGET_ARCH_X32 |
+void Assembler::testl(const Operand& op, Register reg) { |
+ EnsureSpace ensure_space(this); |
+ emit_optional_rex_32(reg, op); |
+ emit(0x85); |
+ emit_operand(reg, op); |
+} |
+#endif |
+ |
+ |
void Assembler::testq(const Operand& op, Register reg) { |
EnsureSpace ensure_space(this); |
emit_rex_64(reg, op); |
@@ -3025,6 +3149,17 @@ |
} |
+#ifdef V8_TARGET_ARCH_X32 |
+void Assembler::pcmpeqd(XMMRegister dst, XMMRegister src) { |
+ EnsureSpace ensure_space(this); |
+ emit(0x66); |
+ emit(0x0f); |
+ emit(0x76); |
+ emit_sse_operand(dst, src); |
+} |
+#endif |
+ |
+ |
void Assembler::emit_sse_operand(XMMRegister reg, const Operand& adr) { |
Register ireg = { reg.code() }; |
emit_operand(ireg, adr); |