| Index: src/x64/assembler-x64.cc
|
| ===================================================================
|
| --- src/x64/assembler-x64.cc (revision 4699)
|
| +++ src/x64/assembler-x64.cc (working copy)
|
| @@ -27,6 +27,8 @@
|
|
|
| #include "v8.h"
|
|
|
| +#if defined(V8_TARGET_ARCH_X64)
|
| +
|
| #include "macro-assembler.h"
|
| #include "serialize.h"
|
|
|
| @@ -458,19 +460,36 @@
|
| void Assembler::arithmetic_op(byte opcode, Register reg, Register rm_reg) {
|
| EnsureSpace ensure_space(this);
|
| last_pc_ = pc_;
|
| - emit_rex_64(reg, rm_reg);
|
| - emit(opcode);
|
| - emit_modrm(reg, rm_reg);
|
| + ASSERT((opcode & 0xC6) == 2);
|
| + if (rm_reg.low_bits() == 4) { // Forces SIB byte.
|
| + // Swap reg and rm_reg and change opcode operand order.
|
| + emit_rex_64(rm_reg, reg);
|
| + emit(opcode ^ 0x02);
|
| + emit_modrm(rm_reg, reg);
|
| + } else {
|
| + emit_rex_64(reg, rm_reg);
|
| + emit(opcode);
|
| + emit_modrm(reg, rm_reg);
|
| + }
|
| }
|
|
|
|
|
| void Assembler::arithmetic_op_16(byte opcode, Register reg, Register rm_reg) {
|
| EnsureSpace ensure_space(this);
|
| last_pc_ = pc_;
|
| - emit(0x66);
|
| - emit_optional_rex_32(reg, rm_reg);
|
| - emit(opcode);
|
| - emit_modrm(reg, rm_reg);
|
| + ASSERT((opcode & 0xC6) == 2);
|
| + if (rm_reg.low_bits() == 4) { // Forces SIB byte.
|
| + // Swap reg and rm_reg and change opcode operand order.
|
| + emit(0x66);
|
| + emit_optional_rex_32(rm_reg, reg);
|
| + emit(opcode ^ 0x02);
|
| + emit_modrm(rm_reg, reg);
|
| + } else {
|
| + emit(0x66);
|
| + emit_optional_rex_32(reg, rm_reg);
|
| + emit(opcode);
|
| + emit_modrm(reg, rm_reg);
|
| + }
|
| }
|
|
|
|
|
| @@ -489,9 +508,17 @@
|
| void Assembler::arithmetic_op_32(byte opcode, Register reg, Register rm_reg) {
|
| EnsureSpace ensure_space(this);
|
| last_pc_ = pc_;
|
| - emit_optional_rex_32(reg, rm_reg);
|
| - emit(opcode);
|
| - emit_modrm(reg, rm_reg);
|
| + ASSERT((opcode & 0xC6) == 2);
|
| + if (rm_reg.low_bits() == 4) { // Forces SIB byte.
|
| + // Swap reg and rm_reg and change opcode operand order.
|
| + emit_optional_rex_32(rm_reg, reg);
|
| + emit(opcode ^ 0x02); // E.g. 0x03 -> 0x01 for ADD.
|
| + emit_modrm(rm_reg, reg);
|
| + } else {
|
| + emit_optional_rex_32(reg, rm_reg);
|
| + emit(opcode);
|
| + emit_modrm(reg, rm_reg);
|
| + }
|
| }
|
|
|
|
|
| @@ -1290,9 +1317,15 @@
|
| void Assembler::movl(Register dst, Register src) {
|
| EnsureSpace ensure_space(this);
|
| last_pc_ = pc_;
|
| - emit_optional_rex_32(dst, src);
|
| - emit(0x8B);
|
| - emit_modrm(dst, src);
|
| + if (src.low_bits() == 4) {
|
| + emit_optional_rex_32(src, dst);
|
| + emit(0x89);
|
| + emit_modrm(src, dst);
|
| + } else {
|
| + emit_optional_rex_32(dst, src);
|
| + emit(0x8B);
|
| + emit_modrm(dst, src);
|
| + }
|
| }
|
|
|
|
|
| @@ -1337,9 +1370,15 @@
|
| void Assembler::movq(Register dst, Register src) {
|
| EnsureSpace ensure_space(this);
|
| last_pc_ = pc_;
|
| - emit_rex_64(dst, src);
|
| - emit(0x8B);
|
| - emit_modrm(dst, src);
|
| + if (src.low_bits() == 4) {
|
| + emit_rex_64(src, dst);
|
| + emit(0x89);
|
| + emit_modrm(src, dst);
|
| + } else {
|
| + emit_rex_64(dst, src);
|
| + emit(0x8B);
|
| + emit_modrm(dst, src);
|
| + }
|
| }
|
|
|
|
|
| @@ -1860,6 +1899,10 @@
|
| Register other = src.is(rax) ? dst : src;
|
| emit_rex_64(other);
|
| emit(0x90 | other.low_bits());
|
| + } else if (dst.low_bits() == 4) {
|
| + emit_rex_64(dst, src);
|
| + emit(0x87);
|
| + emit_modrm(dst, src);
|
| } else {
|
| emit_rex_64(src, dst);
|
| emit(0x87);
|
| @@ -1885,12 +1928,18 @@
|
| void Assembler::testb(Register dst, Register src) {
|
| EnsureSpace ensure_space(this);
|
| last_pc_ = pc_;
|
| - if (dst.code() > 3 || src.code() > 3) {
|
| - // Register is not one of al, bl, cl, dl. Its encoding needs REX.
|
| - emit_rex_32(dst, src);
|
| + if (src.low_bits() == 4) {
|
| + emit_rex_32(src, dst);
|
| + emit(0x84);
|
| + emit_modrm(src, dst);
|
| + } else {
|
| + if (dst.code() > 3 || src.code() > 3) {
|
| + // Register is not one of al, bl, cl, dl. Its encoding needs REX.
|
| + emit_rex_32(dst, src);
|
| + }
|
| + emit(0x84);
|
| + emit_modrm(dst, src);
|
| }
|
| - emit(0x84);
|
| - emit_modrm(dst, src);
|
| }
|
|
|
|
|
| @@ -1941,9 +1990,15 @@
|
| void Assembler::testl(Register dst, Register src) {
|
| EnsureSpace ensure_space(this);
|
| last_pc_ = pc_;
|
| - emit_optional_rex_32(dst, src);
|
| - emit(0x85);
|
| - emit_modrm(dst, src);
|
| + if (src.low_bits() == 4) {
|
| + emit_optional_rex_32(src, dst);
|
| + emit(0x85);
|
| + emit_modrm(src, dst);
|
| + } else {
|
| + emit_optional_rex_32(dst, src);
|
| + emit(0x85);
|
| + emit_modrm(dst, src);
|
| + }
|
| }
|
|
|
|
|
| @@ -1994,9 +2049,15 @@
|
| void Assembler::testq(Register dst, Register src) {
|
| EnsureSpace ensure_space(this);
|
| last_pc_ = pc_;
|
| - emit_rex_64(dst, src);
|
| - emit(0x85);
|
| - emit_modrm(dst, src);
|
| + if (src.low_bits() == 4) {
|
| + emit_rex_64(src, dst);
|
| + emit(0x85);
|
| + emit_modrm(src, dst);
|
| + } else {
|
| + emit_rex_64(dst, src);
|
| + emit(0x85);
|
| + emit_modrm(dst, src);
|
| + }
|
| }
|
|
|
|
|
| @@ -2739,4 +2800,16 @@
|
| 1 << RelocInfo::INTERNAL_REFERENCE |
|
| 1 << RelocInfo::JS_RETURN;
|
|
|
| +
|
| +bool RelocInfo::IsCodedSpecially() {
|
| + // The deserializer needs to know whether a pointer is specially coded. Being
|
| + // specially coded on x64 means that it is a relative 32 bit address, as used
|
| + // by branch instructions.
|
| + return (1 << rmode_) & kApplyMask;
|
| +}
|
| +
|
| +
|
| +
|
| } } // namespace v8::internal
|
| +
|
| +#endif // V8_TARGET_ARCH_X64
|
|
|