| OLD | NEW |
| 1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 11 matching lines...) Expand all Loading... |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | 27 |
| 28 #ifndef V8_X64_ASSEMBLER_X64_INL_H_ | 28 #ifndef V8_X64_ASSEMBLER_X64_INL_H_ |
| 29 #define V8_X64_ASSEMBLER_X64_INL_H_ | 29 #define V8_X64_ASSEMBLER_X64_INL_H_ |
| 30 | 30 |
| 31 #include "cpu.h" | 31 #include "cpu.h" |
| 32 #include "memory.h" |
| 32 | 33 |
| 33 namespace v8 { | 34 namespace v8 { |
| 34 namespace internal { | 35 namespace internal { |
| 35 | 36 |
| 36 Condition NegateCondition(Condition cc) { | 37 Condition NegateCondition(Condition cc) { |
| 37 return static_cast<Condition>(cc ^ 1); | 38 return static_cast<Condition>(cc ^ 1); |
| 38 } | 39 } |
| 39 | 40 |
| 40 // ----------------------------------------------------------------------------- | 41 // ----------------------------------------------------------------------------- |
| 41 | 42 |
| (...skipping 25 matching lines...) Expand all Loading... |
| 67 Memory::uint16_at(pc_) = x; | 68 Memory::uint16_at(pc_) = x; |
| 68 pc_ += sizeof(uint16_t); | 69 pc_ += sizeof(uint16_t); |
| 69 } | 70 } |
| 70 | 71 |
| 71 | 72 |
| 72 void Assembler::emit_rex_64(Register reg, Register rm_reg) { | 73 void Assembler::emit_rex_64(Register reg, Register rm_reg) { |
| 73 emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit()); | 74 emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit()); |
| 74 } | 75 } |
| 75 | 76 |
| 76 | 77 |
| 78 void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) { |
| 79 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3); |
| 80 } |
| 81 |
| 82 |
| 77 void Assembler::emit_rex_64(Register reg, const Operand& op) { | 83 void Assembler::emit_rex_64(Register reg, const Operand& op) { |
| 78 emit(0x48 | reg.high_bit() << 2 | op.rex_); | 84 emit(0x48 | reg.high_bit() << 2 | op.rex_); |
| 79 } | 85 } |
| 80 | 86 |
| 81 | 87 |
| 88 void Assembler::emit_rex_64(XMMRegister reg, const Operand& op) { |
| 89 emit(0x48 | (reg.code() & 0x8) >> 1 | op.rex_); |
| 90 } |
| 91 |
| 92 |
| 82 void Assembler::emit_rex_64(Register rm_reg) { | 93 void Assembler::emit_rex_64(Register rm_reg) { |
| 83 ASSERT_EQ(rm_reg.code() & 0xf, rm_reg.code()); | 94 ASSERT_EQ(rm_reg.code() & 0xf, rm_reg.code()); |
| 84 emit(0x48 | rm_reg.high_bit()); | 95 emit(0x48 | rm_reg.high_bit()); |
| 85 } | 96 } |
| 86 | 97 |
| 87 | 98 |
| 88 void Assembler::emit_rex_64(const Operand& op) { | 99 void Assembler::emit_rex_64(const Operand& op) { |
| 89 emit(0x48 | op.rex_); | 100 emit(0x48 | op.rex_); |
| 90 } | 101 } |
| 91 | 102 |
| (...skipping 23 matching lines...) Expand all Loading... |
| 115 if (rex_bits != 0) emit(0x40 | rex_bits); | 126 if (rex_bits != 0) emit(0x40 | rex_bits); |
| 116 } | 127 } |
| 117 | 128 |
| 118 | 129 |
| 119 void Assembler::emit_optional_rex_32(Register reg, const Operand& op) { | 130 void Assembler::emit_optional_rex_32(Register reg, const Operand& op) { |
| 120 byte rex_bits = reg.high_bit() << 2 | op.rex_; | 131 byte rex_bits = reg.high_bit() << 2 | op.rex_; |
| 121 if (rex_bits != 0) emit(0x40 | rex_bits); | 132 if (rex_bits != 0) emit(0x40 | rex_bits); |
| 122 } | 133 } |
| 123 | 134 |
| 124 | 135 |
| 136 void Assembler::emit_optional_rex_32(XMMRegister reg, const Operand& op) { |
| 137 byte rex_bits = (reg.code() & 0x8) >> 1 | op.rex_; |
| 138 if (rex_bits != 0) emit(0x40 | rex_bits); |
| 139 } |
| 140 |
| 141 |
| 142 void Assembler::emit_optional_rex_32(XMMRegister reg, XMMRegister base) { |
| 143 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3; |
| 144 if (rex_bits != 0) emit(0x40 | rex_bits); |
| 145 } |
| 146 |
| 147 |
| 148 void Assembler::emit_optional_rex_32(XMMRegister reg, Register base) { |
| 149 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3; |
| 150 if (rex_bits != 0) emit(0x40 | rex_bits); |
| 151 } |
| 152 |
| 153 |
| 125 void Assembler::emit_optional_rex_32(Register rm_reg) { | 154 void Assembler::emit_optional_rex_32(Register rm_reg) { |
| 126 if (rm_reg.high_bit()) emit(0x41); | 155 if (rm_reg.high_bit()) emit(0x41); |
| 127 } | 156 } |
| 128 | 157 |
| 129 | 158 |
| 130 void Assembler::emit_optional_rex_32(const Operand& op) { | 159 void Assembler::emit_optional_rex_32(const Operand& op) { |
| 131 if (op.rex_ != 0) emit(0x40 | op.rex_); | 160 if (op.rex_ != 0) emit(0x40 | op.rex_); |
| 132 } | 161 } |
| 133 | 162 |
| 134 | 163 |
| (...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 272 ASSERT(len_ == 1 || len_ == 2); | 301 ASSERT(len_ == 1 || len_ == 2); |
| 273 int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]); | 302 int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]); |
| 274 *p = disp; | 303 *p = disp; |
| 275 len_ += sizeof(int32_t); | 304 len_ += sizeof(int32_t); |
| 276 } | 305 } |
| 277 | 306 |
| 278 | 307 |
| 279 } } // namespace v8::internal | 308 } } // namespace v8::internal |
| 280 | 309 |
| 281 #endif // V8_X64_ASSEMBLER_X64_INL_H_ | 310 #endif // V8_X64_ASSEMBLER_X64_INL_H_ |
| OLD | NEW |