OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1348 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1359 | 1359 |
1360 void Assembler::movb(const Operand& dst, Immediate imm) { | 1360 void Assembler::movb(const Operand& dst, Immediate imm) { |
1361 EnsureSpace ensure_space(this); | 1361 EnsureSpace ensure_space(this); |
1362 emit_optional_rex_32(dst); | 1362 emit_optional_rex_32(dst); |
1363 emit(0xC6); | 1363 emit(0xC6); |
1364 emit_operand(0x0, dst); | 1364 emit_operand(0x0, dst); |
1365 emit(static_cast<byte>(imm.value_)); | 1365 emit(static_cast<byte>(imm.value_)); |
1366 } | 1366 } |
1367 | 1367 |
1368 | 1368 |
| 1369 void Assembler::movw(Register dst, const Operand& src) { |
| 1370 EnsureSpace ensure_space(this); |
| 1371 emit(0x66); |
| 1372 emit_optional_rex_32(dst, src); |
| 1373 emit(0x8B); |
| 1374 emit_operand(dst, src); |
| 1375 } |
| 1376 |
| 1377 |
1369 void Assembler::movw(const Operand& dst, Register src) { | 1378 void Assembler::movw(const Operand& dst, Register src) { |
1370 EnsureSpace ensure_space(this); | 1379 EnsureSpace ensure_space(this); |
1371 emit(0x66); | 1380 emit(0x66); |
1372 emit_optional_rex_32(src, dst); | 1381 emit_optional_rex_32(src, dst); |
1373 emit(0x89); | 1382 emit(0x89); |
1374 emit_operand(src, dst); | 1383 emit_operand(src, dst); |
1375 } | 1384 } |
1376 | 1385 |
1377 | 1386 |
1378 void Assembler::movw(const Operand& dst, Immediate imm) { | 1387 void Assembler::movw(const Operand& dst, Immediate imm) { |
(...skipping 1719 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3098 bool RelocInfo::IsCodedSpecially() { | 3107 bool RelocInfo::IsCodedSpecially() { |
3099 // The deserializer needs to know whether a pointer is specially coded. Being | 3108 // The deserializer needs to know whether a pointer is specially coded. Being |
3100 // specially coded on x64 means that it is a relative 32 bit address, as used | 3109 // specially coded on x64 means that it is a relative 32 bit address, as used |
3101 // by branch instructions. | 3110 // by branch instructions. |
3102 return (1 << rmode_) & kApplyMask; | 3111 return (1 << rmode_) & kApplyMask; |
3103 } | 3112 } |
3104 | 3113 |
3105 } } // namespace v8::internal | 3114 } } // namespace v8::internal |
3106 | 3115 |
3107 #endif // V8_TARGET_ARCH_X64 | 3116 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |