OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2583 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2594 void Assembler::movdqa(XMMRegister dst, const Operand& src) { | 2594 void Assembler::movdqa(XMMRegister dst, const Operand& src) { |
2595 EnsureSpace ensure_space(this); | 2595 EnsureSpace ensure_space(this); |
2596 emit(0x66); | 2596 emit(0x66); |
2597 emit_rex_64(dst, src); | 2597 emit_rex_64(dst, src); |
2598 emit(0x0F); | 2598 emit(0x0F); |
2599 emit(0x6F); | 2599 emit(0x6F); |
2600 emit_sse_operand(dst, src); | 2600 emit_sse_operand(dst, src); |
2601 } | 2601 } |
2602 | 2602 |
2603 | 2603 |
| 2604 void Assembler::movdqu(const Operand& dst, XMMRegister src) { |
| 2605 EnsureSpace ensure_space(this); |
| 2606 emit(0xF3); |
| 2607 emit_rex_64(src, dst); |
| 2608 emit(0x0F); |
| 2609 emit(0x7F); |
| 2610 emit_sse_operand(src, dst); |
| 2611 } |
| 2612 |
| 2613 |
| 2614 void Assembler::movdqu(XMMRegister dst, const Operand& src) { |
| 2615 EnsureSpace ensure_space(this); |
| 2616 emit(0xF3); |
| 2617 emit_rex_64(dst, src); |
| 2618 emit(0x0F); |
| 2619 emit(0x6F); |
| 2620 emit_sse_operand(dst, src); |
| 2621 } |
| 2622 |
| 2623 |
2604 void Assembler::extractps(Register dst, XMMRegister src, byte imm8) { | 2624 void Assembler::extractps(Register dst, XMMRegister src, byte imm8) { |
2605 ASSERT(CpuFeatures::IsSupported(SSE4_1)); | 2625 ASSERT(CpuFeatures::IsSupported(SSE4_1)); |
2606 ASSERT(is_uint8(imm8)); | 2626 ASSERT(is_uint8(imm8)); |
2607 EnsureSpace ensure_space(this); | 2627 EnsureSpace ensure_space(this); |
2608 emit(0x66); | 2628 emit(0x66); |
2609 emit_optional_rex_32(dst, src); | 2629 emit_optional_rex_32(dst, src); |
2610 emit(0x0F); | 2630 emit(0x0F); |
2611 emit(0x3A); | 2631 emit(0x3A); |
2612 emit(0x17); | 2632 emit(0x17); |
2613 emit_sse_operand(dst, src); | 2633 emit_sse_operand(dst, src); |
(...skipping 471 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3085 bool RelocInfo::IsCodedSpecially() { | 3105 bool RelocInfo::IsCodedSpecially() { |
3086 // The deserializer needs to know whether a pointer is specially coded. Being | 3106 // The deserializer needs to know whether a pointer is specially coded. Being |
3087 // specially coded on x64 means that it is a relative 32 bit address, as used | 3107 // specially coded on x64 means that it is a relative 32 bit address, as used |
3088 // by branch instructions. | 3108 // by branch instructions. |
3089 return (1 << rmode_) & kApplyMask; | 3109 return (1 << rmode_) & kApplyMask; |
3090 } | 3110 } |
3091 | 3111 |
3092 } } // namespace v8::internal | 3112 } } // namespace v8::internal |
3093 | 3113 |
3094 #endif // V8_TARGET_ARCH_X64 | 3114 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |