| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 2639 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2650 } else { | 2650 } else { |
| 2651 emit(0x66); | 2651 emit(0x66); |
| 2652 emit_optional_rex_32(dst, src); | 2652 emit_optional_rex_32(dst, src); |
| 2653 emit(0x0F); | 2653 emit(0x0F); |
| 2654 emit(0x28); | 2654 emit(0x28); |
| 2655 emit_sse_operand(dst, src); | 2655 emit_sse_operand(dst, src); |
| 2656 } | 2656 } |
| 2657 } | 2657 } |
| 2658 | 2658 |
| 2659 | 2659 |
| 2660 void Assembler::movddup(XMMRegister dst, const Operand& src) { |
| 2661 ASSERT(CpuFeatures::IsEnabled(SSE3)); |
| 2662 EnsureSpace ensure_space(this); |
| 2663 emit(0xF2); |
| 2664 emit_optional_rex_32(dst, src); |
| 2665 emit(0x0F); |
| 2666 emit(0x12); |
| 2667 emit_sse_operand(dst, src); |
| 2668 } |
| 2669 |
| 2670 |
| 2660 void Assembler::movss(XMMRegister dst, const Operand& src) { | 2671 void Assembler::movss(XMMRegister dst, const Operand& src) { |
| 2661 EnsureSpace ensure_space(this); | 2672 EnsureSpace ensure_space(this); |
| 2662 emit(0xF3); // single | 2673 emit(0xF3); // single |
| 2663 emit_optional_rex_32(dst, src); | 2674 emit_optional_rex_32(dst, src); |
| 2664 emit(0x0F); | 2675 emit(0x0F); |
| 2665 emit(0x10); // load | 2676 emit(0x10); // load |
| 2666 emit_sse_operand(dst, src); | 2677 emit_sse_operand(dst, src); |
| 2667 } | 2678 } |
| 2668 | 2679 |
| 2669 | 2680 |
| (...skipping 362 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3032 // specially coded on x64 means that it is a relative 32 bit address, as used | 3043 // specially coded on x64 means that it is a relative 32 bit address, as used |
| 3033 // by branch instructions. | 3044 // by branch instructions. |
| 3034 return (1 << rmode_) & kApplyMask; | 3045 return (1 << rmode_) & kApplyMask; |
| 3035 } | 3046 } |
| 3036 | 3047 |
| 3037 | 3048 |
| 3038 | 3049 |
| 3039 } } // namespace v8::internal | 3050 } } // namespace v8::internal |
| 3040 | 3051 |
| 3041 #endif // V8_TARGET_ARCH_X64 | 3052 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |