OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/x64/assembler-x64.h" | 5 #include "src/x64/assembler-x64.h" |
6 | 6 |
7 #include <cstring> | 7 #include <cstring> |
8 | 8 |
9 #if V8_TARGET_ARCH_X64 | 9 #if V8_TARGET_ARCH_X64 |
10 | 10 |
(...skipping 3492 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3503 void Assembler::vsd(byte op, XMMRegister dst, XMMRegister src1, | 3503 void Assembler::vsd(byte op, XMMRegister dst, XMMRegister src1, |
3504 const Operand& src2) { | 3504 const Operand& src2) { |
3505 DCHECK(IsEnabled(AVX)); | 3505 DCHECK(IsEnabled(AVX)); |
3506 EnsureSpace ensure_space(this); | 3506 EnsureSpace ensure_space(this); |
3507 emit_vex_prefix(dst, src1, src2, kLIG, kF2, k0F, kWIG); | 3507 emit_vex_prefix(dst, src1, src2, kLIG, kF2, k0F, kWIG); |
3508 emit(op); | 3508 emit(op); |
3509 emit_sse_operand(dst, src2); | 3509 emit_sse_operand(dst, src2); |
3510 } | 3510 } |
3511 | 3511 |
3512 | 3512 |
| 3513 void Assembler::vps(byte op, XMMRegister dst, XMMRegister src1, |
| 3514 XMMRegister src2) { |
| 3515 DCHECK(IsEnabled(AVX)); |
| 3516 EnsureSpace ensure_space(this); |
| 3517 emit_vex_prefix(dst, src1, src2, kL128, kNone, k0F, kWIG); |
| 3518 emit(op); |
| 3519 emit_sse_operand(dst, src2); |
| 3520 } |
| 3521 |
| 3522 |
| 3523 void Assembler::vps(byte op, XMMRegister dst, XMMRegister src1, |
| 3524 const Operand& src2) { |
| 3525 DCHECK(IsEnabled(AVX)); |
| 3526 EnsureSpace ensure_space(this); |
| 3527 emit_vex_prefix(dst, src1, src2, kL128, kNone, k0F, kWIG); |
| 3528 emit(op); |
| 3529 emit_sse_operand(dst, src2); |
| 3530 } |
| 3531 |
| 3532 |
| 3533 void Assembler::vpd(byte op, XMMRegister dst, XMMRegister src1, |
| 3534 XMMRegister src2) { |
| 3535 DCHECK(IsEnabled(AVX)); |
| 3536 EnsureSpace ensure_space(this); |
| 3537 emit_vex_prefix(dst, src1, src2, kL128, k66, k0F, kWIG); |
| 3538 emit(op); |
| 3539 emit_sse_operand(dst, src2); |
| 3540 } |
| 3541 |
| 3542 |
| 3543 void Assembler::vpd(byte op, XMMRegister dst, XMMRegister src1, |
| 3544 const Operand& src2) { |
| 3545 DCHECK(IsEnabled(AVX)); |
| 3546 EnsureSpace ensure_space(this); |
| 3547 emit_vex_prefix(dst, src1, src2, kL128, k66, k0F, kWIG); |
| 3548 emit(op); |
| 3549 emit_sse_operand(dst, src2); |
| 3550 } |
| 3551 |
| 3552 |
3513 void Assembler::vucomiss(XMMRegister dst, XMMRegister src) { | 3553 void Assembler::vucomiss(XMMRegister dst, XMMRegister src) { |
3514 DCHECK(IsEnabled(AVX)); | 3554 DCHECK(IsEnabled(AVX)); |
3515 EnsureSpace ensure_space(this); | 3555 EnsureSpace ensure_space(this); |
3516 emit_vex_prefix(dst, xmm0, src, kLIG, kNone, k0F, kWIG); | 3556 emit_vex_prefix(dst, xmm0, src, kLIG, kNone, k0F, kWIG); |
3517 emit(0x2e); | 3557 emit(0x2e); |
3518 emit_sse_operand(dst, src); | 3558 emit_sse_operand(dst, src); |
3519 } | 3559 } |
3520 | 3560 |
3521 | 3561 |
3522 void Assembler::vucomiss(XMMRegister dst, const Operand& src) { | 3562 void Assembler::vucomiss(XMMRegister dst, const Operand& src) { |
(...skipping 387 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3910 | 3950 |
3911 | 3951 |
3912 bool RelocInfo::IsInConstantPool() { | 3952 bool RelocInfo::IsInConstantPool() { |
3913 return false; | 3953 return false; |
3914 } | 3954 } |
3915 | 3955 |
3916 | 3956 |
3917 } } // namespace v8::internal | 3957 } } // namespace v8::internal |
3918 | 3958 |
3919 #endif // V8_TARGET_ARCH_X64 | 3959 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |