| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/x64/assembler-x64.h" | 5 #include "src/x64/assembler-x64.h" |
| 6 | 6 |
| 7 #include <cstring> | 7 #include <cstring> |
| 8 | 8 |
| 9 #if V8_TARGET_ARCH_X64 | 9 #if V8_TARGET_ARCH_X64 |
| 10 | 10 |
| (...skipping 2523 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2534 emit_optional_rex_32(dst, src); | 2534 emit_optional_rex_32(dst, src); |
| 2535 emit(0x0F); | 2535 emit(0x0F); |
| 2536 emit(0x5E); | 2536 emit(0x5E); |
| 2537 emit_sse_operand(dst, src); | 2537 emit_sse_operand(dst, src); |
| 2538 } | 2538 } |
| 2539 | 2539 |
| 2540 | 2540 |
| 2541 // SSE 2 operations. | 2541 // SSE 2 operations. |
| 2542 | 2542 |
| 2543 void Assembler::movd(XMMRegister dst, Register src) { | 2543 void Assembler::movd(XMMRegister dst, Register src) { |
| 2544 DCHECK(!IsEnabled(AVX)); |
| 2544 EnsureSpace ensure_space(this); | 2545 EnsureSpace ensure_space(this); |
| 2545 emit(0x66); | 2546 emit(0x66); |
| 2546 emit_optional_rex_32(dst, src); | 2547 emit_optional_rex_32(dst, src); |
| 2547 emit(0x0F); | 2548 emit(0x0F); |
| 2548 emit(0x6E); | 2549 emit(0x6E); |
| 2549 emit_sse_operand(dst, src); | 2550 emit_sse_operand(dst, src); |
| 2550 } | 2551 } |
| 2551 | 2552 |
| 2552 | 2553 |
| 2553 void Assembler::movd(XMMRegister dst, const Operand& src) { | 2554 void Assembler::movd(XMMRegister dst, const Operand& src) { |
| 2555 DCHECK(!IsEnabled(AVX)); |
| 2554 EnsureSpace ensure_space(this); | 2556 EnsureSpace ensure_space(this); |
| 2555 emit(0x66); | 2557 emit(0x66); |
| 2556 emit_optional_rex_32(dst, src); | 2558 emit_optional_rex_32(dst, src); |
| 2557 emit(0x0F); | 2559 emit(0x0F); |
| 2558 emit(0x6E); | 2560 emit(0x6E); |
| 2559 emit_sse_operand(dst, src); | 2561 emit_sse_operand(dst, src); |
| 2560 } | 2562 } |
| 2561 | 2563 |
| 2562 | 2564 |
| 2563 void Assembler::movd(Register dst, XMMRegister src) { | 2565 void Assembler::movd(Register dst, XMMRegister src) { |
| 2566 DCHECK(!IsEnabled(AVX)); |
| 2564 EnsureSpace ensure_space(this); | 2567 EnsureSpace ensure_space(this); |
| 2565 emit(0x66); | 2568 emit(0x66); |
| 2566 emit_optional_rex_32(src, dst); | 2569 emit_optional_rex_32(src, dst); |
| 2567 emit(0x0F); | 2570 emit(0x0F); |
| 2568 emit(0x7E); | 2571 emit(0x7E); |
| 2569 emit_sse_operand(src, dst); | 2572 emit_sse_operand(src, dst); |
| 2570 } | 2573 } |
| 2571 | 2574 |
| 2572 | 2575 |
| 2573 void Assembler::movq(XMMRegister dst, Register src) { | 2576 void Assembler::movq(XMMRegister dst, Register src) { |
| 2577 DCHECK(!IsEnabled(AVX)); |
| 2574 EnsureSpace ensure_space(this); | 2578 EnsureSpace ensure_space(this); |
| 2575 emit(0x66); | 2579 emit(0x66); |
| 2576 emit_rex_64(dst, src); | 2580 emit_rex_64(dst, src); |
| 2577 emit(0x0F); | 2581 emit(0x0F); |
| 2578 emit(0x6E); | 2582 emit(0x6E); |
| 2579 emit_sse_operand(dst, src); | 2583 emit_sse_operand(dst, src); |
| 2580 } | 2584 } |
| 2581 | 2585 |
| 2582 | 2586 |
| 2583 void Assembler::movq(Register dst, XMMRegister src) { | 2587 void Assembler::movq(Register dst, XMMRegister src) { |
| 2588 DCHECK(!IsEnabled(AVX)); |
| 2584 EnsureSpace ensure_space(this); | 2589 EnsureSpace ensure_space(this); |
| 2585 emit(0x66); | 2590 emit(0x66); |
| 2586 emit_rex_64(src, dst); | 2591 emit_rex_64(src, dst); |
| 2587 emit(0x0F); | 2592 emit(0x0F); |
| 2588 emit(0x7E); | 2593 emit(0x7E); |
| 2589 emit_sse_operand(src, dst); | 2594 emit_sse_operand(src, dst); |
| 2590 } | 2595 } |
| 2591 | 2596 |
| 2592 | 2597 |
| 2593 void Assembler::movq(XMMRegister dst, XMMRegister src) { | 2598 void Assembler::movq(XMMRegister dst, XMMRegister src) { |
| 2599 DCHECK(!IsEnabled(AVX)); |
| 2594 EnsureSpace ensure_space(this); | 2600 EnsureSpace ensure_space(this); |
| 2595 if (dst.low_bits() == 4) { | 2601 if (dst.low_bits() == 4) { |
| 2596 // Avoid unnecessary SIB byte. | 2602 // Avoid unnecessary SIB byte. |
| 2597 emit(0xf3); | 2603 emit(0xf3); |
| 2598 emit_optional_rex_32(dst, src); | 2604 emit_optional_rex_32(dst, src); |
| 2599 emit(0x0F); | 2605 emit(0x0F); |
| 2600 emit(0x7e); | 2606 emit(0x7e); |
| 2601 emit_sse_operand(dst, src); | 2607 emit_sse_operand(dst, src); |
| 2602 } else { | 2608 } else { |
| 2603 emit(0x66); | 2609 emit(0x66); |
| (...skipping 870 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3474 void Assembler::vfmass(byte op, XMMRegister dst, XMMRegister src1, | 3480 void Assembler::vfmass(byte op, XMMRegister dst, XMMRegister src1, |
| 3475 const Operand& src2) { | 3481 const Operand& src2) { |
| 3476 DCHECK(IsEnabled(FMA3)); | 3482 DCHECK(IsEnabled(FMA3)); |
| 3477 EnsureSpace ensure_space(this); | 3483 EnsureSpace ensure_space(this); |
| 3478 emit_vex_prefix(dst, src1, src2, kLIG, k66, k0F38, kW0); | 3484 emit_vex_prefix(dst, src1, src2, kLIG, k66, k0F38, kW0); |
| 3479 emit(op); | 3485 emit(op); |
| 3480 emit_sse_operand(dst, src2); | 3486 emit_sse_operand(dst, src2); |
| 3481 } | 3487 } |
| 3482 | 3488 |
| 3483 | 3489 |
| 3490 void Assembler::vmovd(XMMRegister dst, Register src) { |
| 3491 DCHECK(IsEnabled(AVX)); |
| 3492 EnsureSpace ensure_space(this); |
| 3493 XMMRegister isrc = {src.code()}; |
| 3494 emit_vex_prefix(dst, xmm0, isrc, kL128, k66, k0F, kW0); |
| 3495 emit(0x6e); |
| 3496 emit_sse_operand(dst, src); |
| 3497 } |
| 3498 |
| 3499 |
| 3500 void Assembler::vmovd(XMMRegister dst, const Operand& src) { |
| 3501 DCHECK(IsEnabled(AVX)); |
| 3502 EnsureSpace ensure_space(this); |
| 3503 emit_vex_prefix(dst, xmm0, src, kL128, k66, k0F, kW0); |
| 3504 emit(0x6e); |
| 3505 emit_sse_operand(dst, src); |
| 3506 } |
| 3507 |
| 3508 |
| 3509 void Assembler::vmovd(Register dst, XMMRegister src) { |
| 3510 DCHECK(IsEnabled(AVX)); |
| 3511 EnsureSpace ensure_space(this); |
| 3512 XMMRegister idst = {dst.code()}; |
| 3513 emit_vex_prefix(src, xmm0, idst, kL128, k66, k0F, kW0); |
| 3514 emit(0x7e); |
| 3515 emit_sse_operand(src, dst); |
| 3516 } |
| 3517 |
| 3518 |
| 3519 void Assembler::vmovq(XMMRegister dst, Register src) { |
| 3520 DCHECK(IsEnabled(AVX)); |
| 3521 EnsureSpace ensure_space(this); |
| 3522 XMMRegister isrc = {src.code()}; |
| 3523 emit_vex_prefix(dst, xmm0, isrc, kL128, k66, k0F, kW1); |
| 3524 emit(0x6e); |
| 3525 emit_sse_operand(dst, src); |
| 3526 } |
| 3527 |
| 3528 |
| 3529 void Assembler::vmovq(XMMRegister dst, const Operand& src) { |
| 3530 DCHECK(IsEnabled(AVX)); |
| 3531 EnsureSpace ensure_space(this); |
| 3532 emit_vex_prefix(dst, xmm0, src, kL128, k66, k0F, kW1); |
| 3533 emit(0x6e); |
| 3534 emit_sse_operand(dst, src); |
| 3535 } |
| 3536 |
| 3537 |
| 3538 void Assembler::vmovq(Register dst, XMMRegister src) { |
| 3539 DCHECK(IsEnabled(AVX)); |
| 3540 EnsureSpace ensure_space(this); |
| 3541 XMMRegister idst = {dst.code()}; |
| 3542 emit_vex_prefix(src, xmm0, idst, kL128, k66, k0F, kW1); |
| 3543 emit(0x7e); |
| 3544 emit_sse_operand(src, dst); |
| 3545 } |
| 3546 |
| 3547 |
| 3484 void Assembler::vmovapd(XMMRegister dst, XMMRegister src) { | 3548 void Assembler::vmovapd(XMMRegister dst, XMMRegister src) { |
| 3485 DCHECK(IsEnabled(AVX)); | 3549 DCHECK(IsEnabled(AVX)); |
| 3486 EnsureSpace ensure_space(this); | 3550 EnsureSpace ensure_space(this); |
| 3487 emit_vex_prefix(dst, xmm0, src, kLIG, k66, k0F, kWIG); | 3551 emit_vex_prefix(dst, xmm0, src, kLIG, k66, k0F, kWIG); |
| 3488 emit(0x28); | 3552 emit(0x28); |
| 3489 emit_sse_operand(dst, src); | 3553 emit_sse_operand(dst, src); |
| 3490 } | 3554 } |
| 3491 | 3555 |
| 3492 | 3556 |
| 3493 void Assembler::vucomisd(XMMRegister dst, XMMRegister src) { | 3557 void Assembler::vucomisd(XMMRegister dst, XMMRegister src) { |
| (...skipping 467 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3961 | 4025 |
| 3962 bool RelocInfo::IsInConstantPool() { | 4026 bool RelocInfo::IsInConstantPool() { |
| 3963 return false; | 4027 return false; |
| 3964 } | 4028 } |
| 3965 | 4029 |
| 3966 | 4030 |
| 3967 } // namespace internal | 4031 } // namespace internal |
| 3968 } // namespace v8 | 4032 } // namespace v8 |
| 3969 | 4033 |
| 3970 #endif // V8_TARGET_ARCH_X64 | 4034 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |