OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" | 5 #include "vm/globals.h" |
6 #if defined(TARGET_ARCH_IA32) | 6 #if defined(TARGET_ARCH_IA32) |
7 | 7 |
8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
9 #include "vm/code_generator.h" | 9 #include "vm/code_generator.h" |
10 #include "vm/heap.h" | 10 #include "vm/heap.h" |
(...skipping 487 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
498 | 498 |
499 | 499 |
500 void Assembler::movaps(XmmRegister dst, XmmRegister src) { | 500 void Assembler::movaps(XmmRegister dst, XmmRegister src) { |
501 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 501 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
502 EmitUint8(0x0F); | 502 EmitUint8(0x0F); |
503 EmitUint8(0x28); | 503 EmitUint8(0x28); |
504 EmitXmmRegisterOperand(dst, src); | 504 EmitXmmRegisterOperand(dst, src); |
505 } | 505 } |
506 | 506 |
507 | 507 |
| 508 void Assembler::movups(XmmRegister dst, const Address& src) { |
| 509 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 510 EmitUint8(0x0F); |
| 511 EmitUint8(0x10); |
| 512 EmitOperand(dst, src); |
| 513 } |
| 514 |
| 515 |
| 516 void Assembler::movups(const Address& dst, XmmRegister src) { |
| 517 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 518 EmitUint8(0x0F); |
| 519 EmitUint8(0x11); |
| 520 EmitOperand(src, dst); |
| 521 } |
| 522 |
| 523 |
508 void Assembler::addsd(XmmRegister dst, XmmRegister src) { | 524 void Assembler::addsd(XmmRegister dst, XmmRegister src) { |
509 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 525 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
510 EmitUint8(0xF2); | 526 EmitUint8(0xF2); |
511 EmitUint8(0x0F); | 527 EmitUint8(0x0F); |
512 EmitUint8(0x58); | 528 EmitUint8(0x58); |
513 EmitXmmRegisterOperand(dst, src); | 529 EmitXmmRegisterOperand(dst, src); |
514 } | 530 } |
515 | 531 |
516 | 532 |
517 void Assembler::addsd(XmmRegister dst, const Address& src) { | 533 void Assembler::addsd(XmmRegister dst, const Address& src) { |
518 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 534 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
519 EmitUint8(0xF2); | 535 EmitUint8(0xF2); |
520 EmitUint8(0x0F); | 536 EmitUint8(0x0F); |
521 EmitUint8(0x58); | 537 EmitUint8(0x58); |
522 EmitOperand(dst, src); | 538 EmitOperand(dst, src); |
523 } | 539 } |
524 | 540 |
525 | 541 |
| 542 void Assembler::addps(XmmRegister dst, XmmRegister src) { |
| 543 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 544 EmitUint8(0x0F); |
| 545 EmitUint8(0x58); |
| 546 EmitXmmRegisterOperand(dst, src); |
| 547 } |
| 548 |
| 549 |
| 550 void Assembler::subps(XmmRegister dst, XmmRegister src) { |
| 551 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 552 EmitUint8(0x0F); |
| 553 EmitUint8(0x5C); |
| 554 EmitXmmRegisterOperand(dst, src); |
| 555 } |
| 556 |
| 557 |
| 558 void Assembler::divps(XmmRegister dst, XmmRegister src) { |
| 559 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 560 EmitUint8(0x0F); |
| 561 EmitUint8(0x5E); |
| 562 EmitXmmRegisterOperand(dst, src); |
| 563 } |
| 564 |
| 565 |
| 566 void Assembler::mulps(XmmRegister dst, XmmRegister src) { |
| 567 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 568 EmitUint8(0x0F); |
| 569 EmitUint8(0x59); |
| 570 EmitXmmRegisterOperand(dst, src); |
| 571 } |
| 572 |
| 573 |
| 574 void Assembler::minps(XmmRegister dst, XmmRegister src) { |
| 575 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 576 EmitUint8(0x0F); |
| 577 EmitUint8(0x5D); |
| 578 EmitXmmRegisterOperand(dst, src); |
| 579 } |
| 580 |
| 581 |
| 582 void Assembler::maxps(XmmRegister dst, XmmRegister src) { |
| 583 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 584 EmitUint8(0x0F); |
| 585 EmitUint8(0x5F); |
| 586 EmitXmmRegisterOperand(dst, src); |
| 587 } |
| 588 |
| 589 |
| 590 void Assembler::andps(XmmRegister dst, XmmRegister src) { |
| 591 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 592 EmitUint8(0x0F); |
| 593 EmitUint8(0x54); |
| 594 EmitXmmRegisterOperand(dst, src); |
| 595 } |
| 596 |
| 597 |
| 598 void Assembler::andps(XmmRegister dst, const Address& src) { |
| 599 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 600 EmitUint8(0x0F); |
| 601 EmitUint8(0x54); |
| 602 EmitOperand(dst, src); |
| 603 } |
| 604 |
| 605 |
| 606 void Assembler::orps(XmmRegister dst, XmmRegister src) { |
| 607 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 608 EmitUint8(0x0F); |
| 609 EmitUint8(0x56); |
| 610 EmitXmmRegisterOperand(dst, src); |
| 611 } |
| 612 |
| 613 |
| 614 void Assembler::notps(XmmRegister dst) { |
| 615 static const struct ALIGN16 { |
| 616 uint32_t a; |
| 617 uint32_t b; |
| 618 uint32_t c; |
| 619 uint32_t d; |
| 620 } float_not_constant = |
| 621 { 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF }; |
| 622 xorps(dst, |
| 623 Address::Absolute(reinterpret_cast<uword>(&float_not_constant))); |
| 624 } |
| 625 |
| 626 |
| 627 void Assembler::negateps(XmmRegister dst) { |
| 628 static const struct ALIGN16 { |
| 629 uint32_t a; |
| 630 uint32_t b; |
| 631 uint32_t c; |
| 632 uint32_t d; |
| 633 } float_negate_constant = |
| 634 { 0x80000000, 0x80000000, 0x80000000, 0x80000000 }; |
| 635 xorps(dst, |
| 636 Address::Absolute(reinterpret_cast<uword>(&float_negate_constant))); |
| 637 } |
| 638 |
| 639 |
| 640 void Assembler::absps(XmmRegister dst) { |
| 641 static const struct ALIGN16 { |
| 642 uint32_t a; |
| 643 uint32_t b; |
| 644 uint32_t c; |
| 645 uint32_t d; |
| 646 } float_absolute_constant = |
| 647 { 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF }; |
| 648 andps(dst, |
| 649 Address::Absolute(reinterpret_cast<uword>(&float_absolute_constant))); |
| 650 } |
| 651 |
| 652 |
| 653 void Assembler::zerowps(XmmRegister dst) { |
| 654 static const struct ALIGN16 { |
| 655 uint32_t a; |
| 656 uint32_t b; |
| 657 uint32_t c; |
| 658 uint32_t d; |
| 659 } float_zerow_constant = |
| 660 { 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0x00000000 }; |
| 661 andps(dst, Address::Absolute(reinterpret_cast<uword>(&float_zerow_constant))); |
| 662 } |
| 663 |
| 664 |
| 665 void Assembler::cmppseq(XmmRegister dst, XmmRegister src) { |
| 666 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 667 EmitUint8(0x0F); |
| 668 EmitUint8(0xC2); |
| 669 EmitXmmRegisterOperand(dst, src); |
| 670 EmitUint8(0x0); |
| 671 } |
| 672 |
| 673 |
| 674 void Assembler::cmppsneq(XmmRegister dst, XmmRegister src) { |
| 675 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 676 EmitUint8(0x0F); |
| 677 EmitUint8(0xC2); |
| 678 EmitXmmRegisterOperand(dst, src); |
| 679 EmitUint8(0x4); |
| 680 } |
| 681 |
| 682 |
| 683 void Assembler::cmppslt(XmmRegister dst, XmmRegister src) { |
| 684 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 685 EmitUint8(0x0F); |
| 686 EmitUint8(0xC2); |
| 687 EmitXmmRegisterOperand(dst, src); |
| 688 EmitUint8(0x1); |
| 689 } |
| 690 |
| 691 |
| 692 void Assembler::cmppsle(XmmRegister dst, XmmRegister src) { |
| 693 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 694 EmitUint8(0x0F); |
| 695 EmitUint8(0xC2); |
| 696 EmitXmmRegisterOperand(dst, src); |
| 697 EmitUint8(0x2); |
| 698 } |
| 699 |
| 700 |
| 701 void Assembler::cmppsnlt(XmmRegister dst, XmmRegister src) { |
| 702 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 703 EmitUint8(0x0F); |
| 704 EmitUint8(0xC2); |
| 705 EmitXmmRegisterOperand(dst, src); |
| 706 EmitUint8(0x5); |
| 707 } |
| 708 |
| 709 |
| 710 void Assembler::cmppsnle(XmmRegister dst, XmmRegister src) { |
| 711 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 712 EmitUint8(0x0F); |
| 713 EmitUint8(0xC2); |
| 714 EmitXmmRegisterOperand(dst, src); |
| 715 EmitUint8(0x6); |
| 716 } |
| 717 |
| 718 |
| 719 void Assembler::sqrtps(XmmRegister dst) { |
| 720 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 721 EmitUint8(0x0F); |
| 722 EmitUint8(0x51); |
| 723 EmitXmmRegisterOperand(dst, dst); |
| 724 } |
| 725 |
| 726 |
| 727 void Assembler::rsqrtps(XmmRegister dst) { |
| 728 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 729 EmitUint8(0x0F); |
| 730 EmitUint8(0x52); |
| 731 EmitXmmRegisterOperand(dst, dst); |
| 732 } |
| 733 |
| 734 |
| 735 void Assembler::reciprocalps(XmmRegister dst) { |
| 736 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 737 EmitUint8(0x0F); |
| 738 EmitUint8(0x53); |
| 739 EmitXmmRegisterOperand(dst, dst); |
| 740 } |
| 741 |
| 742 |
| 743 void Assembler::set1ps(XmmRegister dst, Register tmp1, const Immediate& imm) { |
| 744 // Load 32-bit immediate value into tmp1. |
| 745 movl(tmp1, imm); |
| 746 // Move value from tmp1 into dst. |
| 747 movd(dst, tmp1); |
| 748 // Broadcast low lane into other three lanes. |
| 749 shufps(dst, dst, Immediate(0x0)); |
| 750 } |
| 751 |
| 752 |
| 753 void Assembler::shufps(XmmRegister dst, XmmRegister src, const Immediate& imm) { |
| 754 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 755 EmitUint8(0x0F); |
| 756 EmitUint8(0xC6); |
| 757 EmitXmmRegisterOperand(dst, src); |
| 758 ASSERT(imm.is_uint8()); |
| 759 EmitUint8(imm.value()); |
| 760 } |
| 761 |
| 762 |
526 void Assembler::subsd(XmmRegister dst, XmmRegister src) { | 763 void Assembler::subsd(XmmRegister dst, XmmRegister src) { |
527 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 764 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
528 EmitUint8(0xF2); | 765 EmitUint8(0xF2); |
529 EmitUint8(0x0F); | 766 EmitUint8(0x0F); |
530 EmitUint8(0x5C); | 767 EmitUint8(0x5C); |
531 EmitXmmRegisterOperand(dst, src); | 768 EmitXmmRegisterOperand(dst, src); |
532 } | 769 } |
533 | 770 |
534 | 771 |
535 void Assembler::subsd(XmmRegister dst, const Address& src) { | 772 void Assembler::subsd(XmmRegister dst, const Address& src) { |
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
677 | 914 |
678 void Assembler::movmskpd(Register dst, XmmRegister src) { | 915 void Assembler::movmskpd(Register dst, XmmRegister src) { |
679 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 916 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
680 EmitUint8(0x66); | 917 EmitUint8(0x66); |
681 EmitUint8(0x0F); | 918 EmitUint8(0x0F); |
682 EmitUint8(0x50); | 919 EmitUint8(0x50); |
683 EmitXmmRegisterOperand(dst, src); | 920 EmitXmmRegisterOperand(dst, src); |
684 } | 921 } |
685 | 922 |
686 | 923 |
| 924 void Assembler::movmskps(Register dst, XmmRegister src) { |
| 925 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| 926 EmitUint8(0x0F); |
| 927 EmitUint8(0x50); |
| 928 EmitXmmRegisterOperand(dst, src); |
| 929 } |
| 930 |
| 931 |
687 void Assembler::sqrtsd(XmmRegister dst, XmmRegister src) { | 932 void Assembler::sqrtsd(XmmRegister dst, XmmRegister src) { |
688 AssemblerBuffer::EnsureCapacity ensured(&buffer_); | 933 AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
689 EmitUint8(0xF2); | 934 EmitUint8(0xF2); |
690 EmitUint8(0x0F); | 935 EmitUint8(0x0F); |
691 EmitUint8(0x51); | 936 EmitUint8(0x51); |
692 EmitXmmRegisterOperand(dst, src); | 937 EmitXmmRegisterOperand(dst, src); |
693 } | 938 } |
694 | 939 |
695 | 940 |
696 void Assembler::sqrtss(XmmRegister dst, XmmRegister src) { | 941 void Assembler::sqrtss(XmmRegister dst, XmmRegister src) { |
(...skipping 1112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1809 | 2054 |
1810 void Assembler::EnterCallRuntimeFrame(intptr_t frame_space) { | 2055 void Assembler::EnterCallRuntimeFrame(intptr_t frame_space) { |
1811 EnterFrame(0); | 2056 EnterFrame(0); |
1812 | 2057 |
1813 // Preserve volatile CPU registers. | 2058 // Preserve volatile CPU registers. |
1814 for (intptr_t i = 0; i < kNumberOfVolatileCpuRegisters; i++) { | 2059 for (intptr_t i = 0; i < kNumberOfVolatileCpuRegisters; i++) { |
1815 pushl(volatile_cpu_registers[i]); | 2060 pushl(volatile_cpu_registers[i]); |
1816 } | 2061 } |
1817 | 2062 |
1818 // Preserve all XMM registers except XMM0 | 2063 // Preserve all XMM registers except XMM0 |
1819 subl(ESP, Immediate((kNumberOfXmmRegisters - 1) * kDoubleSize)); | 2064 subl(ESP, Immediate((kNumberOfXmmRegisters - 1) * kFpuRegisterSize)); |
1820 // Store XMM registers with the lowest register number at the lowest | 2065 // Store XMM registers with the lowest register number at the lowest |
1821 // address. | 2066 // address. |
1822 intptr_t offset = 0; | 2067 intptr_t offset = 0; |
1823 for (intptr_t reg_idx = 1; reg_idx < kNumberOfXmmRegisters; ++reg_idx) { | 2068 for (intptr_t reg_idx = 1; reg_idx < kNumberOfXmmRegisters; ++reg_idx) { |
1824 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx); | 2069 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx); |
1825 movsd(Address(ESP, offset), xmm_reg); | 2070 movups(Address(ESP, offset), xmm_reg); |
1826 offset += kDoubleSize; | 2071 offset += kFpuRegisterSize; |
1827 } | 2072 } |
1828 | 2073 |
1829 ReserveAlignedFrameSpace(frame_space); | 2074 ReserveAlignedFrameSpace(frame_space); |
1830 } | 2075 } |
1831 | 2076 |
1832 | 2077 |
1833 void Assembler::LeaveCallRuntimeFrame() { | 2078 void Assembler::LeaveCallRuntimeFrame() { |
1834 // ESP might have been modified to reserve space for arguments | 2079 // ESP might have been modified to reserve space for arguments |
1835 // and ensure proper alignment of the stack frame. | 2080 // and ensure proper alignment of the stack frame. |
1836 // We need to restore it before restoring registers. | 2081 // We need to restore it before restoring registers. |
1837 const intptr_t kPushedRegistersSize = | 2082 const intptr_t kPushedRegistersSize = |
1838 kNumberOfVolatileCpuRegisters * kWordSize + | 2083 kNumberOfVolatileCpuRegisters * kWordSize + |
1839 kNumberOfVolatileXmmRegisters * kDoubleSize; | 2084 kNumberOfVolatileXmmRegisters * kFpuRegisterSize; |
1840 leal(ESP, Address(EBP, -kPushedRegistersSize)); | 2085 leal(ESP, Address(EBP, -kPushedRegistersSize)); |
1841 | 2086 |
1842 // Restore all XMM registers except XMM0 | 2087 // Restore all XMM registers except XMM0 |
1843 // XMM registers have the lowest register number at the lowest address. | 2088 // XMM registers have the lowest register number at the lowest address. |
1844 intptr_t offset = 0; | 2089 intptr_t offset = 0; |
1845 for (intptr_t reg_idx = 1; reg_idx < kNumberOfXmmRegisters; ++reg_idx) { | 2090 for (intptr_t reg_idx = 1; reg_idx < kNumberOfXmmRegisters; ++reg_idx) { |
1846 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx); | 2091 XmmRegister xmm_reg = static_cast<XmmRegister>(reg_idx); |
1847 movsd(xmm_reg, Address(ESP, offset)); | 2092 movups(xmm_reg, Address(ESP, offset)); |
1848 offset += kDoubleSize; | 2093 offset += kFpuRegisterSize; |
1849 } | 2094 } |
1850 addl(ESP, Immediate(offset)); | 2095 addl(ESP, Immediate(offset)); |
1851 | 2096 |
1852 // Restore volatile CPU registers. | 2097 // Restore volatile CPU registers. |
1853 for (intptr_t i = kNumberOfVolatileCpuRegisters - 1; i >= 0; i--) { | 2098 for (intptr_t i = kNumberOfVolatileCpuRegisters - 1; i >= 0; i--) { |
1854 popl(volatile_cpu_registers[i]); | 2099 popl(volatile_cpu_registers[i]); |
1855 } | 2100 } |
1856 | 2101 |
1857 leave(); | 2102 leave(); |
1858 } | 2103 } |
(...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2068 | 2313 |
2069 const char* Assembler::FpuRegisterName(FpuRegister reg) { | 2314 const char* Assembler::FpuRegisterName(FpuRegister reg) { |
2070 ASSERT((0 <= reg) && (reg < kNumberOfXmmRegisters)); | 2315 ASSERT((0 <= reg) && (reg < kNumberOfXmmRegisters)); |
2071 return xmm_reg_names[reg]; | 2316 return xmm_reg_names[reg]; |
2072 } | 2317 } |
2073 | 2318 |
2074 | 2319 |
2075 } // namespace dart | 2320 } // namespace dart |
2076 | 2321 |
2077 #endif // defined TARGET_ARCH_IA32 | 2322 #endif // defined TARGET_ARCH_IA32 |
OLD | NEW |