OLD | NEW |
1 // Copyright (c) 1994-2006 Sun Microsystems Inc. | 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. |
2 // All Rights Reserved. | 2 // All Rights Reserved. |
3 // | 3 // |
4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
5 // modification, are permitted provided that the following conditions | 5 // modification, are permitted provided that the following conditions |
6 // are met: | 6 // are met: |
7 // | 7 // |
8 // - Redistributions of source code must retain the above copyright notice, | 8 // - Redistributions of source code must retain the above copyright notice, |
9 // this list of conditions and the following disclaimer. | 9 // this list of conditions and the following disclaimer. |
10 // | 10 // |
(...skipping 801 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
812 #endif // def DEBUG | 812 #endif // def DEBUG |
813 if (assembler != NULL && assembler->predictable_code_size()) return true; | 813 if (assembler != NULL && assembler->predictable_code_size()) return true; |
814 return Serializer::enabled(); | 814 return Serializer::enabled(); |
815 } else if (rmode_ == RelocInfo::NONE) { | 815 } else if (rmode_ == RelocInfo::NONE) { |
816 return false; | 816 return false; |
817 } | 817 } |
818 return true; | 818 return true; |
819 } | 819 } |
820 | 820 |
821 | 821 |
| 822 static bool use_movw_movt(const Operand& x, const Assembler* assembler) { |
| 823 if (Assembler::use_immediate_constant_pool_loads(assembler)) { |
| 824 return true; |
| 825 } |
| 826 if (x.must_output_reloc_info(assembler)) { |
| 827 return false; |
| 828 } |
| 829 return CpuFeatures::IsSupported(ARMv7); |
| 830 } |
| 831 |
| 832 |
822 bool Operand::is_single_instruction(const Assembler* assembler, | 833 bool Operand::is_single_instruction(const Assembler* assembler, |
823 Instr instr) const { | 834 Instr instr) const { |
824 if (rm_.is_valid()) return true; | 835 if (rm_.is_valid()) return true; |
825 uint32_t dummy1, dummy2; | 836 uint32_t dummy1, dummy2; |
826 if (must_output_reloc_info(assembler) || | 837 if (must_output_reloc_info(assembler) || |
827 !fits_shifter(imm32_, &dummy1, &dummy2, &instr)) { | 838 !fits_shifter(imm32_, &dummy1, &dummy2, &instr)) { |
828 // The immediate operand cannot be encoded as a shifter operand, or use of | 839 // The immediate operand cannot be encoded as a shifter operand, or use of |
829 // constant pool is required. For a mov instruction not setting the | 840 // constant pool is required. For a mov instruction not setting the |
830 // condition code additional instruction conventions can be used. | 841 // condition code additional instruction conventions can be used. |
831 if ((instr & ~kCondMask) == 13*B21) { // mov, S not set | 842 if ((instr & ~kCondMask) == 13*B21) { // mov, S not set |
832 #ifdef USE_BLX | 843 return !use_movw_movt(*this, assembler); |
833 // When using BLX, there are two things that must be true for the address | |
834 // load to be longer than a single instruction. First, immediate loads | |
835 // using movw/movt must be supported (and fast) on the target ARM | |
836 // architecture. Second, the reloc mode must be something other than NONE, | |
837 // since NONE is a used whenever the constant pool cannot be used for | |
838 // technical reasons, e.g. back-patching calls site in optimized code with | |
839 // a call to a lazy deopt routine. | |
840 return !Assembler::allow_immediate_constant_pool_loads(assembler) && | |
841 rmode_ != RelocInfo::NONE; | |
842 #else | |
843 // It's not possible to use immediate loads to the pc to do a call, (the | |
844 // pc would be inconsistent half-way through the load), so loading the | |
845 // destination address without USE_BLX is always a single instruction of | |
846 // the form ldr pc, [pc + #xxx]. | |
847 return true; | |
848 #endif | |
849 } else { | 844 } else { |
850 // If this is not a mov or mvn instruction there will always an additional | 845 // If this is not a mov or mvn instruction there will always an additional |
851 // instructions - either mov or ldr. The mov might actually be two | 846 // instructions - either mov or ldr. The mov might actually be two |
852 // instructions mov or movw followed by movt so including the actual | 847 // instructions mov or movw followed by movt so including the actual |
853 // instruction two or three instructions will be generated. | 848 // instruction two or three instructions will be generated. |
854 return false; | 849 return false; |
855 } | 850 } |
856 } else { | 851 } else { |
857 // No use of constant pool and the immediate operand can be encoded as a | 852 // No use of constant pool and the immediate operand can be encoded as a |
858 // shifter operand. | 853 // shifter operand. |
859 return true; | 854 return true; |
860 } | 855 } |
861 } | 856 } |
862 | 857 |
863 | 858 |
864 void Assembler::move_32_bit_immediate(Condition cond, | 859 void Assembler::move_32_bit_immediate(Condition cond, |
865 Register rd, | 860 Register rd, |
866 SBit s, | 861 SBit s, |
867 const Operand& x) { | 862 const Operand& x) { |
868 if (rd.code() != pc.code() && s == LeaveCC) { | 863 if (rd.code() != pc.code() && s == LeaveCC) { |
869 // Candidate for immediate load. | 864 if (use_movw_movt(x, this)) { |
870 if (x.must_output_reloc_info(this)) { | 865 if (x.must_output_reloc_info(this)) { |
871 if (!Assembler::allow_immediate_constant_pool_loads(this)) { | 866 RecordRelocInfo(x.rmode_, x.imm32_, DONT_USE_CONSTANT_POOL); |
872 RecordRelocInfo(x.rmode_, x.imm32_, USE_CONSTANT_POOL); | 867 // Make sure the movw/movt doesn't get separated. |
873 ldr(rd, MemOperand(pc, 0), cond); | 868 BlockConstPoolFor(2); |
874 return; | |
875 } | 869 } |
876 RecordRelocInfo(x.rmode_, x.imm32_, DONT_USE_CONSTANT_POOL); | 870 emit(cond | 0x30*B20 | rd.code()*B12 | |
877 // Make sure the movw/movt doesn't get separated. | 871 EncodeMovwImmediate(x.imm32_ & 0xffff)); |
878 BlockConstPoolFor(2); | 872 movt(rd, static_cast<uint32_t>(x.imm32_) >> 16, cond); |
| 873 return; |
879 } | 874 } |
| 875 } |
880 | 876 |
881 // Emit a real movw/movt pair. | 877 RecordRelocInfo(x.rmode_, x.imm32_, USE_CONSTANT_POOL); |
882 emit(cond | 0x30*B20 | rd.code()*B12 | | 878 ldr(rd, MemOperand(pc, 0), cond); |
883 EncodeMovwImmediate(x.imm32_ & 0xffff)); | |
884 movt(rd, static_cast<uint32_t>(x.imm32_) >> 16, cond); | |
885 } else { | |
886 RecordRelocInfo(x.rmode_, x.imm32_, USE_CONSTANT_POOL); | |
887 ldr(rd, MemOperand(pc, 0), cond); | |
888 } | |
889 } | 879 } |
890 | 880 |
891 | 881 |
892 void Assembler::addrmod1(Instr instr, | 882 void Assembler::addrmod1(Instr instr, |
893 Register rn, | 883 Register rn, |
894 Register rd, | 884 Register rd, |
895 const Operand& x) { | 885 const Operand& x) { |
896 CheckBuffer(); | 886 CheckBuffer(); |
897 ASSERT((instr & ~(kCondMask | kOpCodeMask | S)) == 0); | 887 ASSERT((instr & ~(kCondMask | kOpCodeMask | S)) == 0); |
898 if (!x.rm_.is_valid()) { | 888 if (!x.rm_.is_valid()) { |
(...skipping 10 matching lines...) Expand all Loading... |
909 Condition cond = Instruction::ConditionField(instr); | 899 Condition cond = Instruction::ConditionField(instr); |
910 if ((instr & ~kCondMask) == 13*B21) { // mov, S not set | 900 if ((instr & ~kCondMask) == 13*B21) { // mov, S not set |
911 move_32_bit_immediate(cond, rd, LeaveCC, x); | 901 move_32_bit_immediate(cond, rd, LeaveCC, x); |
912 } else { | 902 } else { |
913 // If this is not a mov or mvn instruction we may still be able to avoid | 903 // If this is not a mov or mvn instruction we may still be able to avoid |
914 // a constant pool entry by using mvn or movw. | 904 // a constant pool entry by using mvn or movw. |
915 if (!x.must_output_reloc_info(this) && | 905 if (!x.must_output_reloc_info(this) && |
916 (instr & kMovMvnMask) != kMovMvnPattern) { | 906 (instr & kMovMvnMask) != kMovMvnPattern) { |
917 mov(ip, x, LeaveCC, cond); | 907 mov(ip, x, LeaveCC, cond); |
918 } else { | 908 } else { |
919 move_32_bit_immediate(cond, ip, | 909 move_32_bit_immediate(cond, ip, LeaveCC, x); |
920 static_cast<SBit>(instr & (1 << 20)), x); | |
921 } | 910 } |
922 addrmod1(instr, rn, rd, Operand(ip)); | 911 addrmod1(instr, rn, rd, Operand(ip)); |
923 } | 912 } |
924 return; | 913 return; |
925 } | 914 } |
926 instr |= I | rotate_imm*B8 | immed_8; | 915 instr |= I | rotate_imm*B8 | immed_8; |
927 } else if (!x.rs_.is_valid()) { | 916 } else if (!x.rs_.is_valid()) { |
928 // Immediate shift. | 917 // Immediate shift. |
929 instr |= x.shift_imm_*B7 | x.shift_op_ | x.rm_.code(); | 918 instr |= x.shift_imm_*B7 | x.shift_op_ | x.rm_.code(); |
930 } else { | 919 } else { |
(...skipping 1848 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2779 | 2768 |
2780 // Since a constant pool was just emitted, move the check offset forward by | 2769 // Since a constant pool was just emitted, move the check offset forward by |
2781 // the standard interval. | 2770 // the standard interval. |
2782 next_buffer_check_ = pc_offset() + kCheckPoolInterval; | 2771 next_buffer_check_ = pc_offset() + kCheckPoolInterval; |
2783 } | 2772 } |
2784 | 2773 |
2785 | 2774 |
2786 } } // namespace v8::internal | 2775 } } // namespace v8::internal |
2787 | 2776 |
2788 #endif // V8_TARGET_ARCH_ARM | 2777 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |