| OLD | NEW |
| 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. | 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. |
| 2 // All Rights Reserved. | 2 // All Rights Reserved. |
| 3 // | 3 // |
| 4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
| 5 // modification, are permitted provided that the following conditions | 5 // modification, are permitted provided that the following conditions |
| 6 // are met: | 6 // are met: |
| 7 // | 7 // |
| 8 // - Redistributions of source code must retain the above copyright notice, | 8 // - Redistributions of source code must retain the above copyright notice, |
| 9 // this list of conditions and the following disclaimer. | 9 // this list of conditions and the following disclaimer. |
| 10 // | 10 // |
| (...skipping 1058 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1069 } | 1069 } |
| 1070 } | 1070 } |
| 1071 return false; | 1071 return false; |
| 1072 } | 1072 } |
| 1073 | 1073 |
| 1074 | 1074 |
| 1075 // We have to use the temporary register for things that can be relocated even | 1075 // We have to use the temporary register for things that can be relocated even |
| 1076 // if they can be encoded in the ARM's 12 bits of immediate-offset instruction | 1076 // if they can be encoded in the ARM's 12 bits of immediate-offset instruction |
| 1077 // space. There is no guarantee that the relocated location can be similarly | 1077 // space. There is no guarantee that the relocated location can be similarly |
| 1078 // encoded. | 1078 // encoded. |
| 1079 bool Operand::must_output_reloc_info(const Assembler* assembler) const { | 1079 bool Operand::must_output_reloc_info(Isolate* isolate, |
| 1080 const Assembler* assembler) const { |
| 1080 if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) { | 1081 if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) { |
| 1081 if (assembler != NULL && assembler->predictable_code_size()) return true; | 1082 if (assembler != NULL && assembler->predictable_code_size()) return true; |
| 1082 return Serializer::enabled(); | 1083 return Serializer::enabled(isolate); |
| 1083 } else if (RelocInfo::IsNone(rmode_)) { | 1084 } else if (RelocInfo::IsNone(rmode_)) { |
| 1084 return false; | 1085 return false; |
| 1085 } | 1086 } |
| 1086 return true; | 1087 return true; |
| 1087 } | 1088 } |
| 1088 | 1089 |
| 1089 | 1090 |
| 1090 static bool use_mov_immediate_load(const Operand& x, | 1091 static bool use_mov_immediate_load(Isolate* isolate, |
| 1092 const Operand& x, |
| 1091 const Assembler* assembler) { | 1093 const Assembler* assembler) { |
| 1092 if (assembler != NULL && !assembler->can_use_constant_pool()) { | 1094 if (assembler != NULL && !assembler->can_use_constant_pool()) { |
| 1093 // If there is no constant pool available, we must use an mov immediate. | 1095 // If there is no constant pool available, we must use an mov immediate. |
| 1094 // TODO(rmcilroy): enable ARMv6 support. | 1096 // TODO(rmcilroy): enable ARMv6 support. |
| 1095 ASSERT(CpuFeatures::IsSupported(ARMv7)); | 1097 ASSERT(CpuFeatures::IsSupported(ARMv7)); |
| 1096 return true; | 1098 return true; |
| 1097 } else if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) && | 1099 } else if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) && |
| 1098 (assembler == NULL || !assembler->predictable_code_size())) { | 1100 (assembler == NULL || !assembler->predictable_code_size())) { |
| 1099 // Prefer movw / movt to constant pool if it is more efficient on the CPU. | 1101 // Prefer movw / movt to constant pool if it is more efficient on the CPU. |
| 1100 return true; | 1102 return true; |
| 1101 } else if (x.must_output_reloc_info(assembler)) { | 1103 } else if (x.must_output_reloc_info(isolate, assembler)) { |
| 1102 // Prefer constant pool if data is likely to be patched. | 1104 // Prefer constant pool if data is likely to be patched. |
| 1103 return false; | 1105 return false; |
| 1104 } else { | 1106 } else { |
| 1105 // Otherwise, use immediate load if movw / movt is available. | 1107 // Otherwise, use immediate load if movw / movt is available. |
| 1106 return CpuFeatures::IsSupported(ARMv7); | 1108 return CpuFeatures::IsSupported(ARMv7); |
| 1107 } | 1109 } |
| 1108 } | 1110 } |
| 1109 | 1111 |
| 1110 | 1112 |
| 1111 bool Operand::is_single_instruction(const Assembler* assembler, | 1113 bool Operand::is_single_instruction(Isolate* isolate, |
| 1114 const Assembler* assembler, |
| 1112 Instr instr) const { | 1115 Instr instr) const { |
| 1113 if (rm_.is_valid()) return true; | 1116 if (rm_.is_valid()) return true; |
| 1114 uint32_t dummy1, dummy2; | 1117 uint32_t dummy1, dummy2; |
| 1115 if (must_output_reloc_info(assembler) || | 1118 if (must_output_reloc_info(isolate, assembler) || |
| 1116 !fits_shifter(imm32_, &dummy1, &dummy2, &instr)) { | 1119 !fits_shifter(imm32_, &dummy1, &dummy2, &instr)) { |
| 1117 // The immediate operand cannot be encoded as a shifter operand, or use of | 1120 // The immediate operand cannot be encoded as a shifter operand, or use of |
| 1118 // constant pool is required. For a mov instruction not setting the | 1121 // constant pool is required. For a mov instruction not setting the |
| 1119 // condition code additional instruction conventions can be used. | 1122 // condition code additional instruction conventions can be used. |
| 1120 if ((instr & ~kCondMask) == 13*B21) { // mov, S not set | 1123 if ((instr & ~kCondMask) == 13*B21) { // mov, S not set |
| 1121 return !use_mov_immediate_load(*this, assembler); | 1124 return !use_mov_immediate_load(isolate, *this, assembler); |
| 1122 } else { | 1125 } else { |
| 1123 // If this is not a mov or mvn instruction there will always an additional | 1126 // If this is not a mov or mvn instruction there will always an additional |
| 1124 // instructions - either mov or ldr. The mov might actually be two | 1127 // instructions - either mov or ldr. The mov might actually be two |
| 1125 // instructions mov or movw followed by movt so including the actual | 1128 // instructions mov or movw followed by movt so including the actual |
| 1126 // instruction two or three instructions will be generated. | 1129 // instruction two or three instructions will be generated. |
| 1127 return false; | 1130 return false; |
| 1128 } | 1131 } |
| 1129 } else { | 1132 } else { |
| 1130 // No use of constant pool and the immediate operand can be encoded as a | 1133 // No use of constant pool and the immediate operand can be encoded as a |
| 1131 // shifter operand. | 1134 // shifter operand. |
| 1132 return true; | 1135 return true; |
| 1133 } | 1136 } |
| 1134 } | 1137 } |
| 1135 | 1138 |
| 1136 | 1139 |
| 1137 void Assembler::move_32_bit_immediate(Register rd, | 1140 void Assembler::move_32_bit_immediate(Register rd, |
| 1138 const Operand& x, | 1141 const Operand& x, |
| 1139 Condition cond) { | 1142 Condition cond) { |
| 1140 RelocInfo rinfo(pc_, x.rmode_, x.imm32_, NULL); | 1143 RelocInfo rinfo(pc_, x.rmode_, x.imm32_, NULL); |
| 1141 if (x.must_output_reloc_info(this)) { | 1144 if (x.must_output_reloc_info(isolate(), this)) { |
| 1142 RecordRelocInfo(rinfo); | 1145 RecordRelocInfo(rinfo); |
| 1143 } | 1146 } |
| 1144 | 1147 |
| 1145 if (use_mov_immediate_load(x, this)) { | 1148 if (use_mov_immediate_load(isolate(), x, this)) { |
| 1146 Register target = rd.code() == pc.code() ? ip : rd; | 1149 Register target = rd.code() == pc.code() ? ip : rd; |
| 1147 // TODO(rmcilroy): add ARMv6 support for immediate loads. | 1150 // TODO(rmcilroy): add ARMv6 support for immediate loads. |
| 1148 ASSERT(CpuFeatures::IsSupported(ARMv7)); | 1151 ASSERT(CpuFeatures::IsSupported(ARMv7)); |
| 1149 if (!FLAG_enable_ool_constant_pool && x.must_output_reloc_info(this)) { | 1152 if (!FLAG_enable_ool_constant_pool && |
| 1153 x.must_output_reloc_info(isolate(), this)) { |
| 1150 // Make sure the movw/movt doesn't get separated. | 1154 // Make sure the movw/movt doesn't get separated. |
| 1151 BlockConstPoolFor(2); | 1155 BlockConstPoolFor(2); |
| 1152 } | 1156 } |
| 1153 emit(cond | 0x30*B20 | target.code()*B12 | | 1157 emit(cond | 0x30*B20 | target.code()*B12 | |
| 1154 EncodeMovwImmediate(x.imm32_ & 0xffff)); | 1158 EncodeMovwImmediate(x.imm32_ & 0xffff)); |
| 1155 movt(target, static_cast<uint32_t>(x.imm32_) >> 16, cond); | 1159 movt(target, static_cast<uint32_t>(x.imm32_) >> 16, cond); |
| 1156 if (target.code() != rd.code()) { | 1160 if (target.code() != rd.code()) { |
| 1157 mov(rd, target, LeaveCC, cond); | 1161 mov(rd, target, LeaveCC, cond); |
| 1158 } | 1162 } |
| 1159 } else { | 1163 } else { |
| 1160 ASSERT(can_use_constant_pool()); | 1164 ASSERT(can_use_constant_pool()); |
| 1161 ConstantPoolAddEntry(rinfo); | 1165 ConstantPoolAddEntry(rinfo); |
| 1162 ldr(rd, MemOperand(FLAG_enable_ool_constant_pool ? pp : pc, 0), cond); | 1166 ldr(rd, MemOperand(FLAG_enable_ool_constant_pool ? pp : pc, 0), cond); |
| 1163 } | 1167 } |
| 1164 } | 1168 } |
| 1165 | 1169 |
| 1166 | 1170 |
| 1167 void Assembler::addrmod1(Instr instr, | 1171 void Assembler::addrmod1(Instr instr, |
| 1168 Register rn, | 1172 Register rn, |
| 1169 Register rd, | 1173 Register rd, |
| 1170 const Operand& x) { | 1174 const Operand& x) { |
| 1171 CheckBuffer(); | 1175 CheckBuffer(); |
| 1172 ASSERT((instr & ~(kCondMask | kOpCodeMask | S)) == 0); | 1176 ASSERT((instr & ~(kCondMask | kOpCodeMask | S)) == 0); |
| 1173 if (!x.rm_.is_valid()) { | 1177 if (!x.rm_.is_valid()) { |
| 1174 // Immediate. | 1178 // Immediate. |
| 1175 uint32_t rotate_imm; | 1179 uint32_t rotate_imm; |
| 1176 uint32_t immed_8; | 1180 uint32_t immed_8; |
| 1177 if (x.must_output_reloc_info(this) || | 1181 if (x.must_output_reloc_info(isolate(), this) || |
| 1178 !fits_shifter(x.imm32_, &rotate_imm, &immed_8, &instr)) { | 1182 !fits_shifter(x.imm32_, &rotate_imm, &immed_8, &instr)) { |
| 1179 // The immediate operand cannot be encoded as a shifter operand, so load | 1183 // The immediate operand cannot be encoded as a shifter operand, so load |
| 1180 // it first to register ip and change the original instruction to use ip. | 1184 // it first to register ip and change the original instruction to use ip. |
| 1181 // However, if the original instruction is a 'mov rd, x' (not setting the | 1185 // However, if the original instruction is a 'mov rd, x' (not setting the |
| 1182 // condition code), then replace it with a 'ldr rd, [pc]'. | 1186 // condition code), then replace it with a 'ldr rd, [pc]'. |
| 1183 CHECK(!rn.is(ip)); // rn should never be ip, or will be trashed | 1187 CHECK(!rn.is(ip)); // rn should never be ip, or will be trashed |
| 1184 Condition cond = Instruction::ConditionField(instr); | 1188 Condition cond = Instruction::ConditionField(instr); |
| 1185 if ((instr & ~kCondMask) == 13*B21) { // mov, S not set | 1189 if ((instr & ~kCondMask) == 13*B21) { // mov, S not set |
| 1186 move_32_bit_immediate(rd, x, cond); | 1190 move_32_bit_immediate(rd, x, cond); |
| 1187 } else { | 1191 } else { |
| (...skipping 661 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1849 | 1853 |
| 1850 | 1854 |
| 1851 void Assembler::msr(SRegisterFieldMask fields, const Operand& src, | 1855 void Assembler::msr(SRegisterFieldMask fields, const Operand& src, |
| 1852 Condition cond) { | 1856 Condition cond) { |
| 1853 ASSERT(fields >= B16 && fields < B20); // at least one field set | 1857 ASSERT(fields >= B16 && fields < B20); // at least one field set |
| 1854 Instr instr; | 1858 Instr instr; |
| 1855 if (!src.rm_.is_valid()) { | 1859 if (!src.rm_.is_valid()) { |
| 1856 // Immediate. | 1860 // Immediate. |
| 1857 uint32_t rotate_imm; | 1861 uint32_t rotate_imm; |
| 1858 uint32_t immed_8; | 1862 uint32_t immed_8; |
| 1859 if (src.must_output_reloc_info(this) || | 1863 if (src.must_output_reloc_info(isolate(), this) || |
| 1860 !fits_shifter(src.imm32_, &rotate_imm, &immed_8, NULL)) { | 1864 !fits_shifter(src.imm32_, &rotate_imm, &immed_8, NULL)) { |
| 1861 // Immediate operand cannot be encoded, load it first to register ip. | 1865 // Immediate operand cannot be encoded, load it first to register ip. |
| 1862 move_32_bit_immediate(ip, src); | 1866 move_32_bit_immediate(ip, src); |
| 1863 msr(fields, Operand(ip), cond); | 1867 msr(fields, Operand(ip), cond); |
| 1864 return; | 1868 return; |
| 1865 } | 1869 } |
| 1866 instr = I | rotate_imm*B8 | immed_8; | 1870 instr = I | rotate_imm*B8 | immed_8; |
| 1867 } else { | 1871 } else { |
| 1868 ASSERT(!src.rs_.is_valid() && src.shift_imm_ == 0); // only rm allowed | 1872 ASSERT(!src.rs_.is_valid() && src.shift_imm_ == 0); // only rm allowed |
| 1869 instr = src.rm_.code(); | 1873 instr = src.rm_.code(); |
| (...skipping 1382 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3252 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { | 3256 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { |
| 3253 RelocInfo rinfo(pc_, rmode, data, NULL); | 3257 RelocInfo rinfo(pc_, rmode, data, NULL); |
| 3254 RecordRelocInfo(rinfo); | 3258 RecordRelocInfo(rinfo); |
| 3255 } | 3259 } |
| 3256 | 3260 |
| 3257 | 3261 |
| 3258 void Assembler::RecordRelocInfo(const RelocInfo& rinfo) { | 3262 void Assembler::RecordRelocInfo(const RelocInfo& rinfo) { |
| 3259 if (!RelocInfo::IsNone(rinfo.rmode())) { | 3263 if (!RelocInfo::IsNone(rinfo.rmode())) { |
| 3260 // Don't record external references unless the heap will be serialized. | 3264 // Don't record external references unless the heap will be serialized. |
| 3261 if (rinfo.rmode() == RelocInfo::EXTERNAL_REFERENCE) { | 3265 if (rinfo.rmode() == RelocInfo::EXTERNAL_REFERENCE) { |
| 3262 if (!Serializer::enabled() && !emit_debug_code()) { | 3266 if (!Serializer::enabled(isolate()) && !emit_debug_code()) { |
| 3263 return; | 3267 return; |
| 3264 } | 3268 } |
| 3265 } | 3269 } |
| 3266 ASSERT(buffer_space() >= kMaxRelocSize); // too late to grow buffer here | 3270 ASSERT(buffer_space() >= kMaxRelocSize); // too late to grow buffer here |
| 3267 if (rinfo.rmode() == RelocInfo::CODE_TARGET_WITH_ID) { | 3271 if (rinfo.rmode() == RelocInfo::CODE_TARGET_WITH_ID) { |
| 3268 RelocInfo reloc_info_with_ast_id(rinfo.pc(), | 3272 RelocInfo reloc_info_with_ast_id(rinfo.pc(), |
| 3269 rinfo.rmode(), | 3273 rinfo.rmode(), |
| 3270 RecordedAstId().ToInt(), | 3274 RecordedAstId().ToInt(), |
| 3271 NULL); | 3275 NULL); |
| 3272 ClearRecordedAstId(); | 3276 ClearRecordedAstId(); |
| (...skipping 210 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3483 if (IsLdrPcImmediateOffset(instr) && | 3487 if (IsLdrPcImmediateOffset(instr) && |
| 3484 GetLdrRegisterImmediateOffset(instr) == 0) { | 3488 GetLdrRegisterImmediateOffset(instr) == 0) { |
| 3485 int delta = pc_ - rinfo.pc() - kPcLoadDelta; | 3489 int delta = pc_ - rinfo.pc() - kPcLoadDelta; |
| 3486 ASSERT(is_uint12(delta)); | 3490 ASSERT(is_uint12(delta)); |
| 3487 // 0 is the smallest delta: | 3491 // 0 is the smallest delta: |
| 3488 // ldr rd, [pc, #0] | 3492 // ldr rd, [pc, #0] |
| 3489 // constant pool marker | 3493 // constant pool marker |
| 3490 // data | 3494 // data |
| 3491 | 3495 |
| 3492 bool found = false; | 3496 bool found = false; |
| 3493 if (!Serializer::enabled() && (rinfo.rmode() >= RelocInfo::CELL)) { | 3497 if (!Serializer::enabled(isolate()) && |
| 3498 (rinfo.rmode() >= RelocInfo::CELL)) { |
| 3494 for (int j = 0; j < i; j++) { | 3499 for (int j = 0; j < i; j++) { |
| 3495 RelocInfo& rinfo2 = pending_32_bit_reloc_info_[j]; | 3500 RelocInfo& rinfo2 = pending_32_bit_reloc_info_[j]; |
| 3496 | 3501 |
| 3497 if ((rinfo2.data() == rinfo.data()) && | 3502 if ((rinfo2.data() == rinfo.data()) && |
| 3498 (rinfo2.rmode() == rinfo.rmode())) { | 3503 (rinfo2.rmode() == rinfo.rmode())) { |
| 3499 Instr instr2 = instr_at(rinfo2.pc()); | 3504 Instr instr2 = instr_at(rinfo2.pc()); |
| 3500 if (IsLdrPcImmediateOffset(instr2)) { | 3505 if (IsLdrPcImmediateOffset(instr2)) { |
| 3501 delta = GetLdrRegisterImmediateOffset(instr2); | 3506 delta = GetLdrRegisterImmediateOffset(instr2); |
| 3502 delta += rinfo2.pc() - rinfo.pc(); | 3507 delta += rinfo2.pc() - rinfo.pc(); |
| 3503 found = true; | 3508 found = true; |
| (...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3587 RelocInfo::Mode rmode = rinfo.rmode(); | 3592 RelocInfo::Mode rmode = rinfo.rmode(); |
| 3588 ASSERT(rmode != RelocInfo::COMMENT && | 3593 ASSERT(rmode != RelocInfo::COMMENT && |
| 3589 rmode != RelocInfo::POSITION && | 3594 rmode != RelocInfo::POSITION && |
| 3590 rmode != RelocInfo::STATEMENT_POSITION && | 3595 rmode != RelocInfo::STATEMENT_POSITION && |
| 3591 rmode != RelocInfo::CONST_POOL); | 3596 rmode != RelocInfo::CONST_POOL); |
| 3592 | 3597 |
| 3593 | 3598 |
| 3594 // Try to merge entries which won't be patched. | 3599 // Try to merge entries which won't be patched. |
| 3595 int merged_index = -1; | 3600 int merged_index = -1; |
| 3596 if (RelocInfo::IsNone(rmode) || | 3601 if (RelocInfo::IsNone(rmode) || |
| 3597 (!Serializer::enabled() && (rmode >= RelocInfo::CELL))) { | 3602 (!Serializer::enabled(assm->isolate()) && (rmode >= RelocInfo::CELL))) { |
| 3598 size_t i; | 3603 size_t i; |
| 3599 std::vector<RelocInfo>::const_iterator it; | 3604 std::vector<RelocInfo>::const_iterator it; |
| 3600 for (it = entries_.begin(), i = 0; it != entries_.end(); it++, i++) { | 3605 for (it = entries_.begin(), i = 0; it != entries_.end(); it++, i++) { |
| 3601 if (RelocInfo::IsEqual(rinfo, *it)) { | 3606 if (RelocInfo::IsEqual(rinfo, *it)) { |
| 3602 merged_index = i; | 3607 merged_index = i; |
| 3603 break; | 3608 break; |
| 3604 } | 3609 } |
| 3605 } | 3610 } |
| 3606 } | 3611 } |
| 3607 | 3612 |
| (...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3723 ASSERT((index_64bit == count_of_64bit_) && | 3728 ASSERT((index_64bit == count_of_64bit_) && |
| 3724 (index_code_ptr == (index_64bit + count_of_code_ptr_)) && | 3729 (index_code_ptr == (index_64bit + count_of_code_ptr_)) && |
| 3725 (index_heap_ptr == (index_code_ptr + count_of_heap_ptr_)) && | 3730 (index_heap_ptr == (index_code_ptr + count_of_heap_ptr_)) && |
| 3726 (index_32bit == (index_heap_ptr + count_of_32bit_))); | 3731 (index_32bit == (index_heap_ptr + count_of_32bit_))); |
| 3727 } | 3732 } |
| 3728 | 3733 |
| 3729 | 3734 |
| 3730 } } // namespace v8::internal | 3735 } } // namespace v8::internal |
| 3731 | 3736 |
| 3732 #endif // V8_TARGET_ARCH_ARM | 3737 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |