| OLD | NEW |
| 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. | 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. |
| 2 // All Rights Reserved. | 2 // All Rights Reserved. |
| 3 // | 3 // |
| 4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
| 5 // modification, are permitted provided that the following conditions | 5 // modification, are permitted provided that the following conditions |
| 6 // are met: | 6 // are met: |
| 7 // | 7 // |
| 8 // - Redistributions of source code must retain the above copyright notice, | 8 // - Redistributions of source code must retain the above copyright notice, |
| 9 // this list of conditions and the following disclaimer. | 9 // this list of conditions and the following disclaimer. |
| 10 // | 10 // |
| (...skipping 282 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 293 | 293 |
| 294 | 294 |
| 295 bool RelocInfo::IsCodedSpecially() { | 295 bool RelocInfo::IsCodedSpecially() { |
| 296 // The deserializer needs to know whether a pointer is specially coded. Being | 296 // The deserializer needs to know whether a pointer is specially coded. Being |
| 297 // specially coded on ARM means that it is a movw/movt instruction. We don't | 297 // specially coded on ARM means that it is a movw/movt instruction. We don't |
| 298 // generate those yet. | 298 // generate those yet. |
| 299 return false; | 299 return false; |
| 300 } | 300 } |
| 301 | 301 |
| 302 | 302 |
| 303 bool RelocInfo::IsInConstantPool() { |
| 304 return Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc_)); |
| 305 } |
| 306 |
| 307 |
| 303 void RelocInfo::PatchCode(byte* instructions, int instruction_count) { | 308 void RelocInfo::PatchCode(byte* instructions, int instruction_count) { |
| 304 // Patch the code at the current address with the supplied instructions. | 309 // Patch the code at the current address with the supplied instructions. |
| 305 Instr* pc = reinterpret_cast<Instr*>(pc_); | 310 Instr* pc = reinterpret_cast<Instr*>(pc_); |
| 306 Instr* instr = reinterpret_cast<Instr*>(instructions); | 311 Instr* instr = reinterpret_cast<Instr*>(instructions); |
| 307 for (int i = 0; i < instruction_count; i++) { | 312 for (int i = 0; i < instruction_count; i++) { |
| 308 *(pc + i) = *(instr + i); | 313 *(pc + i) = *(instr + i); |
| 309 } | 314 } |
| 310 | 315 |
| 311 // Indicate that code has changed. | 316 // Indicate that code has changed. |
| 312 CPU::FlushICache(pc_, instruction_count * Assembler::kInstrSize); | 317 CPU::FlushICache(pc_, instruction_count * Assembler::kInstrSize); |
| (...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 518 positions_recorder_(this) { | 523 positions_recorder_(this) { |
| 519 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); | 524 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); |
| 520 num_pending_32_bit_reloc_info_ = 0; | 525 num_pending_32_bit_reloc_info_ = 0; |
| 521 num_pending_64_bit_reloc_info_ = 0; | 526 num_pending_64_bit_reloc_info_ = 0; |
| 522 next_buffer_check_ = 0; | 527 next_buffer_check_ = 0; |
| 523 const_pool_blocked_nesting_ = 0; | 528 const_pool_blocked_nesting_ = 0; |
| 524 no_const_pool_before_ = 0; | 529 no_const_pool_before_ = 0; |
| 525 first_const_pool_32_use_ = -1; | 530 first_const_pool_32_use_ = -1; |
| 526 first_const_pool_64_use_ = -1; | 531 first_const_pool_64_use_ = -1; |
| 527 last_bound_pos_ = 0; | 532 last_bound_pos_ = 0; |
| 533 constant_pool_available_ = !FLAG_enable_ool_constant_pool; |
| 534 constant_pool_full_ = false; |
| 528 ClearRecordedAstId(); | 535 ClearRecordedAstId(); |
| 529 } | 536 } |
| 530 | 537 |
| 531 | 538 |
| 532 Assembler::~Assembler() { | 539 Assembler::~Assembler() { |
| 533 ASSERT(const_pool_blocked_nesting_ == 0); | 540 ASSERT(const_pool_blocked_nesting_ == 0); |
| 534 } | 541 } |
| 535 | 542 |
| 536 | 543 |
| 537 void Assembler::GetCode(CodeDesc* desc) { | 544 void Assembler::GetCode(CodeDesc* desc) { |
| (...skipping 509 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1047 #endif // def DEBUG | 1054 #endif // def DEBUG |
| 1048 if (assembler != NULL && assembler->predictable_code_size()) return true; | 1055 if (assembler != NULL && assembler->predictable_code_size()) return true; |
| 1049 return Serializer::enabled(); | 1056 return Serializer::enabled(); |
| 1050 } else if (RelocInfo::IsNone(rmode_)) { | 1057 } else if (RelocInfo::IsNone(rmode_)) { |
| 1051 return false; | 1058 return false; |
| 1052 } | 1059 } |
| 1053 return true; | 1060 return true; |
| 1054 } | 1061 } |
| 1055 | 1062 |
| 1056 | 1063 |
| 1057 static bool use_movw_movt(const Operand& x, const Assembler* assembler) { | 1064 static bool use_mov_immediate_load(const Operand& x, |
| 1058 if (Assembler::use_immediate_embedded_pointer_loads(assembler)) { | 1065 const Assembler* assembler) { |
| 1066 if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) && |
| 1067 (assembler == NULL || !assembler->predictable_code_size())) { |
| 1068 // Prefer movw / movt to constant pool if it is more efficient on the CPU. |
| 1059 return true; | 1069 return true; |
| 1070 } else if (x.must_output_reloc_info(assembler)) { |
| 1071 // Prefer constant pool if data is likely to be patched. |
| 1072 return false; |
| 1073 } else { |
| 1074 // Otherwise, use immediate load if movw / movt is available. |
| 1075 return CpuFeatures::IsSupported(ARMv7); |
| 1060 } | 1076 } |
| 1061 if (x.must_output_reloc_info(assembler)) { | |
| 1062 return false; | |
| 1063 } | |
| 1064 return CpuFeatures::IsSupported(ARMv7); | |
| 1065 } | 1077 } |
| 1066 | 1078 |
| 1067 | 1079 |
| 1068 bool Operand::is_single_instruction(const Assembler* assembler, | 1080 bool Operand::is_single_instruction(const Assembler* assembler, |
| 1069 Instr instr) const { | 1081 Instr instr) const { |
| 1070 if (rm_.is_valid()) return true; | 1082 if (rm_.is_valid()) return true; |
| 1071 uint32_t dummy1, dummy2; | 1083 uint32_t dummy1, dummy2; |
| 1072 if (must_output_reloc_info(assembler) || | 1084 if (must_output_reloc_info(assembler) || |
| 1073 !fits_shifter(imm32_, &dummy1, &dummy2, &instr)) { | 1085 !fits_shifter(imm32_, &dummy1, &dummy2, &instr)) { |
| 1074 // The immediate operand cannot be encoded as a shifter operand, or use of | 1086 // The immediate operand cannot be encoded as a shifter operand, or use of |
| 1075 // constant pool is required. For a mov instruction not setting the | 1087 // constant pool is required. For a mov instruction not setting the |
| 1076 // condition code additional instruction conventions can be used. | 1088 // condition code additional instruction conventions can be used. |
| 1077 if ((instr & ~kCondMask) == 13*B21) { // mov, S not set | 1089 if ((instr & ~kCondMask) == 13*B21) { // mov, S not set |
| 1078 return !use_movw_movt(*this, assembler); | 1090 return !use_mov_immediate_load(*this, assembler); |
| 1079 } else { | 1091 } else { |
| 1080 // If this is not a mov or mvn instruction there will always an additional | 1092 // If this is not a mov or mvn instruction there will always an additional |
| 1081 // instructions - either mov or ldr. The mov might actually be two | 1093 // instructions - either mov or ldr. The mov might actually be two |
| 1082 // instructions mov or movw followed by movt so including the actual | 1094 // instructions mov or movw followed by movt so including the actual |
| 1083 // instruction two or three instructions will be generated. | 1095 // instruction two or three instructions will be generated. |
| 1084 return false; | 1096 return false; |
| 1085 } | 1097 } |
| 1086 } else { | 1098 } else { |
| 1087 // No use of constant pool and the immediate operand can be encoded as a | 1099 // No use of constant pool and the immediate operand can be encoded as a |
| 1088 // shifter operand. | 1100 // shifter operand. |
| 1089 return true; | 1101 return true; |
| 1090 } | 1102 } |
| 1091 } | 1103 } |
| 1092 | 1104 |
| 1093 | 1105 |
| 1094 void Assembler::move_32_bit_immediate(Condition cond, | 1106 void Assembler::move_32_bit_immediate(Register rd, |
| 1095 Register rd, | 1107 const Operand& x, |
| 1096 SBit s, | 1108 Condition cond) { |
| 1097 const Operand& x) { | 1109 if (rd.code() != pc.code()) { |
| 1098 if (rd.code() != pc.code() && s == LeaveCC) { | 1110 if (use_mov_immediate_load(x, this)) { |
| 1099 if (use_movw_movt(x, this)) { | |
| 1100 if (x.must_output_reloc_info(this)) { | 1111 if (x.must_output_reloc_info(this)) { |
| 1101 RecordRelocInfo(x.rmode_, x.imm32_, DONT_USE_CONSTANT_POOL); | 1112 RecordRelocInfo(x.rmode_, x.imm32_, DONT_USE_CONSTANT_POOL); |
| 1102 // Make sure the movw/movt doesn't get separated. | 1113 // Make sure the movw/movt doesn't get separated. |
| 1103 BlockConstPoolFor(2); | 1114 BlockConstPoolFor(2); |
| 1104 } | 1115 } |
| 1105 emit(cond | 0x30*B20 | rd.code()*B12 | | 1116 emit(cond | 0x30*B20 | rd.code()*B12 | |
| 1106 EncodeMovwImmediate(x.imm32_ & 0xffff)); | 1117 EncodeMovwImmediate(x.imm32_ & 0xffff)); |
| 1107 movt(rd, static_cast<uint32_t>(x.imm32_) >> 16, cond); | 1118 movt(rd, static_cast<uint32_t>(x.imm32_) >> 16, cond); |
| 1108 return; | 1119 return; |
| 1109 } | 1120 } |
| (...skipping 16 matching lines...) Expand all Loading... |
| 1126 uint32_t immed_8; | 1137 uint32_t immed_8; |
| 1127 if (x.must_output_reloc_info(this) || | 1138 if (x.must_output_reloc_info(this) || |
| 1128 !fits_shifter(x.imm32_, &rotate_imm, &immed_8, &instr)) { | 1139 !fits_shifter(x.imm32_, &rotate_imm, &immed_8, &instr)) { |
| 1129 // The immediate operand cannot be encoded as a shifter operand, so load | 1140 // The immediate operand cannot be encoded as a shifter operand, so load |
| 1130 // it first to register ip and change the original instruction to use ip. | 1141 // it first to register ip and change the original instruction to use ip. |
| 1131 // However, if the original instruction is a 'mov rd, x' (not setting the | 1142 // However, if the original instruction is a 'mov rd, x' (not setting the |
| 1132 // condition code), then replace it with a 'ldr rd, [pc]'. | 1143 // condition code), then replace it with a 'ldr rd, [pc]'. |
| 1133 CHECK(!rn.is(ip)); // rn should never be ip, or will be trashed | 1144 CHECK(!rn.is(ip)); // rn should never be ip, or will be trashed |
| 1134 Condition cond = Instruction::ConditionField(instr); | 1145 Condition cond = Instruction::ConditionField(instr); |
| 1135 if ((instr & ~kCondMask) == 13*B21) { // mov, S not set | 1146 if ((instr & ~kCondMask) == 13*B21) { // mov, S not set |
| 1136 move_32_bit_immediate(cond, rd, LeaveCC, x); | 1147 move_32_bit_immediate(rd, x, cond); |
| 1137 } else { | 1148 } else { |
| 1138 if ((instr & kMovMvnMask) == kMovMvnPattern) { | 1149 mov(ip, x, LeaveCC, cond); |
| 1139 // Moves need to use a constant pool entry. | |
| 1140 RecordRelocInfo(x.rmode_, x.imm32_, USE_CONSTANT_POOL); | |
| 1141 ldr(ip, MemOperand(pc, 0), cond); | |
| 1142 } else if (x.must_output_reloc_info(this)) { | |
| 1143 // Otherwise, use most efficient form of fetching from constant pool. | |
| 1144 move_32_bit_immediate(cond, ip, LeaveCC, x); | |
| 1145 } else { | |
| 1146 // If this is not a mov or mvn instruction we may still be able to | |
| 1147 // avoid a constant pool entry by using mvn or movw. | |
| 1148 mov(ip, x, LeaveCC, cond); | |
| 1149 } | |
| 1150 addrmod1(instr, rn, rd, Operand(ip)); | 1150 addrmod1(instr, rn, rd, Operand(ip)); |
| 1151 } | 1151 } |
| 1152 return; | 1152 return; |
| 1153 } | 1153 } |
| 1154 instr |= I | rotate_imm*B8 | immed_8; | 1154 instr |= I | rotate_imm*B8 | immed_8; |
| 1155 } else if (!x.rs_.is_valid()) { | 1155 } else if (!x.rs_.is_valid()) { |
| 1156 // Immediate shift. | 1156 // Immediate shift. |
| 1157 instr |= x.shift_imm_*B7 | x.shift_op_ | x.rm_.code(); | 1157 instr |= x.shift_imm_*B7 | x.shift_op_ | x.rm_.code(); |
| 1158 } else { | 1158 } else { |
| 1159 // Register shift. | 1159 // Register shift. |
| (...skipping 647 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1807 Condition cond) { | 1807 Condition cond) { |
| 1808 ASSERT(fields >= B16 && fields < B20); // at least one field set | 1808 ASSERT(fields >= B16 && fields < B20); // at least one field set |
| 1809 Instr instr; | 1809 Instr instr; |
| 1810 if (!src.rm_.is_valid()) { | 1810 if (!src.rm_.is_valid()) { |
| 1811 // Immediate. | 1811 // Immediate. |
| 1812 uint32_t rotate_imm; | 1812 uint32_t rotate_imm; |
| 1813 uint32_t immed_8; | 1813 uint32_t immed_8; |
| 1814 if (src.must_output_reloc_info(this) || | 1814 if (src.must_output_reloc_info(this) || |
| 1815 !fits_shifter(src.imm32_, &rotate_imm, &immed_8, NULL)) { | 1815 !fits_shifter(src.imm32_, &rotate_imm, &immed_8, NULL)) { |
| 1816 // Immediate operand cannot be encoded, load it first to register ip. | 1816 // Immediate operand cannot be encoded, load it first to register ip. |
| 1817 RecordRelocInfo(src.rmode_, src.imm32_); | 1817 move_32_bit_immediate(ip, src); |
| 1818 ldr(ip, MemOperand(pc, 0), cond); | |
| 1819 msr(fields, Operand(ip), cond); | 1818 msr(fields, Operand(ip), cond); |
| 1820 return; | 1819 return; |
| 1821 } | 1820 } |
| 1822 instr = I | rotate_imm*B8 | immed_8; | 1821 instr = I | rotate_imm*B8 | immed_8; |
| 1823 } else { | 1822 } else { |
| 1824 ASSERT(!src.rs_.is_valid() && src.shift_imm_ == 0); // only rm allowed | 1823 ASSERT(!src.rs_.is_valid() && src.shift_imm_ == 0); // only rm allowed |
| 1825 instr = src.rm_.code(); | 1824 instr = src.rm_.code(); |
| 1826 } | 1825 } |
| 1827 emit(cond | instr | B24 | B21 | fields | 15*B12); | 1826 emit(cond | instr | B24 | B21 | fields | 15*B12); |
| 1828 } | 1827 } |
| (...skipping 1663 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3492 | 3491 |
| 3493 // Since a constant pool was just emitted, move the check offset forward by | 3492 // Since a constant pool was just emitted, move the check offset forward by |
| 3494 // the standard interval. | 3493 // the standard interval. |
| 3495 next_buffer_check_ = pc_offset() + kCheckPoolInterval; | 3494 next_buffer_check_ = pc_offset() + kCheckPoolInterval; |
| 3496 } | 3495 } |
| 3497 | 3496 |
| 3498 | 3497 |
| 3499 } } // namespace v8::internal | 3498 } } // namespace v8::internal |
| 3500 | 3499 |
| 3501 #endif // V8_TARGET_ARCH_ARM | 3500 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |