OLD | NEW |
1 // Copyright (c) 1994-2006 Sun Microsystems Inc. | 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. |
2 // All Rights Reserved. | 2 // All Rights Reserved. |
3 // | 3 // |
4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
5 // modification, are permitted provided that the following conditions | 5 // modification, are permitted provided that the following conditions |
6 // are met: | 6 // are met: |
7 // | 7 // |
8 // - Redistributions of source code must retain the above copyright notice, | 8 // - Redistributions of source code must retain the above copyright notice, |
9 // this list of conditions and the following disclaimer. | 9 // this list of conditions and the following disclaimer. |
10 // | 10 // |
(...skipping 360 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
371 // ----------------------------------------------------------------------------- | 371 // ----------------------------------------------------------------------------- |
372 // Implementation of Operand and MemOperand | 372 // Implementation of Operand and MemOperand |
373 // See assembler-arm-inl.h for inlined constructors | 373 // See assembler-arm-inl.h for inlined constructors |
374 | 374 |
375 Operand::Operand(Handle<Object> handle) { | 375 Operand::Operand(Handle<Object> handle) { |
376 AllowDeferredHandleDereference using_raw_address; | 376 AllowDeferredHandleDereference using_raw_address; |
377 rm_ = no_reg; | 377 rm_ = no_reg; |
378 // Verify all Objects referred by code are NOT in new space. | 378 // Verify all Objects referred by code are NOT in new space. |
379 Object* obj = *handle; | 379 Object* obj = *handle; |
380 if (obj->IsHeapObject()) { | 380 if (obj->IsHeapObject()) { |
381 imm32_ = reinterpret_cast<intptr_t>(handle.location()); | 381 value_.immediate = reinterpret_cast<intptr_t>(handle.location()); |
382 rmode_ = RelocInfo::EMBEDDED_OBJECT; | 382 rmode_ = RelocInfo::EMBEDDED_OBJECT; |
383 } else { | 383 } else { |
384 // no relocation needed | 384 // no relocation needed |
385 imm32_ = reinterpret_cast<intptr_t>(obj); | 385 value_.immediate = reinterpret_cast<intptr_t>(obj); |
386 rmode_ = RelocInfo::NONE32; | 386 rmode_ = RelocInfo::NONE32; |
387 } | 387 } |
388 } | 388 } |
389 | 389 |
390 | 390 |
391 Operand::Operand(Register rm, ShiftOp shift_op, int shift_imm) { | 391 Operand::Operand(Register rm, ShiftOp shift_op, int shift_imm) { |
392 DCHECK(is_uint5(shift_imm)); | 392 DCHECK(is_uint5(shift_imm)); |
393 | 393 |
394 rm_ = rm; | 394 rm_ = rm; |
395 rs_ = no_reg; | 395 rs_ = no_reg; |
(...skipping 14 matching lines...) Expand all Loading... |
410 | 410 |
411 | 411 |
412 Operand::Operand(Register rm, ShiftOp shift_op, Register rs) { | 412 Operand::Operand(Register rm, ShiftOp shift_op, Register rs) { |
413 DCHECK(shift_op != RRX); | 413 DCHECK(shift_op != RRX); |
414 rm_ = rm; | 414 rm_ = rm; |
415 rs_ = no_reg; | 415 rs_ = no_reg; |
416 shift_op_ = shift_op; | 416 shift_op_ = shift_op; |
417 rs_ = rs; | 417 rs_ = rs; |
418 } | 418 } |
419 | 419 |
| 420 Operand Operand::EmbeddedNumber(double value) { |
| 421 int32_t smi; |
| 422 if (DoubleToSmiInteger(value, &smi)) return Operand(Smi::FromInt(smi)); |
| 423 Operand result(0, RelocInfo::EMBEDDED_OBJECT); |
| 424 result.is_heap_number_ = true; |
| 425 result.value_.heap_number = value; |
| 426 return result; |
| 427 } |
420 | 428 |
421 MemOperand::MemOperand(Register rn, int32_t offset, AddrMode am) { | 429 MemOperand::MemOperand(Register rn, int32_t offset, AddrMode am) { |
422 rn_ = rn; | 430 rn_ = rn; |
423 rm_ = no_reg; | 431 rm_ = no_reg; |
424 offset_ = offset; | 432 offset_ = offset; |
425 am_ = am; | 433 am_ = am; |
426 | 434 |
427 // Accesses below the stack pointer are not safe, and are prohibited by the | 435 // Accesses below the stack pointer are not safe, and are prohibited by the |
428 // ABI. We can check obvious violations here. | 436 // ABI. We can check obvious violations here. |
429 if (rn.is(sp)) { | 437 if (rn.is(sp)) { |
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
561 // its use consistent with other features, we always enable it if we can. | 569 // its use consistent with other features, we always enable it if we can. |
562 EnableCpuFeature(VFP32DREGS); | 570 EnableCpuFeature(VFP32DREGS); |
563 } | 571 } |
564 } | 572 } |
565 | 573 |
566 | 574 |
567 Assembler::~Assembler() { | 575 Assembler::~Assembler() { |
568 DCHECK(const_pool_blocked_nesting_ == 0); | 576 DCHECK(const_pool_blocked_nesting_ == 0); |
569 } | 577 } |
570 | 578 |
571 | 579 void Assembler::GetCode(Isolate* isolate, CodeDesc* desc) { |
572 void Assembler::GetCode(CodeDesc* desc) { | |
573 // Emit constant pool if necessary. | 580 // Emit constant pool if necessary. |
574 int constant_pool_offset = 0; | 581 int constant_pool_offset = 0; |
575 CheckConstPool(true, false); | 582 CheckConstPool(true, false); |
576 DCHECK(pending_32_bit_constants_.empty()); | 583 DCHECK(pending_32_bit_constants_.empty()); |
577 DCHECK(pending_64_bit_constants_.empty()); | 584 DCHECK(pending_64_bit_constants_.empty()); |
| 585 |
| 586 AllocateRequestedHeapNumbers(isolate); |
| 587 |
578 // Set up code descriptor. | 588 // Set up code descriptor. |
579 desc->buffer = buffer_; | 589 desc->buffer = buffer_; |
580 desc->buffer_size = buffer_size_; | 590 desc->buffer_size = buffer_size_; |
581 desc->instr_size = pc_offset(); | 591 desc->instr_size = pc_offset(); |
582 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); | 592 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); |
583 desc->constant_pool_size = | 593 desc->constant_pool_size = |
584 (constant_pool_offset ? desc->instr_size - constant_pool_offset : 0); | 594 (constant_pool_offset ? desc->instr_size - constant_pool_offset : 0); |
585 desc->origin = this; | 595 desc->origin = this; |
586 desc->unwinding_info_size = 0; | 596 desc->unwinding_info_size = 0; |
587 desc->unwinding_info = nullptr; | 597 desc->unwinding_info = nullptr; |
(...skipping 537 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1125 bool Operand::must_output_reloc_info(const Assembler* assembler) const { | 1135 bool Operand::must_output_reloc_info(const Assembler* assembler) const { |
1126 return v8::internal::must_output_reloc_info(rmode_, assembler); | 1136 return v8::internal::must_output_reloc_info(rmode_, assembler); |
1127 } | 1137 } |
1128 | 1138 |
1129 int Operand::instructions_required(const Assembler* assembler, | 1139 int Operand::instructions_required(const Assembler* assembler, |
1130 Instr instr) const { | 1140 Instr instr) const { |
1131 DCHECK(assembler != nullptr); | 1141 DCHECK(assembler != nullptr); |
1132 if (rm_.is_valid()) return 1; | 1142 if (rm_.is_valid()) return 1; |
1133 uint32_t dummy1, dummy2; | 1143 uint32_t dummy1, dummy2; |
1134 if (must_output_reloc_info(assembler) || | 1144 if (must_output_reloc_info(assembler) || |
1135 !fits_shifter(imm32_, &dummy1, &dummy2, &instr)) { | 1145 !fits_shifter(immediate(), &dummy1, &dummy2, &instr)) { |
1136 // The immediate operand cannot be encoded as a shifter operand, or use of | 1146 // The immediate operand cannot be encoded as a shifter operand, or use of |
1137 // constant pool is required. First account for the instructions required | 1147 // constant pool is required. First account for the instructions required |
1138 // for the constant pool or immediate load | 1148 // for the constant pool or immediate load |
1139 int instructions; | 1149 int instructions; |
1140 if (use_mov_immediate_load(*this, assembler)) { | 1150 if (use_mov_immediate_load(*this, assembler)) { |
1141 DCHECK(CpuFeatures::IsSupported(ARMv7)); | 1151 DCHECK(CpuFeatures::IsSupported(ARMv7)); |
1142 // A movw / movt immediate load. | 1152 // A movw / movt immediate load. |
1143 instructions = 2; | 1153 instructions = 2; |
1144 } else { | 1154 } else { |
1145 // A small constant pool load. | 1155 // A small constant pool load. |
(...skipping 17 matching lines...) Expand all Loading... |
1163 void Assembler::move_32_bit_immediate(Register rd, | 1173 void Assembler::move_32_bit_immediate(Register rd, |
1164 const Operand& x, | 1174 const Operand& x, |
1165 Condition cond) { | 1175 Condition cond) { |
1166 if (use_mov_immediate_load(x, this)) { | 1176 if (use_mov_immediate_load(x, this)) { |
1167 // use_mov_immediate_load should return false when we need to output | 1177 // use_mov_immediate_load should return false when we need to output |
1168 // relocation info, since we prefer the constant pool for values that | 1178 // relocation info, since we prefer the constant pool for values that |
1169 // can be patched. | 1179 // can be patched. |
1170 DCHECK(!x.must_output_reloc_info(this)); | 1180 DCHECK(!x.must_output_reloc_info(this)); |
1171 Register target = rd.code() == pc.code() ? ip : rd; | 1181 Register target = rd.code() == pc.code() ? ip : rd; |
1172 if (CpuFeatures::IsSupported(ARMv7)) { | 1182 if (CpuFeatures::IsSupported(ARMv7)) { |
1173 uint32_t imm32 = static_cast<uint32_t>(x.imm32_); | 1183 uint32_t imm32 = static_cast<uint32_t>(x.immediate()); |
1174 CpuFeatureScope scope(this, ARMv7); | 1184 CpuFeatureScope scope(this, ARMv7); |
1175 movw(target, imm32 & 0xffff, cond); | 1185 movw(target, imm32 & 0xffff, cond); |
1176 movt(target, imm32 >> 16, cond); | 1186 movt(target, imm32 >> 16, cond); |
1177 } | 1187 } |
1178 if (target.code() != rd.code()) { | 1188 if (target.code() != rd.code()) { |
1179 mov(rd, target, LeaveCC, cond); | 1189 mov(rd, target, LeaveCC, cond); |
1180 } | 1190 } |
1181 } else { | 1191 } else { |
1182 ConstantPoolAddEntry(pc_offset(), x.rmode_, x.imm32_); | 1192 int32_t immediate; |
| 1193 if (x.is_heap_number()) { |
| 1194 RequestHeapNumber(x.heap_number()); |
| 1195 immediate = 0; |
| 1196 } else { |
| 1197 immediate = x.immediate(); |
| 1198 } |
| 1199 ConstantPoolAddEntry(pc_offset(), x.rmode_, immediate); |
1183 ldr(rd, MemOperand(pc, 0), cond); | 1200 ldr(rd, MemOperand(pc, 0), cond); |
1184 } | 1201 } |
1185 } | 1202 } |
1186 | 1203 |
1187 | 1204 |
1188 void Assembler::addrmod1(Instr instr, | 1205 void Assembler::addrmod1(Instr instr, |
1189 Register rn, | 1206 Register rn, |
1190 Register rd, | 1207 Register rd, |
1191 const Operand& x) { | 1208 const Operand& x) { |
1192 CheckBuffer(); | 1209 CheckBuffer(); |
1193 DCHECK((instr & ~(kCondMask | kOpCodeMask | S)) == 0); | 1210 DCHECK((instr & ~(kCondMask | kOpCodeMask | S)) == 0); |
1194 if (!x.rm_.is_valid()) { | 1211 if (!x.rm_.is_valid()) { |
1195 // Immediate. | 1212 // Immediate. |
1196 uint32_t rotate_imm; | 1213 uint32_t rotate_imm; |
1197 uint32_t immed_8; | 1214 uint32_t immed_8; |
1198 if (x.must_output_reloc_info(this) || | 1215 if (x.must_output_reloc_info(this) || |
1199 !fits_shifter(x.imm32_, &rotate_imm, &immed_8, &instr)) { | 1216 !fits_shifter(x.immediate(), &rotate_imm, &immed_8, &instr)) { |
1200 // The immediate operand cannot be encoded as a shifter operand, so load | 1217 // The immediate operand cannot be encoded as a shifter operand, so load |
1201 // it first to register ip and change the original instruction to use ip. | 1218 // it first to register ip and change the original instruction to use ip. |
1202 // However, if the original instruction is a 'mov rd, x' (not setting the | 1219 // However, if the original instruction is a 'mov rd, x' (not setting the |
1203 // condition code), then replace it with a 'ldr rd, [pc]'. | 1220 // condition code), then replace it with a 'ldr rd, [pc]'. |
1204 CHECK(!rn.is(ip)); // rn should never be ip, or will be trashed | 1221 CHECK(!rn.is(ip)); // rn should never be ip, or will be trashed |
1205 Condition cond = Instruction::ConditionField(instr); | 1222 Condition cond = Instruction::ConditionField(instr); |
1206 if ((instr & ~kCondMask) == 13*B21) { // mov, S not set | 1223 if ((instr & ~kCondMask) == 13*B21) { // mov, S not set |
1207 move_32_bit_immediate(rd, x, cond); | 1224 move_32_bit_immediate(rd, x, cond); |
1208 } else { | 1225 } else { |
1209 mov(ip, x, LeaveCC, cond); | 1226 mov(ip, x, LeaveCC, cond); |
(...skipping 793 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2003 void Assembler::msr(SRegisterFieldMask fields, const Operand& src, | 2020 void Assembler::msr(SRegisterFieldMask fields, const Operand& src, |
2004 Condition cond) { | 2021 Condition cond) { |
2005 DCHECK((fields & 0x000f0000) != 0); // At least one field must be set. | 2022 DCHECK((fields & 0x000f0000) != 0); // At least one field must be set. |
2006 DCHECK(((fields & 0xfff0ffff) == CPSR) || ((fields & 0xfff0ffff) == SPSR)); | 2023 DCHECK(((fields & 0xfff0ffff) == CPSR) || ((fields & 0xfff0ffff) == SPSR)); |
2007 Instr instr; | 2024 Instr instr; |
2008 if (!src.rm_.is_valid()) { | 2025 if (!src.rm_.is_valid()) { |
2009 // Immediate. | 2026 // Immediate. |
2010 uint32_t rotate_imm; | 2027 uint32_t rotate_imm; |
2011 uint32_t immed_8; | 2028 uint32_t immed_8; |
2012 if (src.must_output_reloc_info(this) || | 2029 if (src.must_output_reloc_info(this) || |
2013 !fits_shifter(src.imm32_, &rotate_imm, &immed_8, NULL)) { | 2030 !fits_shifter(src.immediate(), &rotate_imm, &immed_8, NULL)) { |
2014 // Immediate operand cannot be encoded, load it first to register ip. | 2031 // Immediate operand cannot be encoded, load it first to register ip. |
2015 move_32_bit_immediate(ip, src); | 2032 move_32_bit_immediate(ip, src); |
2016 msr(fields, Operand(ip), cond); | 2033 msr(fields, Operand(ip), cond); |
2017 return; | 2034 return; |
2018 } | 2035 } |
2019 instr = I | rotate_imm*B8 | immed_8; | 2036 instr = I | rotate_imm*B8 | immed_8; |
2020 } else { | 2037 } else { |
2021 DCHECK(!src.rs_.is_valid() && src.shift_imm_ == 0); // only rm allowed | 2038 DCHECK(!src.rs_.is_valid() && src.shift_imm_ == 0); // only rm allowed |
2022 instr = src.rm_.code(); | 2039 instr = src.rm_.code(); |
2023 } | 2040 } |
(...skipping 3328 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5352 } | 5369 } |
5353 | 5370 |
5354 void PatchingAssembler::FlushICache(Isolate* isolate) { | 5371 void PatchingAssembler::FlushICache(Isolate* isolate) { |
5355 Assembler::FlushICache(isolate, buffer_, buffer_size_ - kGap); | 5372 Assembler::FlushICache(isolate, buffer_, buffer_size_ - kGap); |
5356 } | 5373 } |
5357 | 5374 |
5358 } // namespace internal | 5375 } // namespace internal |
5359 } // namespace v8 | 5376 } // namespace v8 |
5360 | 5377 |
5361 #endif // V8_TARGET_ARCH_ARM | 5378 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |