OLD | NEW |
1 // Copyright (c) 1994-2006 Sun Microsystems Inc. | 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. |
2 // All Rights Reserved. | 2 // All Rights Reserved. |
3 // | 3 // |
4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
5 // modification, are permitted provided that the following conditions | 5 // modification, are permitted provided that the following conditions |
6 // are met: | 6 // are met: |
7 // | 7 // |
8 // - Redistributions of source code must retain the above copyright notice, | 8 // - Redistributions of source code must retain the above copyright notice, |
9 // this list of conditions and the following disclaimer. | 9 // this list of conditions and the following disclaimer. |
10 // | 10 // |
(...skipping 275 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
286 } | 286 } |
287 | 287 |
288 | 288 |
289 // ----------------------------------------------------------------------------- | 289 // ----------------------------------------------------------------------------- |
290 // Implementation of RelocInfo | 290 // Implementation of RelocInfo |
291 | 291 |
292 const int RelocInfo::kApplyMask = 0; | 292 const int RelocInfo::kApplyMask = 0; |
293 | 293 |
294 | 294 |
295 bool RelocInfo::IsCodedSpecially() { | 295 bool RelocInfo::IsCodedSpecially() { |
296 // The deserializer needs to know whether a pointer is specially coded. Being | 296 // The deserializer needs to know whether a pointer is specially coded. Being |
297 // specially coded on ARM means that it is a movw/movt instruction. We don't | 297 // specially coded on ARM means that it is a movw/movt instruction, or is an |
298 // generate those yet. | 298 // out of line constant pool entry. These only occur if |
299 return false; | 299 // FLAG_enable_ool_constant_pool is true. |
| 300 return FLAG_enable_ool_constant_pool; |
300 } | 301 } |
301 | 302 |
302 | 303 |
303 bool RelocInfo::IsInConstantPool() { | 304 bool RelocInfo::IsInConstantPool() { |
304 return Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc_)); | 305 if (FLAG_enable_ool_constant_pool) { |
| 306 return Assembler::IsLdrPpImmediateOffset(Memory::int32_at(pc_)); |
| 307 } else { |
| 308 return Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc_)); |
| 309 } |
305 } | 310 } |
306 | 311 |
307 | 312 |
308 void RelocInfo::PatchCode(byte* instructions, int instruction_count) { | 313 void RelocInfo::PatchCode(byte* instructions, int instruction_count) { |
309 // Patch the code at the current address with the supplied instructions. | 314 // Patch the code at the current address with the supplied instructions. |
310 Instr* pc = reinterpret_cast<Instr*>(pc_); | 315 Instr* pc = reinterpret_cast<Instr*>(pc_); |
311 Instr* instr = reinterpret_cast<Instr*>(instructions); | 316 Instr* instr = reinterpret_cast<Instr*>(instructions); |
312 for (int i = 0; i < instruction_count; i++) { | 317 for (int i = 0; i < instruction_count; i++) { |
313 *(pc + i) = *(instr + i); | 318 *(pc + i) = *(instr + i); |
314 } | 319 } |
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
473 al | B26 | 4 | NegPreIndex | kRegister_sp_Code * B16; | 478 al | B26 | 4 | NegPreIndex | kRegister_sp_Code * B16; |
474 // ldr(r, MemOperand(sp, 4, PostIndex), al) instruction (aka pop(r)) | 479 // ldr(r, MemOperand(sp, 4, PostIndex), al) instruction (aka pop(r)) |
475 // register r is not encoded. | 480 // register r is not encoded. |
476 const Instr kPopRegPattern = | 481 const Instr kPopRegPattern = |
477 al | B26 | L | 4 | PostIndex | kRegister_sp_Code * B16; | 482 al | B26 | L | 4 | PostIndex | kRegister_sp_Code * B16; |
478 // mov lr, pc | 483 // mov lr, pc |
479 const Instr kMovLrPc = al | MOV | kRegister_pc_Code | kRegister_lr_Code * B12; | 484 const Instr kMovLrPc = al | MOV | kRegister_pc_Code | kRegister_lr_Code * B12; |
480 // ldr rd, [pc, #offset] | 485 // ldr rd, [pc, #offset] |
481 const Instr kLdrPCMask = 15 * B24 | 7 * B20 | 15 * B16; | 486 const Instr kLdrPCMask = 15 * B24 | 7 * B20 | 15 * B16; |
482 const Instr kLdrPCPattern = 5 * B24 | L | kRegister_pc_Code * B16; | 487 const Instr kLdrPCPattern = 5 * B24 | L | kRegister_pc_Code * B16; |
| 488 // ldr rd, [pp, #offset] |
| 489 const Instr kLdrPpMask = 15 * B24 | 7 * B20 | 15 * B16; |
| 490 const Instr kLdrPpPattern = 5 * B24 | L | kRegister_r8_Code * B16; |
483 // vldr dd, [pc, #offset] | 491 // vldr dd, [pc, #offset] |
484 const Instr kVldrDPCMask = 15 * B24 | 3 * B20 | 15 * B16 | 15 * B8; | 492 const Instr kVldrDPCMask = 15 * B24 | 3 * B20 | 15 * B16 | 15 * B8; |
485 const Instr kVldrDPCPattern = 13 * B24 | L | kRegister_pc_Code * B16 | 11 * B8; | 493 const Instr kVldrDPCPattern = 13 * B24 | L | kRegister_pc_Code * B16 | 11 * B8; |
| 494 // vldr dd, [pp, #offset] |
| 495 const Instr kVldrDPpMask = 15 * B24 | 3 * B20 | 15 * B16 | 15 * B8; |
| 496 const Instr kVldrDPpPattern = 13 * B24 | L | kRegister_r8_Code * B16 | 11 * B8; |
486 // blxcc rm | 497 // blxcc rm |
487 const Instr kBlxRegMask = | 498 const Instr kBlxRegMask = |
488 15 * B24 | 15 * B20 | 15 * B16 | 15 * B12 | 15 * B8 | 15 * B4; | 499 15 * B24 | 15 * B20 | 15 * B16 | 15 * B12 | 15 * B8 | 15 * B4; |
489 const Instr kBlxRegPattern = | 500 const Instr kBlxRegPattern = |
490 B24 | B21 | 15 * B16 | 15 * B12 | 15 * B8 | BLX; | 501 B24 | B21 | 15 * B16 | 15 * B12 | 15 * B8 | BLX; |
491 const Instr kBlxIp = al | kBlxRegPattern | ip.code(); | 502 const Instr kBlxIp = al | kBlxRegPattern | ip.code(); |
492 const Instr kMovMvnMask = 0x6d * B21 | 0xf * B16; | 503 const Instr kMovMvnMask = 0x6d * B21 | 0xf * B16; |
493 const Instr kMovMvnPattern = 0xd * B21; | 504 const Instr kMovMvnPattern = 0xd * B21; |
494 const Instr kMovMvnFlip = B22; | 505 const Instr kMovMvnFlip = B22; |
495 const Instr kMovLeaveCCMask = 0xdff * B16; | 506 const Instr kMovLeaveCCMask = 0xdff * B16; |
(...skipping 17 matching lines...) Expand all Loading... |
513 const Instr kStrRegFpNegOffsetPattern = | 524 const Instr kStrRegFpNegOffsetPattern = |
514 al | B26 | NegOffset | kRegister_fp_Code * B16; | 525 al | B26 | NegOffset | kRegister_fp_Code * B16; |
515 const Instr kLdrStrInstrTypeMask = 0xffff0000; | 526 const Instr kLdrStrInstrTypeMask = 0xffff0000; |
516 const Instr kLdrStrInstrArgumentMask = 0x0000ffff; | 527 const Instr kLdrStrInstrArgumentMask = 0x0000ffff; |
517 const Instr kLdrStrOffsetMask = 0x00000fff; | 528 const Instr kLdrStrOffsetMask = 0x00000fff; |
518 | 529 |
519 | 530 |
520 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size) | 531 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size) |
521 : AssemblerBase(isolate, buffer, buffer_size), | 532 : AssemblerBase(isolate, buffer, buffer_size), |
522 recorded_ast_id_(TypeFeedbackId::None()), | 533 recorded_ast_id_(TypeFeedbackId::None()), |
| 534 constant_pool_builder_(), |
523 positions_recorder_(this) { | 535 positions_recorder_(this) { |
524 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); | 536 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); |
525 num_pending_32_bit_reloc_info_ = 0; | 537 num_pending_32_bit_reloc_info_ = 0; |
526 num_pending_64_bit_reloc_info_ = 0; | 538 num_pending_64_bit_reloc_info_ = 0; |
527 next_buffer_check_ = 0; | 539 next_buffer_check_ = 0; |
528 const_pool_blocked_nesting_ = 0; | 540 const_pool_blocked_nesting_ = 0; |
529 no_const_pool_before_ = 0; | 541 no_const_pool_before_ = 0; |
530 first_const_pool_32_use_ = -1; | 542 first_const_pool_32_use_ = -1; |
531 first_const_pool_64_use_ = -1; | 543 first_const_pool_64_use_ = -1; |
532 last_bound_pos_ = 0; | 544 last_bound_pos_ = 0; |
533 constant_pool_available_ = !FLAG_enable_ool_constant_pool; | 545 constant_pool_available_ = !FLAG_enable_ool_constant_pool; |
534 constant_pool_full_ = false; | 546 constant_pool_full_ = false; |
535 ClearRecordedAstId(); | 547 ClearRecordedAstId(); |
536 } | 548 } |
537 | 549 |
538 | 550 |
539 Assembler::~Assembler() { | 551 Assembler::~Assembler() { |
540 ASSERT(const_pool_blocked_nesting_ == 0); | 552 ASSERT(const_pool_blocked_nesting_ == 0); |
541 } | 553 } |
542 | 554 |
543 | 555 |
544 void Assembler::GetCode(CodeDesc* desc) { | 556 void Assembler::GetCode(CodeDesc* desc) { |
545 // Emit constant pool if necessary. | 557 if (!FLAG_enable_ool_constant_pool) { |
546 CheckConstPool(true, false); | 558 // Emit constant pool if necessary. |
547 ASSERT(num_pending_32_bit_reloc_info_ == 0); | 559 CheckConstPool(true, false); |
548 ASSERT(num_pending_64_bit_reloc_info_ == 0); | 560 ASSERT(num_pending_32_bit_reloc_info_ == 0); |
549 | 561 ASSERT(num_pending_64_bit_reloc_info_ == 0); |
| 562 } |
550 // Set up code descriptor. | 563 // Set up code descriptor. |
551 desc->buffer = buffer_; | 564 desc->buffer = buffer_; |
552 desc->buffer_size = buffer_size_; | 565 desc->buffer_size = buffer_size_; |
553 desc->instr_size = pc_offset(); | 566 desc->instr_size = pc_offset(); |
554 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); | 567 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); |
555 desc->origin = this; | 568 desc->origin = this; |
556 } | 569 } |
557 | 570 |
558 | 571 |
559 void Assembler::Align(int m) { | 572 void Assembler::Align(int m) { |
(...skipping 162 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
722 } | 735 } |
723 | 736 |
724 | 737 |
725 bool Assembler::IsLdrPcImmediateOffset(Instr instr) { | 738 bool Assembler::IsLdrPcImmediateOffset(Instr instr) { |
726 // Check the instruction is indeed a | 739 // Check the instruction is indeed a |
727 // ldr<cond> <Rd>, [pc +/- offset_12]. | 740 // ldr<cond> <Rd>, [pc +/- offset_12]. |
728 return (instr & kLdrPCMask) == kLdrPCPattern; | 741 return (instr & kLdrPCMask) == kLdrPCPattern; |
729 } | 742 } |
730 | 743 |
731 | 744 |
| 745 bool Assembler::IsLdrPpImmediateOffset(Instr instr) { |
| 746 // Check the instruction is indeed a |
| 747 // ldr<cond> <Rd>, [pp +/- offset_12]. |
| 748 return (instr & kLdrPpMask) == kLdrPpPattern; |
| 749 } |
| 750 |
| 751 |
732 bool Assembler::IsVldrDPcImmediateOffset(Instr instr) { | 752 bool Assembler::IsVldrDPcImmediateOffset(Instr instr) { |
733 // Check the instruction is indeed a | 753 // Check the instruction is indeed a |
734 // vldr<cond> <Dd>, [pc +/- offset_10]. | 754 // vldr<cond> <Dd>, [pc +/- offset_10]. |
735 return (instr & kVldrDPCMask) == kVldrDPCPattern; | 755 return (instr & kVldrDPCMask) == kVldrDPCPattern; |
736 } | 756 } |
737 | 757 |
738 | 758 |
| 759 bool Assembler::IsVldrDPpImmediateOffset(Instr instr) { |
| 760 // Check the instruction is indeed a |
| 761 // vldr<cond> <Dd>, [pp +/- offset_10]. |
| 762 return (instr & kVldrDPpMask) == kVldrDPpPattern; |
| 763 } |
| 764 |
| 765 |
739 bool Assembler::IsTstImmediate(Instr instr) { | 766 bool Assembler::IsTstImmediate(Instr instr) { |
740 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask)) == | 767 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask)) == |
741 (I | TST | S); | 768 (I | TST | S); |
742 } | 769 } |
743 | 770 |
744 | 771 |
745 bool Assembler::IsCmpRegister(Instr instr) { | 772 bool Assembler::IsCmpRegister(Instr instr) { |
746 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask | B4)) == | 773 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask | B4)) == |
747 (CMP | S); | 774 (CMP | S); |
748 } | 775 } |
(...skipping 307 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1056 return Serializer::enabled(); | 1083 return Serializer::enabled(); |
1057 } else if (RelocInfo::IsNone(rmode_)) { | 1084 } else if (RelocInfo::IsNone(rmode_)) { |
1058 return false; | 1085 return false; |
1059 } | 1086 } |
1060 return true; | 1087 return true; |
1061 } | 1088 } |
1062 | 1089 |
1063 | 1090 |
1064 static bool use_mov_immediate_load(const Operand& x, | 1091 static bool use_mov_immediate_load(const Operand& x, |
1065 const Assembler* assembler) { | 1092 const Assembler* assembler) { |
1066 if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) && | 1093 if (assembler != NULL && !assembler->can_use_constant_pool()) { |
| 1094 // If there is no constant pool available, we must use an mov immediate. |
| 1095 // TODO(rmcilroy): enable ARMv6 support. |
| 1096 ASSERT(CpuFeatures::IsSupported(ARMv7)); |
| 1097 return true; |
| 1098 } else if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) && |
1067 (assembler == NULL || !assembler->predictable_code_size())) { | 1099 (assembler == NULL || !assembler->predictable_code_size())) { |
1068 // Prefer movw / movt to constant pool if it is more efficient on the CPU. | 1100 // Prefer movw / movt to constant pool if it is more efficient on the CPU. |
1069 return true; | 1101 return true; |
1070 } else if (x.must_output_reloc_info(assembler)) { | 1102 } else if (x.must_output_reloc_info(assembler)) { |
1071 // Prefer constant pool if data is likely to be patched. | 1103 // Prefer constant pool if data is likely to be patched. |
1072 return false; | 1104 return false; |
1073 } else { | 1105 } else { |
1074 // Otherwise, use immediate load if movw / movt is available. | 1106 // Otherwise, use immediate load if movw / movt is available. |
1075 return CpuFeatures::IsSupported(ARMv7); | 1107 return CpuFeatures::IsSupported(ARMv7); |
1076 } | 1108 } |
(...skipping 22 matching lines...) Expand all Loading... |
1099 // No use of constant pool and the immediate operand can be encoded as a | 1131 // No use of constant pool and the immediate operand can be encoded as a |
1100 // shifter operand. | 1132 // shifter operand. |
1101 return true; | 1133 return true; |
1102 } | 1134 } |
1103 } | 1135 } |
1104 | 1136 |
1105 | 1137 |
1106 void Assembler::move_32_bit_immediate(Register rd, | 1138 void Assembler::move_32_bit_immediate(Register rd, |
1107 const Operand& x, | 1139 const Operand& x, |
1108 Condition cond) { | 1140 Condition cond) { |
1109 if (rd.code() != pc.code()) { | 1141 RelocInfo rinfo(pc_, x.rmode_, x.imm32_, NULL); |
1110 if (use_mov_immediate_load(x, this)) { | 1142 if (x.must_output_reloc_info(this)) { |
1111 if (x.must_output_reloc_info(this)) { | 1143 RecordRelocInfo(rinfo); |
1112 RecordRelocInfo(x.rmode_, x.imm32_, DONT_USE_CONSTANT_POOL); | |
1113 // Make sure the movw/movt doesn't get separated. | |
1114 BlockConstPoolFor(2); | |
1115 } | |
1116 emit(cond | 0x30*B20 | rd.code()*B12 | | |
1117 EncodeMovwImmediate(x.imm32_ & 0xffff)); | |
1118 movt(rd, static_cast<uint32_t>(x.imm32_) >> 16, cond); | |
1119 return; | |
1120 } | |
1121 } | 1144 } |
1122 | 1145 |
1123 RecordRelocInfo(x.rmode_, x.imm32_, USE_CONSTANT_POOL); | 1146 if (use_mov_immediate_load(x, this)) { |
1124 ldr(rd, MemOperand(pc, 0), cond); | 1147 Register target = rd.code() == pc.code() ? ip : rd; |
| 1148 // TODO(rmcilroy): add ARMv6 support for immediate loads. |
| 1149 ASSERT(CpuFeatures::IsSupported(ARMv7)); |
| 1150 if (!FLAG_enable_ool_constant_pool && x.must_output_reloc_info(this)) { |
| 1151 // Make sure the movw/movt doesn't get separated. |
| 1152 BlockConstPoolFor(2); |
| 1153 } |
| 1154 emit(cond | 0x30*B20 | target.code()*B12 | |
| 1155 EncodeMovwImmediate(x.imm32_ & 0xffff)); |
| 1156 movt(target, static_cast<uint32_t>(x.imm32_) >> 16, cond); |
| 1157 if (target.code() != rd.code()) { |
| 1158 mov(rd, target, LeaveCC, cond); |
| 1159 } |
| 1160 } else { |
| 1161 ASSERT(can_use_constant_pool()); |
| 1162 ConstantPoolAddEntry(rinfo); |
| 1163 ldr(rd, MemOperand(FLAG_enable_ool_constant_pool ? pp : pc, 0), cond); |
| 1164 } |
1125 } | 1165 } |
1126 | 1166 |
1127 | 1167 |
1128 void Assembler::addrmod1(Instr instr, | 1168 void Assembler::addrmod1(Instr instr, |
1129 Register rn, | 1169 Register rn, |
1130 Register rd, | 1170 Register rd, |
1131 const Operand& x) { | 1171 const Operand& x) { |
1132 CheckBuffer(); | 1172 CheckBuffer(); |
1133 ASSERT((instr & ~(kCondMask | kOpCodeMask | S)) == 0); | 1173 ASSERT((instr & ~(kCondMask | kOpCodeMask | S)) == 0); |
1134 if (!x.rm_.is_valid()) { | 1174 if (!x.rm_.is_valid()) { |
(...skipping 1279 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2414 if (CpuFeatures::IsSupported(VFP3) && FitsVMOVDoubleImmediate(imm, &enc)) { | 2454 if (CpuFeatures::IsSupported(VFP3) && FitsVMOVDoubleImmediate(imm, &enc)) { |
2415 // The double can be encoded in the instruction. | 2455 // The double can be encoded in the instruction. |
2416 // | 2456 // |
2417 // Dd = immediate | 2457 // Dd = immediate |
2418 // Instruction details available in ARM DDI 0406C.b, A8-936. | 2458 // Instruction details available in ARM DDI 0406C.b, A8-936. |
2419 // cond(31-28) | 11101(27-23) | D(22) | 11(21-20) | imm4H(19-16) | | 2459 // cond(31-28) | 11101(27-23) | D(22) | 11(21-20) | imm4H(19-16) | |
2420 // Vd(15-12) | 101(11-9) | sz=1(8) | imm4L(3-0) | 2460 // Vd(15-12) | 101(11-9) | sz=1(8) | imm4L(3-0) |
2421 int vd, d; | 2461 int vd, d; |
2422 dst.split_code(&vd, &d); | 2462 dst.split_code(&vd, &d); |
2423 emit(al | 0x1D*B23 | d*B22 | 0x3*B20 | vd*B12 | 0x5*B9 | B8 | enc); | 2463 emit(al | 0x1D*B23 | d*B22 | 0x3*B20 | vd*B12 | 0x5*B9 | B8 | enc); |
2424 } else if (FLAG_enable_vldr_imm) { | 2464 } else if (FLAG_enable_vldr_imm && can_use_constant_pool()) { |
2425 // TODO(jfb) Temporarily turned off until we have constant blinding or | 2465 // TODO(jfb) Temporarily turned off until we have constant blinding or |
2426 // some equivalent mitigation: an attacker can otherwise control | 2466 // some equivalent mitigation: an attacker can otherwise control |
2427 // generated data which also happens to be executable, a Very Bad | 2467 // generated data which also happens to be executable, a Very Bad |
2428 // Thing indeed. | 2468 // Thing indeed. |
2429 // Blinding gets tricky because we don't have xor, we probably | 2469 // Blinding gets tricky because we don't have xor, we probably |
2430 // need to add/subtract without losing precision, which requires a | 2470 // need to add/subtract without losing precision, which requires a |
2431 // cookie value that Lithium is probably better positioned to | 2471 // cookie value that Lithium is probably better positioned to |
2432 // choose. | 2472 // choose. |
2433 // We could also add a few peepholes here like detecting 0.0 and | 2473 // We could also add a few peepholes here like detecting 0.0 and |
2434 // -0.0 and doing a vmov from the sequestered d14, forcing denorms | 2474 // -0.0 and doing a vmov from the sequestered d14, forcing denorms |
2435 // to zero (we set flush-to-zero), and normalizing NaN values. | 2475 // to zero (we set flush-to-zero), and normalizing NaN values. |
2436 // We could also detect redundant values. | 2476 // We could also detect redundant values. |
2437 // The code could also randomize the order of values, though | 2477 // The code could also randomize the order of values, though |
2438 // that's tricky because vldr has a limited reach. Furthermore | 2478 // that's tricky because vldr has a limited reach. Furthermore |
2439 // it breaks load locality. | 2479 // it breaks load locality. |
2440 RecordRelocInfo(imm); | 2480 RelocInfo rinfo(pc_, imm); |
2441 vldr(dst, MemOperand(pc, 0)); | 2481 ConstantPoolAddEntry(rinfo); |
| 2482 vldr(dst, MemOperand(FLAG_enable_ool_constant_pool ? pp : pc, 0)); |
2442 } else { | 2483 } else { |
2443 // Synthesise the double from ARM immediates. | 2484 // Synthesise the double from ARM immediates. |
2444 uint32_t lo, hi; | 2485 uint32_t lo, hi; |
2445 DoubleAsTwoUInt32(imm, &lo, &hi); | 2486 DoubleAsTwoUInt32(imm, &lo, &hi); |
2446 | 2487 |
2447 if (scratch.is(no_reg)) { | 2488 if (scratch.is(no_reg)) { |
2448 if (dst.code() < 16) { | 2489 if (dst.code() < 16) { |
2449 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code()); | 2490 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code()); |
2450 // Move the low part of the double into the lower of the corresponsing S | 2491 // Move the low part of the double into the lower of the corresponsing S |
2451 // registers of D register dst. | 2492 // registers of D register dst. |
(...skipping 709 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3161 rinfo.rmode() != RelocInfo::POSITION); | 3202 rinfo.rmode() != RelocInfo::POSITION); |
3162 if (rinfo.rmode() != RelocInfo::JS_RETURN) { | 3203 if (rinfo.rmode() != RelocInfo::JS_RETURN) { |
3163 rinfo.set_pc(rinfo.pc() + pc_delta); | 3204 rinfo.set_pc(rinfo.pc() + pc_delta); |
3164 } | 3205 } |
3165 } | 3206 } |
3166 for (int i = 0; i < num_pending_64_bit_reloc_info_; i++) { | 3207 for (int i = 0; i < num_pending_64_bit_reloc_info_; i++) { |
3167 RelocInfo& rinfo = pending_64_bit_reloc_info_[i]; | 3208 RelocInfo& rinfo = pending_64_bit_reloc_info_[i]; |
3168 ASSERT(rinfo.rmode() == RelocInfo::NONE64); | 3209 ASSERT(rinfo.rmode() == RelocInfo::NONE64); |
3169 rinfo.set_pc(rinfo.pc() + pc_delta); | 3210 rinfo.set_pc(rinfo.pc() + pc_delta); |
3170 } | 3211 } |
| 3212 constant_pool_builder_.Relocate(pc_delta); |
3171 } | 3213 } |
3172 | 3214 |
3173 | 3215 |
3174 void Assembler::db(uint8_t data) { | 3216 void Assembler::db(uint8_t data) { |
3175 // No relocation info should be pending while using db. db is used | 3217 // No relocation info should be pending while using db. db is used |
3176 // to write pure data with no pointers and the constant pool should | 3218 // to write pure data with no pointers and the constant pool should |
3177 // be emitted before using db. | 3219 // be emitted before using db. |
3178 ASSERT(num_pending_32_bit_reloc_info_ == 0); | 3220 ASSERT(num_pending_32_bit_reloc_info_ == 0); |
3179 ASSERT(num_pending_64_bit_reloc_info_ == 0); | 3221 ASSERT(num_pending_64_bit_reloc_info_ == 0); |
3180 CheckBuffer(); | 3222 CheckBuffer(); |
(...skipping 15 matching lines...) Expand all Loading... |
3196 | 3238 |
3197 | 3239 |
3198 void Assembler::emit_code_stub_address(Code* stub) { | 3240 void Assembler::emit_code_stub_address(Code* stub) { |
3199 CheckBuffer(); | 3241 CheckBuffer(); |
3200 *reinterpret_cast<uint32_t*>(pc_) = | 3242 *reinterpret_cast<uint32_t*>(pc_) = |
3201 reinterpret_cast<uint32_t>(stub->instruction_start()); | 3243 reinterpret_cast<uint32_t>(stub->instruction_start()); |
3202 pc_ += sizeof(uint32_t); | 3244 pc_ += sizeof(uint32_t); |
3203 } | 3245 } |
3204 | 3246 |
3205 | 3247 |
3206 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data, | 3248 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { |
3207 UseConstantPoolMode mode) { | |
3208 // We do not try to reuse pool constants. | |
3209 RelocInfo rinfo(pc_, rmode, data, NULL); | 3249 RelocInfo rinfo(pc_, rmode, data, NULL); |
3210 if (((rmode >= RelocInfo::JS_RETURN) && | 3250 RecordRelocInfo(rinfo); |
3211 (rmode <= RelocInfo::DEBUG_BREAK_SLOT)) || | 3251 } |
3212 (rmode == RelocInfo::CONST_POOL) || | 3252 |
3213 mode == DONT_USE_CONSTANT_POOL) { | 3253 |
3214 // Adjust code for new modes. | 3254 void Assembler::RecordRelocInfo(const RelocInfo& rinfo) { |
3215 ASSERT(RelocInfo::IsDebugBreakSlot(rmode) | |
3216 || RelocInfo::IsJSReturn(rmode) | |
3217 || RelocInfo::IsComment(rmode) | |
3218 || RelocInfo::IsPosition(rmode) | |
3219 || RelocInfo::IsConstPool(rmode) | |
3220 || mode == DONT_USE_CONSTANT_POOL); | |
3221 // These modes do not need an entry in the constant pool. | |
3222 } else { | |
3223 RecordRelocInfoConstantPoolEntryHelper(rinfo); | |
3224 } | |
3225 if (!RelocInfo::IsNone(rinfo.rmode())) { | 3255 if (!RelocInfo::IsNone(rinfo.rmode())) { |
3226 // Don't record external references unless the heap will be serialized. | 3256 // Don't record external references unless the heap will be serialized. |
3227 if (rmode == RelocInfo::EXTERNAL_REFERENCE) { | 3257 if (rinfo.rmode() == RelocInfo::EXTERNAL_REFERENCE) { |
3228 #ifdef DEBUG | 3258 #ifdef DEBUG |
3229 if (!Serializer::enabled()) { | 3259 if (!Serializer::enabled()) { |
3230 Serializer::TooLateToEnableNow(); | 3260 Serializer::TooLateToEnableNow(); |
3231 } | 3261 } |
3232 #endif | 3262 #endif |
3233 if (!Serializer::enabled() && !emit_debug_code()) { | 3263 if (!Serializer::enabled() && !emit_debug_code()) { |
3234 return; | 3264 return; |
3235 } | 3265 } |
3236 } | 3266 } |
3237 ASSERT(buffer_space() >= kMaxRelocSize); // too late to grow buffer here | 3267 ASSERT(buffer_space() >= kMaxRelocSize); // too late to grow buffer here |
3238 if (rmode == RelocInfo::CODE_TARGET_WITH_ID) { | 3268 if (rinfo.rmode() == RelocInfo::CODE_TARGET_WITH_ID) { |
3239 RelocInfo reloc_info_with_ast_id(pc_, | 3269 RelocInfo reloc_info_with_ast_id(rinfo.pc(), |
3240 rmode, | 3270 rinfo.rmode(), |
3241 RecordedAstId().ToInt(), | 3271 RecordedAstId().ToInt(), |
3242 NULL); | 3272 NULL); |
3243 ClearRecordedAstId(); | 3273 ClearRecordedAstId(); |
3244 reloc_info_writer.Write(&reloc_info_with_ast_id); | 3274 reloc_info_writer.Write(&reloc_info_with_ast_id); |
3245 } else { | 3275 } else { |
3246 reloc_info_writer.Write(&rinfo); | 3276 reloc_info_writer.Write(&rinfo); |
3247 } | 3277 } |
3248 } | 3278 } |
3249 } | 3279 } |
3250 | 3280 |
3251 | 3281 |
3252 void Assembler::RecordRelocInfo(double data) { | 3282 void Assembler::ConstantPoolAddEntry(const RelocInfo& rinfo) { |
3253 // We do not try to reuse pool constants. | 3283 if (FLAG_enable_ool_constant_pool) { |
3254 RelocInfo rinfo(pc_, data); | 3284 constant_pool_builder_.AddEntry(this, rinfo); |
3255 RecordRelocInfoConstantPoolEntryHelper(rinfo); | 3285 } else { |
3256 } | 3286 if (rinfo.rmode() == RelocInfo::NONE64) { |
3257 | 3287 ASSERT(num_pending_64_bit_reloc_info_ < kMaxNumPending64RelocInfo); |
3258 | 3288 if (num_pending_64_bit_reloc_info_ == 0) { |
3259 void Assembler::RecordRelocInfoConstantPoolEntryHelper(const RelocInfo& rinfo) { | 3289 first_const_pool_64_use_ = pc_offset(); |
3260 if (rinfo.rmode() == RelocInfo::NONE64) { | 3290 } |
3261 ASSERT(num_pending_64_bit_reloc_info_ < kMaxNumPending64RelocInfo); | 3291 pending_64_bit_reloc_info_[num_pending_64_bit_reloc_info_++] = rinfo; |
3262 if (num_pending_64_bit_reloc_info_ == 0) { | 3292 } else { |
3263 first_const_pool_64_use_ = pc_offset(); | 3293 ASSERT(num_pending_32_bit_reloc_info_ < kMaxNumPending32RelocInfo); |
| 3294 if (num_pending_32_bit_reloc_info_ == 0) { |
| 3295 first_const_pool_32_use_ = pc_offset(); |
| 3296 } |
| 3297 pending_32_bit_reloc_info_[num_pending_32_bit_reloc_info_++] = rinfo; |
3264 } | 3298 } |
3265 pending_64_bit_reloc_info_[num_pending_64_bit_reloc_info_++] = rinfo; | 3299 // Make sure the constant pool is not emitted in place of the next |
3266 } else { | 3300 // instruction for which we just recorded relocation info. |
3267 ASSERT(num_pending_32_bit_reloc_info_ < kMaxNumPending32RelocInfo); | 3301 BlockConstPoolFor(1); |
3268 if (num_pending_32_bit_reloc_info_ == 0) { | |
3269 first_const_pool_32_use_ = pc_offset(); | |
3270 } | |
3271 pending_32_bit_reloc_info_[num_pending_32_bit_reloc_info_++] = rinfo; | |
3272 } | 3302 } |
3273 // Make sure the constant pool is not emitted in place of the next | |
3274 // instruction for which we just recorded relocation info. | |
3275 BlockConstPoolFor(1); | |
3276 } | 3303 } |
3277 | 3304 |
3278 | 3305 |
3279 void Assembler::BlockConstPoolFor(int instructions) { | 3306 void Assembler::BlockConstPoolFor(int instructions) { |
| 3307 if (FLAG_enable_ool_constant_pool) { |
| 3308 // Should be a no-op if using an out-of-line constant pool. |
| 3309 ASSERT(num_pending_32_bit_reloc_info_ == 0); |
| 3310 ASSERT(num_pending_64_bit_reloc_info_ == 0); |
| 3311 return; |
| 3312 } |
| 3313 |
3280 int pc_limit = pc_offset() + instructions * kInstrSize; | 3314 int pc_limit = pc_offset() + instructions * kInstrSize; |
3281 if (no_const_pool_before_ < pc_limit) { | 3315 if (no_const_pool_before_ < pc_limit) { |
3282 // Max pool start (if we need a jump and an alignment). | 3316 // Max pool start (if we need a jump and an alignment). |
3283 #ifdef DEBUG | 3317 #ifdef DEBUG |
3284 int start = pc_limit + kInstrSize + 2 * kPointerSize; | 3318 int start = pc_limit + kInstrSize + 2 * kPointerSize; |
3285 ASSERT((num_pending_32_bit_reloc_info_ == 0) || | 3319 ASSERT((num_pending_32_bit_reloc_info_ == 0) || |
3286 (start - first_const_pool_32_use_ + | 3320 (start - first_const_pool_32_use_ + |
3287 num_pending_64_bit_reloc_info_ * kDoubleSize < kMaxDistToIntPool)); | 3321 num_pending_64_bit_reloc_info_ * kDoubleSize < kMaxDistToIntPool)); |
3288 ASSERT((num_pending_64_bit_reloc_info_ == 0) || | 3322 ASSERT((num_pending_64_bit_reloc_info_ == 0) || |
3289 (start - first_const_pool_64_use_ < kMaxDistToFPPool)); | 3323 (start - first_const_pool_64_use_ < kMaxDistToFPPool)); |
3290 #endif | 3324 #endif |
3291 no_const_pool_before_ = pc_limit; | 3325 no_const_pool_before_ = pc_limit; |
3292 } | 3326 } |
3293 | 3327 |
3294 if (next_buffer_check_ < no_const_pool_before_) { | 3328 if (next_buffer_check_ < no_const_pool_before_) { |
3295 next_buffer_check_ = no_const_pool_before_; | 3329 next_buffer_check_ = no_const_pool_before_; |
3296 } | 3330 } |
3297 } | 3331 } |
3298 | 3332 |
3299 | 3333 |
3300 void Assembler::CheckConstPool(bool force_emit, bool require_jump) { | 3334 void Assembler::CheckConstPool(bool force_emit, bool require_jump) { |
| 3335 if (FLAG_enable_ool_constant_pool) { |
| 3336 // Should be a no-op if using an out-of-line constant pool. |
| 3337 ASSERT(num_pending_32_bit_reloc_info_ == 0); |
| 3338 ASSERT(num_pending_64_bit_reloc_info_ == 0); |
| 3339 return; |
| 3340 } |
| 3341 |
3301 // Some short sequence of instruction mustn't be broken up by constant pool | 3342 // Some short sequence of instruction mustn't be broken up by constant pool |
3302 // emission, such sequences are protected by calls to BlockConstPoolFor and | 3343 // emission, such sequences are protected by calls to BlockConstPoolFor and |
3303 // BlockConstPoolScope. | 3344 // BlockConstPoolScope. |
3304 if (is_const_pool_blocked()) { | 3345 if (is_const_pool_blocked()) { |
3305 // Something is wrong if emission is forced and blocked at the same time. | 3346 // Something is wrong if emission is forced and blocked at the same time. |
3306 ASSERT(!force_emit); | 3347 ASSERT(!force_emit); |
3307 return; | 3348 return; |
3308 } | 3349 } |
3309 | 3350 |
3310 // There is nothing to do if there are no pending constant pool entries. | 3351 // There is nothing to do if there are no pending constant pool entries. |
(...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3488 bind(&after_pool); | 3529 bind(&after_pool); |
3489 } | 3530 } |
3490 } | 3531 } |
3491 | 3532 |
3492 // Since a constant pool was just emitted, move the check offset forward by | 3533 // Since a constant pool was just emitted, move the check offset forward by |
3493 // the standard interval. | 3534 // the standard interval. |
3494 next_buffer_check_ = pc_offset() + kCheckPoolInterval; | 3535 next_buffer_check_ = pc_offset() + kCheckPoolInterval; |
3495 } | 3536 } |
3496 | 3537 |
3497 | 3538 |
| 3539 MaybeObject* Assembler::AllocateConstantPool(Heap* heap) { |
| 3540 ASSERT(FLAG_enable_ool_constant_pool); |
| 3541 return constant_pool_builder_.Allocate(heap); |
| 3542 } |
| 3543 |
| 3544 |
| 3545 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { |
| 3546 ASSERT(FLAG_enable_ool_constant_pool); |
| 3547 constant_pool_builder_.Populate(this, constant_pool); |
| 3548 } |
| 3549 |
| 3550 |
| 3551 ConstantPoolBuilder::ConstantPoolBuilder() |
| 3552 : entries_(), |
| 3553 merged_indexes_(), |
| 3554 count_of_64bit_(0), |
| 3555 count_of_code_ptr_(0), |
| 3556 count_of_heap_ptr_(0), |
| 3557 count_of_32bit_(0) { } |
| 3558 |
| 3559 |
| 3560 bool ConstantPoolBuilder::IsEmpty() { |
| 3561 return entries_.size() == 0; |
| 3562 } |
| 3563 |
| 3564 |
| 3565 bool ConstantPoolBuilder::Is64BitEntry(RelocInfo::Mode rmode) { |
| 3566 return rmode == RelocInfo::NONE64; |
| 3567 } |
| 3568 |
| 3569 |
| 3570 bool ConstantPoolBuilder::Is32BitEntry(RelocInfo::Mode rmode) { |
| 3571 return !RelocInfo::IsGCRelocMode(rmode) && rmode != RelocInfo::NONE64; |
| 3572 } |
| 3573 |
| 3574 |
| 3575 bool ConstantPoolBuilder::IsCodePtrEntry(RelocInfo::Mode rmode) { |
| 3576 return RelocInfo::IsCodeTarget(rmode); |
| 3577 } |
| 3578 |
| 3579 |
| 3580 bool ConstantPoolBuilder::IsHeapPtrEntry(RelocInfo::Mode rmode) { |
| 3581 return RelocInfo::IsGCRelocMode(rmode) && !RelocInfo::IsCodeTarget(rmode); |
| 3582 } |
| 3583 |
| 3584 |
| 3585 void ConstantPoolBuilder::AddEntry(Assembler* assm, |
| 3586 const RelocInfo& rinfo) { |
| 3587 RelocInfo::Mode rmode = rinfo.rmode(); |
| 3588 ASSERT(rmode != RelocInfo::COMMENT && |
| 3589 rmode != RelocInfo::POSITION && |
| 3590 rmode != RelocInfo::STATEMENT_POSITION && |
| 3591 rmode != RelocInfo::CONST_POOL); |
| 3592 |
| 3593 |
| 3594 // Try to merge entries which won't be patched. |
| 3595 int merged_index = -1; |
| 3596 if (RelocInfo::IsNone(rmode) || |
| 3597 (!Serializer::enabled() && (rmode >= RelocInfo::CELL))) { |
| 3598 size_t i; |
| 3599 std::vector<RelocInfo>::const_iterator it; |
| 3600 for (it = entries_.begin(), i = 0; it != entries_.end(); it++, i++) { |
| 3601 if (RelocInfo::IsEqual(rinfo, *it)) { |
| 3602 merged_index = i; |
| 3603 break; |
| 3604 } |
| 3605 } |
| 3606 } |
| 3607 |
| 3608 entries_.push_back(rinfo); |
| 3609 merged_indexes_.push_back(merged_index); |
| 3610 |
| 3611 if (merged_index == -1) { |
| 3612 // Not merged, so update the appropriate count. |
| 3613 if (Is64BitEntry(rmode)) { |
| 3614 count_of_64bit_++; |
| 3615 } else if (Is32BitEntry(rmode)) { |
| 3616 count_of_32bit_++; |
| 3617 } else if (IsCodePtrEntry(rmode)) { |
| 3618 count_of_code_ptr_++; |
| 3619 } else { |
| 3620 ASSERT(IsHeapPtrEntry(rmode)); |
| 3621 count_of_heap_ptr_++; |
| 3622 } |
| 3623 } |
| 3624 |
| 3625 // Check if we still have room for another entry given Arm's ldr and vldr |
| 3626 // immediate offset range. |
| 3627 if (!(is_uint12(ConstantPoolArray::SizeFor(count_of_64bit_, |
| 3628 count_of_code_ptr_, |
| 3629 count_of_heap_ptr_, |
| 3630 count_of_32bit_))) && |
| 3631 is_uint10(ConstantPoolArray::SizeFor(count_of_64bit_, 0, 0, 0))) { |
| 3632 assm->set_constant_pool_full(); |
| 3633 } |
| 3634 } |
| 3635 |
| 3636 |
| 3637 void ConstantPoolBuilder::Relocate(int pc_delta) { |
| 3638 for (std::vector<RelocInfo>::iterator rinfo = entries_.begin(); |
| 3639 rinfo != entries_.end(); rinfo++) { |
| 3640 ASSERT(rinfo->rmode() != RelocInfo::JS_RETURN); |
| 3641 rinfo->set_pc(rinfo->pc() + pc_delta); |
| 3642 } |
| 3643 } |
| 3644 |
| 3645 |
| 3646 MaybeObject* ConstantPoolBuilder::Allocate(Heap* heap) { |
| 3647 if (IsEmpty()) { |
| 3648 return heap->empty_constant_pool_array(); |
| 3649 } else { |
| 3650 return heap->AllocateConstantPoolArray(count_of_64bit_, count_of_code_ptr_, |
| 3651 count_of_heap_ptr_, count_of_32bit_); |
| 3652 } |
| 3653 } |
| 3654 |
| 3655 |
| 3656 void ConstantPoolBuilder::Populate(Assembler* assm, |
| 3657 ConstantPoolArray* constant_pool) { |
| 3658 ASSERT(constant_pool->count_of_int64_entries() == count_of_64bit_); |
| 3659 ASSERT(constant_pool->count_of_code_ptr_entries() == count_of_code_ptr_); |
| 3660 ASSERT(constant_pool->count_of_heap_ptr_entries() == count_of_heap_ptr_); |
| 3661 ASSERT(constant_pool->count_of_int32_entries() == count_of_32bit_); |
| 3662 ASSERT(entries_.size() == merged_indexes_.size()); |
| 3663 |
| 3664 int index_64bit = 0; |
| 3665 int index_code_ptr = count_of_64bit_; |
| 3666 int index_heap_ptr = count_of_64bit_ + count_of_code_ptr_; |
| 3667 int index_32bit = count_of_64bit_ + count_of_code_ptr_ + count_of_heap_ptr_; |
| 3668 |
| 3669 size_t i; |
| 3670 std::vector<RelocInfo>::const_iterator rinfo; |
| 3671 for (rinfo = entries_.begin(), i = 0; rinfo != entries_.end(); rinfo++, i++) { |
| 3672 RelocInfo::Mode rmode = rinfo->rmode(); |
| 3673 |
| 3674 // Update constant pool if necessary and get the entry's offset. |
| 3675 int offset; |
| 3676 if (merged_indexes_[i] == -1) { |
| 3677 if (Is64BitEntry(rmode)) { |
| 3678 offset = constant_pool->OffsetOfElementAt(index_64bit) - kHeapObjectTag; |
| 3679 constant_pool->set(index_64bit++, rinfo->data64()); |
| 3680 } else if (Is32BitEntry(rmode)) { |
| 3681 offset = constant_pool->OffsetOfElementAt(index_32bit) - kHeapObjectTag; |
| 3682 constant_pool->set(index_32bit++, static_cast<int32_t>(rinfo->data())); |
| 3683 } else if (IsCodePtrEntry(rmode)) { |
| 3684 offset = constant_pool->OffsetOfElementAt(index_code_ptr) - |
| 3685 kHeapObjectTag; |
| 3686 constant_pool->set(index_code_ptr++, |
| 3687 reinterpret_cast<Object *>(rinfo->data())); |
| 3688 } else { |
| 3689 ASSERT(IsHeapPtrEntry(rmode)); |
| 3690 offset = constant_pool->OffsetOfElementAt(index_heap_ptr) - |
| 3691 kHeapObjectTag; |
| 3692 constant_pool->set(index_heap_ptr++, |
| 3693 reinterpret_cast<Object *>(rinfo->data())); |
| 3694 } |
| 3695 merged_indexes_[i] = offset; // Stash offset for merged entries. |
| 3696 } else { |
| 3697 size_t merged_index = static_cast<size_t>(merged_indexes_[i]); |
| 3698 ASSERT(merged_index < merged_indexes_.size() && merged_index < i); |
| 3699 offset = merged_indexes_[merged_index]; |
| 3700 } |
| 3701 |
| 3702 // Patch vldr/ldr instruction with correct offset. |
| 3703 Instr instr = assm->instr_at(rinfo->pc()); |
| 3704 if (Is64BitEntry(rmode)) { |
| 3705 // Instruction to patch must be 'vldr rd, [pp, #0]'. |
| 3706 ASSERT((Assembler::IsVldrDPpImmediateOffset(instr) && |
| 3707 Assembler::GetVldrDRegisterImmediateOffset(instr) == 0)); |
| 3708 ASSERT(is_uint10(offset)); |
| 3709 assm->instr_at_put(rinfo->pc(), |
| 3710 Assembler::SetVldrDRegisterImmediateOffset(instr, offset)); |
| 3711 } else { |
| 3712 // Instruction to patch must be 'ldr rd, [pp, #0]'. |
| 3713 ASSERT((Assembler::IsLdrPpImmediateOffset(instr) && |
| 3714 Assembler::GetLdrRegisterImmediateOffset(instr) == 0)); |
| 3715 ASSERT(is_uint12(offset)); |
| 3716 assm->instr_at_put(rinfo->pc(), |
| 3717 Assembler::SetLdrRegisterImmediateOffset(instr, offset)); |
| 3718 } |
| 3719 } |
| 3720 |
| 3721 ASSERT((index_64bit == count_of_64bit_) && |
| 3722 (index_code_ptr == (index_64bit + count_of_code_ptr_)) && |
| 3723 (index_heap_ptr == (index_code_ptr + count_of_heap_ptr_)) && |
| 3724 (index_32bit == (index_heap_ptr + count_of_32bit_))); |
| 3725 } |
| 3726 |
| 3727 |
3498 } } // namespace v8::internal | 3728 } } // namespace v8::internal |
3499 | 3729 |
3500 #endif // V8_TARGET_ARCH_ARM | 3730 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |