Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(459)

Side by Side Diff: src/arm/assembler-arm.cc

Issue 1131783003: Embedded constant pools. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: New test, address nits Created 5 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 1994-2006 Sun Microsystems Inc. 1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved. 2 // All Rights Reserved.
3 // 3 //
4 // Redistribution and use in source and binary forms, with or without 4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions 5 // modification, are permitted provided that the following conditions
6 // are met: 6 // are met:
7 // 7 //
8 // - Redistributions of source code must retain the above copyright notice, 8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer. 9 // this list of conditions and the following disclaimer.
10 // 10 //
(...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after
227 // ----------------------------------------------------------------------------- 227 // -----------------------------------------------------------------------------
228 // Implementation of RelocInfo 228 // Implementation of RelocInfo
229 229
230 // static 230 // static
231 const int RelocInfo::kApplyMask = 0; 231 const int RelocInfo::kApplyMask = 0;
232 232
233 233
234 bool RelocInfo::IsCodedSpecially() { 234 bool RelocInfo::IsCodedSpecially() {
235 // The deserializer needs to know whether a pointer is specially coded.  Being 235 // The deserializer needs to know whether a pointer is specially coded.  Being
236 // specially coded on ARM means that it is a movw/movt instruction, or is an 236 // specially coded on ARM means that it is a movw/movt instruction, or is an
237 // out of line constant pool entry.  These only occur if 237 // embedded constant pool entry.  These only occur if
238 // FLAG_enable_ool_constant_pool is true. 238 // FLAG_enable_embedded_constant_pool is true.
239 return FLAG_enable_ool_constant_pool; 239 return FLAG_enable_embedded_constant_pool;
240 } 240 }
241 241
242 242
243 bool RelocInfo::IsInConstantPool() { 243 bool RelocInfo::IsInConstantPool() {
244 return Assembler::is_constant_pool_load(pc_); 244 return Assembler::is_constant_pool_load(pc_);
245 } 245 }
246 246
247 247
248 // ----------------------------------------------------------------------------- 248 // -----------------------------------------------------------------------------
249 // Implementation of Operand and MemOperand 249 // Implementation of Operand and MemOperand
(...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after
442 const Instr kLdrRegFpNegOffsetPattern = 442 const Instr kLdrRegFpNegOffsetPattern =
443 al | B26 | L | NegOffset | kRegister_fp_Code * B16; 443 al | B26 | L | NegOffset | kRegister_fp_Code * B16;
444 const Instr kStrRegFpNegOffsetPattern = 444 const Instr kStrRegFpNegOffsetPattern =
445 al | B26 | NegOffset | kRegister_fp_Code * B16; 445 al | B26 | NegOffset | kRegister_fp_Code * B16;
446 const Instr kLdrStrInstrTypeMask = 0xffff0000; 446 const Instr kLdrStrInstrTypeMask = 0xffff0000;
447 447
448 448
449 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size) 449 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size)
450 : AssemblerBase(isolate, buffer, buffer_size), 450 : AssemblerBase(isolate, buffer, buffer_size),
451 recorded_ast_id_(TypeFeedbackId::None()), 451 recorded_ast_id_(TypeFeedbackId::None()),
452 constant_pool_builder_(), 452 constant_pool_builder_(kLdrMaxReachBits, kVldrMaxReachBits),
453 positions_recorder_(this) { 453 positions_recorder_(this) {
454 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); 454 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_);
455 num_pending_32_bit_reloc_info_ = 0; 455 num_pending_32_bit_constants_ = 0;
456 num_pending_64_bit_reloc_info_ = 0; 456 num_pending_64_bit_constants_ = 0;
457 next_buffer_check_ = 0; 457 next_buffer_check_ = 0;
458 const_pool_blocked_nesting_ = 0; 458 const_pool_blocked_nesting_ = 0;
459 no_const_pool_before_ = 0; 459 no_const_pool_before_ = 0;
460 first_const_pool_32_use_ = -1; 460 first_const_pool_32_use_ = -1;
461 first_const_pool_64_use_ = -1; 461 first_const_pool_64_use_ = -1;
462 last_bound_pos_ = 0; 462 last_bound_pos_ = 0;
463 ClearRecordedAstId(); 463 ClearRecordedAstId();
464 } 464 }
465 465
466 466
467 Assembler::~Assembler() { 467 Assembler::~Assembler() {
468 DCHECK(const_pool_blocked_nesting_ == 0); 468 DCHECK(const_pool_blocked_nesting_ == 0);
469 } 469 }
470 470
471 471
472 void Assembler::GetCode(CodeDesc* desc) { 472 void Assembler::GetCode(CodeDesc* desc) {
473 reloc_info_writer.Finish(); 473 reloc_info_writer.Finish();
474 if (!FLAG_enable_ool_constant_pool) { 474
475 // Emit constant pool if necessary. 475 // Emit constant pool if necessary.
476 int constant_pool_offset = 0;
477 if (FLAG_enable_embedded_constant_pool) {
478 constant_pool_offset = EmitEmbeddedConstantPool();
479 } else {
476 CheckConstPool(true, false); 480 CheckConstPool(true, false);
477 DCHECK(num_pending_32_bit_reloc_info_ == 0); 481 DCHECK(num_pending_32_bit_constants_ == 0);
478 DCHECK(num_pending_64_bit_reloc_info_ == 0); 482 DCHECK(num_pending_64_bit_constants_ == 0);
479 } 483 }
480 // Set up code descriptor. 484 // Set up code descriptor.
481 desc->buffer = buffer_; 485 desc->buffer = buffer_;
482 desc->buffer_size = buffer_size_; 486 desc->buffer_size = buffer_size_;
483 desc->instr_size = pc_offset(); 487 desc->instr_size = pc_offset();
484 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); 488 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos();
489 desc->constant_pool_size =
490 (constant_pool_offset ? desc->instr_size - constant_pool_offset : 0);
485 desc->origin = this; 491 desc->origin = this;
486 } 492 }
487 493
488 494
489 void Assembler::Align(int m) { 495 void Assembler::Align(int m) {
490 DCHECK(m >= 4 && base::bits::IsPowerOfTwo32(m)); 496 DCHECK(m >= 4 && base::bits::IsPowerOfTwo32(m));
491 while ((pc_offset() & (m - 1)) != 0) { 497 while ((pc_offset() & (m - 1)) != 0) {
492 nop(); 498 nop();
493 } 499 }
494 } 500 }
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
616 622
617 623
618 Register Assembler::GetRm(Instr instr) { 624 Register Assembler::GetRm(Instr instr) {
619 Register reg; 625 Register reg;
620 reg.code_ = Instruction::RmValue(instr); 626 reg.code_ = Instruction::RmValue(instr);
621 return reg; 627 return reg;
622 } 628 }
623 629
624 630
625 Instr Assembler::GetConsantPoolLoadPattern() { 631 Instr Assembler::GetConsantPoolLoadPattern() {
626 if (FLAG_enable_ool_constant_pool) { 632 if (FLAG_enable_embedded_constant_pool) {
627 return kLdrPpImmedPattern; 633 return kLdrPpImmedPattern;
628 } else { 634 } else {
629 return kLdrPCImmedPattern; 635 return kLdrPCImmedPattern;
630 } 636 }
631 } 637 }
632 638
633 639
634 Instr Assembler::GetConsantPoolLoadMask() { 640 Instr Assembler::GetConsantPoolLoadMask() {
635 if (FLAG_enable_ool_constant_pool) { 641 if (FLAG_enable_embedded_constant_pool) {
636 return kLdrPpImmedMask; 642 return kLdrPpImmedMask;
637 } else { 643 } else {
638 return kLdrPCImmedMask; 644 return kLdrPCImmedMask;
639 } 645 }
640 } 646 }
641 647
642 648
643 bool Assembler::IsPush(Instr instr) { 649 bool Assembler::IsPush(Instr instr) {
644 return ((instr & ~kRdMask) == kPushRegPattern); 650 return ((instr & ~kRdMask) == kPushRegPattern);
645 } 651 }
(...skipping 391 matching lines...) Expand 10 before | Expand all | Expand 10 after
1037 return assembler->serializer_enabled(); 1043 return assembler->serializer_enabled();
1038 } else if (RelocInfo::IsNone(rmode_)) { 1044 } else if (RelocInfo::IsNone(rmode_)) {
1039 return false; 1045 return false;
1040 } 1046 }
1041 return true; 1047 return true;
1042 } 1048 }
1043 1049
1044 1050
1045 static bool use_mov_immediate_load(const Operand& x, 1051 static bool use_mov_immediate_load(const Operand& x,
1046 const Assembler* assembler) { 1052 const Assembler* assembler) {
1047 if (FLAG_enable_ool_constant_pool && assembler != NULL && 1053 if (FLAG_enable_embedded_constant_pool && assembler != NULL &&
1048 !assembler->is_ool_constant_pool_available()) { 1054 !assembler->is_constant_pool_available()) {
1049 return true; 1055 return true;
1050 } else if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) && 1056 } else if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) &&
1051 (assembler == NULL || !assembler->predictable_code_size())) { 1057 (assembler == NULL || !assembler->predictable_code_size())) {
1052 // Prefer movw / movt to constant pool if it is more efficient on the CPU. 1058 // Prefer movw / movt to constant pool if it is more efficient on the CPU.
1053 return true; 1059 return true;
1054 } else if (x.must_output_reloc_info(assembler)) { 1060 } else if (x.must_output_reloc_info(assembler)) {
1055 // Prefer constant pool if data is likely to be patched. 1061 // Prefer constant pool if data is likely to be patched.
1056 return false; 1062 return false;
1057 } else { 1063 } else {
1058 // Otherwise, use immediate load if movw / movt is available. 1064 // Otherwise, use immediate load if movw / movt is available.
1059 return CpuFeatures::IsSupported(ARMv7); 1065 return CpuFeatures::IsSupported(ARMv7);
1060 } 1066 }
1061 } 1067 }
1062 1068
1063 1069
1064 int Operand::instructions_required(const Assembler* assembler, 1070 int Operand::instructions_required(const Assembler* assembler,
1065 Instr instr) const { 1071 Instr instr) const {
1066 if (rm_.is_valid()) return 1; 1072 if (rm_.is_valid()) return 1;
1067 uint32_t dummy1, dummy2; 1073 uint32_t dummy1, dummy2;
1068 if (must_output_reloc_info(assembler) || 1074 if (must_output_reloc_info(assembler) ||
1069 !fits_shifter(imm32_, &dummy1, &dummy2, &instr)) { 1075 !fits_shifter(imm32_, &dummy1, &dummy2, &instr)) {
1070 // The immediate operand cannot be encoded as a shifter operand, or use of 1076 // The immediate operand cannot be encoded as a shifter operand, or use of
1071 // constant pool is required. First account for the instructions required 1077 // constant pool is required. First account for the instructions required
1072 // for the constant pool or immediate load 1078 // for the constant pool or immediate load
1073 int instructions; 1079 int instructions;
1074 if (use_mov_immediate_load(*this, assembler)) { 1080 if (use_mov_immediate_load(*this, assembler)) {
1075 // A movw / movt or mov / orr immediate load. 1081 // A movw / movt or mov / orr immediate load.
1076 instructions = CpuFeatures::IsSupported(ARMv7) ? 2 : 4; 1082 instructions = CpuFeatures::IsSupported(ARMv7) ? 2 : 4;
1077 } else if (assembler != NULL && assembler->use_extended_constant_pool()) { 1083 } else if (assembler != NULL &&
1078 // An extended constant pool load. 1084 assembler->ConstantPoolAccessIsInOverflow()) {
1085 // An overflowed constant pool load.
1079 instructions = CpuFeatures::IsSupported(ARMv7) ? 3 : 5; 1086 instructions = CpuFeatures::IsSupported(ARMv7) ? 3 : 5;
1080 } else { 1087 } else {
1081 // A small constant pool load. 1088 // A small constant pool load.
1082 instructions = 1; 1089 instructions = 1;
1083 } 1090 }
1084 1091
1085 if ((instr & ~kCondMask) != 13 * B21) { // mov, S not set 1092 if ((instr & ~kCondMask) != 13 * B21) { // mov, S not set
1086 // For a mov or mvn instruction which doesn't set the condition 1093 // For a mov or mvn instruction which doesn't set the condition
1087 // code, the constant pool or immediate load is enough, otherwise we need 1094 // code, the constant pool or immediate load is enough, otherwise we need
1088 // to account for the actual instruction being requested. 1095 // to account for the actual instruction being requested.
1089 instructions += 1; 1096 instructions += 1;
1090 } 1097 }
1091 return instructions; 1098 return instructions;
1092 } else { 1099 } else {
1093 // No use of constant pool and the immediate operand can be encoded as a 1100 // No use of constant pool and the immediate operand can be encoded as a
1094 // shifter operand. 1101 // shifter operand.
1095 return 1; 1102 return 1;
1096 } 1103 }
1097 } 1104 }
1098 1105
1099 1106
1100 void Assembler::move_32_bit_immediate(Register rd, 1107 void Assembler::move_32_bit_immediate(Register rd,
1101 const Operand& x, 1108 const Operand& x,
1102 Condition cond) { 1109 Condition cond) {
1103 RelocInfo rinfo(pc_, x.rmode_, x.imm32_, NULL);
1104 uint32_t imm32 = static_cast<uint32_t>(x.imm32_); 1110 uint32_t imm32 = static_cast<uint32_t>(x.imm32_);
1105 if (x.must_output_reloc_info(this)) { 1111 if (x.must_output_reloc_info(this)) {
1106 RecordRelocInfo(rinfo); 1112 RecordRelocInfo(x.rmode_);
1107 } 1113 }
1108 1114
1109 if (use_mov_immediate_load(x, this)) { 1115 if (use_mov_immediate_load(x, this)) {
1110 Register target = rd.code() == pc.code() ? ip : rd; 1116 Register target = rd.code() == pc.code() ? ip : rd;
1111 if (CpuFeatures::IsSupported(ARMv7)) { 1117 if (CpuFeatures::IsSupported(ARMv7)) {
1112 if (!FLAG_enable_ool_constant_pool && x.must_output_reloc_info(this)) { 1118 if (!FLAG_enable_embedded_constant_pool &&
1119 x.must_output_reloc_info(this)) {
1113 // Make sure the movw/movt doesn't get separated. 1120 // Make sure the movw/movt doesn't get separated.
1114 BlockConstPoolFor(2); 1121 BlockConstPoolFor(2);
1115 } 1122 }
1116 movw(target, imm32 & 0xffff, cond); 1123 movw(target, imm32 & 0xffff, cond);
1117 movt(target, imm32 >> 16, cond); 1124 movt(target, imm32 >> 16, cond);
1118 } else { 1125 } else {
1119 DCHECK(FLAG_enable_ool_constant_pool); 1126 DCHECK(FLAG_enable_embedded_constant_pool);
1120 mov(target, Operand(imm32 & kImm8Mask), LeaveCC, cond); 1127 mov(target, Operand(imm32 & kImm8Mask), LeaveCC, cond);
1121 orr(target, target, Operand(imm32 & (kImm8Mask << 8)), LeaveCC, cond); 1128 orr(target, target, Operand(imm32 & (kImm8Mask << 8)), LeaveCC, cond);
1122 orr(target, target, Operand(imm32 & (kImm8Mask << 16)), LeaveCC, cond); 1129 orr(target, target, Operand(imm32 & (kImm8Mask << 16)), LeaveCC, cond);
1123 orr(target, target, Operand(imm32 & (kImm8Mask << 24)), LeaveCC, cond); 1130 orr(target, target, Operand(imm32 & (kImm8Mask << 24)), LeaveCC, cond);
1124 } 1131 }
1125 if (target.code() != rd.code()) { 1132 if (target.code() != rd.code()) {
1126 mov(rd, target, LeaveCC, cond); 1133 mov(rd, target, LeaveCC, cond);
1127 } 1134 }
1128 } else { 1135 } else {
1129 DCHECK(!FLAG_enable_ool_constant_pool || is_ool_constant_pool_available()); 1136 DCHECK(!FLAG_enable_embedded_constant_pool || is_constant_pool_available());
1130 ConstantPoolArray::LayoutSection section = ConstantPoolAddEntry(rinfo); 1137 ConstantPoolEntry::Access access =
1131 if (section == ConstantPoolArray::EXTENDED_SECTION) { 1138 ConstantPoolAddEntry(pc_offset(), x.rmode_, x.imm32_);
1132 DCHECK(FLAG_enable_ool_constant_pool); 1139 if (access == ConstantPoolEntry::OVERFLOWED) {
1140 DCHECK(FLAG_enable_embedded_constant_pool);
1133 Register target = rd.code() == pc.code() ? ip : rd; 1141 Register target = rd.code() == pc.code() ? ip : rd;
1134 // Emit instructions to load constant pool offset. 1142 // Emit instructions to load constant pool offset.
1135 if (CpuFeatures::IsSupported(ARMv7)) { 1143 if (CpuFeatures::IsSupported(ARMv7)) {
1136 movw(target, 0, cond); 1144 movw(target, 0, cond);
1137 movt(target, 0, cond); 1145 movt(target, 0, cond);
1138 } else { 1146 } else {
1139 mov(target, Operand(0), LeaveCC, cond); 1147 mov(target, Operand(0), LeaveCC, cond);
1140 orr(target, target, Operand(0), LeaveCC, cond); 1148 orr(target, target, Operand(0), LeaveCC, cond);
1141 orr(target, target, Operand(0), LeaveCC, cond); 1149 orr(target, target, Operand(0), LeaveCC, cond);
1142 orr(target, target, Operand(0), LeaveCC, cond); 1150 orr(target, target, Operand(0), LeaveCC, cond);
1143 } 1151 }
1144 // Load from constant pool at offset. 1152 // Load from constant pool at offset.
1145 ldr(rd, MemOperand(pp, target), cond); 1153 ldr(rd, MemOperand(pp, target), cond);
1146 } else { 1154 } else {
1147 DCHECK(section == ConstantPoolArray::SMALL_SECTION); 1155 DCHECK(access == ConstantPoolEntry::REGULAR);
1148 ldr(rd, MemOperand(FLAG_enable_ool_constant_pool ? pp : pc, 0), cond); 1156 ldr(rd, MemOperand(FLAG_enable_embedded_constant_pool ? pp : pc, 0),
1157 cond);
1149 } 1158 }
1150 } 1159 }
1151 } 1160 }
1152 1161
1153 1162
1154 void Assembler::addrmod1(Instr instr, 1163 void Assembler::addrmod1(Instr instr,
1155 Register rn, 1164 Register rn,
1156 Register rd, 1165 Register rd,
1157 const Operand& x) { 1166 const Operand& x) {
1158 CheckBuffer(); 1167 CheckBuffer();
(...skipping 1388 matching lines...) Expand 10 before | Expand all | Expand 10 after
2547 if (CpuFeatures::IsSupported(VFP3) && FitsVMOVDoubleImmediate(imm, &enc)) { 2556 if (CpuFeatures::IsSupported(VFP3) && FitsVMOVDoubleImmediate(imm, &enc)) {
2548 // The double can be encoded in the instruction. 2557 // The double can be encoded in the instruction.
2549 // 2558 //
2550 // Dd = immediate 2559 // Dd = immediate
2551 // Instruction details available in ARM DDI 0406C.b, A8-936. 2560 // Instruction details available in ARM DDI 0406C.b, A8-936.
2552 // cond(31-28) | 11101(27-23) | D(22) | 11(21-20) | imm4H(19-16) | 2561 // cond(31-28) | 11101(27-23) | D(22) | 11(21-20) | imm4H(19-16) |
2553 // Vd(15-12) | 101(11-9) | sz=1(8) | imm4L(3-0) 2562 // Vd(15-12) | 101(11-9) | sz=1(8) | imm4L(3-0)
2554 int vd, d; 2563 int vd, d;
2555 dst.split_code(&vd, &d); 2564 dst.split_code(&vd, &d);
2556 emit(al | 0x1D*B23 | d*B22 | 0x3*B20 | vd*B12 | 0x5*B9 | B8 | enc); 2565 emit(al | 0x1D*B23 | d*B22 | 0x3*B20 | vd*B12 | 0x5*B9 | B8 | enc);
2557 } else if (FLAG_enable_vldr_imm && is_ool_constant_pool_available()) { 2566 } else if (FLAG_enable_vldr_imm && is_constant_pool_available()) {
2558 // TODO(jfb) Temporarily turned off until we have constant blinding or 2567 // TODO(jfb) Temporarily turned off until we have constant blinding or
2559 // some equivalent mitigation: an attacker can otherwise control 2568 // some equivalent mitigation: an attacker can otherwise control
2560 // generated data which also happens to be executable, a Very Bad 2569 // generated data which also happens to be executable, a Very Bad
2561 // Thing indeed. 2570 // Thing indeed.
2562 // Blinding gets tricky because we don't have xor, we probably 2571 // Blinding gets tricky because we don't have xor, we probably
2563 // need to add/subtract without losing precision, which requires a 2572 // need to add/subtract without losing precision, which requires a
2564 // cookie value that Lithium is probably better positioned to 2573 // cookie value that Lithium is probably better positioned to
2565 // choose. 2574 // choose.
2566 // We could also add a few peepholes here like detecting 0.0 and 2575 // We could also add a few peepholes here like detecting 0.0 and
2567 // -0.0 and doing a vmov from the sequestered d14, forcing denorms 2576 // -0.0 and doing a vmov from the sequestered d14, forcing denorms
2568 // to zero (we set flush-to-zero), and normalizing NaN values. 2577 // to zero (we set flush-to-zero), and normalizing NaN values.
2569 // We could also detect redundant values. 2578 // We could also detect redundant values.
2570 // The code could also randomize the order of values, though 2579 // The code could also randomize the order of values, though
2571 // that's tricky because vldr has a limited reach. Furthermore 2580 // that's tricky because vldr has a limited reach. Furthermore
2572 // it breaks load locality. 2581 // it breaks load locality.
2573 RelocInfo rinfo(pc_, imm); 2582 ConstantPoolEntry::Access access = ConstantPoolAddEntry(pc_offset(), imm);
2574 ConstantPoolArray::LayoutSection section = ConstantPoolAddEntry(rinfo); 2583 if (access == ConstantPoolEntry::OVERFLOWED) {
2575 if (section == ConstantPoolArray::EXTENDED_SECTION) { 2584 DCHECK(FLAG_enable_embedded_constant_pool);
2576 DCHECK(FLAG_enable_ool_constant_pool);
2577 // Emit instructions to load constant pool offset. 2585 // Emit instructions to load constant pool offset.
2578 movw(ip, 0); 2586 movw(ip, 0);
2579 movt(ip, 0); 2587 movt(ip, 0);
2580 // Load from constant pool at offset. 2588 // Load from constant pool at offset.
2581 vldr(dst, MemOperand(pp, ip)); 2589 vldr(dst, MemOperand(pp, ip));
2582 } else { 2590 } else {
2583 DCHECK(section == ConstantPoolArray::SMALL_SECTION); 2591 DCHECK(access == ConstantPoolEntry::REGULAR);
2584 vldr(dst, MemOperand(FLAG_enable_ool_constant_pool ? pp : pc, 0)); 2592 vldr(dst, MemOperand(FLAG_enable_embedded_constant_pool ? pp : pc, 0));
2585 } 2593 }
2586 } else { 2594 } else {
2587 // Synthesise the double from ARM immediates. 2595 // Synthesise the double from ARM immediates.
2588 uint32_t lo, hi; 2596 uint32_t lo, hi;
2589 DoubleAsTwoUInt32(imm, &lo, &hi); 2597 DoubleAsTwoUInt32(imm, &lo, &hi);
2590 2598
2591 if (lo == hi) { 2599 if (lo == hi) {
2592 // Move the low and high parts of the double to a D register in one 2600 // Move the low and high parts of the double to a D register in one
2593 // instruction. 2601 // instruction.
2594 mov(ip, Operand(lo)); 2602 mov(ip, Operand(lo));
(...skipping 953 matching lines...) Expand 10 before | Expand all | Expand 10 after
3548 DeleteArray(buffer_); 3556 DeleteArray(buffer_);
3549 buffer_ = desc.buffer; 3557 buffer_ = desc.buffer;
3550 buffer_size_ = desc.buffer_size; 3558 buffer_size_ = desc.buffer_size;
3551 pc_ += pc_delta; 3559 pc_ += pc_delta;
3552 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta, 3560 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta,
3553 reloc_info_writer.last_pc() + pc_delta); 3561 reloc_info_writer.last_pc() + pc_delta);
3554 3562
3555 // None of our relocation types are pc relative pointing outside the code 3563 // None of our relocation types are pc relative pointing outside the code
3556 // buffer nor pc absolute pointing inside the code buffer, so there is no need 3564 // buffer nor pc absolute pointing inside the code buffer, so there is no need
3557 // to relocate any emitted relocation entries. 3565 // to relocate any emitted relocation entries.
3558
3559 // Relocate pending relocation entries.
3560 for (int i = 0; i < num_pending_32_bit_reloc_info_; i++) {
3561 RelocInfo& rinfo = pending_32_bit_reloc_info_[i];
3562 DCHECK(rinfo.rmode() != RelocInfo::COMMENT &&
3563 rinfo.rmode() != RelocInfo::POSITION);
3564 if (rinfo.rmode() != RelocInfo::JS_RETURN) {
3565 rinfo.set_pc(rinfo.pc() + pc_delta);
3566 }
3567 }
3568 for (int i = 0; i < num_pending_64_bit_reloc_info_; i++) {
3569 RelocInfo& rinfo = pending_64_bit_reloc_info_[i];
3570 DCHECK(rinfo.rmode() == RelocInfo::NONE64);
3571 rinfo.set_pc(rinfo.pc() + pc_delta);
3572 }
3573 constant_pool_builder_.Relocate(pc_delta);
3574 } 3566 }
3575 3567
3576 3568
3577 void Assembler::db(uint8_t data) { 3569 void Assembler::db(uint8_t data) {
3578 // No relocation info should be pending while using db. db is used 3570 // No relocation info should be pending while using db. db is used
3579 // to write pure data with no pointers and the constant pool should 3571 // to write pure data with no pointers and the constant pool should
3580 // be emitted before using db. 3572 // be emitted before using db.
3581 DCHECK(num_pending_32_bit_reloc_info_ == 0); 3573 DCHECK(num_pending_32_bit_constants_ == 0);
3582 DCHECK(num_pending_64_bit_reloc_info_ == 0); 3574 DCHECK(num_pending_64_bit_constants_ == 0);
3583 CheckBuffer(); 3575 CheckBuffer();
3584 *reinterpret_cast<uint8_t*>(pc_) = data; 3576 *reinterpret_cast<uint8_t*>(pc_) = data;
3585 pc_ += sizeof(uint8_t); 3577 pc_ += sizeof(uint8_t);
3586 } 3578 }
3587 3579
3588 3580
3589 void Assembler::dd(uint32_t data) { 3581 void Assembler::dd(uint32_t data) {
3590 // No relocation info should be pending while using dd. dd is used 3582 // No relocation info should be pending while using dd. dd is used
3591 // to write pure data with no pointers and the constant pool should 3583 // to write pure data with no pointers and the constant pool should
3592 // be emitted before using dd. 3584 // be emitted before using dd.
3593 DCHECK(num_pending_32_bit_reloc_info_ == 0); 3585 DCHECK(num_pending_32_bit_constants_ == 0);
3594 DCHECK(num_pending_64_bit_reloc_info_ == 0); 3586 DCHECK(num_pending_64_bit_constants_ == 0);
3595 CheckBuffer(); 3587 CheckBuffer();
3596 *reinterpret_cast<uint32_t*>(pc_) = data; 3588 *reinterpret_cast<uint32_t*>(pc_) = data;
3597 pc_ += sizeof(uint32_t); 3589 pc_ += sizeof(uint32_t);
3598 } 3590 }
3599 3591
3600 3592
3593 void Assembler::dq(uint64_t value) {
3594 // No relocation info should be pending while using dq. dq is used
3595 // to write pure data with no pointers and the constant pool should
3596 // be emitted before using dd.
3597 DCHECK(num_pending_32_bit_constants_ == 0);
3598 DCHECK(num_pending_64_bit_constants_ == 0);
3599 CheckBuffer();
3600 *reinterpret_cast<uint64_t*>(pc_) = value;
3601 pc_ += sizeof(uint64_t);
3602 }
3603
3604
3601 void Assembler::emit_code_stub_address(Code* stub) { 3605 void Assembler::emit_code_stub_address(Code* stub) {
3602 CheckBuffer(); 3606 CheckBuffer();
3603 *reinterpret_cast<uint32_t*>(pc_) = 3607 *reinterpret_cast<uint32_t*>(pc_) =
3604 reinterpret_cast<uint32_t>(stub->instruction_start()); 3608 reinterpret_cast<uint32_t>(stub->instruction_start());
3605 pc_ += sizeof(uint32_t); 3609 pc_ += sizeof(uint32_t);
3606 } 3610 }
3607 3611
3608 3612
3609 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { 3613 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
3614 if (RelocInfo::IsNone(rmode) ||
3615 // Don't record external references unless the heap will be serialized.
3616 (rmode == RelocInfo::EXTERNAL_REFERENCE && !serializer_enabled() &&
3617 !emit_debug_code())) {
3618 return;
3619 }
3620 DCHECK(buffer_space() >= kMaxRelocSize); // too late to grow buffer here
3621 if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
3622 data = RecordedAstId().ToInt();
3623 ClearRecordedAstId();
3624 }
3610 RelocInfo rinfo(pc_, rmode, data, NULL); 3625 RelocInfo rinfo(pc_, rmode, data, NULL);
3611 RecordRelocInfo(rinfo); 3626 reloc_info_writer.Write(&rinfo);
3612 } 3627 }
3613 3628
3614 3629
3615 void Assembler::RecordRelocInfo(const RelocInfo& rinfo) { 3630 ConstantPoolEntry::Access Assembler::ConstantPoolAddEntry(int position,
3616 if (!RelocInfo::IsNone(rinfo.rmode())) { 3631 RelocInfo::Mode rmode,
3617 // Don't record external references unless the heap will be serialized. 3632 intptr_t value) {
3618 if (rinfo.rmode() == RelocInfo::EXTERNAL_REFERENCE && 3633 DCHECK(rmode != RelocInfo::COMMENT && rmode != RelocInfo::POSITION &&
3619 !serializer_enabled() && !emit_debug_code()) { 3634 rmode != RelocInfo::STATEMENT_POSITION &&
3620 return; 3635 rmode != RelocInfo::CONST_POOL && rmode != RelocInfo::NONE64);
3636 bool sharing_ok = RelocInfo::IsNone(rmode) ||
3637 !(serializer_enabled() || rmode < RelocInfo::CELL);
3638 if (FLAG_enable_embedded_constant_pool) {
3639 return constant_pool_builder_.AddEntry(position, value, sharing_ok);
3640 } else {
3641 DCHECK(num_pending_32_bit_constants_ < kMaxNumPending32Constants);
3642 if (num_pending_32_bit_constants_ == 0) {
3643 first_const_pool_32_use_ = position;
3621 } 3644 }
3622 DCHECK(buffer_space() >= kMaxRelocSize); // too late to grow buffer here 3645 ConstantPoolEntry entry(position, value, sharing_ok);
3623 if (rinfo.rmode() == RelocInfo::CODE_TARGET_WITH_ID) { 3646 pending_32_bit_constants_[num_pending_32_bit_constants_++] = entry;
3624 RelocInfo reloc_info_with_ast_id(rinfo.pc(), 3647
3625 rinfo.rmode(), 3648 // Make sure the constant pool is not emitted in place of the next
3626 RecordedAstId().ToInt(), 3649 // instruction for which we just recorded relocation info.
3627 NULL); 3650 BlockConstPoolFor(1);
3628 ClearRecordedAstId(); 3651 return ConstantPoolEntry::REGULAR;
3629 reloc_info_writer.Write(&reloc_info_with_ast_id);
3630 } else {
3631 reloc_info_writer.Write(&rinfo);
3632 }
3633 } 3652 }
3634 } 3653 }
3635 3654
3636 3655
3637 ConstantPoolArray::LayoutSection Assembler::ConstantPoolAddEntry( 3656 ConstantPoolEntry::Access Assembler::ConstantPoolAddEntry(int position,
3638 const RelocInfo& rinfo) { 3657 double value) {
3639 if (FLAG_enable_ool_constant_pool) { 3658 if (FLAG_enable_embedded_constant_pool) {
3640 return constant_pool_builder_.AddEntry(this, rinfo); 3659 return constant_pool_builder_.AddEntry(position, value);
3641 } else { 3660 } else {
3642 if (rinfo.rmode() == RelocInfo::NONE64) { 3661 DCHECK(num_pending_64_bit_constants_ < kMaxNumPending64Constants);
3643 DCHECK(num_pending_64_bit_reloc_info_ < kMaxNumPending64RelocInfo); 3662 if (num_pending_64_bit_constants_ == 0) {
3644 if (num_pending_64_bit_reloc_info_ == 0) { 3663 first_const_pool_64_use_ = position;
3645 first_const_pool_64_use_ = pc_offset();
3646 }
3647 pending_64_bit_reloc_info_[num_pending_64_bit_reloc_info_++] = rinfo;
3648 } else {
3649 DCHECK(num_pending_32_bit_reloc_info_ < kMaxNumPending32RelocInfo);
3650 if (num_pending_32_bit_reloc_info_ == 0) {
3651 first_const_pool_32_use_ = pc_offset();
3652 }
3653 pending_32_bit_reloc_info_[num_pending_32_bit_reloc_info_++] = rinfo;
3654 } 3664 }
3665 ConstantPoolEntry entry(position, value);
3666 pending_64_bit_constants_[num_pending_64_bit_constants_++] = entry;
3667
3655 // Make sure the constant pool is not emitted in place of the next 3668 // Make sure the constant pool is not emitted in place of the next
3656 // instruction for which we just recorded relocation info. 3669 // instruction for which we just recorded relocation info.
3657 BlockConstPoolFor(1); 3670 BlockConstPoolFor(1);
3658 return ConstantPoolArray::SMALL_SECTION; 3671 return ConstantPoolEntry::REGULAR;
3659 } 3672 }
3660 } 3673 }
3661 3674
3662 3675
3663 void Assembler::BlockConstPoolFor(int instructions) { 3676 void Assembler::BlockConstPoolFor(int instructions) {
3664 if (FLAG_enable_ool_constant_pool) { 3677 if (FLAG_enable_embedded_constant_pool) {
3665 // Should be a no-op if using an out-of-line constant pool. 3678 // Should be a no-op if using an embedded constant pool.
3666 DCHECK(num_pending_32_bit_reloc_info_ == 0); 3679 DCHECK(num_pending_32_bit_constants_ == 0);
3667 DCHECK(num_pending_64_bit_reloc_info_ == 0); 3680 DCHECK(num_pending_64_bit_constants_ == 0);
3668 return; 3681 return;
3669 } 3682 }
3670 3683
3671 int pc_limit = pc_offset() + instructions * kInstrSize; 3684 int pc_limit = pc_offset() + instructions * kInstrSize;
3672 if (no_const_pool_before_ < pc_limit) { 3685 if (no_const_pool_before_ < pc_limit) {
3673 // Max pool start (if we need a jump and an alignment). 3686 // Max pool start (if we need a jump and an alignment).
3674 #ifdef DEBUG 3687 #ifdef DEBUG
3675 int start = pc_limit + kInstrSize + 2 * kPointerSize; 3688 int start = pc_limit + kInstrSize + 2 * kPointerSize;
3676 DCHECK((num_pending_32_bit_reloc_info_ == 0) || 3689 DCHECK((num_pending_32_bit_constants_ == 0) ||
3677 (start - first_const_pool_32_use_ + 3690 (start - first_const_pool_32_use_ +
3678 num_pending_64_bit_reloc_info_ * kDoubleSize < kMaxDistToIntPool)); 3691 num_pending_64_bit_constants_ * kDoubleSize <
3679 DCHECK((num_pending_64_bit_reloc_info_ == 0) || 3692 kMaxDistToIntPool));
3693 DCHECK((num_pending_64_bit_constants_ == 0) ||
3680 (start - first_const_pool_64_use_ < kMaxDistToFPPool)); 3694 (start - first_const_pool_64_use_ < kMaxDistToFPPool));
3681 #endif 3695 #endif
3682 no_const_pool_before_ = pc_limit; 3696 no_const_pool_before_ = pc_limit;
3683 } 3697 }
3684 3698
3685 if (next_buffer_check_ < no_const_pool_before_) { 3699 if (next_buffer_check_ < no_const_pool_before_) {
3686 next_buffer_check_ = no_const_pool_before_; 3700 next_buffer_check_ = no_const_pool_before_;
3687 } 3701 }
3688 } 3702 }
3689 3703
3690 3704
3691 void Assembler::CheckConstPool(bool force_emit, bool require_jump) { 3705 void Assembler::CheckConstPool(bool force_emit, bool require_jump) {
3692 if (FLAG_enable_ool_constant_pool) { 3706 if (FLAG_enable_embedded_constant_pool) {
3693 // Should be a no-op if using an out-of-line constant pool. 3707 // Should be a no-op if using an embedded constant pool.
3694 DCHECK(num_pending_32_bit_reloc_info_ == 0); 3708 DCHECK(num_pending_32_bit_constants_ == 0);
3695 DCHECK(num_pending_64_bit_reloc_info_ == 0); 3709 DCHECK(num_pending_64_bit_constants_ == 0);
3696 return; 3710 return;
3697 } 3711 }
3698 3712
3699 // Some short sequence of instruction mustn't be broken up by constant pool 3713 // Some short sequence of instruction mustn't be broken up by constant pool
3700 // emission, such sequences are protected by calls to BlockConstPoolFor and 3714 // emission, such sequences are protected by calls to BlockConstPoolFor and
3701 // BlockConstPoolScope. 3715 // BlockConstPoolScope.
3702 if (is_const_pool_blocked()) { 3716 if (is_const_pool_blocked()) {
3703 // Something is wrong if emission is forced and blocked at the same time. 3717 // Something is wrong if emission is forced and blocked at the same time.
3704 DCHECK(!force_emit); 3718 DCHECK(!force_emit);
3705 return; 3719 return;
3706 } 3720 }
3707 3721
3708 // There is nothing to do if there are no pending constant pool entries. 3722 // There is nothing to do if there are no pending constant pool entries.
3709 if ((num_pending_32_bit_reloc_info_ == 0) && 3723 if ((num_pending_32_bit_constants_ == 0) &&
3710 (num_pending_64_bit_reloc_info_ == 0)) { 3724 (num_pending_64_bit_constants_ == 0)) {
3711 // Calculate the offset of the next check. 3725 // Calculate the offset of the next check.
3712 next_buffer_check_ = pc_offset() + kCheckPoolInterval; 3726 next_buffer_check_ = pc_offset() + kCheckPoolInterval;
3713 return; 3727 return;
3714 } 3728 }
3715 3729
3716 // Check that the code buffer is large enough before emitting the constant 3730 // Check that the code buffer is large enough before emitting the constant
3717 // pool (include the jump over the pool and the constant pool marker and 3731 // pool (include the jump over the pool and the constant pool marker and
3718 // the gap to the relocation information). 3732 // the gap to the relocation information).
3719 int jump_instr = require_jump ? kInstrSize : 0; 3733 int jump_instr = require_jump ? kInstrSize : 0;
3720 int size_up_to_marker = jump_instr + kInstrSize; 3734 int size_up_to_marker = jump_instr + kInstrSize;
3721 int size_after_marker = num_pending_32_bit_reloc_info_ * kPointerSize; 3735 int size_after_marker = num_pending_32_bit_constants_ * kPointerSize;
3722 bool has_fp_values = (num_pending_64_bit_reloc_info_ > 0); 3736 bool has_fp_values = (num_pending_64_bit_constants_ > 0);
3723 bool require_64_bit_align = false; 3737 bool require_64_bit_align = false;
3724 if (has_fp_values) { 3738 if (has_fp_values) {
3725 require_64_bit_align = (((uintptr_t)pc_ + size_up_to_marker) & 0x7); 3739 require_64_bit_align = (((uintptr_t)pc_ + size_up_to_marker) & 0x7);
3726 if (require_64_bit_align) { 3740 if (require_64_bit_align) {
3727 size_after_marker += kInstrSize; 3741 size_after_marker += kInstrSize;
3728 } 3742 }
3729 size_after_marker += num_pending_64_bit_reloc_info_ * kDoubleSize; 3743 size_after_marker += num_pending_64_bit_constants_ * kDoubleSize;
3730 } 3744 }
3731 3745
3732 int size = size_up_to_marker + size_after_marker; 3746 int size = size_up_to_marker + size_after_marker;
3733 3747
3734 // We emit a constant pool when: 3748 // We emit a constant pool when:
3735 // * requested to do so by parameter force_emit (e.g. after each function). 3749 // * requested to do so by parameter force_emit (e.g. after each function).
3736 // * the distance from the first instruction accessing the constant pool to 3750 // * the distance from the first instruction accessing the constant pool to
3737 // any of the constant pool entries will exceed its limit the next 3751 // any of the constant pool entries will exceed its limit the next
3738 // time the pool is checked. This is overly restrictive, but we don't emit 3752 // time the pool is checked. This is overly restrictive, but we don't emit
3739 // constant pool entries in-order so it's conservatively correct. 3753 // constant pool entries in-order so it's conservatively correct.
3740 // * the instruction doesn't require a jump after itself to jump over the 3754 // * the instruction doesn't require a jump after itself to jump over the
3741 // constant pool, and we're getting close to running out of range. 3755 // constant pool, and we're getting close to running out of range.
3742 if (!force_emit) { 3756 if (!force_emit) {
3743 DCHECK((first_const_pool_32_use_ >= 0) || (first_const_pool_64_use_ >= 0)); 3757 DCHECK((first_const_pool_32_use_ >= 0) || (first_const_pool_64_use_ >= 0));
3744 bool need_emit = false; 3758 bool need_emit = false;
3745 if (has_fp_values) { 3759 if (has_fp_values) {
3746 int dist64 = pc_offset() + 3760 int dist64 = pc_offset() + size -
3747 size - 3761 num_pending_32_bit_constants_ * kPointerSize -
3748 num_pending_32_bit_reloc_info_ * kPointerSize -
3749 first_const_pool_64_use_; 3762 first_const_pool_64_use_;
3750 if ((dist64 >= kMaxDistToFPPool - kCheckPoolInterval) || 3763 if ((dist64 >= kMaxDistToFPPool - kCheckPoolInterval) ||
3751 (!require_jump && (dist64 >= kMaxDistToFPPool / 2))) { 3764 (!require_jump && (dist64 >= kMaxDistToFPPool / 2))) {
3752 need_emit = true; 3765 need_emit = true;
3753 } 3766 }
3754 } 3767 }
3755 int dist32 = 3768 int dist32 =
3756 pc_offset() + size - first_const_pool_32_use_; 3769 pc_offset() + size - first_const_pool_32_use_;
3757 if ((dist32 >= kMaxDistToIntPool - kCheckPoolInterval) || 3770 if ((dist32 >= kMaxDistToIntPool - kCheckPoolInterval) ||
3758 (!require_jump && (dist32 >= kMaxDistToIntPool / 2))) { 3771 (!require_jump && (dist32 >= kMaxDistToIntPool / 2))) {
(...skipping 21 matching lines...) Expand all
3780 // The data size helps disassembly know what to print. 3793 // The data size helps disassembly know what to print.
3781 emit(kConstantPoolMarker | 3794 emit(kConstantPoolMarker |
3782 EncodeConstantPoolLength(size_after_marker / kPointerSize)); 3795 EncodeConstantPoolLength(size_after_marker / kPointerSize));
3783 3796
3784 if (require_64_bit_align) { 3797 if (require_64_bit_align) {
3785 emit(kConstantPoolMarker); 3798 emit(kConstantPoolMarker);
3786 } 3799 }
3787 3800
3788 // Emit 64-bit constant pool entries first: their range is smaller than 3801 // Emit 64-bit constant pool entries first: their range is smaller than
3789 // 32-bit entries. 3802 // 32-bit entries.
3790 for (int i = 0; i < num_pending_64_bit_reloc_info_; i++) { 3803 for (int i = 0; i < num_pending_64_bit_constants_; i++) {
3791 RelocInfo& rinfo = pending_64_bit_reloc_info_[i]; 3804 ConstantPoolEntry& entry = pending_64_bit_constants_[i];
3792 3805
3793 DCHECK(!((uintptr_t)pc_ & 0x7)); // Check 64-bit alignment. 3806 DCHECK(!((uintptr_t)pc_ & 0x7)); // Check 64-bit alignment.
3794 3807
3795 Instr instr = instr_at(rinfo.pc()); 3808 Instr instr = instr_at(entry.position());
3796 // Instruction to patch must be 'vldr rd, [pc, #offset]' with offset == 0. 3809 // Instruction to patch must be 'vldr rd, [pc, #offset]' with offset == 0.
3797 DCHECK((IsVldrDPcImmediateOffset(instr) && 3810 DCHECK((IsVldrDPcImmediateOffset(instr) &&
3798 GetVldrDRegisterImmediateOffset(instr) == 0)); 3811 GetVldrDRegisterImmediateOffset(instr) == 0));
3799 3812
3800 int delta = pc_ - rinfo.pc() - kPcLoadDelta; 3813 int delta = pc_offset() - entry.position() - kPcLoadDelta;
3801 DCHECK(is_uint10(delta)); 3814 DCHECK(is_uint10(delta));
3802 3815
3803 bool found = false; 3816 bool found = false;
3804 uint64_t value = rinfo.raw_data64(); 3817 uint64_t value = entry.value64();
3805 for (int j = 0; j < i; j++) { 3818 for (int j = 0; j < i; j++) {
3806 RelocInfo& rinfo2 = pending_64_bit_reloc_info_[j]; 3819 ConstantPoolEntry& entry2 = pending_64_bit_constants_[j];
3807 if (value == rinfo2.raw_data64()) { 3820 if (value == entry2.value64()) {
3808 found = true; 3821 found = true;
3809 DCHECK(rinfo2.rmode() == RelocInfo::NONE64); 3822 Instr instr2 = instr_at(entry2.position());
3810 Instr instr2 = instr_at(rinfo2.pc());
3811 DCHECK(IsVldrDPcImmediateOffset(instr2)); 3823 DCHECK(IsVldrDPcImmediateOffset(instr2));
3812 delta = GetVldrDRegisterImmediateOffset(instr2); 3824 delta = GetVldrDRegisterImmediateOffset(instr2);
3813 delta += rinfo2.pc() - rinfo.pc(); 3825 delta += entry2.position() - entry.position();
3814 break; 3826 break;
3815 } 3827 }
3816 } 3828 }
3817 3829
3818 instr_at_put(rinfo.pc(), SetVldrDRegisterImmediateOffset(instr, delta)); 3830 instr_at_put(entry.position(),
3831 SetVldrDRegisterImmediateOffset(instr, delta));
3819 3832
3820 if (!found) { 3833 if (!found) {
3821 uint64_t uint_data = rinfo.raw_data64(); 3834 dq(entry.value64());
3822 emit(uint_data & 0xFFFFFFFF);
3823 emit(uint_data >> 32);
3824 } 3835 }
3825 } 3836 }
3826 3837
3827 // Emit 32-bit constant pool entries. 3838 // Emit 32-bit constant pool entries.
3828 for (int i = 0; i < num_pending_32_bit_reloc_info_; i++) { 3839 for (int i = 0; i < num_pending_32_bit_constants_; i++) {
3829 RelocInfo& rinfo = pending_32_bit_reloc_info_[i]; 3840 ConstantPoolEntry& entry = pending_32_bit_constants_[i];
3830 DCHECK(rinfo.rmode() != RelocInfo::COMMENT && 3841 Instr instr = instr_at(entry.position());
3831 rinfo.rmode() != RelocInfo::POSITION &&
3832 rinfo.rmode() != RelocInfo::STATEMENT_POSITION &&
3833 rinfo.rmode() != RelocInfo::CONST_POOL &&
3834 rinfo.rmode() != RelocInfo::NONE64);
3835
3836 Instr instr = instr_at(rinfo.pc());
3837 3842
3838 // 64-bit loads shouldn't get here. 3843 // 64-bit loads shouldn't get here.
3839 DCHECK(!IsVldrDPcImmediateOffset(instr)); 3844 DCHECK(!IsVldrDPcImmediateOffset(instr));
3840 3845
3841 if (IsLdrPcImmediateOffset(instr) && 3846 if (IsLdrPcImmediateOffset(instr) &&
3842 GetLdrRegisterImmediateOffset(instr) == 0) { 3847 GetLdrRegisterImmediateOffset(instr) == 0) {
3843 int delta = pc_ - rinfo.pc() - kPcLoadDelta; 3848 int delta = pc_offset() - entry.position() - kPcLoadDelta;
3844 DCHECK(is_uint12(delta)); 3849 DCHECK(is_uint12(delta));
3845 // 0 is the smallest delta: 3850 // 0 is the smallest delta:
3846 // ldr rd, [pc, #0] 3851 // ldr rd, [pc, #0]
3847 // constant pool marker 3852 // constant pool marker
3848 // data 3853 // data
3849 3854
3850 bool found = false; 3855 bool found = false;
3851 if (!serializer_enabled() && rinfo.rmode() >= RelocInfo::CELL) { 3856 if (entry.sharing_ok()) {
3852 for (int j = 0; j < i; j++) { 3857 for (int j = 0; j < i; j++) {
3853 RelocInfo& rinfo2 = pending_32_bit_reloc_info_[j]; 3858 ConstantPoolEntry& entry2 = pending_32_bit_constants_[j];
3854 3859
3855 if ((rinfo2.data() == rinfo.data()) && 3860 if (entry2.value() == entry.value()) {
3856 (rinfo2.rmode() == rinfo.rmode())) { 3861 Instr instr2 = instr_at(entry2.position());
3857 Instr instr2 = instr_at(rinfo2.pc());
3858 if (IsLdrPcImmediateOffset(instr2)) { 3862 if (IsLdrPcImmediateOffset(instr2)) {
3859 delta = GetLdrRegisterImmediateOffset(instr2); 3863 delta = GetLdrRegisterImmediateOffset(instr2);
3860 delta += rinfo2.pc() - rinfo.pc(); 3864 delta += entry2.position() - entry.position();
3861 found = true; 3865 found = true;
3862 break; 3866 break;
3863 } 3867 }
3864 } 3868 }
3865 } 3869 }
3866 } 3870 }
3867 3871
3868 instr_at_put(rinfo.pc(), SetLdrRegisterImmediateOffset(instr, delta)); 3872 instr_at_put(entry.position(),
3873 SetLdrRegisterImmediateOffset(instr, delta));
3869 3874
3870 if (!found) { 3875 if (!found) {
3871 emit(rinfo.data()); 3876 dp(entry.value());
3872 } 3877 }
3873 } else { 3878 } else {
3874 DCHECK(IsMovW(instr)); 3879 DCHECK(IsMovW(instr));
3875 } 3880 }
3876 } 3881 }
3877 3882
3878 num_pending_32_bit_reloc_info_ = 0; 3883 num_pending_32_bit_constants_ = 0;
3879 num_pending_64_bit_reloc_info_ = 0; 3884 num_pending_64_bit_constants_ = 0;
3880 first_const_pool_32_use_ = -1; 3885 first_const_pool_32_use_ = -1;
3881 first_const_pool_64_use_ = -1; 3886 first_const_pool_64_use_ = -1;
3882 3887
3883 RecordComment("]"); 3888 RecordComment("]");
3884 3889
3885 if (after_pool.is_linked()) { 3890 if (after_pool.is_linked()) {
3886 bind(&after_pool); 3891 bind(&after_pool);
3887 } 3892 }
3888 } 3893 }
3889 3894
3890 // Since a constant pool was just emitted, move the check offset forward by 3895 // Since a constant pool was just emitted, move the check offset forward by
3891 // the standard interval. 3896 // the standard interval.
3892 next_buffer_check_ = pc_offset() + kCheckPoolInterval; 3897 next_buffer_check_ = pc_offset() + kCheckPoolInterval;
3893 } 3898 }
3894 3899
3895 3900
3896 Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) { 3901 void Assembler::PatchConstantPoolAccessInstruction(
3897 if (!FLAG_enable_ool_constant_pool) { 3902 int pc_offset, int offset, ConstantPoolEntry::Access access,
3898 return isolate->factory()->empty_constant_pool_array(); 3903 ConstantPoolEntry::Type type) {
3899 } 3904 DCHECK(FLAG_enable_embedded_constant_pool);
3900 return constant_pool_builder_.New(isolate); 3905 Address pc = buffer_ + pc_offset;
3901 }
3902 3906
3903 3907 // Patch vldr/ldr instruction with correct offset.
3904 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { 3908 Instr instr = instr_at(pc);
3905 constant_pool_builder_.Populate(this, constant_pool); 3909 if (access == ConstantPoolEntry::OVERFLOWED) {
3906 } 3910 if (CpuFeatures::IsSupported(ARMv7)) {
3907 3911 // Instructions to patch must be 'movw rd, [#0]' and 'movt rd, [#0].
3908 3912 Instr next_instr = instr_at(pc + kInstrSize);
3909 ConstantPoolBuilder::ConstantPoolBuilder() 3913 DCHECK((IsMovW(instr) && Instruction::ImmedMovwMovtValue(instr) == 0));
3910 : entries_(), current_section_(ConstantPoolArray::SMALL_SECTION) {} 3914 DCHECK((IsMovT(next_instr) &&
3911 3915 Instruction::ImmedMovwMovtValue(next_instr) == 0));
3912 3916 instr_at_put(pc, PatchMovwImmediate(instr, offset & 0xffff));
3913 bool ConstantPoolBuilder::IsEmpty() { 3917 instr_at_put(pc + kInstrSize,
3914 return entries_.size() == 0; 3918 PatchMovwImmediate(next_instr, offset >> 16));
3915 } 3919 } else {
3916 3920 // Instructions to patch must be 'mov rd, [#0]' and 'orr rd, rd, [#0].
3917 3921 Instr instr_2 = instr_at(pc + kInstrSize);
3918 ConstantPoolArray::Type ConstantPoolBuilder::GetConstantPoolType( 3922 Instr instr_3 = instr_at(pc + 2 * kInstrSize);
3919 RelocInfo::Mode rmode) { 3923 Instr instr_4 = instr_at(pc + 3 * kInstrSize);
3920 if (rmode == RelocInfo::NONE64) { 3924 DCHECK((IsMovImmed(instr) && Instruction::Immed8Value(instr) == 0));
3921 return ConstantPoolArray::INT64; 3925 DCHECK((IsOrrImmed(instr_2) && Instruction::Immed8Value(instr_2) == 0) &&
3922 } else if (!RelocInfo::IsGCRelocMode(rmode)) { 3926 GetRn(instr_2).is(GetRd(instr_2)));
3923 return ConstantPoolArray::INT32; 3927 DCHECK((IsOrrImmed(instr_3) && Instruction::Immed8Value(instr_3) == 0) &&
3924 } else if (RelocInfo::IsCodeTarget(rmode)) { 3928 GetRn(instr_3).is(GetRd(instr_3)));
3925 return ConstantPoolArray::CODE_PTR; 3929 DCHECK((IsOrrImmed(instr_4) && Instruction::Immed8Value(instr_4) == 0) &&
3930 GetRn(instr_4).is(GetRd(instr_4)));
3931 instr_at_put(pc, PatchShiftImm(instr, (offset & kImm8Mask)));
3932 instr_at_put(pc + kInstrSize,
3933 PatchShiftImm(instr_2, (offset & (kImm8Mask << 8))));
3934 instr_at_put(pc + 2 * kInstrSize,
3935 PatchShiftImm(instr_3, (offset & (kImm8Mask << 16))));
3936 instr_at_put(pc + 3 * kInstrSize,
3937 PatchShiftImm(instr_4, (offset & (kImm8Mask << 24))));
3938 }
3939 } else if (type == ConstantPoolEntry::DOUBLE) {
3940 // Instruction to patch must be 'vldr rd, [pp, #0]'.
3941 DCHECK((IsVldrDPpImmediateOffset(instr) &&
3942 GetVldrDRegisterImmediateOffset(instr) == 0));
3943 DCHECK(is_uint10(offset));
3944 instr_at_put(pc, SetVldrDRegisterImmediateOffset(instr, offset));
3926 } else { 3945 } else {
3927 DCHECK(RelocInfo::IsGCRelocMode(rmode) && !RelocInfo::IsCodeTarget(rmode)); 3946 // Instruction to patch must be 'ldr rd, [pp, #0]'.
3928 return ConstantPoolArray::HEAP_PTR; 3947 DCHECK((IsLdrPpImmediateOffset(instr) &&
3948 GetLdrRegisterImmediateOffset(instr) == 0));
3949 DCHECK(is_uint12(offset));
3950 instr_at_put(pc, SetLdrRegisterImmediateOffset(instr, offset));
3929 } 3951 }
3930 } 3952 }
3931 3953
3932
3933 ConstantPoolArray::LayoutSection ConstantPoolBuilder::AddEntry(
3934 Assembler* assm, const RelocInfo& rinfo) {
3935 RelocInfo::Mode rmode = rinfo.rmode();
3936 DCHECK(rmode != RelocInfo::COMMENT &&
3937 rmode != RelocInfo::POSITION &&
3938 rmode != RelocInfo::STATEMENT_POSITION &&
3939 rmode != RelocInfo::CONST_POOL);
3940
3941 // Try to merge entries which won't be patched.
3942 int merged_index = -1;
3943 ConstantPoolArray::LayoutSection entry_section = current_section_;
3944 if (RelocInfo::IsNone(rmode) ||
3945 (!assm->serializer_enabled() && (rmode >= RelocInfo::CELL))) {
3946 size_t i;
3947 std::vector<ConstantPoolEntry>::const_iterator it;
3948 for (it = entries_.begin(), i = 0; it != entries_.end(); it++, i++) {
3949 if (RelocInfo::IsEqual(rinfo, it->rinfo_)) {
3950 // Merge with found entry.
3951 merged_index = i;
3952 entry_section = entries_[i].section_;
3953 break;
3954 }
3955 }
3956 }
3957 DCHECK(entry_section <= current_section_);
3958 entries_.push_back(ConstantPoolEntry(rinfo, entry_section, merged_index));
3959
3960 if (merged_index == -1) {
3961 // Not merged, so update the appropriate count.
3962 number_of_entries_[entry_section].increment(GetConstantPoolType(rmode));
3963 }
3964
3965 // Check if we still have room for another entry in the small section
3966 // given Arm's ldr and vldr immediate offset range.
3967 if (current_section_ == ConstantPoolArray::SMALL_SECTION &&
3968 !(is_uint12(ConstantPoolArray::SizeFor(*small_entries())) &&
3969 is_uint10(ConstantPoolArray::MaxInt64Offset(
3970 small_entries()->count_of(ConstantPoolArray::INT64))))) {
3971 current_section_ = ConstantPoolArray::EXTENDED_SECTION;
3972 }
3973 return entry_section;
3974 }
3975
3976
3977 void ConstantPoolBuilder::Relocate(int pc_delta) {
3978 for (std::vector<ConstantPoolEntry>::iterator entry = entries_.begin();
3979 entry != entries_.end(); entry++) {
3980 DCHECK(entry->rinfo_.rmode() != RelocInfo::JS_RETURN);
3981 entry->rinfo_.set_pc(entry->rinfo_.pc() + pc_delta);
3982 }
3983 }
3984
3985
3986 Handle<ConstantPoolArray> ConstantPoolBuilder::New(Isolate* isolate) {
3987 if (IsEmpty()) {
3988 return isolate->factory()->empty_constant_pool_array();
3989 } else if (extended_entries()->is_empty()) {
3990 return isolate->factory()->NewConstantPoolArray(*small_entries());
3991 } else {
3992 DCHECK(current_section_ == ConstantPoolArray::EXTENDED_SECTION);
3993 return isolate->factory()->NewExtendedConstantPoolArray(
3994 *small_entries(), *extended_entries());
3995 }
3996 }
3997
3998
3999 void ConstantPoolBuilder::Populate(Assembler* assm,
4000 ConstantPoolArray* constant_pool) {
4001 DCHECK_EQ(extended_entries()->is_empty(),
4002 !constant_pool->is_extended_layout());
4003 DCHECK(small_entries()->equals(ConstantPoolArray::NumberOfEntries(
4004 constant_pool, ConstantPoolArray::SMALL_SECTION)));
4005 if (constant_pool->is_extended_layout()) {
4006 DCHECK(extended_entries()->equals(ConstantPoolArray::NumberOfEntries(
4007 constant_pool, ConstantPoolArray::EXTENDED_SECTION)));
4008 }
4009
4010 // Set up initial offsets.
4011 int offsets[ConstantPoolArray::NUMBER_OF_LAYOUT_SECTIONS]
4012 [ConstantPoolArray::NUMBER_OF_TYPES];
4013 for (int section = 0; section <= constant_pool->final_section(); section++) {
4014 int section_start = (section == ConstantPoolArray::EXTENDED_SECTION)
4015 ? small_entries()->total_count()
4016 : 0;
4017 for (int i = 0; i < ConstantPoolArray::NUMBER_OF_TYPES; i++) {
4018 ConstantPoolArray::Type type = static_cast<ConstantPoolArray::Type>(i);
4019 if (number_of_entries_[section].count_of(type) != 0) {
4020 offsets[section][type] = constant_pool->OffsetOfElementAt(
4021 number_of_entries_[section].base_of(type) + section_start);
4022 }
4023 }
4024 }
4025
4026 for (std::vector<ConstantPoolEntry>::iterator entry = entries_.begin();
4027 entry != entries_.end(); entry++) {
4028 RelocInfo rinfo = entry->rinfo_;
4029 RelocInfo::Mode rmode = entry->rinfo_.rmode();
4030 ConstantPoolArray::Type type = GetConstantPoolType(rmode);
4031
4032 // Update constant pool if necessary and get the entry's offset.
4033 int offset;
4034 if (entry->merged_index_ == -1) {
4035 offset = offsets[entry->section_][type];
4036 offsets[entry->section_][type] += ConstantPoolArray::entry_size(type);
4037 if (type == ConstantPoolArray::INT64) {
4038 constant_pool->set_at_offset(offset, rinfo.data64());
4039 } else if (type == ConstantPoolArray::INT32) {
4040 constant_pool->set_at_offset(offset,
4041 static_cast<int32_t>(rinfo.data()));
4042 } else if (type == ConstantPoolArray::CODE_PTR) {
4043 constant_pool->set_at_offset(offset,
4044 reinterpret_cast<Address>(rinfo.data()));
4045 } else {
4046 DCHECK(type == ConstantPoolArray::HEAP_PTR);
4047 constant_pool->set_at_offset(offset,
4048 reinterpret_cast<Object*>(rinfo.data()));
4049 }
4050 offset -= kHeapObjectTag;
4051 entry->merged_index_ = offset; // Stash offset for merged entries.
4052 } else {
4053 DCHECK(entry->merged_index_ < (entry - entries_.begin()));
4054 offset = entries_[entry->merged_index_].merged_index_;
4055 }
4056
4057 // Patch vldr/ldr instruction with correct offset.
4058 Instr instr = assm->instr_at(rinfo.pc());
4059 if (entry->section_ == ConstantPoolArray::EXTENDED_SECTION) {
4060 if (CpuFeatures::IsSupported(ARMv7)) {
4061 // Instructions to patch must be 'movw rd, [#0]' and 'movt rd, [#0].
4062 Instr next_instr = assm->instr_at(rinfo.pc() + Assembler::kInstrSize);
4063 DCHECK((Assembler::IsMovW(instr) &&
4064 Instruction::ImmedMovwMovtValue(instr) == 0));
4065 DCHECK((Assembler::IsMovT(next_instr) &&
4066 Instruction::ImmedMovwMovtValue(next_instr) == 0));
4067 assm->instr_at_put(
4068 rinfo.pc(), Assembler::PatchMovwImmediate(instr, offset & 0xffff));
4069 assm->instr_at_put(
4070 rinfo.pc() + Assembler::kInstrSize,
4071 Assembler::PatchMovwImmediate(next_instr, offset >> 16));
4072 } else {
4073 // Instructions to patch must be 'mov rd, [#0]' and 'orr rd, rd, [#0].
4074 Instr instr_2 = assm->instr_at(rinfo.pc() + Assembler::kInstrSize);
4075 Instr instr_3 = assm->instr_at(rinfo.pc() + 2 * Assembler::kInstrSize);
4076 Instr instr_4 = assm->instr_at(rinfo.pc() + 3 * Assembler::kInstrSize);
4077 DCHECK((Assembler::IsMovImmed(instr) &&
4078 Instruction::Immed8Value(instr) == 0));
4079 DCHECK((Assembler::IsOrrImmed(instr_2) &&
4080 Instruction::Immed8Value(instr_2) == 0) &&
4081 Assembler::GetRn(instr_2).is(Assembler::GetRd(instr_2)));
4082 DCHECK((Assembler::IsOrrImmed(instr_3) &&
4083 Instruction::Immed8Value(instr_3) == 0) &&
4084 Assembler::GetRn(instr_3).is(Assembler::GetRd(instr_3)));
4085 DCHECK((Assembler::IsOrrImmed(instr_4) &&
4086 Instruction::Immed8Value(instr_4) == 0) &&
4087 Assembler::GetRn(instr_4).is(Assembler::GetRd(instr_4)));
4088 assm->instr_at_put(
4089 rinfo.pc(), Assembler::PatchShiftImm(instr, (offset & kImm8Mask)));
4090 assm->instr_at_put(
4091 rinfo.pc() + Assembler::kInstrSize,
4092 Assembler::PatchShiftImm(instr_2, (offset & (kImm8Mask << 8))));
4093 assm->instr_at_put(
4094 rinfo.pc() + 2 * Assembler::kInstrSize,
4095 Assembler::PatchShiftImm(instr_3, (offset & (kImm8Mask << 16))));
4096 assm->instr_at_put(
4097 rinfo.pc() + 3 * Assembler::kInstrSize,
4098 Assembler::PatchShiftImm(instr_4, (offset & (kImm8Mask << 24))));
4099 }
4100 } else if (type == ConstantPoolArray::INT64) {
4101 // Instruction to patch must be 'vldr rd, [pp, #0]'.
4102 DCHECK((Assembler::IsVldrDPpImmediateOffset(instr) &&
4103 Assembler::GetVldrDRegisterImmediateOffset(instr) == 0));
4104 DCHECK(is_uint10(offset));
4105 assm->instr_at_put(rinfo.pc(), Assembler::SetVldrDRegisterImmediateOffset(
4106 instr, offset));
4107 } else {
4108 // Instruction to patch must be 'ldr rd, [pp, #0]'.
4109 DCHECK((Assembler::IsLdrPpImmediateOffset(instr) &&
4110 Assembler::GetLdrRegisterImmediateOffset(instr) == 0));
4111 DCHECK(is_uint12(offset));
4112 assm->instr_at_put(
4113 rinfo.pc(), Assembler::SetLdrRegisterImmediateOffset(instr, offset));
4114 }
4115 }
4116 }
4117
4118 3954
4119 } } // namespace v8::internal 3955 } } // namespace v8::internal
4120 3956
4121 #endif // V8_TARGET_ARCH_ARM 3957 #endif // V8_TARGET_ARCH_ARM
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698