Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(100)

Side by Side Diff: src/arm/assembler-arm.cc

Issue 1131783003: Embedded constant pools. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Address remaining comments. Created 5 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 1994-2006 Sun Microsystems Inc. 1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved. 2 // All Rights Reserved.
3 // 3 //
4 // Redistribution and use in source and binary forms, with or without 4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions 5 // modification, are permitted provided that the following conditions
6 // are met: 6 // are met:
7 // 7 //
8 // - Redistributions of source code must retain the above copyright notice, 8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer. 9 // this list of conditions and the following disclaimer.
10 // 10 //
(...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after
227 // ----------------------------------------------------------------------------- 227 // -----------------------------------------------------------------------------
228 // Implementation of RelocInfo 228 // Implementation of RelocInfo
229 229
230 // static 230 // static
231 const int RelocInfo::kApplyMask = 0; 231 const int RelocInfo::kApplyMask = 0;
232 232
233 233
234 bool RelocInfo::IsCodedSpecially() { 234 bool RelocInfo::IsCodedSpecially() {
235 // The deserializer needs to know whether a pointer is specially coded.  Being 235 // The deserializer needs to know whether a pointer is specially coded.  Being
236 // specially coded on ARM means that it is a movw/movt instruction, or is an 236 // specially coded on ARM means that it is a movw/movt instruction, or is an
237 // out of line constant pool entry.  These only occur if 237 // embedded constant pool entry.  These only occur if
238 // FLAG_enable_ool_constant_pool is true. 238 // FLAG_enable_embedded_constant_pool is true.
239 return FLAG_enable_ool_constant_pool; 239 return FLAG_enable_embedded_constant_pool;
240 } 240 }
241 241
242 242
243 bool RelocInfo::IsInConstantPool() { 243 bool RelocInfo::IsInConstantPool() {
244 return Assembler::is_constant_pool_load(pc_); 244 return Assembler::is_constant_pool_load(pc_);
245 } 245 }
246 246
247 247
248 // ----------------------------------------------------------------------------- 248 // -----------------------------------------------------------------------------
249 // Implementation of Operand and MemOperand 249 // Implementation of Operand and MemOperand
(...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after
442 const Instr kLdrRegFpNegOffsetPattern = 442 const Instr kLdrRegFpNegOffsetPattern =
443 al | B26 | L | NegOffset | kRegister_fp_Code * B16; 443 al | B26 | L | NegOffset | kRegister_fp_Code * B16;
444 const Instr kStrRegFpNegOffsetPattern = 444 const Instr kStrRegFpNegOffsetPattern =
445 al | B26 | NegOffset | kRegister_fp_Code * B16; 445 al | B26 | NegOffset | kRegister_fp_Code * B16;
446 const Instr kLdrStrInstrTypeMask = 0xffff0000; 446 const Instr kLdrStrInstrTypeMask = 0xffff0000;
447 447
448 448
449 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size) 449 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size)
450 : AssemblerBase(isolate, buffer, buffer_size), 450 : AssemblerBase(isolate, buffer, buffer_size),
451 recorded_ast_id_(TypeFeedbackId::None()), 451 recorded_ast_id_(TypeFeedbackId::None()),
452 constant_pool_builder_(), 452 constant_pool_builder_(kLdrMaxReachBits, kVldrMaxReachBits),
453 positions_recorder_(this) { 453 positions_recorder_(this) {
454 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); 454 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_);
455 num_pending_32_bit_reloc_info_ = 0; 455 num_pending_32_bit_constants_ = 0;
456 num_pending_64_bit_reloc_info_ = 0; 456 num_pending_64_bit_constants_ = 0;
457 next_buffer_check_ = 0; 457 next_buffer_check_ = 0;
458 const_pool_blocked_nesting_ = 0; 458 const_pool_blocked_nesting_ = 0;
459 no_const_pool_before_ = 0; 459 no_const_pool_before_ = 0;
460 first_const_pool_32_use_ = -1; 460 first_const_pool_32_use_ = -1;
461 first_const_pool_64_use_ = -1; 461 first_const_pool_64_use_ = -1;
462 last_bound_pos_ = 0; 462 last_bound_pos_ = 0;
463 ClearRecordedAstId(); 463 ClearRecordedAstId();
464 } 464 }
465 465
466 466
467 Assembler::~Assembler() { 467 Assembler::~Assembler() {
468 DCHECK(const_pool_blocked_nesting_ == 0); 468 DCHECK(const_pool_blocked_nesting_ == 0);
469 } 469 }
470 470
471 471
472 void Assembler::GetCode(CodeDesc* desc) { 472 void Assembler::GetCode(CodeDesc* desc) {
473 reloc_info_writer.Finish(); 473 reloc_info_writer.Finish();
474 if (!FLAG_enable_ool_constant_pool) { 474
475 // Emit constant pool if necessary. 475 // Emit constant pool if necessary.
476 int offset = 0;
477 if (FLAG_enable_embedded_constant_pool) {
478 offset = EmitEmbeddedConstantPool();
rmcilroy 2015/06/01 09:52:08 /s/offset/constant_pool_offset
MTBrandyberry 2015/06/01 21:01:30 Done.
479 } else {
476 CheckConstPool(true, false); 480 CheckConstPool(true, false);
477 DCHECK(num_pending_32_bit_reloc_info_ == 0); 481 DCHECK(num_pending_32_bit_constants_ == 0);
478 DCHECK(num_pending_64_bit_reloc_info_ == 0); 482 DCHECK(num_pending_64_bit_constants_ == 0);
479 } 483 }
480 // Set up code descriptor. 484 // Set up code descriptor.
481 desc->buffer = buffer_; 485 desc->buffer = buffer_;
482 desc->buffer_size = buffer_size_; 486 desc->buffer_size = buffer_size_;
483 desc->instr_size = pc_offset(); 487 desc->instr_size = pc_offset();
484 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); 488 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos();
489 desc->constant_pool_size = (offset ? desc->instr_size - offset : 0);
485 desc->origin = this; 490 desc->origin = this;
486 } 491 }
487 492
488 493
489 void Assembler::Align(int m) { 494 void Assembler::Align(int m) {
490 DCHECK(m >= 4 && base::bits::IsPowerOfTwo32(m)); 495 DCHECK(m >= 4 && base::bits::IsPowerOfTwo32(m));
491 while ((pc_offset() & (m - 1)) != 0) { 496 while ((pc_offset() & (m - 1)) != 0) {
492 nop(); 497 nop();
493 } 498 }
494 } 499 }
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
616 621
617 622
618 Register Assembler::GetRm(Instr instr) { 623 Register Assembler::GetRm(Instr instr) {
619 Register reg; 624 Register reg;
620 reg.code_ = Instruction::RmValue(instr); 625 reg.code_ = Instruction::RmValue(instr);
621 return reg; 626 return reg;
622 } 627 }
623 628
624 629
625 Instr Assembler::GetConsantPoolLoadPattern() { 630 Instr Assembler::GetConsantPoolLoadPattern() {
626 if (FLAG_enable_ool_constant_pool) { 631 if (FLAG_enable_embedded_constant_pool) {
627 return kLdrPpImmedPattern; 632 return kLdrPpImmedPattern;
628 } else { 633 } else {
629 return kLdrPCImmedPattern; 634 return kLdrPCImmedPattern;
630 } 635 }
631 } 636 }
632 637
633 638
634 Instr Assembler::GetConsantPoolLoadMask() { 639 Instr Assembler::GetConsantPoolLoadMask() {
635 if (FLAG_enable_ool_constant_pool) { 640 if (FLAG_enable_embedded_constant_pool) {
636 return kLdrPpImmedMask; 641 return kLdrPpImmedMask;
637 } else { 642 } else {
638 return kLdrPCImmedMask; 643 return kLdrPCImmedMask;
639 } 644 }
640 } 645 }
641 646
642 647
643 bool Assembler::IsPush(Instr instr) { 648 bool Assembler::IsPush(Instr instr) {
644 return ((instr & ~kRdMask) == kPushRegPattern); 649 return ((instr & ~kRdMask) == kPushRegPattern);
645 } 650 }
(...skipping 391 matching lines...) Expand 10 before | Expand all | Expand 10 after
1037 return assembler->serializer_enabled(); 1042 return assembler->serializer_enabled();
1038 } else if (RelocInfo::IsNone(rmode_)) { 1043 } else if (RelocInfo::IsNone(rmode_)) {
1039 return false; 1044 return false;
1040 } 1045 }
1041 return true; 1046 return true;
1042 } 1047 }
1043 1048
1044 1049
1045 static bool use_mov_immediate_load(const Operand& x, 1050 static bool use_mov_immediate_load(const Operand& x,
1046 const Assembler* assembler) { 1051 const Assembler* assembler) {
1047 if (FLAG_enable_ool_constant_pool && assembler != NULL && 1052 if (FLAG_enable_embedded_constant_pool && assembler != NULL &&
1048 !assembler->is_ool_constant_pool_available()) { 1053 !assembler->is_constant_pool_available()) {
1049 return true; 1054 return true;
1050 } else if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) && 1055 } else if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) &&
1051 (assembler == NULL || !assembler->predictable_code_size())) { 1056 (assembler == NULL || !assembler->predictable_code_size())) {
1052 // Prefer movw / movt to constant pool if it is more efficient on the CPU. 1057 // Prefer movw / movt to constant pool if it is more efficient on the CPU.
1053 return true; 1058 return true;
1054 } else if (x.must_output_reloc_info(assembler)) { 1059 } else if (x.must_output_reloc_info(assembler)) {
1055 // Prefer constant pool if data is likely to be patched. 1060 // Prefer constant pool if data is likely to be patched.
1056 return false; 1061 return false;
1057 } else { 1062 } else {
1058 // Otherwise, use immediate load if movw / movt is available. 1063 // Otherwise, use immediate load if movw / movt is available.
1059 return CpuFeatures::IsSupported(ARMv7); 1064 return CpuFeatures::IsSupported(ARMv7);
1060 } 1065 }
1061 } 1066 }
1062 1067
1063 1068
1064 int Operand::instructions_required(const Assembler* assembler, 1069 int Operand::instructions_required(const Assembler* assembler,
1065 Instr instr) const { 1070 Instr instr) const {
1066 if (rm_.is_valid()) return 1; 1071 if (rm_.is_valid()) return 1;
1067 uint32_t dummy1, dummy2; 1072 uint32_t dummy1, dummy2;
1068 if (must_output_reloc_info(assembler) || 1073 if (must_output_reloc_info(assembler) ||
1069 !fits_shifter(imm32_, &dummy1, &dummy2, &instr)) { 1074 !fits_shifter(imm32_, &dummy1, &dummy2, &instr)) {
1070 // The immediate operand cannot be encoded as a shifter operand, or use of 1075 // The immediate operand cannot be encoded as a shifter operand, or use of
1071 // constant pool is required. First account for the instructions required 1076 // constant pool is required. First account for the instructions required
1072 // for the constant pool or immediate load 1077 // for the constant pool or immediate load
1073 int instructions; 1078 int instructions;
1074 if (use_mov_immediate_load(*this, assembler)) { 1079 if (use_mov_immediate_load(*this, assembler)) {
1075 // A movw / movt or mov / orr immediate load. 1080 // A movw / movt or mov / orr immediate load.
1076 instructions = CpuFeatures::IsSupported(ARMv7) ? 2 : 4; 1081 instructions = CpuFeatures::IsSupported(ARMv7) ? 2 : 4;
1077 } else if (assembler != NULL && assembler->use_extended_constant_pool()) { 1082 } else if (assembler != NULL &&
1078 // An extended constant pool load. 1083 assembler->ConstantPoolAccessIsInOverflow()) {
1084 // An overflowed constant pool load.
1079 instructions = CpuFeatures::IsSupported(ARMv7) ? 3 : 5; 1085 instructions = CpuFeatures::IsSupported(ARMv7) ? 3 : 5;
1080 } else { 1086 } else {
1081 // A small constant pool load. 1087 // A small constant pool load.
1082 instructions = 1; 1088 instructions = 1;
1083 } 1089 }
1084 1090
1085 if ((instr & ~kCondMask) != 13 * B21) { // mov, S not set 1091 if ((instr & ~kCondMask) != 13 * B21) { // mov, S not set
1086 // For a mov or mvn instruction which doesn't set the condition 1092 // For a mov or mvn instruction which doesn't set the condition
1087 // code, the constant pool or immediate load is enough, otherwise we need 1093 // code, the constant pool or immediate load is enough, otherwise we need
1088 // to account for the actual instruction being requested. 1094 // to account for the actual instruction being requested.
1089 instructions += 1; 1095 instructions += 1;
1090 } 1096 }
1091 return instructions; 1097 return instructions;
1092 } else { 1098 } else {
1093 // No use of constant pool and the immediate operand can be encoded as a 1099 // No use of constant pool and the immediate operand can be encoded as a
1094 // shifter operand. 1100 // shifter operand.
1095 return 1; 1101 return 1;
1096 } 1102 }
1097 } 1103 }
1098 1104
1099 1105
1100 void Assembler::move_32_bit_immediate(Register rd, 1106 void Assembler::move_32_bit_immediate(Register rd,
1101 const Operand& x, 1107 const Operand& x,
1102 Condition cond) { 1108 Condition cond) {
1103 RelocInfo rinfo(pc_, x.rmode_, x.imm32_, NULL);
1104 uint32_t imm32 = static_cast<uint32_t>(x.imm32_); 1109 uint32_t imm32 = static_cast<uint32_t>(x.imm32_);
1105 if (x.must_output_reloc_info(this)) { 1110 if (x.must_output_reloc_info(this)) {
1106 RecordRelocInfo(rinfo); 1111 RecordRelocInfo(x.rmode_);
1107 } 1112 }
1108 1113
1109 if (use_mov_immediate_load(x, this)) { 1114 if (use_mov_immediate_load(x, this)) {
1110 Register target = rd.code() == pc.code() ? ip : rd; 1115 Register target = rd.code() == pc.code() ? ip : rd;
1111 if (CpuFeatures::IsSupported(ARMv7)) { 1116 if (CpuFeatures::IsSupported(ARMv7)) {
1112 if (!FLAG_enable_ool_constant_pool && x.must_output_reloc_info(this)) { 1117 if (!FLAG_enable_embedded_constant_pool &&
1118 x.must_output_reloc_info(this)) {
1113 // Make sure the movw/movt doesn't get separated. 1119 // Make sure the movw/movt doesn't get separated.
1114 BlockConstPoolFor(2); 1120 BlockConstPoolFor(2);
1115 } 1121 }
1116 movw(target, imm32 & 0xffff, cond); 1122 movw(target, imm32 & 0xffff, cond);
1117 movt(target, imm32 >> 16, cond); 1123 movt(target, imm32 >> 16, cond);
1118 } else { 1124 } else {
1119 DCHECK(FLAG_enable_ool_constant_pool); 1125 DCHECK(FLAG_enable_embedded_constant_pool);
1120 mov(target, Operand(imm32 & kImm8Mask), LeaveCC, cond); 1126 mov(target, Operand(imm32 & kImm8Mask), LeaveCC, cond);
1121 orr(target, target, Operand(imm32 & (kImm8Mask << 8)), LeaveCC, cond); 1127 orr(target, target, Operand(imm32 & (kImm8Mask << 8)), LeaveCC, cond);
1122 orr(target, target, Operand(imm32 & (kImm8Mask << 16)), LeaveCC, cond); 1128 orr(target, target, Operand(imm32 & (kImm8Mask << 16)), LeaveCC, cond);
1123 orr(target, target, Operand(imm32 & (kImm8Mask << 24)), LeaveCC, cond); 1129 orr(target, target, Operand(imm32 & (kImm8Mask << 24)), LeaveCC, cond);
1124 } 1130 }
1125 if (target.code() != rd.code()) { 1131 if (target.code() != rd.code()) {
1126 mov(rd, target, LeaveCC, cond); 1132 mov(rd, target, LeaveCC, cond);
1127 } 1133 }
1128 } else { 1134 } else {
1129 DCHECK(!FLAG_enable_ool_constant_pool || is_ool_constant_pool_available()); 1135 DCHECK(!FLAG_enable_embedded_constant_pool || is_constant_pool_available());
1130 ConstantPoolArray::LayoutSection section = ConstantPoolAddEntry(rinfo); 1136 ConstantPoolEntry::Access access =
1131 if (section == ConstantPoolArray::EXTENDED_SECTION) { 1137 ConstantPoolAddEntry(pc_offset(), x.rmode_, x.imm32_);
1132 DCHECK(FLAG_enable_ool_constant_pool); 1138 if (access == ConstantPoolEntry::OVERFLOWED) {
1139 DCHECK(FLAG_enable_embedded_constant_pool);
1133 Register target = rd.code() == pc.code() ? ip : rd; 1140 Register target = rd.code() == pc.code() ? ip : rd;
1134 // Emit instructions to load constant pool offset. 1141 // Emit instructions to load constant pool offset.
1135 if (CpuFeatures::IsSupported(ARMv7)) { 1142 if (CpuFeatures::IsSupported(ARMv7)) {
1136 movw(target, 0, cond); 1143 movw(target, 0, cond);
1137 movt(target, 0, cond); 1144 movt(target, 0, cond);
1138 } else { 1145 } else {
1139 mov(target, Operand(0), LeaveCC, cond); 1146 mov(target, Operand(0), LeaveCC, cond);
1140 orr(target, target, Operand(0), LeaveCC, cond); 1147 orr(target, target, Operand(0), LeaveCC, cond);
1141 orr(target, target, Operand(0), LeaveCC, cond); 1148 orr(target, target, Operand(0), LeaveCC, cond);
1142 orr(target, target, Operand(0), LeaveCC, cond); 1149 orr(target, target, Operand(0), LeaveCC, cond);
1143 } 1150 }
1144 // Load from constant pool at offset. 1151 // Load from constant pool at offset.
1145 ldr(rd, MemOperand(pp, target), cond); 1152 ldr(rd, MemOperand(pp, target), cond);
1146 } else { 1153 } else {
1147 DCHECK(section == ConstantPoolArray::SMALL_SECTION); 1154 DCHECK(access == ConstantPoolEntry::REGULAR);
1148 ldr(rd, MemOperand(FLAG_enable_ool_constant_pool ? pp : pc, 0), cond); 1155 ldr(rd, MemOperand(FLAG_enable_embedded_constant_pool ? pp : pc, 0),
1156 cond);
1149 } 1157 }
1150 } 1158 }
1151 } 1159 }
1152 1160
1153 1161
1154 void Assembler::addrmod1(Instr instr, 1162 void Assembler::addrmod1(Instr instr,
1155 Register rn, 1163 Register rn,
1156 Register rd, 1164 Register rd,
1157 const Operand& x) { 1165 const Operand& x) {
1158 CheckBuffer(); 1166 CheckBuffer();
(...skipping 1388 matching lines...) Expand 10 before | Expand all | Expand 10 after
2547 if (CpuFeatures::IsSupported(VFP3) && FitsVMOVDoubleImmediate(imm, &enc)) { 2555 if (CpuFeatures::IsSupported(VFP3) && FitsVMOVDoubleImmediate(imm, &enc)) {
2548 // The double can be encoded in the instruction. 2556 // The double can be encoded in the instruction.
2549 // 2557 //
2550 // Dd = immediate 2558 // Dd = immediate
2551 // Instruction details available in ARM DDI 0406C.b, A8-936. 2559 // Instruction details available in ARM DDI 0406C.b, A8-936.
2552 // cond(31-28) | 11101(27-23) | D(22) | 11(21-20) | imm4H(19-16) | 2560 // cond(31-28) | 11101(27-23) | D(22) | 11(21-20) | imm4H(19-16) |
2553 // Vd(15-12) | 101(11-9) | sz=1(8) | imm4L(3-0) 2561 // Vd(15-12) | 101(11-9) | sz=1(8) | imm4L(3-0)
2554 int vd, d; 2562 int vd, d;
2555 dst.split_code(&vd, &d); 2563 dst.split_code(&vd, &d);
2556 emit(al | 0x1D*B23 | d*B22 | 0x3*B20 | vd*B12 | 0x5*B9 | B8 | enc); 2564 emit(al | 0x1D*B23 | d*B22 | 0x3*B20 | vd*B12 | 0x5*B9 | B8 | enc);
2557 } else if (FLAG_enable_vldr_imm && is_ool_constant_pool_available()) { 2565 } else if (FLAG_enable_vldr_imm && is_constant_pool_available()) {
2558 // TODO(jfb) Temporarily turned off until we have constant blinding or 2566 // TODO(jfb) Temporarily turned off until we have constant blinding or
2559 // some equivalent mitigation: an attacker can otherwise control 2567 // some equivalent mitigation: an attacker can otherwise control
2560 // generated data which also happens to be executable, a Very Bad 2568 // generated data which also happens to be executable, a Very Bad
2561 // Thing indeed. 2569 // Thing indeed.
2562 // Blinding gets tricky because we don't have xor, we probably 2570 // Blinding gets tricky because we don't have xor, we probably
2563 // need to add/subtract without losing precision, which requires a 2571 // need to add/subtract without losing precision, which requires a
2564 // cookie value that Lithium is probably better positioned to 2572 // cookie value that Lithium is probably better positioned to
2565 // choose. 2573 // choose.
2566 // We could also add a few peepholes here like detecting 0.0 and 2574 // We could also add a few peepholes here like detecting 0.0 and
2567 // -0.0 and doing a vmov from the sequestered d14, forcing denorms 2575 // -0.0 and doing a vmov from the sequestered d14, forcing denorms
2568 // to zero (we set flush-to-zero), and normalizing NaN values. 2576 // to zero (we set flush-to-zero), and normalizing NaN values.
2569 // We could also detect redundant values. 2577 // We could also detect redundant values.
2570 // The code could also randomize the order of values, though 2578 // The code could also randomize the order of values, though
2571 // that's tricky because vldr has a limited reach. Furthermore 2579 // that's tricky because vldr has a limited reach. Furthermore
2572 // it breaks load locality. 2580 // it breaks load locality.
2573 RelocInfo rinfo(pc_, imm); 2581 ConstantPoolEntry::Access access = ConstantPoolAddEntry(pc_offset(), imm);
2574 ConstantPoolArray::LayoutSection section = ConstantPoolAddEntry(rinfo); 2582 if (access == ConstantPoolEntry::OVERFLOWED) {
2575 if (section == ConstantPoolArray::EXTENDED_SECTION) { 2583 DCHECK(FLAG_enable_embedded_constant_pool);
2576 DCHECK(FLAG_enable_ool_constant_pool);
2577 // Emit instructions to load constant pool offset. 2584 // Emit instructions to load constant pool offset.
2578 movw(ip, 0); 2585 movw(ip, 0);
2579 movt(ip, 0); 2586 movt(ip, 0);
2580 // Load from constant pool at offset. 2587 // Load from constant pool at offset.
2581 vldr(dst, MemOperand(pp, ip)); 2588 vldr(dst, MemOperand(pp, ip));
2582 } else { 2589 } else {
2583 DCHECK(section == ConstantPoolArray::SMALL_SECTION); 2590 DCHECK(access == ConstantPoolEntry::REGULAR);
2584 vldr(dst, MemOperand(FLAG_enable_ool_constant_pool ? pp : pc, 0)); 2591 vldr(dst, MemOperand(FLAG_enable_embedded_constant_pool ? pp : pc, 0));
2585 } 2592 }
2586 } else { 2593 } else {
2587 // Synthesise the double from ARM immediates. 2594 // Synthesise the double from ARM immediates.
2588 uint32_t lo, hi; 2595 uint32_t lo, hi;
2589 DoubleAsTwoUInt32(imm, &lo, &hi); 2596 DoubleAsTwoUInt32(imm, &lo, &hi);
2590 2597
2591 if (lo == hi) { 2598 if (lo == hi) {
2592 // Move the low and high parts of the double to a D register in one 2599 // Move the low and high parts of the double to a D register in one
2593 // instruction. 2600 // instruction.
2594 mov(ip, Operand(lo)); 2601 mov(ip, Operand(lo));
(...skipping 953 matching lines...) Expand 10 before | Expand all | Expand 10 after
3548 DeleteArray(buffer_); 3555 DeleteArray(buffer_);
3549 buffer_ = desc.buffer; 3556 buffer_ = desc.buffer;
3550 buffer_size_ = desc.buffer_size; 3557 buffer_size_ = desc.buffer_size;
3551 pc_ += pc_delta; 3558 pc_ += pc_delta;
3552 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta, 3559 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta,
3553 reloc_info_writer.last_pc() + pc_delta); 3560 reloc_info_writer.last_pc() + pc_delta);
3554 3561
3555 // None of our relocation types are pc relative pointing outside the code 3562 // None of our relocation types are pc relative pointing outside the code
3556 // buffer nor pc absolute pointing inside the code buffer, so there is no need 3563 // buffer nor pc absolute pointing inside the code buffer, so there is no need
3557 // to relocate any emitted relocation entries. 3564 // to relocate any emitted relocation entries.
3558
3559 // Relocate pending relocation entries.
3560 for (int i = 0; i < num_pending_32_bit_reloc_info_; i++) {
3561 RelocInfo& rinfo = pending_32_bit_reloc_info_[i];
3562 DCHECK(rinfo.rmode() != RelocInfo::COMMENT &&
3563 rinfo.rmode() != RelocInfo::POSITION);
3564 if (rinfo.rmode() != RelocInfo::JS_RETURN) {
3565 rinfo.set_pc(rinfo.pc() + pc_delta);
3566 }
3567 }
3568 for (int i = 0; i < num_pending_64_bit_reloc_info_; i++) {
3569 RelocInfo& rinfo = pending_64_bit_reloc_info_[i];
3570 DCHECK(rinfo.rmode() == RelocInfo::NONE64);
3571 rinfo.set_pc(rinfo.pc() + pc_delta);
3572 }
3573 constant_pool_builder_.Relocate(pc_delta);
3574 } 3565 }
3575 3566
3576 3567
3577 void Assembler::db(uint8_t data) { 3568 void Assembler::db(uint8_t data) {
3578 // No relocation info should be pending while using db. db is used 3569 // No relocation info should be pending while using db. db is used
3579 // to write pure data with no pointers and the constant pool should 3570 // to write pure data with no pointers and the constant pool should
3580 // be emitted before using db. 3571 // be emitted before using db.
3581 DCHECK(num_pending_32_bit_reloc_info_ == 0); 3572 DCHECK(num_pending_32_bit_constants_ == 0);
3582 DCHECK(num_pending_64_bit_reloc_info_ == 0); 3573 DCHECK(num_pending_64_bit_constants_ == 0);
3583 CheckBuffer(); 3574 CheckBuffer();
3584 *reinterpret_cast<uint8_t*>(pc_) = data; 3575 *reinterpret_cast<uint8_t*>(pc_) = data;
3585 pc_ += sizeof(uint8_t); 3576 pc_ += sizeof(uint8_t);
3586 } 3577 }
3587 3578
3588 3579
3589 void Assembler::dd(uint32_t data) { 3580 void Assembler::dd(uint32_t data) {
3590 // No relocation info should be pending while using dd. dd is used 3581 // No relocation info should be pending while using dd. dd is used
3591 // to write pure data with no pointers and the constant pool should 3582 // to write pure data with no pointers and the constant pool should
3592 // be emitted before using dd. 3583 // be emitted before using dd.
3593 DCHECK(num_pending_32_bit_reloc_info_ == 0); 3584 DCHECK(num_pending_32_bit_constants_ == 0);
3594 DCHECK(num_pending_64_bit_reloc_info_ == 0); 3585 DCHECK(num_pending_64_bit_constants_ == 0);
3595 CheckBuffer(); 3586 CheckBuffer();
3596 *reinterpret_cast<uint32_t*>(pc_) = data; 3587 *reinterpret_cast<uint32_t*>(pc_) = data;
3597 pc_ += sizeof(uint32_t); 3588 pc_ += sizeof(uint32_t);
3598 } 3589 }
3599 3590
3600 3591
3592 void Assembler::dq(uint64_t value) {
3593 // No relocation info should be pending while using dq. dq is used
3594 // to write pure data with no pointers and the constant pool should
3595 // be emitted before using dd.
3596 DCHECK(num_pending_32_bit_constants_ == 0);
3597 DCHECK(num_pending_64_bit_constants_ == 0);
3598 CheckBuffer();
3599 *reinterpret_cast<uint64_t*>(pc_) = value;
3600 pc_ += sizeof(uint64_t);
3601 }
3602
3603
3601 void Assembler::emit_code_stub_address(Code* stub) { 3604 void Assembler::emit_code_stub_address(Code* stub) {
3602 CheckBuffer(); 3605 CheckBuffer();
3603 *reinterpret_cast<uint32_t*>(pc_) = 3606 *reinterpret_cast<uint32_t*>(pc_) =
3604 reinterpret_cast<uint32_t>(stub->instruction_start()); 3607 reinterpret_cast<uint32_t>(stub->instruction_start());
3605 pc_ += sizeof(uint32_t); 3608 pc_ += sizeof(uint32_t);
3606 } 3609 }
3607 3610
3608 3611
3609 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { 3612 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
3613 if (RelocInfo::IsNone(rmode) ||
3614 // Don't record external references unless the heap will be serialized.
3615 (rmode == RelocInfo::EXTERNAL_REFERENCE && !serializer_enabled() &&
3616 !emit_debug_code())) {
3617 return;
3618 }
3619 DCHECK(buffer_space() >= kMaxRelocSize); // too late to grow buffer here
3620 if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
3621 data = RecordedAstId().ToInt();
3622 ClearRecordedAstId();
3623 }
3610 RelocInfo rinfo(pc_, rmode, data, NULL); 3624 RelocInfo rinfo(pc_, rmode, data, NULL);
3611 RecordRelocInfo(rinfo); 3625 reloc_info_writer.Write(&rinfo);
3612 } 3626 }
3613 3627
3614 3628
3615 void Assembler::RecordRelocInfo(const RelocInfo& rinfo) { 3629 ConstantPoolEntry::Access Assembler::ConstantPoolAddEntry(int position,
3616 if (!RelocInfo::IsNone(rinfo.rmode())) { 3630 RelocInfo::Mode rmode,
3617 // Don't record external references unless the heap will be serialized. 3631 intptr_t value) {
3618 if (rinfo.rmode() == RelocInfo::EXTERNAL_REFERENCE && 3632 DCHECK(rmode != RelocInfo::COMMENT && rmode != RelocInfo::POSITION &&
3619 !serializer_enabled() && !emit_debug_code()) { 3633 rmode != RelocInfo::STATEMENT_POSITION &&
3620 return; 3634 rmode != RelocInfo::CONST_POOL && rmode != RelocInfo::NONE64);
3635 bool sharing_ok = RelocInfo::IsNone(rmode) ||
3636 !(serializer_enabled() || rmode < RelocInfo::CELL);
3637 if (FLAG_enable_embedded_constant_pool) {
3638 return constant_pool_builder_.AddEntry(position, value, sharing_ok);
3639 } else {
3640 DCHECK(num_pending_32_bit_constants_ < kMaxNumPending32Constants);
3641 if (num_pending_32_bit_constants_ == 0) {
3642 first_const_pool_32_use_ = position;
3621 } 3643 }
3622 DCHECK(buffer_space() >= kMaxRelocSize); // too late to grow buffer here 3644 ConstantPoolEntry entry(position, value, sharing_ok);
3623 if (rinfo.rmode() == RelocInfo::CODE_TARGET_WITH_ID) { 3645 pending_32_bit_constants_[num_pending_32_bit_constants_++] = entry;
3624 RelocInfo reloc_info_with_ast_id(rinfo.pc(), 3646
3625 rinfo.rmode(), 3647 // Make sure the constant pool is not emitted in place of the next
3626 RecordedAstId().ToInt(), 3648 // instruction for which we just recorded relocation info.
3627 NULL); 3649 BlockConstPoolFor(1);
3628 ClearRecordedAstId(); 3650 return ConstantPoolEntry::REGULAR;
3629 reloc_info_writer.Write(&reloc_info_with_ast_id);
3630 } else {
3631 reloc_info_writer.Write(&rinfo);
3632 }
3633 } 3651 }
3634 } 3652 }
3635 3653
3636 3654
3637 ConstantPoolArray::LayoutSection Assembler::ConstantPoolAddEntry( 3655 ConstantPoolEntry::Access Assembler::ConstantPoolAddEntry(int position,
3638 const RelocInfo& rinfo) { 3656 double value) {
3639 if (FLAG_enable_ool_constant_pool) { 3657 if (FLAG_enable_embedded_constant_pool) {
3640 return constant_pool_builder_.AddEntry(this, rinfo); 3658 return constant_pool_builder_.AddEntry(position, value);
3641 } else { 3659 } else {
3642 if (rinfo.rmode() == RelocInfo::NONE64) { 3660 DCHECK(num_pending_64_bit_constants_ < kMaxNumPending64Constants);
3643 DCHECK(num_pending_64_bit_reloc_info_ < kMaxNumPending64RelocInfo); 3661 if (num_pending_64_bit_constants_ == 0) {
3644 if (num_pending_64_bit_reloc_info_ == 0) { 3662 first_const_pool_64_use_ = position;
3645 first_const_pool_64_use_ = pc_offset();
3646 }
3647 pending_64_bit_reloc_info_[num_pending_64_bit_reloc_info_++] = rinfo;
3648 } else {
3649 DCHECK(num_pending_32_bit_reloc_info_ < kMaxNumPending32RelocInfo);
3650 if (num_pending_32_bit_reloc_info_ == 0) {
3651 first_const_pool_32_use_ = pc_offset();
3652 }
3653 pending_32_bit_reloc_info_[num_pending_32_bit_reloc_info_++] = rinfo;
3654 } 3663 }
3664 ConstantPoolEntry entry(position, value);
3665 pending_64_bit_constants_[num_pending_64_bit_constants_++] = entry;
3666
3655 // Make sure the constant pool is not emitted in place of the next 3667 // Make sure the constant pool is not emitted in place of the next
3656 // instruction for which we just recorded relocation info. 3668 // instruction for which we just recorded relocation info.
3657 BlockConstPoolFor(1); 3669 BlockConstPoolFor(1);
3658 return ConstantPoolArray::SMALL_SECTION; 3670 return ConstantPoolEntry::REGULAR;
3659 } 3671 }
3660 } 3672 }
3661 3673
3662 3674
3663 void Assembler::BlockConstPoolFor(int instructions) { 3675 void Assembler::BlockConstPoolFor(int instructions) {
3664 if (FLAG_enable_ool_constant_pool) { 3676 if (FLAG_enable_embedded_constant_pool) {
3665 // Should be a no-op if using an out-of-line constant pool. 3677 // Should be a no-op if using an embedded constant pool.
3666 DCHECK(num_pending_32_bit_reloc_info_ == 0); 3678 DCHECK(num_pending_32_bit_constants_ == 0);
3667 DCHECK(num_pending_64_bit_reloc_info_ == 0); 3679 DCHECK(num_pending_64_bit_constants_ == 0);
3668 return; 3680 return;
3669 } 3681 }
3670 3682
3671 int pc_limit = pc_offset() + instructions * kInstrSize; 3683 int pc_limit = pc_offset() + instructions * kInstrSize;
3672 if (no_const_pool_before_ < pc_limit) { 3684 if (no_const_pool_before_ < pc_limit) {
3673 // Max pool start (if we need a jump and an alignment). 3685 // Max pool start (if we need a jump and an alignment).
3674 #ifdef DEBUG 3686 #ifdef DEBUG
3675 int start = pc_limit + kInstrSize + 2 * kPointerSize; 3687 int start = pc_limit + kInstrSize + 2 * kPointerSize;
3676 DCHECK((num_pending_32_bit_reloc_info_ == 0) || 3688 DCHECK((num_pending_32_bit_constants_ == 0) ||
3677 (start - first_const_pool_32_use_ + 3689 (start - first_const_pool_32_use_ +
3678 num_pending_64_bit_reloc_info_ * kDoubleSize < kMaxDistToIntPool)); 3690 num_pending_64_bit_constants_ * kDoubleSize <
3679 DCHECK((num_pending_64_bit_reloc_info_ == 0) || 3691 kMaxDistToIntPool));
3692 DCHECK((num_pending_64_bit_constants_ == 0) ||
3680 (start - first_const_pool_64_use_ < kMaxDistToFPPool)); 3693 (start - first_const_pool_64_use_ < kMaxDistToFPPool));
3681 #endif 3694 #endif
3682 no_const_pool_before_ = pc_limit; 3695 no_const_pool_before_ = pc_limit;
3683 } 3696 }
3684 3697
3685 if (next_buffer_check_ < no_const_pool_before_) { 3698 if (next_buffer_check_ < no_const_pool_before_) {
3686 next_buffer_check_ = no_const_pool_before_; 3699 next_buffer_check_ = no_const_pool_before_;
3687 } 3700 }
3688 } 3701 }
3689 3702
3690 3703
3691 void Assembler::CheckConstPool(bool force_emit, bool require_jump) { 3704 void Assembler::CheckConstPool(bool force_emit, bool require_jump) {
3692 if (FLAG_enable_ool_constant_pool) { 3705 if (FLAG_enable_embedded_constant_pool) {
3693 // Should be a no-op if using an out-of-line constant pool. 3706 // Should be a no-op if using an embedded constant pool.
3694 DCHECK(num_pending_32_bit_reloc_info_ == 0); 3707 DCHECK(num_pending_32_bit_constants_ == 0);
3695 DCHECK(num_pending_64_bit_reloc_info_ == 0); 3708 DCHECK(num_pending_64_bit_constants_ == 0);
3696 return; 3709 return;
3697 } 3710 }
3698 3711
3699 // Some short sequence of instruction mustn't be broken up by constant pool 3712 // Some short sequence of instruction mustn't be broken up by constant pool
3700 // emission, such sequences are protected by calls to BlockConstPoolFor and 3713 // emission, such sequences are protected by calls to BlockConstPoolFor and
3701 // BlockConstPoolScope. 3714 // BlockConstPoolScope.
3702 if (is_const_pool_blocked()) { 3715 if (is_const_pool_blocked()) {
3703 // Something is wrong if emission is forced and blocked at the same time. 3716 // Something is wrong if emission is forced and blocked at the same time.
3704 DCHECK(!force_emit); 3717 DCHECK(!force_emit);
3705 return; 3718 return;
3706 } 3719 }
3707 3720
3708 // There is nothing to do if there are no pending constant pool entries. 3721 // There is nothing to do if there are no pending constant pool entries.
3709 if ((num_pending_32_bit_reloc_info_ == 0) && 3722 if ((num_pending_32_bit_constants_ == 0) &&
3710 (num_pending_64_bit_reloc_info_ == 0)) { 3723 (num_pending_64_bit_constants_ == 0)) {
3711 // Calculate the offset of the next check. 3724 // Calculate the offset of the next check.
3712 next_buffer_check_ = pc_offset() + kCheckPoolInterval; 3725 next_buffer_check_ = pc_offset() + kCheckPoolInterval;
3713 return; 3726 return;
3714 } 3727 }
3715 3728
3716 // Check that the code buffer is large enough before emitting the constant 3729 // Check that the code buffer is large enough before emitting the constant
3717 // pool (include the jump over the pool and the constant pool marker and 3730 // pool (include the jump over the pool and the constant pool marker and
3718 // the gap to the relocation information). 3731 // the gap to the relocation information).
3719 int jump_instr = require_jump ? kInstrSize : 0; 3732 int jump_instr = require_jump ? kInstrSize : 0;
3720 int size_up_to_marker = jump_instr + kInstrSize; 3733 int size_up_to_marker = jump_instr + kInstrSize;
3721 int size_after_marker = num_pending_32_bit_reloc_info_ * kPointerSize; 3734 int size_after_marker = num_pending_32_bit_constants_ * kPointerSize;
3722 bool has_fp_values = (num_pending_64_bit_reloc_info_ > 0); 3735 bool has_fp_values = (num_pending_64_bit_constants_ > 0);
3723 bool require_64_bit_align = false; 3736 bool require_64_bit_align = false;
3724 if (has_fp_values) { 3737 if (has_fp_values) {
3725 require_64_bit_align = (((uintptr_t)pc_ + size_up_to_marker) & 0x7); 3738 require_64_bit_align = (((uintptr_t)pc_ + size_up_to_marker) & 0x7);
3726 if (require_64_bit_align) { 3739 if (require_64_bit_align) {
3727 size_after_marker += kInstrSize; 3740 size_after_marker += kInstrSize;
3728 } 3741 }
3729 size_after_marker += num_pending_64_bit_reloc_info_ * kDoubleSize; 3742 size_after_marker += num_pending_64_bit_constants_ * kDoubleSize;
3730 } 3743 }
3731 3744
3732 int size = size_up_to_marker + size_after_marker; 3745 int size = size_up_to_marker + size_after_marker;
3733 3746
3734 // We emit a constant pool when: 3747 // We emit a constant pool when:
3735 // * requested to do so by parameter force_emit (e.g. after each function). 3748 // * requested to do so by parameter force_emit (e.g. after each function).
3736 // * the distance from the first instruction accessing the constant pool to 3749 // * the distance from the first instruction accessing the constant pool to
3737 // any of the constant pool entries will exceed its limit the next 3750 // any of the constant pool entries will exceed its limit the next
3738 // time the pool is checked. This is overly restrictive, but we don't emit 3751 // time the pool is checked. This is overly restrictive, but we don't emit
3739 // constant pool entries in-order so it's conservatively correct. 3752 // constant pool entries in-order so it's conservatively correct.
3740 // * the instruction doesn't require a jump after itself to jump over the 3753 // * the instruction doesn't require a jump after itself to jump over the
3741 // constant pool, and we're getting close to running out of range. 3754 // constant pool, and we're getting close to running out of range.
3742 if (!force_emit) { 3755 if (!force_emit) {
3743 DCHECK((first_const_pool_32_use_ >= 0) || (first_const_pool_64_use_ >= 0)); 3756 DCHECK((first_const_pool_32_use_ >= 0) || (first_const_pool_64_use_ >= 0));
3744 bool need_emit = false; 3757 bool need_emit = false;
3745 if (has_fp_values) { 3758 if (has_fp_values) {
3746 int dist64 = pc_offset() + 3759 int dist64 = pc_offset() + size -
3747 size - 3760 num_pending_32_bit_constants_ * kPointerSize -
3748 num_pending_32_bit_reloc_info_ * kPointerSize -
3749 first_const_pool_64_use_; 3761 first_const_pool_64_use_;
3750 if ((dist64 >= kMaxDistToFPPool - kCheckPoolInterval) || 3762 if ((dist64 >= kMaxDistToFPPool - kCheckPoolInterval) ||
3751 (!require_jump && (dist64 >= kMaxDistToFPPool / 2))) { 3763 (!require_jump && (dist64 >= kMaxDistToFPPool / 2))) {
3752 need_emit = true; 3764 need_emit = true;
3753 } 3765 }
3754 } 3766 }
3755 int dist32 = 3767 int dist32 =
3756 pc_offset() + size - first_const_pool_32_use_; 3768 pc_offset() + size - first_const_pool_32_use_;
3757 if ((dist32 >= kMaxDistToIntPool - kCheckPoolInterval) || 3769 if ((dist32 >= kMaxDistToIntPool - kCheckPoolInterval) ||
3758 (!require_jump && (dist32 >= kMaxDistToIntPool / 2))) { 3770 (!require_jump && (dist32 >= kMaxDistToIntPool / 2))) {
(...skipping 21 matching lines...) Expand all
3780 // The data size helps disassembly know what to print. 3792 // The data size helps disassembly know what to print.
3781 emit(kConstantPoolMarker | 3793 emit(kConstantPoolMarker |
3782 EncodeConstantPoolLength(size_after_marker / kPointerSize)); 3794 EncodeConstantPoolLength(size_after_marker / kPointerSize));
3783 3795
3784 if (require_64_bit_align) { 3796 if (require_64_bit_align) {
3785 emit(kConstantPoolMarker); 3797 emit(kConstantPoolMarker);
3786 } 3798 }
3787 3799
3788 // Emit 64-bit constant pool entries first: their range is smaller than 3800 // Emit 64-bit constant pool entries first: their range is smaller than
3789 // 32-bit entries. 3801 // 32-bit entries.
3790 for (int i = 0; i < num_pending_64_bit_reloc_info_; i++) { 3802 for (int i = 0; i < num_pending_64_bit_constants_; i++) {
3791 RelocInfo& rinfo = pending_64_bit_reloc_info_[i]; 3803 ConstantPoolEntry& entry = pending_64_bit_constants_[i];
3792 3804
3793 DCHECK(!((uintptr_t)pc_ & 0x7)); // Check 64-bit alignment. 3805 DCHECK(!((uintptr_t)pc_ & 0x7)); // Check 64-bit alignment.
3794 3806
3795 Instr instr = instr_at(rinfo.pc()); 3807 Instr instr = instr_at(entry.position());
3796 // Instruction to patch must be 'vldr rd, [pc, #offset]' with offset == 0. 3808 // Instruction to patch must be 'vldr rd, [pc, #offset]' with offset == 0.
3797 DCHECK((IsVldrDPcImmediateOffset(instr) && 3809 DCHECK((IsVldrDPcImmediateOffset(instr) &&
3798 GetVldrDRegisterImmediateOffset(instr) == 0)); 3810 GetVldrDRegisterImmediateOffset(instr) == 0));
3799 3811
3800 int delta = pc_ - rinfo.pc() - kPcLoadDelta; 3812 int delta = pc_offset() - entry.position() - kPcLoadDelta;
3801 DCHECK(is_uint10(delta)); 3813 DCHECK(is_uint10(delta));
3802 3814
3803 bool found = false; 3815 bool found = false;
3804 uint64_t value = rinfo.raw_data64(); 3816 uint64_t value = entry.value64();
3805 for (int j = 0; j < i; j++) { 3817 for (int j = 0; j < i; j++) {
3806 RelocInfo& rinfo2 = pending_64_bit_reloc_info_[j]; 3818 ConstantPoolEntry& entry2 = pending_64_bit_constants_[j];
3807 if (value == rinfo2.raw_data64()) { 3819 if (value == entry2.value64()) {
3808 found = true; 3820 found = true;
3809 DCHECK(rinfo2.rmode() == RelocInfo::NONE64); 3821 Instr instr2 = instr_at(entry2.position());
3810 Instr instr2 = instr_at(rinfo2.pc());
3811 DCHECK(IsVldrDPcImmediateOffset(instr2)); 3822 DCHECK(IsVldrDPcImmediateOffset(instr2));
3812 delta = GetVldrDRegisterImmediateOffset(instr2); 3823 delta = GetVldrDRegisterImmediateOffset(instr2);
3813 delta += rinfo2.pc() - rinfo.pc(); 3824 delta += entry2.position() - entry.position();
3814 break; 3825 break;
3815 } 3826 }
3816 } 3827 }
3817 3828
3818 instr_at_put(rinfo.pc(), SetVldrDRegisterImmediateOffset(instr, delta)); 3829 instr_at_put(entry.position(),
3830 SetVldrDRegisterImmediateOffset(instr, delta));
3819 3831
3820 if (!found) { 3832 if (!found) {
3821 uint64_t uint_data = rinfo.raw_data64(); 3833 dq(entry.value64());
3822 emit(uint_data & 0xFFFFFFFF);
3823 emit(uint_data >> 32);
3824 } 3834 }
3825 } 3835 }
3826 3836
3827 // Emit 32-bit constant pool entries. 3837 // Emit 32-bit constant pool entries.
3828 for (int i = 0; i < num_pending_32_bit_reloc_info_; i++) { 3838 for (int i = 0; i < num_pending_32_bit_constants_; i++) {
3829 RelocInfo& rinfo = pending_32_bit_reloc_info_[i]; 3839 ConstantPoolEntry& entry = pending_32_bit_constants_[i];
3830 DCHECK(rinfo.rmode() != RelocInfo::COMMENT && 3840 Instr instr = instr_at(entry.position());
3831 rinfo.rmode() != RelocInfo::POSITION &&
3832 rinfo.rmode() != RelocInfo::STATEMENT_POSITION &&
3833 rinfo.rmode() != RelocInfo::CONST_POOL &&
3834 rinfo.rmode() != RelocInfo::NONE64);
3835
3836 Instr instr = instr_at(rinfo.pc());
3837 3841
3838 // 64-bit loads shouldn't get here. 3842 // 64-bit loads shouldn't get here.
3839 DCHECK(!IsVldrDPcImmediateOffset(instr)); 3843 DCHECK(!IsVldrDPcImmediateOffset(instr));
3840 3844
3841 if (IsLdrPcImmediateOffset(instr) && 3845 if (IsLdrPcImmediateOffset(instr) &&
3842 GetLdrRegisterImmediateOffset(instr) == 0) { 3846 GetLdrRegisterImmediateOffset(instr) == 0) {
3843 int delta = pc_ - rinfo.pc() - kPcLoadDelta; 3847 int delta = pc_offset() - entry.position() - kPcLoadDelta;
3844 DCHECK(is_uint12(delta)); 3848 DCHECK(is_uint12(delta));
3845 // 0 is the smallest delta: 3849 // 0 is the smallest delta:
3846 // ldr rd, [pc, #0] 3850 // ldr rd, [pc, #0]
3847 // constant pool marker 3851 // constant pool marker
3848 // data 3852 // data
3849 3853
3850 bool found = false; 3854 bool found = false;
3851 if (!serializer_enabled() && rinfo.rmode() >= RelocInfo::CELL) { 3855 if (entry.sharing_ok()) {
3852 for (int j = 0; j < i; j++) { 3856 for (int j = 0; j < i; j++) {
3853 RelocInfo& rinfo2 = pending_32_bit_reloc_info_[j]; 3857 ConstantPoolEntry& entry2 = pending_32_bit_constants_[j];
3854 3858
3855 if ((rinfo2.data() == rinfo.data()) && 3859 if (entry2.value() == entry.value()) {
3856 (rinfo2.rmode() == rinfo.rmode())) { 3860 Instr instr2 = instr_at(entry2.position());
3857 Instr instr2 = instr_at(rinfo2.pc());
3858 if (IsLdrPcImmediateOffset(instr2)) { 3861 if (IsLdrPcImmediateOffset(instr2)) {
3859 delta = GetLdrRegisterImmediateOffset(instr2); 3862 delta = GetLdrRegisterImmediateOffset(instr2);
3860 delta += rinfo2.pc() - rinfo.pc(); 3863 delta += entry2.position() - entry.position();
3861 found = true; 3864 found = true;
3862 break; 3865 break;
3863 } 3866 }
3864 } 3867 }
3865 } 3868 }
3866 } 3869 }
3867 3870
3868 instr_at_put(rinfo.pc(), SetLdrRegisterImmediateOffset(instr, delta)); 3871 instr_at_put(entry.position(),
3872 SetLdrRegisterImmediateOffset(instr, delta));
3869 3873
3870 if (!found) { 3874 if (!found) {
3871 emit(rinfo.data()); 3875 dp(entry.value());
3872 } 3876 }
3873 } else { 3877 } else {
3874 DCHECK(IsMovW(instr)); 3878 DCHECK(IsMovW(instr));
3875 } 3879 }
3876 } 3880 }
3877 3881
3878 num_pending_32_bit_reloc_info_ = 0; 3882 num_pending_32_bit_constants_ = 0;
3879 num_pending_64_bit_reloc_info_ = 0; 3883 num_pending_64_bit_constants_ = 0;
3880 first_const_pool_32_use_ = -1; 3884 first_const_pool_32_use_ = -1;
3881 first_const_pool_64_use_ = -1; 3885 first_const_pool_64_use_ = -1;
3882 3886
3883 RecordComment("]"); 3887 RecordComment("]");
3884 3888
3885 if (after_pool.is_linked()) { 3889 if (after_pool.is_linked()) {
3886 bind(&after_pool); 3890 bind(&after_pool);
3887 } 3891 }
3888 } 3892 }
3889 3893
3890 // Since a constant pool was just emitted, move the check offset forward by 3894 // Since a constant pool was just emitted, move the check offset forward by
3891 // the standard interval. 3895 // the standard interval.
3892 next_buffer_check_ = pc_offset() + kCheckPoolInterval; 3896 next_buffer_check_ = pc_offset() + kCheckPoolInterval;
3893 } 3897 }
3894 3898
3895 3899
3896 Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) { 3900 void Assembler::PatchConstantPoolAccessInstruction(
3897 if (!FLAG_enable_ool_constant_pool) { 3901 int pc_offset, int offset, ConstantPoolEntry::Access access,
3898 return isolate->factory()->empty_constant_pool_array(); 3902 ConstantPoolEntry::Type type) {
3899 } 3903 DCHECK(FLAG_enable_embedded_constant_pool);
3900 return constant_pool_builder_.New(isolate); 3904 Address pc = buffer_ + pc_offset;
3901 }
3902 3905
3903 3906 // Patch vldr/ldr instruction with correct offset.
3904 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { 3907 Instr instr = instr_at(pc);
3905 constant_pool_builder_.Populate(this, constant_pool); 3908 if (access == ConstantPoolEntry::OVERFLOWED) {
3906 } 3909 if (CpuFeatures::IsSupported(ARMv7)) {
3907 3910 // Instructions to patch must be 'movw rd, [#0]' and 'movt rd, [#0].
3908 3911 Instr next_instr = instr_at(pc + kInstrSize);
3909 ConstantPoolBuilder::ConstantPoolBuilder() 3912 DCHECK((IsMovW(instr) && Instruction::ImmedMovwMovtValue(instr) == 0));
3910 : entries_(), current_section_(ConstantPoolArray::SMALL_SECTION) {} 3913 DCHECK((IsMovT(next_instr) &&
3911 3914 Instruction::ImmedMovwMovtValue(next_instr) == 0));
3912 3915 instr_at_put(pc, PatchMovwImmediate(instr, offset & 0xffff));
3913 bool ConstantPoolBuilder::IsEmpty() { 3916 instr_at_put(pc + kInstrSize,
3914 return entries_.size() == 0; 3917 PatchMovwImmediate(next_instr, offset >> 16));
3915 } 3918 } else {
3916 3919 // Instructions to patch must be 'mov rd, [#0]' and 'orr rd, rd, [#0].
3917 3920 Instr instr_2 = instr_at(pc + kInstrSize);
3918 ConstantPoolArray::Type ConstantPoolBuilder::GetConstantPoolType( 3921 Instr instr_3 = instr_at(pc + 2 * kInstrSize);
3919 RelocInfo::Mode rmode) { 3922 Instr instr_4 = instr_at(pc + 3 * kInstrSize);
3920 if (rmode == RelocInfo::NONE64) { 3923 DCHECK((IsMovImmed(instr) && Instruction::Immed8Value(instr) == 0));
3921 return ConstantPoolArray::INT64; 3924 DCHECK((IsOrrImmed(instr_2) && Instruction::Immed8Value(instr_2) == 0) &&
3922 } else if (!RelocInfo::IsGCRelocMode(rmode)) { 3925 GetRn(instr_2).is(GetRd(instr_2)));
3923 return ConstantPoolArray::INT32; 3926 DCHECK((IsOrrImmed(instr_3) && Instruction::Immed8Value(instr_3) == 0) &&
3924 } else if (RelocInfo::IsCodeTarget(rmode)) { 3927 GetRn(instr_3).is(GetRd(instr_3)));
3925 return ConstantPoolArray::CODE_PTR; 3928 DCHECK((IsOrrImmed(instr_4) && Instruction::Immed8Value(instr_4) == 0) &&
3929 GetRn(instr_4).is(GetRd(instr_4)));
3930 instr_at_put(pc, PatchShiftImm(instr, (offset & kImm8Mask)));
3931 instr_at_put(pc + kInstrSize,
3932 PatchShiftImm(instr_2, (offset & (kImm8Mask << 8))));
3933 instr_at_put(pc + 2 * kInstrSize,
3934 PatchShiftImm(instr_3, (offset & (kImm8Mask << 16))));
3935 instr_at_put(pc + 3 * kInstrSize,
3936 PatchShiftImm(instr_4, (offset & (kImm8Mask << 24))));
3937 }
3938 } else if (type == ConstantPoolEntry::DOUBLE) {
3939 // Instruction to patch must be 'vldr rd, [pp, #0]'.
3940 DCHECK((IsVldrDPpImmediateOffset(instr) &&
3941 GetVldrDRegisterImmediateOffset(instr) == 0));
3942 DCHECK(is_uint10(offset));
3943 instr_at_put(pc, SetVldrDRegisterImmediateOffset(instr, offset));
3926 } else { 3944 } else {
3927 DCHECK(RelocInfo::IsGCRelocMode(rmode) && !RelocInfo::IsCodeTarget(rmode)); 3945 // Instruction to patch must be 'ldr rd, [pp, #0]'.
3928 return ConstantPoolArray::HEAP_PTR; 3946 DCHECK((IsLdrPpImmediateOffset(instr) &&
3947 GetLdrRegisterImmediateOffset(instr) == 0));
3948 DCHECK(is_uint12(offset));
3949 instr_at_put(pc, SetLdrRegisterImmediateOffset(instr, offset));
3929 } 3950 }
3930 } 3951 }
3931 3952
3932
3933 ConstantPoolArray::LayoutSection ConstantPoolBuilder::AddEntry(
3934 Assembler* assm, const RelocInfo& rinfo) {
3935 RelocInfo::Mode rmode = rinfo.rmode();
3936 DCHECK(rmode != RelocInfo::COMMENT &&
3937 rmode != RelocInfo::POSITION &&
3938 rmode != RelocInfo::STATEMENT_POSITION &&
3939 rmode != RelocInfo::CONST_POOL);
3940
3941 // Try to merge entries which won't be patched.
3942 int merged_index = -1;
3943 ConstantPoolArray::LayoutSection entry_section = current_section_;
3944 if (RelocInfo::IsNone(rmode) ||
3945 (!assm->serializer_enabled() && (rmode >= RelocInfo::CELL))) {
3946 size_t i;
3947 std::vector<ConstantPoolEntry>::const_iterator it;
3948 for (it = entries_.begin(), i = 0; it != entries_.end(); it++, i++) {
3949 if (RelocInfo::IsEqual(rinfo, it->rinfo_)) {
3950 // Merge with found entry.
3951 merged_index = i;
3952 entry_section = entries_[i].section_;
3953 break;
3954 }
3955 }
3956 }
3957 DCHECK(entry_section <= current_section_);
3958 entries_.push_back(ConstantPoolEntry(rinfo, entry_section, merged_index));
3959
3960 if (merged_index == -1) {
3961 // Not merged, so update the appropriate count.
3962 number_of_entries_[entry_section].increment(GetConstantPoolType(rmode));
3963 }
3964
3965 // Check if we still have room for another entry in the small section
3966 // given Arm's ldr and vldr immediate offset range.
3967 if (current_section_ == ConstantPoolArray::SMALL_SECTION &&
3968 !(is_uint12(ConstantPoolArray::SizeFor(*small_entries())) &&
3969 is_uint10(ConstantPoolArray::MaxInt64Offset(
3970 small_entries()->count_of(ConstantPoolArray::INT64))))) {
3971 current_section_ = ConstantPoolArray::EXTENDED_SECTION;
3972 }
3973 return entry_section;
3974 }
3975
3976
3977 void ConstantPoolBuilder::Relocate(int pc_delta) {
3978 for (std::vector<ConstantPoolEntry>::iterator entry = entries_.begin();
3979 entry != entries_.end(); entry++) {
3980 DCHECK(entry->rinfo_.rmode() != RelocInfo::JS_RETURN);
3981 entry->rinfo_.set_pc(entry->rinfo_.pc() + pc_delta);
3982 }
3983 }
3984
3985
3986 Handle<ConstantPoolArray> ConstantPoolBuilder::New(Isolate* isolate) {
3987 if (IsEmpty()) {
3988 return isolate->factory()->empty_constant_pool_array();
3989 } else if (extended_entries()->is_empty()) {
3990 return isolate->factory()->NewConstantPoolArray(*small_entries());
3991 } else {
3992 DCHECK(current_section_ == ConstantPoolArray::EXTENDED_SECTION);
3993 return isolate->factory()->NewExtendedConstantPoolArray(
3994 *small_entries(), *extended_entries());
3995 }
3996 }
3997
3998
3999 void ConstantPoolBuilder::Populate(Assembler* assm,
4000 ConstantPoolArray* constant_pool) {
4001 DCHECK_EQ(extended_entries()->is_empty(),
4002 !constant_pool->is_extended_layout());
4003 DCHECK(small_entries()->equals(ConstantPoolArray::NumberOfEntries(
4004 constant_pool, ConstantPoolArray::SMALL_SECTION)));
4005 if (constant_pool->is_extended_layout()) {
4006 DCHECK(extended_entries()->equals(ConstantPoolArray::NumberOfEntries(
4007 constant_pool, ConstantPoolArray::EXTENDED_SECTION)));
4008 }
4009
4010 // Set up initial offsets.
4011 int offsets[ConstantPoolArray::NUMBER_OF_LAYOUT_SECTIONS]
4012 [ConstantPoolArray::NUMBER_OF_TYPES];
4013 for (int section = 0; section <= constant_pool->final_section(); section++) {
4014 int section_start = (section == ConstantPoolArray::EXTENDED_SECTION)
4015 ? small_entries()->total_count()
4016 : 0;
4017 for (int i = 0; i < ConstantPoolArray::NUMBER_OF_TYPES; i++) {
4018 ConstantPoolArray::Type type = static_cast<ConstantPoolArray::Type>(i);
4019 if (number_of_entries_[section].count_of(type) != 0) {
4020 offsets[section][type] = constant_pool->OffsetOfElementAt(
4021 number_of_entries_[section].base_of(type) + section_start);
4022 }
4023 }
4024 }
4025
4026 for (std::vector<ConstantPoolEntry>::iterator entry = entries_.begin();
4027 entry != entries_.end(); entry++) {
4028 RelocInfo rinfo = entry->rinfo_;
4029 RelocInfo::Mode rmode = entry->rinfo_.rmode();
4030 ConstantPoolArray::Type type = GetConstantPoolType(rmode);
4031
4032 // Update constant pool if necessary and get the entry's offset.
4033 int offset;
4034 if (entry->merged_index_ == -1) {
4035 offset = offsets[entry->section_][type];
4036 offsets[entry->section_][type] += ConstantPoolArray::entry_size(type);
4037 if (type == ConstantPoolArray::INT64) {
4038 constant_pool->set_at_offset(offset, rinfo.data64());
4039 } else if (type == ConstantPoolArray::INT32) {
4040 constant_pool->set_at_offset(offset,
4041 static_cast<int32_t>(rinfo.data()));
4042 } else if (type == ConstantPoolArray::CODE_PTR) {
4043 constant_pool->set_at_offset(offset,
4044 reinterpret_cast<Address>(rinfo.data()));
4045 } else {
4046 DCHECK(type == ConstantPoolArray::HEAP_PTR);
4047 constant_pool->set_at_offset(offset,
4048 reinterpret_cast<Object*>(rinfo.data()));
4049 }
4050 offset -= kHeapObjectTag;
4051 entry->merged_index_ = offset; // Stash offset for merged entries.
4052 } else {
4053 DCHECK(entry->merged_index_ < (entry - entries_.begin()));
4054 offset = entries_[entry->merged_index_].merged_index_;
4055 }
4056
4057 // Patch vldr/ldr instruction with correct offset.
4058 Instr instr = assm->instr_at(rinfo.pc());
4059 if (entry->section_ == ConstantPoolArray::EXTENDED_SECTION) {
4060 if (CpuFeatures::IsSupported(ARMv7)) {
4061 // Instructions to patch must be 'movw rd, [#0]' and 'movt rd, [#0].
4062 Instr next_instr = assm->instr_at(rinfo.pc() + Assembler::kInstrSize);
4063 DCHECK((Assembler::IsMovW(instr) &&
4064 Instruction::ImmedMovwMovtValue(instr) == 0));
4065 DCHECK((Assembler::IsMovT(next_instr) &&
4066 Instruction::ImmedMovwMovtValue(next_instr) == 0));
4067 assm->instr_at_put(
4068 rinfo.pc(), Assembler::PatchMovwImmediate(instr, offset & 0xffff));
4069 assm->instr_at_put(
4070 rinfo.pc() + Assembler::kInstrSize,
4071 Assembler::PatchMovwImmediate(next_instr, offset >> 16));
4072 } else {
4073 // Instructions to patch must be 'mov rd, [#0]' and 'orr rd, rd, [#0].
4074 Instr instr_2 = assm->instr_at(rinfo.pc() + Assembler::kInstrSize);
4075 Instr instr_3 = assm->instr_at(rinfo.pc() + 2 * Assembler::kInstrSize);
4076 Instr instr_4 = assm->instr_at(rinfo.pc() + 3 * Assembler::kInstrSize);
4077 DCHECK((Assembler::IsMovImmed(instr) &&
4078 Instruction::Immed8Value(instr) == 0));
4079 DCHECK((Assembler::IsOrrImmed(instr_2) &&
4080 Instruction::Immed8Value(instr_2) == 0) &&
4081 Assembler::GetRn(instr_2).is(Assembler::GetRd(instr_2)));
4082 DCHECK((Assembler::IsOrrImmed(instr_3) &&
4083 Instruction::Immed8Value(instr_3) == 0) &&
4084 Assembler::GetRn(instr_3).is(Assembler::GetRd(instr_3)));
4085 DCHECK((Assembler::IsOrrImmed(instr_4) &&
4086 Instruction::Immed8Value(instr_4) == 0) &&
4087 Assembler::GetRn(instr_4).is(Assembler::GetRd(instr_4)));
4088 assm->instr_at_put(
4089 rinfo.pc(), Assembler::PatchShiftImm(instr, (offset & kImm8Mask)));
4090 assm->instr_at_put(
4091 rinfo.pc() + Assembler::kInstrSize,
4092 Assembler::PatchShiftImm(instr_2, (offset & (kImm8Mask << 8))));
4093 assm->instr_at_put(
4094 rinfo.pc() + 2 * Assembler::kInstrSize,
4095 Assembler::PatchShiftImm(instr_3, (offset & (kImm8Mask << 16))));
4096 assm->instr_at_put(
4097 rinfo.pc() + 3 * Assembler::kInstrSize,
4098 Assembler::PatchShiftImm(instr_4, (offset & (kImm8Mask << 24))));
4099 }
4100 } else if (type == ConstantPoolArray::INT64) {
4101 // Instruction to patch must be 'vldr rd, [pp, #0]'.
4102 DCHECK((Assembler::IsVldrDPpImmediateOffset(instr) &&
4103 Assembler::GetVldrDRegisterImmediateOffset(instr) == 0));
4104 DCHECK(is_uint10(offset));
4105 assm->instr_at_put(rinfo.pc(), Assembler::SetVldrDRegisterImmediateOffset(
4106 instr, offset));
4107 } else {
4108 // Instruction to patch must be 'ldr rd, [pp, #0]'.
4109 DCHECK((Assembler::IsLdrPpImmediateOffset(instr) &&
4110 Assembler::GetLdrRegisterImmediateOffset(instr) == 0));
4111 DCHECK(is_uint12(offset));
4112 assm->instr_at_put(
4113 rinfo.pc(), Assembler::SetLdrRegisterImmediateOffset(instr, offset));
4114 }
4115 }
4116 }
4117
4118 3953
4119 } } // namespace v8::internal 3954 } } // namespace v8::internal
4120 3955
4121 #endif // V8_TARGET_ARCH_ARM 3956 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/assembler-arm.h ('k') | src/arm/assembler-arm-inl.h » ('j') | src/assembler.h » ('J')

Powered by Google App Engine
This is Rietveld 408576698