Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(207)

Side by Side Diff: src/arm/assembler-arm.cc

Issue 1131783003: Embedded constant pools. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 1994-2006 Sun Microsystems Inc. 1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved. 2 // All Rights Reserved.
3 // 3 //
4 // Redistribution and use in source and binary forms, with or without 4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions 5 // modification, are permitted provided that the following conditions
6 // are met: 6 // are met:
7 // 7 //
8 // - Redistributions of source code must retain the above copyright notice, 8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer. 9 // this list of conditions and the following disclaimer.
10 // 10 //
(...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after
227 // ----------------------------------------------------------------------------- 227 // -----------------------------------------------------------------------------
228 // Implementation of RelocInfo 228 // Implementation of RelocInfo
229 229
230 // static 230 // static
231 const int RelocInfo::kApplyMask = 0; 231 const int RelocInfo::kApplyMask = 0;
232 232
233 233
234 bool RelocInfo::IsCodedSpecially() { 234 bool RelocInfo::IsCodedSpecially() {
235 // The deserializer needs to know whether a pointer is specially coded.  Being 235 // The deserializer needs to know whether a pointer is specially coded.  Being
236 // specially coded on ARM means that it is a movw/movt instruction, or is an 236 // specially coded on ARM means that it is a movw/movt instruction, or is an
237 // out of line constant pool entry.  These only occur if 237 // embedded constant pool entry.  These only occur if
238 // FLAG_enable_ool_constant_pool is true. 238 // FLAG_enable_embedded_constant_pool is true.
239 return FLAG_enable_ool_constant_pool; 239 return FLAG_enable_embedded_constant_pool;
240 } 240 }
241 241
242 242
243 bool RelocInfo::IsInConstantPool() { 243 bool RelocInfo::IsInConstantPool() {
244 return Assembler::is_constant_pool_load(pc_); 244 return Assembler::is_constant_pool_load(pc_);
245 } 245 }
246 246
247 247
248 // ----------------------------------------------------------------------------- 248 // -----------------------------------------------------------------------------
249 // Implementation of Operand and MemOperand 249 // Implementation of Operand and MemOperand
(...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after
442 const Instr kLdrRegFpNegOffsetPattern = 442 const Instr kLdrRegFpNegOffsetPattern =
443 al | B26 | L | NegOffset | kRegister_fp_Code * B16; 443 al | B26 | L | NegOffset | kRegister_fp_Code * B16;
444 const Instr kStrRegFpNegOffsetPattern = 444 const Instr kStrRegFpNegOffsetPattern =
445 al | B26 | NegOffset | kRegister_fp_Code * B16; 445 al | B26 | NegOffset | kRegister_fp_Code * B16;
446 const Instr kLdrStrInstrTypeMask = 0xffff0000; 446 const Instr kLdrStrInstrTypeMask = 0xffff0000;
447 447
448 448
449 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size) 449 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size)
450 : AssemblerBase(isolate, buffer, buffer_size), 450 : AssemblerBase(isolate, buffer, buffer_size),
451 recorded_ast_id_(TypeFeedbackId::None()), 451 recorded_ast_id_(TypeFeedbackId::None()),
452 constant_pool_builder_(), 452 constant_pool_builder_(12, 10),
rmcilroy 2015/05/20 14:32:10 make the '12' and '10' constants defined in const
MTBrandyberry 2015/05/20 22:28:21 Done.
453 positions_recorder_(this) { 453 positions_recorder_(this) {
454 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); 454 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_);
455 num_pending_32_bit_reloc_info_ = 0; 455 num_pending_32_bit_constants_ = 0;
456 num_pending_64_bit_reloc_info_ = 0; 456 num_pending_64_bit_constants_ = 0;
457 next_buffer_check_ = 0; 457 next_buffer_check_ = 0;
458 const_pool_blocked_nesting_ = 0; 458 const_pool_blocked_nesting_ = 0;
459 no_const_pool_before_ = 0; 459 no_const_pool_before_ = 0;
460 first_const_pool_32_use_ = -1; 460 first_const_pool_32_use_ = -1;
461 first_const_pool_64_use_ = -1; 461 first_const_pool_64_use_ = -1;
462 last_bound_pos_ = 0; 462 last_bound_pos_ = 0;
463 ClearRecordedAstId(); 463 ClearRecordedAstId();
464 } 464 }
465 465
466 466
467 Assembler::~Assembler() { 467 Assembler::~Assembler() {
468 DCHECK(const_pool_blocked_nesting_ == 0); 468 DCHECK(const_pool_blocked_nesting_ == 0);
469 } 469 }
470 470
471 471
472 void Assembler::GetCode(CodeDesc* desc) { 472 void Assembler::GetCode(CodeDesc* desc) {
473 reloc_info_writer.Finish(); 473 reloc_info_writer.Finish();
474 if (!FLAG_enable_ool_constant_pool) { 474
475 // Emit constant pool if necessary. 475 // Emit constant pool if necessary.
476 int offset = 0;
477 if (FLAG_enable_embedded_constant_pool) {
478 offset = EmitConstantPool();
479 } else {
476 CheckConstPool(true, false); 480 CheckConstPool(true, false);
477 DCHECK(num_pending_32_bit_reloc_info_ == 0); 481 DCHECK(num_pending_32_bit_constants_ == 0);
478 DCHECK(num_pending_64_bit_reloc_info_ == 0); 482 DCHECK(num_pending_64_bit_constants_ == 0);
479 } 483 }
480 // Set up code descriptor. 484 // Set up code descriptor.
481 desc->buffer = buffer_; 485 desc->buffer = buffer_;
482 desc->buffer_size = buffer_size_; 486 desc->buffer_size = buffer_size_;
483 desc->instr_size = pc_offset(); 487 desc->instr_size = pc_offset();
484 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); 488 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos();
489 desc->constant_pool_size = (offset ? desc->instr_size - offset : 0);
485 desc->origin = this; 490 desc->origin = this;
486 } 491 }
487 492
488 493
489 void Assembler::Align(int m) { 494 void Assembler::Align(int m) {
490 DCHECK(m >= 4 && base::bits::IsPowerOfTwo32(m)); 495 DCHECK(m >= 4 && base::bits::IsPowerOfTwo32(m));
491 while ((pc_offset() & (m - 1)) != 0) { 496 while ((pc_offset() & (m - 1)) != 0) {
492 nop(); 497 nop();
493 } 498 }
494 } 499 }
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
616 621
617 622
618 Register Assembler::GetRm(Instr instr) { 623 Register Assembler::GetRm(Instr instr) {
619 Register reg; 624 Register reg;
620 reg.code_ = Instruction::RmValue(instr); 625 reg.code_ = Instruction::RmValue(instr);
621 return reg; 626 return reg;
622 } 627 }
623 628
624 629
625 Instr Assembler::GetConsantPoolLoadPattern() { 630 Instr Assembler::GetConsantPoolLoadPattern() {
626 if (FLAG_enable_ool_constant_pool) { 631 if (FLAG_enable_embedded_constant_pool) {
627 return kLdrPpImmedPattern; 632 return kLdrPpImmedPattern;
628 } else { 633 } else {
629 return kLdrPCImmedPattern; 634 return kLdrPCImmedPattern;
630 } 635 }
631 } 636 }
632 637
633 638
634 Instr Assembler::GetConsantPoolLoadMask() { 639 Instr Assembler::GetConsantPoolLoadMask() {
635 if (FLAG_enable_ool_constant_pool) { 640 if (FLAG_enable_embedded_constant_pool) {
636 return kLdrPpImmedMask; 641 return kLdrPpImmedMask;
637 } else { 642 } else {
638 return kLdrPCImmedMask; 643 return kLdrPCImmedMask;
639 } 644 }
640 } 645 }
641 646
642 647
643 bool Assembler::IsPush(Instr instr) { 648 bool Assembler::IsPush(Instr instr) {
644 return ((instr & ~kRdMask) == kPushRegPattern); 649 return ((instr & ~kRdMask) == kPushRegPattern);
645 } 650 }
(...skipping 391 matching lines...) Expand 10 before | Expand all | Expand 10 after
1037 return assembler->serializer_enabled(); 1042 return assembler->serializer_enabled();
1038 } else if (RelocInfo::IsNone(rmode_)) { 1043 } else if (RelocInfo::IsNone(rmode_)) {
1039 return false; 1044 return false;
1040 } 1045 }
1041 return true; 1046 return true;
1042 } 1047 }
1043 1048
1044 1049
1045 static bool use_mov_immediate_load(const Operand& x, 1050 static bool use_mov_immediate_load(const Operand& x,
1046 const Assembler* assembler) { 1051 const Assembler* assembler) {
1047 if (FLAG_enable_ool_constant_pool && assembler != NULL && 1052 if (FLAG_enable_embedded_constant_pool && assembler != NULL &&
1048 !assembler->is_ool_constant_pool_available()) { 1053 !assembler->is_constant_pool_available()) {
1049 return true; 1054 return true;
1050 } else if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) && 1055 } else if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) &&
1051 (assembler == NULL || !assembler->predictable_code_size())) { 1056 (assembler == NULL || !assembler->predictable_code_size())) {
1052 // Prefer movw / movt to constant pool if it is more efficient on the CPU. 1057 // Prefer movw / movt to constant pool if it is more efficient on the CPU.
1053 return true; 1058 return true;
1054 } else if (x.must_output_reloc_info(assembler)) { 1059 } else if (x.must_output_reloc_info(assembler)) {
1055 // Prefer constant pool if data is likely to be patched. 1060 // Prefer constant pool if data is likely to be patched.
1056 return false; 1061 return false;
1057 } else { 1062 } else {
1058 // Otherwise, use immediate load if movw / movt is available. 1063 // Otherwise, use immediate load if movw / movt is available.
1059 return CpuFeatures::IsSupported(ARMv7); 1064 return CpuFeatures::IsSupported(ARMv7);
1060 } 1065 }
1061 } 1066 }
1062 1067
1063 1068
1064 int Operand::instructions_required(const Assembler* assembler, 1069 int Operand::instructions_required(const Assembler* assembler,
1065 Instr instr) const { 1070 Instr instr) const {
1066 if (rm_.is_valid()) return 1; 1071 if (rm_.is_valid()) return 1;
1067 uint32_t dummy1, dummy2; 1072 uint32_t dummy1, dummy2;
1068 if (must_output_reloc_info(assembler) || 1073 if (must_output_reloc_info(assembler) ||
1069 !fits_shifter(imm32_, &dummy1, &dummy2, &instr)) { 1074 !fits_shifter(imm32_, &dummy1, &dummy2, &instr)) {
1070 // The immediate operand cannot be encoded as a shifter operand, or use of 1075 // The immediate operand cannot be encoded as a shifter operand, or use of
1071 // constant pool is required. First account for the instructions required 1076 // constant pool is required. First account for the instructions required
1072 // for the constant pool or immediate load 1077 // for the constant pool or immediate load
1073 int instructions; 1078 int instructions;
1074 if (use_mov_immediate_load(*this, assembler)) { 1079 if (use_mov_immediate_load(*this, assembler)) {
1075 // A movw / movt or mov / orr immediate load. 1080 // A movw / movt or mov / orr immediate load.
1076 instructions = CpuFeatures::IsSupported(ARMv7) ? 2 : 4; 1081 instructions = CpuFeatures::IsSupported(ARMv7) ? 2 : 4;
1077 } else if (assembler != NULL && assembler->use_extended_constant_pool()) { 1082 } else if (assembler != NULL && assembler->ConstantPoolOverflow()) {
1078 // An extended constant pool load. 1083 // An extended constant pool load.
rmcilroy 2015/05/20 14:32:10 update comment.
MTBrandyberry 2015/05/20 22:28:21 Done.
1079 instructions = CpuFeatures::IsSupported(ARMv7) ? 3 : 5; 1084 instructions = CpuFeatures::IsSupported(ARMv7) ? 3 : 5;
1080 } else { 1085 } else {
1081 // A small constant pool load. 1086 // A small constant pool load.
1082 instructions = 1; 1087 instructions = 1;
1083 } 1088 }
1084 1089
1085 if ((instr & ~kCondMask) != 13 * B21) { // mov, S not set 1090 if ((instr & ~kCondMask) != 13 * B21) { // mov, S not set
1086 // For a mov or mvn instruction which doesn't set the condition 1091 // For a mov or mvn instruction which doesn't set the condition
1087 // code, the constant pool or immediate load is enough, otherwise we need 1092 // code, the constant pool or immediate load is enough, otherwise we need
1088 // to account for the actual instruction being requested. 1093 // to account for the actual instruction being requested.
1089 instructions += 1; 1094 instructions += 1;
1090 } 1095 }
1091 return instructions; 1096 return instructions;
1092 } else { 1097 } else {
1093 // No use of constant pool and the immediate operand can be encoded as a 1098 // No use of constant pool and the immediate operand can be encoded as a
1094 // shifter operand. 1099 // shifter operand.
1095 return 1; 1100 return 1;
1096 } 1101 }
1097 } 1102 }
1098 1103
1099 1104
1100 void Assembler::move_32_bit_immediate(Register rd, 1105 void Assembler::move_32_bit_immediate(Register rd,
1101 const Operand& x, 1106 const Operand& x,
1102 Condition cond) { 1107 Condition cond) {
1103 RelocInfo rinfo(pc_, x.rmode_, x.imm32_, NULL);
1104 uint32_t imm32 = static_cast<uint32_t>(x.imm32_); 1108 uint32_t imm32 = static_cast<uint32_t>(x.imm32_);
1105 if (x.must_output_reloc_info(this)) { 1109 if (x.must_output_reloc_info(this)) {
1106 RecordRelocInfo(rinfo); 1110 RecordRelocInfo(x.rmode_);
1107 } 1111 }
1108 1112
1109 if (use_mov_immediate_load(x, this)) { 1113 if (use_mov_immediate_load(x, this)) {
1110 Register target = rd.code() == pc.code() ? ip : rd; 1114 Register target = rd.code() == pc.code() ? ip : rd;
1111 if (CpuFeatures::IsSupported(ARMv7)) { 1115 if (CpuFeatures::IsSupported(ARMv7)) {
1112 if (!FLAG_enable_ool_constant_pool && x.must_output_reloc_info(this)) { 1116 if (!FLAG_enable_embedded_constant_pool &&
1117 x.must_output_reloc_info(this)) {
1113 // Make sure the movw/movt doesn't get separated. 1118 // Make sure the movw/movt doesn't get separated.
1114 BlockConstPoolFor(2); 1119 BlockConstPoolFor(2);
1115 } 1120 }
1116 movw(target, imm32 & 0xffff, cond); 1121 movw(target, imm32 & 0xffff, cond);
1117 movt(target, imm32 >> 16, cond); 1122 movt(target, imm32 >> 16, cond);
1118 } else { 1123 } else {
1119 DCHECK(FLAG_enable_ool_constant_pool); 1124 DCHECK(FLAG_enable_embedded_constant_pool);
1120 mov(target, Operand(imm32 & kImm8Mask), LeaveCC, cond); 1125 mov(target, Operand(imm32 & kImm8Mask), LeaveCC, cond);
1121 orr(target, target, Operand(imm32 & (kImm8Mask << 8)), LeaveCC, cond); 1126 orr(target, target, Operand(imm32 & (kImm8Mask << 8)), LeaveCC, cond);
1122 orr(target, target, Operand(imm32 & (kImm8Mask << 16)), LeaveCC, cond); 1127 orr(target, target, Operand(imm32 & (kImm8Mask << 16)), LeaveCC, cond);
1123 orr(target, target, Operand(imm32 & (kImm8Mask << 24)), LeaveCC, cond); 1128 orr(target, target, Operand(imm32 & (kImm8Mask << 24)), LeaveCC, cond);
1124 } 1129 }
1125 if (target.code() != rd.code()) { 1130 if (target.code() != rd.code()) {
1126 mov(rd, target, LeaveCC, cond); 1131 mov(rd, target, LeaveCC, cond);
1127 } 1132 }
1128 } else { 1133 } else {
1129 DCHECK(!FLAG_enable_ool_constant_pool || is_ool_constant_pool_available()); 1134 DCHECK(!FLAG_enable_embedded_constant_pool || is_constant_pool_available());
1130 ConstantPoolArray::LayoutSection section = ConstantPoolAddEntry(rinfo); 1135 ConstantPoolEntry::Access access =
1131 if (section == ConstantPoolArray::EXTENDED_SECTION) { 1136 ConstantPoolAddEntry(pc_offset(), x.rmode_, x.imm32_);
1132 DCHECK(FLAG_enable_ool_constant_pool); 1137 if (access == ConstantPoolEntry::OVERFLOWED) {
1138 DCHECK(FLAG_enable_embedded_constant_pool);
1133 Register target = rd.code() == pc.code() ? ip : rd; 1139 Register target = rd.code() == pc.code() ? ip : rd;
1134 // Emit instructions to load constant pool offset. 1140 // Emit instructions to load constant pool offset.
1135 if (CpuFeatures::IsSupported(ARMv7)) { 1141 if (CpuFeatures::IsSupported(ARMv7)) {
1136 movw(target, 0, cond); 1142 movw(target, 0, cond);
1137 movt(target, 0, cond); 1143 movt(target, 0, cond);
1138 } else { 1144 } else {
1139 mov(target, Operand(0), LeaveCC, cond); 1145 mov(target, Operand(0), LeaveCC, cond);
1140 orr(target, target, Operand(0), LeaveCC, cond); 1146 orr(target, target, Operand(0), LeaveCC, cond);
1141 orr(target, target, Operand(0), LeaveCC, cond); 1147 orr(target, target, Operand(0), LeaveCC, cond);
1142 orr(target, target, Operand(0), LeaveCC, cond); 1148 orr(target, target, Operand(0), LeaveCC, cond);
1143 } 1149 }
1144 // Load from constant pool at offset. 1150 // Load from constant pool at offset.
1145 ldr(rd, MemOperand(pp, target), cond); 1151 ldr(rd, MemOperand(pp, target), cond);
1146 } else { 1152 } else {
1147 DCHECK(section == ConstantPoolArray::SMALL_SECTION); 1153 DCHECK(access == ConstantPoolEntry::REGULAR);
1148 ldr(rd, MemOperand(FLAG_enable_ool_constant_pool ? pp : pc, 0), cond); 1154 ldr(rd, MemOperand(FLAG_enable_embedded_constant_pool ? pp : pc, 0),
1155 cond);
1149 } 1156 }
1150 } 1157 }
1151 } 1158 }
1152 1159
1153 1160
1154 void Assembler::addrmod1(Instr instr, 1161 void Assembler::addrmod1(Instr instr,
1155 Register rn, 1162 Register rn,
1156 Register rd, 1163 Register rd,
1157 const Operand& x) { 1164 const Operand& x) {
1158 CheckBuffer(); 1165 CheckBuffer();
(...skipping 1388 matching lines...) Expand 10 before | Expand all | Expand 10 after
2547 if (CpuFeatures::IsSupported(VFP3) && FitsVMOVDoubleImmediate(imm, &enc)) { 2554 if (CpuFeatures::IsSupported(VFP3) && FitsVMOVDoubleImmediate(imm, &enc)) {
2548 // The double can be encoded in the instruction. 2555 // The double can be encoded in the instruction.
2549 // 2556 //
2550 // Dd = immediate 2557 // Dd = immediate
2551 // Instruction details available in ARM DDI 0406C.b, A8-936. 2558 // Instruction details available in ARM DDI 0406C.b, A8-936.
2552 // cond(31-28) | 11101(27-23) | D(22) | 11(21-20) | imm4H(19-16) | 2559 // cond(31-28) | 11101(27-23) | D(22) | 11(21-20) | imm4H(19-16) |
2553 // Vd(15-12) | 101(11-9) | sz=1(8) | imm4L(3-0) 2560 // Vd(15-12) | 101(11-9) | sz=1(8) | imm4L(3-0)
2554 int vd, d; 2561 int vd, d;
2555 dst.split_code(&vd, &d); 2562 dst.split_code(&vd, &d);
2556 emit(al | 0x1D*B23 | d*B22 | 0x3*B20 | vd*B12 | 0x5*B9 | B8 | enc); 2563 emit(al | 0x1D*B23 | d*B22 | 0x3*B20 | vd*B12 | 0x5*B9 | B8 | enc);
2557 } else if (FLAG_enable_vldr_imm && is_ool_constant_pool_available()) { 2564 } else if (FLAG_enable_vldr_imm && is_constant_pool_available()) {
2558 // TODO(jfb) Temporarily turned off until we have constant blinding or 2565 // TODO(jfb) Temporarily turned off until we have constant blinding or
2559 // some equivalent mitigation: an attacker can otherwise control 2566 // some equivalent mitigation: an attacker can otherwise control
2560 // generated data which also happens to be executable, a Very Bad 2567 // generated data which also happens to be executable, a Very Bad
2561 // Thing indeed. 2568 // Thing indeed.
2562 // Blinding gets tricky because we don't have xor, we probably 2569 // Blinding gets tricky because we don't have xor, we probably
2563 // need to add/subtract without losing precision, which requires a 2570 // need to add/subtract without losing precision, which requires a
2564 // cookie value that Lithium is probably better positioned to 2571 // cookie value that Lithium is probably better positioned to
2565 // choose. 2572 // choose.
2566 // We could also add a few peepholes here like detecting 0.0 and 2573 // We could also add a few peepholes here like detecting 0.0 and
2567 // -0.0 and doing a vmov from the sequestered d14, forcing denorms 2574 // -0.0 and doing a vmov from the sequestered d14, forcing denorms
2568 // to zero (we set flush-to-zero), and normalizing NaN values. 2575 // to zero (we set flush-to-zero), and normalizing NaN values.
2569 // We could also detect redundant values. 2576 // We could also detect redundant values.
2570 // The code could also randomize the order of values, though 2577 // The code could also randomize the order of values, though
2571 // that's tricky because vldr has a limited reach. Furthermore 2578 // that's tricky because vldr has a limited reach. Furthermore
2572 // it breaks load locality. 2579 // it breaks load locality.
2573 RelocInfo rinfo(pc_, imm); 2580 ConstantPoolEntry::Access access = ConstantPoolAddEntry(pc_offset(), imm);
2574 ConstantPoolArray::LayoutSection section = ConstantPoolAddEntry(rinfo); 2581 if (access == ConstantPoolEntry::OVERFLOWED) {
2575 if (section == ConstantPoolArray::EXTENDED_SECTION) { 2582 DCHECK(FLAG_enable_embedded_constant_pool);
2576 DCHECK(FLAG_enable_ool_constant_pool);
2577 // Emit instructions to load constant pool offset. 2583 // Emit instructions to load constant pool offset.
2578 movw(ip, 0); 2584 movw(ip, 0);
2579 movt(ip, 0); 2585 movt(ip, 0);
2580 // Load from constant pool at offset. 2586 // Load from constant pool at offset.
2581 vldr(dst, MemOperand(pp, ip)); 2587 vldr(dst, MemOperand(pp, ip));
2582 } else { 2588 } else {
2583 DCHECK(section == ConstantPoolArray::SMALL_SECTION); 2589 DCHECK(access == ConstantPoolEntry::REGULAR);
2584 vldr(dst, MemOperand(FLAG_enable_ool_constant_pool ? pp : pc, 0)); 2590 vldr(dst, MemOperand(FLAG_enable_embedded_constant_pool ? pp : pc, 0));
2585 } 2591 }
2586 } else { 2592 } else {
2587 // Synthesise the double from ARM immediates. 2593 // Synthesise the double from ARM immediates.
2588 uint32_t lo, hi; 2594 uint32_t lo, hi;
2589 DoubleAsTwoUInt32(imm, &lo, &hi); 2595 DoubleAsTwoUInt32(imm, &lo, &hi);
2590 2596
2591 if (lo == hi) { 2597 if (lo == hi) {
2592 // Move the low and high parts of the double to a D register in one 2598 // Move the low and high parts of the double to a D register in one
2593 // instruction. 2599 // instruction.
2594 mov(ip, Operand(lo)); 2600 mov(ip, Operand(lo));
(...skipping 953 matching lines...) Expand 10 before | Expand all | Expand 10 after
3548 DeleteArray(buffer_); 3554 DeleteArray(buffer_);
3549 buffer_ = desc.buffer; 3555 buffer_ = desc.buffer;
3550 buffer_size_ = desc.buffer_size; 3556 buffer_size_ = desc.buffer_size;
3551 pc_ += pc_delta; 3557 pc_ += pc_delta;
3552 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta, 3558 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta,
3553 reloc_info_writer.last_pc() + pc_delta); 3559 reloc_info_writer.last_pc() + pc_delta);
3554 3560
3555 // None of our relocation types are pc relative pointing outside the code 3561 // None of our relocation types are pc relative pointing outside the code
3556 // buffer nor pc absolute pointing inside the code buffer, so there is no need 3562 // buffer nor pc absolute pointing inside the code buffer, so there is no need
3557 // to relocate any emitted relocation entries. 3563 // to relocate any emitted relocation entries.
3558
3559 // Relocate pending relocation entries.
3560 for (int i = 0; i < num_pending_32_bit_reloc_info_; i++) {
3561 RelocInfo& rinfo = pending_32_bit_reloc_info_[i];
3562 DCHECK(rinfo.rmode() != RelocInfo::COMMENT &&
3563 rinfo.rmode() != RelocInfo::POSITION);
3564 if (rinfo.rmode() != RelocInfo::JS_RETURN) {
3565 rinfo.set_pc(rinfo.pc() + pc_delta);
3566 }
3567 }
3568 for (int i = 0; i < num_pending_64_bit_reloc_info_; i++) {
3569 RelocInfo& rinfo = pending_64_bit_reloc_info_[i];
3570 DCHECK(rinfo.rmode() == RelocInfo::NONE64);
3571 rinfo.set_pc(rinfo.pc() + pc_delta);
3572 }
3573 constant_pool_builder_.Relocate(pc_delta);
3574 } 3564 }
3575 3565
3576 3566
3577 void Assembler::db(uint8_t data) { 3567 void Assembler::db(uint8_t data) {
3578 // No relocation info should be pending while using db. db is used 3568 // No relocation info should be pending while using db. db is used
3579 // to write pure data with no pointers and the constant pool should 3569 // to write pure data with no pointers and the constant pool should
3580 // be emitted before using db. 3570 // be emitted before using db.
3581 DCHECK(num_pending_32_bit_reloc_info_ == 0); 3571 DCHECK(num_pending_32_bit_constants_ == 0);
3582 DCHECK(num_pending_64_bit_reloc_info_ == 0); 3572 DCHECK(num_pending_64_bit_constants_ == 0);
3583 CheckBuffer(); 3573 CheckBuffer();
3584 *reinterpret_cast<uint8_t*>(pc_) = data; 3574 *reinterpret_cast<uint8_t*>(pc_) = data;
3585 pc_ += sizeof(uint8_t); 3575 pc_ += sizeof(uint8_t);
3586 } 3576 }
3587 3577
3588 3578
3589 void Assembler::dd(uint32_t data) { 3579 void Assembler::dd(uint32_t data) {
3590 // No relocation info should be pending while using dd. dd is used 3580 // No relocation info should be pending while using dd. dd is used
3591 // to write pure data with no pointers and the constant pool should 3581 // to write pure data with no pointers and the constant pool should
3592 // be emitted before using dd. 3582 // be emitted before using dd.
3593 DCHECK(num_pending_32_bit_reloc_info_ == 0); 3583 DCHECK(num_pending_32_bit_constants_ == 0);
3594 DCHECK(num_pending_64_bit_reloc_info_ == 0); 3584 DCHECK(num_pending_64_bit_constants_ == 0);
3595 CheckBuffer(); 3585 CheckBuffer();
3596 *reinterpret_cast<uint32_t*>(pc_) = data; 3586 *reinterpret_cast<uint32_t*>(pc_) = data;
3597 pc_ += sizeof(uint32_t); 3587 pc_ += sizeof(uint32_t);
3598 } 3588 }
3599 3589
3600 3590
3591 void Assembler::dq(uint64_t value) {
3592 CheckBuffer();
rmcilroy 2015/05/20 14:32:10 Please add the num_pending_xx_bit_constants_ == 0
MTBrandyberry 2015/05/20 22:28:21 Done.
3593 *reinterpret_cast<uint64_t*>(pc_) = value;
3594 pc_ += sizeof(uint64_t);
3595 }
3596
3597
3601 void Assembler::emit_code_stub_address(Code* stub) { 3598 void Assembler::emit_code_stub_address(Code* stub) {
3602 CheckBuffer(); 3599 CheckBuffer();
3603 *reinterpret_cast<uint32_t*>(pc_) = 3600 *reinterpret_cast<uint32_t*>(pc_) =
3604 reinterpret_cast<uint32_t>(stub->instruction_start()); 3601 reinterpret_cast<uint32_t>(stub->instruction_start());
3605 pc_ += sizeof(uint32_t); 3602 pc_ += sizeof(uint32_t);
3606 } 3603 }
3607 3604
3608 3605
3609 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { 3606 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
3607 if (RelocInfo::IsNone(rmode) ||
3608 // Don't record external references unless the heap will be serialized.
3609 (rmode == RelocInfo::EXTERNAL_REFERENCE && !serializer_enabled() &&
3610 !emit_debug_code())) {
3611 return;
3612 }
3613 DCHECK(buffer_space() >= kMaxRelocSize); // too late to grow buffer here
3614 if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
3615 data = RecordedAstId().ToInt();
3616 ClearRecordedAstId();
3617 }
3610 RelocInfo rinfo(pc_, rmode, data, NULL); 3618 RelocInfo rinfo(pc_, rmode, data, NULL);
3611 RecordRelocInfo(rinfo); 3619 reloc_info_writer.Write(&rinfo);
3612 } 3620 }
3613 3621
3614 3622
3615 void Assembler::RecordRelocInfo(const RelocInfo& rinfo) { 3623 ConstantPoolEntry::Access Assembler::ConstantPoolAddEntry(int position,
3616 if (!RelocInfo::IsNone(rinfo.rmode())) { 3624 RelocInfo::Mode rmode,
3617 // Don't record external references unless the heap will be serialized. 3625 intptr_t value) {
3618 if (rinfo.rmode() == RelocInfo::EXTERNAL_REFERENCE && 3626 DCHECK(rmode != RelocInfo::COMMENT && rmode != RelocInfo::POSITION &&
3619 !serializer_enabled() && !emit_debug_code()) { 3627 rmode != RelocInfo::STATEMENT_POSITION &&
3620 return; 3628 rmode != RelocInfo::CONST_POOL && rmode != RelocInfo::NONE64);
3629 bool sharing_ok = RelocInfo::IsNone(rmode) ||
3630 !(serializer_enabled() || rmode < RelocInfo::CELL);
3631 if (FLAG_enable_embedded_constant_pool) {
3632 return constant_pool_builder_.AddEntry(position, value, sharing_ok);
3633 } else {
3634 DCHECK(num_pending_32_bit_constants_ < kMaxNumPending32Constants);
3635 if (num_pending_32_bit_constants_ == 0) {
3636 first_const_pool_32_use_ = position;
3621 } 3637 }
3622 DCHECK(buffer_space() >= kMaxRelocSize); // too late to grow buffer here 3638 ConstantPoolEntry entry(position, value, sharing_ok);
3623 if (rinfo.rmode() == RelocInfo::CODE_TARGET_WITH_ID) { 3639 pending_32_bit_constants_[num_pending_32_bit_constants_++] = entry;
3624 RelocInfo reloc_info_with_ast_id(rinfo.pc(), 3640
3625 rinfo.rmode(), 3641 // Make sure the constant pool is not emitted in place of the next
3626 RecordedAstId().ToInt(), 3642 // instruction for which we just recorded relocation info.
3627 NULL); 3643 BlockConstPoolFor(1);
3628 ClearRecordedAstId(); 3644 return ConstantPoolEntry::REGULAR;
3629 reloc_info_writer.Write(&reloc_info_with_ast_id);
3630 } else {
3631 reloc_info_writer.Write(&rinfo);
3632 }
3633 } 3645 }
3634 } 3646 }
3635 3647
3636 3648
3637 ConstantPoolArray::LayoutSection Assembler::ConstantPoolAddEntry( 3649 ConstantPoolEntry::Access Assembler::ConstantPoolAddEntry(int position,
3638 const RelocInfo& rinfo) { 3650 double value) {
3639 if (FLAG_enable_ool_constant_pool) { 3651 if (FLAG_enable_embedded_constant_pool) {
3640 return constant_pool_builder_.AddEntry(this, rinfo); 3652 return constant_pool_builder_.AddEntry(position, value);
3641 } else { 3653 } else {
3642 if (rinfo.rmode() == RelocInfo::NONE64) { 3654 DCHECK(num_pending_64_bit_constants_ < kMaxNumPending64Constants);
3643 DCHECK(num_pending_64_bit_reloc_info_ < kMaxNumPending64RelocInfo); 3655 if (num_pending_64_bit_constants_ == 0) {
3644 if (num_pending_64_bit_reloc_info_ == 0) { 3656 first_const_pool_64_use_ = position;
3645 first_const_pool_64_use_ = pc_offset();
3646 }
3647 pending_64_bit_reloc_info_[num_pending_64_bit_reloc_info_++] = rinfo;
3648 } else {
3649 DCHECK(num_pending_32_bit_reloc_info_ < kMaxNumPending32RelocInfo);
3650 if (num_pending_32_bit_reloc_info_ == 0) {
3651 first_const_pool_32_use_ = pc_offset();
3652 }
3653 pending_32_bit_reloc_info_[num_pending_32_bit_reloc_info_++] = rinfo;
3654 } 3657 }
3658 ConstantPoolEntry entry(position, value);
3659 pending_64_bit_constants_[num_pending_64_bit_constants_++] = entry;
3660
3655 // Make sure the constant pool is not emitted in place of the next 3661 // Make sure the constant pool is not emitted in place of the next
3656 // instruction for which we just recorded relocation info. 3662 // instruction for which we just recorded relocation info.
3657 BlockConstPoolFor(1); 3663 BlockConstPoolFor(1);
3658 return ConstantPoolArray::SMALL_SECTION; 3664 return ConstantPoolEntry::REGULAR;
3659 } 3665 }
3660 } 3666 }
3661 3667
3662 3668
3663 void Assembler::BlockConstPoolFor(int instructions) { 3669 void Assembler::BlockConstPoolFor(int instructions) {
3664 if (FLAG_enable_ool_constant_pool) { 3670 if (FLAG_enable_embedded_constant_pool) {
3665 // Should be a no-op if using an out-of-line constant pool. 3671 // Should be a no-op if using an embedded constant pool.
3666 DCHECK(num_pending_32_bit_reloc_info_ == 0); 3672 DCHECK(num_pending_32_bit_constants_ == 0);
3667 DCHECK(num_pending_64_bit_reloc_info_ == 0); 3673 DCHECK(num_pending_64_bit_constants_ == 0);
3668 return; 3674 return;
3669 } 3675 }
3670 3676
3671 int pc_limit = pc_offset() + instructions * kInstrSize; 3677 int pc_limit = pc_offset() + instructions * kInstrSize;
3672 if (no_const_pool_before_ < pc_limit) { 3678 if (no_const_pool_before_ < pc_limit) {
3673 // Max pool start (if we need a jump and an alignment). 3679 // Max pool start (if we need a jump and an alignment).
3674 #ifdef DEBUG 3680 #ifdef DEBUG
3675 int start = pc_limit + kInstrSize + 2 * kPointerSize; 3681 int start = pc_limit + kInstrSize + 2 * kPointerSize;
3676 DCHECK((num_pending_32_bit_reloc_info_ == 0) || 3682 DCHECK((num_pending_32_bit_constants_ == 0) ||
3677 (start - first_const_pool_32_use_ + 3683 (start - first_const_pool_32_use_ +
3678 num_pending_64_bit_reloc_info_ * kDoubleSize < kMaxDistToIntPool)); 3684 num_pending_64_bit_constants_ * kDoubleSize <
3679 DCHECK((num_pending_64_bit_reloc_info_ == 0) || 3685 kMaxDistToIntPool));
3686 DCHECK((num_pending_64_bit_constants_ == 0) ||
3680 (start - first_const_pool_64_use_ < kMaxDistToFPPool)); 3687 (start - first_const_pool_64_use_ < kMaxDistToFPPool));
3681 #endif 3688 #endif
3682 no_const_pool_before_ = pc_limit; 3689 no_const_pool_before_ = pc_limit;
3683 } 3690 }
3684 3691
3685 if (next_buffer_check_ < no_const_pool_before_) { 3692 if (next_buffer_check_ < no_const_pool_before_) {
3686 next_buffer_check_ = no_const_pool_before_; 3693 next_buffer_check_ = no_const_pool_before_;
3687 } 3694 }
3688 } 3695 }
3689 3696
3690 3697
3691 void Assembler::CheckConstPool(bool force_emit, bool require_jump) { 3698 void Assembler::CheckConstPool(bool force_emit, bool require_jump) {
3692 if (FLAG_enable_ool_constant_pool) { 3699 if (FLAG_enable_embedded_constant_pool) {
3693 // Should be a no-op if using an out-of-line constant pool. 3700 // Should be a no-op if using an embedded constant pool.
3694 DCHECK(num_pending_32_bit_reloc_info_ == 0); 3701 DCHECK(num_pending_32_bit_constants_ == 0);
3695 DCHECK(num_pending_64_bit_reloc_info_ == 0); 3702 DCHECK(num_pending_64_bit_constants_ == 0);
3696 return; 3703 return;
3697 } 3704 }
3698 3705
3699 // Some short sequence of instruction mustn't be broken up by constant pool 3706 // Some short sequence of instruction mustn't be broken up by constant pool
3700 // emission, such sequences are protected by calls to BlockConstPoolFor and 3707 // emission, such sequences are protected by calls to BlockConstPoolFor and
3701 // BlockConstPoolScope. 3708 // BlockConstPoolScope.
3702 if (is_const_pool_blocked()) { 3709 if (is_const_pool_blocked()) {
3703 // Something is wrong if emission is forced and blocked at the same time. 3710 // Something is wrong if emission is forced and blocked at the same time.
3704 DCHECK(!force_emit); 3711 DCHECK(!force_emit);
3705 return; 3712 return;
3706 } 3713 }
3707 3714
3708 // There is nothing to do if there are no pending constant pool entries. 3715 // There is nothing to do if there are no pending constant pool entries.
3709 if ((num_pending_32_bit_reloc_info_ == 0) && 3716 if ((num_pending_32_bit_constants_ == 0) &&
3710 (num_pending_64_bit_reloc_info_ == 0)) { 3717 (num_pending_64_bit_constants_ == 0)) {
3711 // Calculate the offset of the next check. 3718 // Calculate the offset of the next check.
3712 next_buffer_check_ = pc_offset() + kCheckPoolInterval; 3719 next_buffer_check_ = pc_offset() + kCheckPoolInterval;
3713 return; 3720 return;
3714 } 3721 }
3715 3722
3716 // Check that the code buffer is large enough before emitting the constant 3723 // Check that the code buffer is large enough before emitting the constant
3717 // pool (include the jump over the pool and the constant pool marker and 3724 // pool (include the jump over the pool and the constant pool marker and
3718 // the gap to the relocation information). 3725 // the gap to the relocation information).
3719 int jump_instr = require_jump ? kInstrSize : 0; 3726 int jump_instr = require_jump ? kInstrSize : 0;
3720 int size_up_to_marker = jump_instr + kInstrSize; 3727 int size_up_to_marker = jump_instr + kInstrSize;
3721 int size_after_marker = num_pending_32_bit_reloc_info_ * kPointerSize; 3728 int size_after_marker = num_pending_32_bit_constants_ * kPointerSize;
3722 bool has_fp_values = (num_pending_64_bit_reloc_info_ > 0); 3729 bool has_fp_values = (num_pending_64_bit_constants_ > 0);
3723 bool require_64_bit_align = false; 3730 bool require_64_bit_align = false;
3724 if (has_fp_values) { 3731 if (has_fp_values) {
3725 require_64_bit_align = (((uintptr_t)pc_ + size_up_to_marker) & 0x7); 3732 require_64_bit_align = (((uintptr_t)pc_ + size_up_to_marker) & 0x7);
3726 if (require_64_bit_align) { 3733 if (require_64_bit_align) {
3727 size_after_marker += kInstrSize; 3734 size_after_marker += kInstrSize;
3728 } 3735 }
3729 size_after_marker += num_pending_64_bit_reloc_info_ * kDoubleSize; 3736 size_after_marker += num_pending_64_bit_constants_ * kDoubleSize;
3730 } 3737 }
3731 3738
3732 int size = size_up_to_marker + size_after_marker; 3739 int size = size_up_to_marker + size_after_marker;
3733 3740
3734 // We emit a constant pool when: 3741 // We emit a constant pool when:
3735 // * requested to do so by parameter force_emit (e.g. after each function). 3742 // * requested to do so by parameter force_emit (e.g. after each function).
3736 // * the distance from the first instruction accessing the constant pool to 3743 // * the distance from the first instruction accessing the constant pool to
3737 // any of the constant pool entries will exceed its limit the next 3744 // any of the constant pool entries will exceed its limit the next
3738 // time the pool is checked. This is overly restrictive, but we don't emit 3745 // time the pool is checked. This is overly restrictive, but we don't emit
3739 // constant pool entries in-order so it's conservatively correct. 3746 // constant pool entries in-order so it's conservatively correct.
3740 // * the instruction doesn't require a jump after itself to jump over the 3747 // * the instruction doesn't require a jump after itself to jump over the
3741 // constant pool, and we're getting close to running out of range. 3748 // constant pool, and we're getting close to running out of range.
3742 if (!force_emit) { 3749 if (!force_emit) {
3743 DCHECK((first_const_pool_32_use_ >= 0) || (first_const_pool_64_use_ >= 0)); 3750 DCHECK((first_const_pool_32_use_ >= 0) || (first_const_pool_64_use_ >= 0));
3744 bool need_emit = false; 3751 bool need_emit = false;
3745 if (has_fp_values) { 3752 if (has_fp_values) {
3746 int dist64 = pc_offset() + 3753 int dist64 = pc_offset() + size -
3747 size - 3754 num_pending_32_bit_constants_ * kPointerSize -
3748 num_pending_32_bit_reloc_info_ * kPointerSize -
3749 first_const_pool_64_use_; 3755 first_const_pool_64_use_;
3750 if ((dist64 >= kMaxDistToFPPool - kCheckPoolInterval) || 3756 if ((dist64 >= kMaxDistToFPPool - kCheckPoolInterval) ||
3751 (!require_jump && (dist64 >= kMaxDistToFPPool / 2))) { 3757 (!require_jump && (dist64 >= kMaxDistToFPPool / 2))) {
3752 need_emit = true; 3758 need_emit = true;
3753 } 3759 }
3754 } 3760 }
3755 int dist32 = 3761 int dist32 =
3756 pc_offset() + size - first_const_pool_32_use_; 3762 pc_offset() + size - first_const_pool_32_use_;
3757 if ((dist32 >= kMaxDistToIntPool - kCheckPoolInterval) || 3763 if ((dist32 >= kMaxDistToIntPool - kCheckPoolInterval) ||
3758 (!require_jump && (dist32 >= kMaxDistToIntPool / 2))) { 3764 (!require_jump && (dist32 >= kMaxDistToIntPool / 2))) {
(...skipping 21 matching lines...) Expand all
3780 // The data size helps disassembly know what to print. 3786 // The data size helps disassembly know what to print.
3781 emit(kConstantPoolMarker | 3787 emit(kConstantPoolMarker |
3782 EncodeConstantPoolLength(size_after_marker / kPointerSize)); 3788 EncodeConstantPoolLength(size_after_marker / kPointerSize));
3783 3789
3784 if (require_64_bit_align) { 3790 if (require_64_bit_align) {
3785 emit(kConstantPoolMarker); 3791 emit(kConstantPoolMarker);
3786 } 3792 }
3787 3793
3788 // Emit 64-bit constant pool entries first: their range is smaller than 3794 // Emit 64-bit constant pool entries first: their range is smaller than
3789 // 32-bit entries. 3795 // 32-bit entries.
3790 for (int i = 0; i < num_pending_64_bit_reloc_info_; i++) { 3796 for (int i = 0; i < num_pending_64_bit_constants_; i++) {
3791 RelocInfo& rinfo = pending_64_bit_reloc_info_[i]; 3797 ConstantPoolEntry& entry = pending_64_bit_constants_[i];
3792 3798
3793 DCHECK(!((uintptr_t)pc_ & 0x7)); // Check 64-bit alignment. 3799 DCHECK(!((uintptr_t)pc_ & 0x7)); // Check 64-bit alignment.
3794 3800
3795 Instr instr = instr_at(rinfo.pc()); 3801 Instr instr = instr_at(entry.position());
3796 // Instruction to patch must be 'vldr rd, [pc, #offset]' with offset == 0. 3802 // Instruction to patch must be 'vldr rd, [pc, #offset]' with offset == 0.
3797 DCHECK((IsVldrDPcImmediateOffset(instr) && 3803 DCHECK((IsVldrDPcImmediateOffset(instr) &&
3798 GetVldrDRegisterImmediateOffset(instr) == 0)); 3804 GetVldrDRegisterImmediateOffset(instr) == 0));
3799 3805
3800 int delta = pc_ - rinfo.pc() - kPcLoadDelta; 3806 int delta = pc_offset() - entry.position() - kPcLoadDelta;
3801 DCHECK(is_uint10(delta)); 3807 DCHECK(is_uint10(delta));
3802 3808
3803 bool found = false; 3809 bool found = false;
3804 uint64_t value = rinfo.raw_data64(); 3810 uint64_t value = entry.value64();
3805 for (int j = 0; j < i; j++) { 3811 for (int j = 0; j < i; j++) {
3806 RelocInfo& rinfo2 = pending_64_bit_reloc_info_[j]; 3812 ConstantPoolEntry& entry2 = pending_64_bit_constants_[j];
3807 if (value == rinfo2.raw_data64()) { 3813 if (value == entry2.value64()) {
3808 found = true; 3814 found = true;
3809 DCHECK(rinfo2.rmode() == RelocInfo::NONE64); 3815 Instr instr2 = instr_at(entry2.position());
3810 Instr instr2 = instr_at(rinfo2.pc());
3811 DCHECK(IsVldrDPcImmediateOffset(instr2)); 3816 DCHECK(IsVldrDPcImmediateOffset(instr2));
3812 delta = GetVldrDRegisterImmediateOffset(instr2); 3817 delta = GetVldrDRegisterImmediateOffset(instr2);
3813 delta += rinfo2.pc() - rinfo.pc(); 3818 delta += entry2.position() - entry.position();
3814 break; 3819 break;
3815 } 3820 }
3816 } 3821 }
3817 3822
3818 instr_at_put(rinfo.pc(), SetVldrDRegisterImmediateOffset(instr, delta)); 3823 instr_at_put(entry.position(),
3824 SetVldrDRegisterImmediateOffset(instr, delta));
3819 3825
3820 if (!found) { 3826 if (!found) {
3821 uint64_t uint_data = rinfo.raw_data64(); 3827 dq(entry.value64());
3822 emit(uint_data & 0xFFFFFFFF);
3823 emit(uint_data >> 32);
3824 } 3828 }
3825 } 3829 }
3826 3830
3827 // Emit 32-bit constant pool entries. 3831 // Emit 32-bit constant pool entries.
3828 for (int i = 0; i < num_pending_32_bit_reloc_info_; i++) { 3832 for (int i = 0; i < num_pending_32_bit_constants_; i++) {
3829 RelocInfo& rinfo = pending_32_bit_reloc_info_[i]; 3833 ConstantPoolEntry& entry = pending_32_bit_constants_[i];
3830 DCHECK(rinfo.rmode() != RelocInfo::COMMENT && 3834 Instr instr = instr_at(entry.position());
3831 rinfo.rmode() != RelocInfo::POSITION &&
3832 rinfo.rmode() != RelocInfo::STATEMENT_POSITION &&
3833 rinfo.rmode() != RelocInfo::CONST_POOL &&
3834 rinfo.rmode() != RelocInfo::NONE64);
3835
3836 Instr instr = instr_at(rinfo.pc());
3837 3835
3838 // 64-bit loads shouldn't get here. 3836 // 64-bit loads shouldn't get here.
3839 DCHECK(!IsVldrDPcImmediateOffset(instr)); 3837 DCHECK(!IsVldrDPcImmediateOffset(instr));
3840 3838
3841 if (IsLdrPcImmediateOffset(instr) && 3839 if (IsLdrPcImmediateOffset(instr) &&
3842 GetLdrRegisterImmediateOffset(instr) == 0) { 3840 GetLdrRegisterImmediateOffset(instr) == 0) {
3843 int delta = pc_ - rinfo.pc() - kPcLoadDelta; 3841 int delta = pc_offset() - entry.position() - kPcLoadDelta;
3844 DCHECK(is_uint12(delta)); 3842 DCHECK(is_uint12(delta));
3845 // 0 is the smallest delta: 3843 // 0 is the smallest delta:
3846 // ldr rd, [pc, #0] 3844 // ldr rd, [pc, #0]
3847 // constant pool marker 3845 // constant pool marker
3848 // data 3846 // data
3849 3847
3850 bool found = false; 3848 bool found = false;
3851 if (!serializer_enabled() && rinfo.rmode() >= RelocInfo::CELL) { 3849 if (entry.sharing_ok()) {
3852 for (int j = 0; j < i; j++) { 3850 for (int j = 0; j < i; j++) {
3853 RelocInfo& rinfo2 = pending_32_bit_reloc_info_[j]; 3851 ConstantPoolEntry& entry2 = pending_32_bit_constants_[j];
3854 3852
3855 if ((rinfo2.data() == rinfo.data()) && 3853 if (entry2.value() == entry.value()) {
3856 (rinfo2.rmode() == rinfo.rmode())) { 3854 Instr instr2 = instr_at(entry2.position());
3857 Instr instr2 = instr_at(rinfo2.pc());
3858 if (IsLdrPcImmediateOffset(instr2)) { 3855 if (IsLdrPcImmediateOffset(instr2)) {
3859 delta = GetLdrRegisterImmediateOffset(instr2); 3856 delta = GetLdrRegisterImmediateOffset(instr2);
3860 delta += rinfo2.pc() - rinfo.pc(); 3857 delta += entry2.position() - entry.position();
3861 found = true; 3858 found = true;
3862 break; 3859 break;
3863 } 3860 }
3864 } 3861 }
3865 } 3862 }
3866 } 3863 }
3867 3864
3868 instr_at_put(rinfo.pc(), SetLdrRegisterImmediateOffset(instr, delta)); 3865 instr_at_put(entry.position(),
3866 SetLdrRegisterImmediateOffset(instr, delta));
3869 3867
3870 if (!found) { 3868 if (!found) {
3871 emit(rinfo.data()); 3869 dp(entry.value());
3872 } 3870 }
3873 } else { 3871 } else {
3874 DCHECK(IsMovW(instr)); 3872 DCHECK(IsMovW(instr));
3875 } 3873 }
3876 } 3874 }
3877 3875
3878 num_pending_32_bit_reloc_info_ = 0; 3876 num_pending_32_bit_constants_ = 0;
3879 num_pending_64_bit_reloc_info_ = 0; 3877 num_pending_64_bit_constants_ = 0;
3880 first_const_pool_32_use_ = -1; 3878 first_const_pool_32_use_ = -1;
3881 first_const_pool_64_use_ = -1; 3879 first_const_pool_64_use_ = -1;
3882 3880
3883 RecordComment("]"); 3881 RecordComment("]");
3884 3882
3885 if (after_pool.is_linked()) { 3883 if (after_pool.is_linked()) {
3886 bind(&after_pool); 3884 bind(&after_pool);
3887 } 3885 }
3888 } 3886 }
3889 3887
3890 // Since a constant pool was just emitted, move the check offset forward by 3888 // Since a constant pool was just emitted, move the check offset forward by
3891 // the standard interval. 3889 // the standard interval.
3892 next_buffer_check_ = pc_offset() + kCheckPoolInterval; 3890 next_buffer_check_ = pc_offset() + kCheckPoolInterval;
3893 } 3891 }
3894 3892
3895 3893
3896 Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) { 3894 void Assembler::SetConstantPoolOffset(int pos, int offset,
rmcilroy 2015/05/20 14:32:10 I think this would be clearer as something like Pa
MTBrandyberry 2015/05/20 22:28:21 Done.
3897 if (!FLAG_enable_ool_constant_pool) { 3895 ConstantPoolEntry::Access access,
3898 return isolate->factory()->empty_constant_pool_array(); 3896 ConstantPoolEntry::Type type) {
3899 } 3897 DCHECK(FLAG_enable_embedded_constant_pool);
3900 return constant_pool_builder_.New(isolate); 3898 Address pc = buffer_ + pos;
3901 }
3902 3899
3903 3900 // Patch vldr/ldr instruction with correct offset.
3904 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { 3901 Instr instr = instr_at(pc);
3905 constant_pool_builder_.Populate(this, constant_pool); 3902 if (access == ConstantPoolEntry::OVERFLOWED) {
3906 } 3903 if (CpuFeatures::IsSupported(ARMv7)) {
3907 3904 // Instructions to patch must be 'movw rd, [#0]' and 'movt rd, [#0].
3908 3905 Instr next_instr = instr_at(pc + kInstrSize);
3909 ConstantPoolBuilder::ConstantPoolBuilder() 3906 DCHECK((IsMovW(instr) && Instruction::ImmedMovwMovtValue(instr) == 0));
3910 : entries_(), current_section_(ConstantPoolArray::SMALL_SECTION) {} 3907 DCHECK((IsMovT(next_instr) &&
3911 3908 Instruction::ImmedMovwMovtValue(next_instr) == 0));
3912 3909 instr_at_put(pc, PatchMovwImmediate(instr, offset & 0xffff));
3913 bool ConstantPoolBuilder::IsEmpty() { 3910 instr_at_put(pc + kInstrSize,
3914 return entries_.size() == 0; 3911 PatchMovwImmediate(next_instr, offset >> 16));
3915 } 3912 } else {
3916 3913 // Instructions to patch must be 'mov rd, [#0]' and 'orr rd, rd, [#0].
3917 3914 Instr instr_2 = instr_at(pc + kInstrSize);
3918 ConstantPoolArray::Type ConstantPoolBuilder::GetConstantPoolType( 3915 Instr instr_3 = instr_at(pc + 2 * kInstrSize);
3919 RelocInfo::Mode rmode) { 3916 Instr instr_4 = instr_at(pc + 3 * kInstrSize);
3920 if (rmode == RelocInfo::NONE64) { 3917 DCHECK((IsMovImmed(instr) && Instruction::Immed8Value(instr) == 0));
3921 return ConstantPoolArray::INT64; 3918 DCHECK((IsOrrImmed(instr_2) && Instruction::Immed8Value(instr_2) == 0) &&
3922 } else if (!RelocInfo::IsGCRelocMode(rmode)) { 3919 GetRn(instr_2).is(GetRd(instr_2)));
3923 return ConstantPoolArray::INT32; 3920 DCHECK((IsOrrImmed(instr_3) && Instruction::Immed8Value(instr_3) == 0) &&
3924 } else if (RelocInfo::IsCodeTarget(rmode)) { 3921 GetRn(instr_3).is(GetRd(instr_3)));
3925 return ConstantPoolArray::CODE_PTR; 3922 DCHECK((IsOrrImmed(instr_4) && Instruction::Immed8Value(instr_4) == 0) &&
3923 GetRn(instr_4).is(GetRd(instr_4)));
3924 instr_at_put(pc, PatchShiftImm(instr, (offset & kImm8Mask)));
3925 instr_at_put(pc + kInstrSize,
3926 PatchShiftImm(instr_2, (offset & (kImm8Mask << 8))));
3927 instr_at_put(pc + 2 * kInstrSize,
3928 PatchShiftImm(instr_3, (offset & (kImm8Mask << 16))));
3929 instr_at_put(pc + 3 * kInstrSize,
3930 PatchShiftImm(instr_4, (offset & (kImm8Mask << 24))));
3931 }
3932 } else if (type == ConstantPoolEntry::DOUBLE) {
3933 // Instruction to patch must be 'vldr rd, [pp, #0]'.
3934 DCHECK((IsVldrDPpImmediateOffset(instr) &&
3935 GetVldrDRegisterImmediateOffset(instr) == 0));
3936 DCHECK(is_uint10(offset));
3937 instr_at_put(pc, SetVldrDRegisterImmediateOffset(instr, offset));
3926 } else { 3938 } else {
3927 DCHECK(RelocInfo::IsGCRelocMode(rmode) && !RelocInfo::IsCodeTarget(rmode)); 3939 // Instruction to patch must be 'ldr rd, [pp, #0]'.
3928 return ConstantPoolArray::HEAP_PTR; 3940 DCHECK((IsLdrPpImmediateOffset(instr) &&
3941 GetLdrRegisterImmediateOffset(instr) == 0));
3942 DCHECK(is_uint12(offset));
3943 instr_at_put(pc, SetLdrRegisterImmediateOffset(instr, offset));
3929 } 3944 }
3930 } 3945 }
3931 3946
3932
3933 ConstantPoolArray::LayoutSection ConstantPoolBuilder::AddEntry(
3934 Assembler* assm, const RelocInfo& rinfo) {
3935 RelocInfo::Mode rmode = rinfo.rmode();
3936 DCHECK(rmode != RelocInfo::COMMENT &&
3937 rmode != RelocInfo::POSITION &&
3938 rmode != RelocInfo::STATEMENT_POSITION &&
3939 rmode != RelocInfo::CONST_POOL);
3940
3941 // Try to merge entries which won't be patched.
3942 int merged_index = -1;
3943 ConstantPoolArray::LayoutSection entry_section = current_section_;
3944 if (RelocInfo::IsNone(rmode) ||
3945 (!assm->serializer_enabled() && (rmode >= RelocInfo::CELL))) {
3946 size_t i;
3947 std::vector<ConstantPoolEntry>::const_iterator it;
3948 for (it = entries_.begin(), i = 0; it != entries_.end(); it++, i++) {
3949 if (RelocInfo::IsEqual(rinfo, it->rinfo_)) {
3950 // Merge with found entry.
3951 merged_index = i;
3952 entry_section = entries_[i].section_;
3953 break;
3954 }
3955 }
3956 }
3957 DCHECK(entry_section <= current_section_);
3958 entries_.push_back(ConstantPoolEntry(rinfo, entry_section, merged_index));
3959
3960 if (merged_index == -1) {
3961 // Not merged, so update the appropriate count.
3962 number_of_entries_[entry_section].increment(GetConstantPoolType(rmode));
3963 }
3964
3965 // Check if we still have room for another entry in the small section
3966 // given Arm's ldr and vldr immediate offset range.
3967 if (current_section_ == ConstantPoolArray::SMALL_SECTION &&
3968 !(is_uint12(ConstantPoolArray::SizeFor(*small_entries())) &&
3969 is_uint10(ConstantPoolArray::MaxInt64Offset(
3970 small_entries()->count_of(ConstantPoolArray::INT64))))) {
3971 current_section_ = ConstantPoolArray::EXTENDED_SECTION;
3972 }
3973 return entry_section;
3974 }
3975
3976
3977 void ConstantPoolBuilder::Relocate(int pc_delta) {
3978 for (std::vector<ConstantPoolEntry>::iterator entry = entries_.begin();
3979 entry != entries_.end(); entry++) {
3980 DCHECK(entry->rinfo_.rmode() != RelocInfo::JS_RETURN);
3981 entry->rinfo_.set_pc(entry->rinfo_.pc() + pc_delta);
3982 }
3983 }
3984
3985
3986 Handle<ConstantPoolArray> ConstantPoolBuilder::New(Isolate* isolate) {
3987 if (IsEmpty()) {
3988 return isolate->factory()->empty_constant_pool_array();
3989 } else if (extended_entries()->is_empty()) {
3990 return isolate->factory()->NewConstantPoolArray(*small_entries());
3991 } else {
3992 DCHECK(current_section_ == ConstantPoolArray::EXTENDED_SECTION);
3993 return isolate->factory()->NewExtendedConstantPoolArray(
3994 *small_entries(), *extended_entries());
3995 }
3996 }
3997
3998
3999 void ConstantPoolBuilder::Populate(Assembler* assm,
4000 ConstantPoolArray* constant_pool) {
4001 DCHECK_EQ(extended_entries()->is_empty(),
4002 !constant_pool->is_extended_layout());
4003 DCHECK(small_entries()->equals(ConstantPoolArray::NumberOfEntries(
4004 constant_pool, ConstantPoolArray::SMALL_SECTION)));
4005 if (constant_pool->is_extended_layout()) {
4006 DCHECK(extended_entries()->equals(ConstantPoolArray::NumberOfEntries(
4007 constant_pool, ConstantPoolArray::EXTENDED_SECTION)));
4008 }
4009
4010 // Set up initial offsets.
4011 int offsets[ConstantPoolArray::NUMBER_OF_LAYOUT_SECTIONS]
4012 [ConstantPoolArray::NUMBER_OF_TYPES];
4013 for (int section = 0; section <= constant_pool->final_section(); section++) {
4014 int section_start = (section == ConstantPoolArray::EXTENDED_SECTION)
4015 ? small_entries()->total_count()
4016 : 0;
4017 for (int i = 0; i < ConstantPoolArray::NUMBER_OF_TYPES; i++) {
4018 ConstantPoolArray::Type type = static_cast<ConstantPoolArray::Type>(i);
4019 if (number_of_entries_[section].count_of(type) != 0) {
4020 offsets[section][type] = constant_pool->OffsetOfElementAt(
4021 number_of_entries_[section].base_of(type) + section_start);
4022 }
4023 }
4024 }
4025
4026 for (std::vector<ConstantPoolEntry>::iterator entry = entries_.begin();
4027 entry != entries_.end(); entry++) {
4028 RelocInfo rinfo = entry->rinfo_;
4029 RelocInfo::Mode rmode = entry->rinfo_.rmode();
4030 ConstantPoolArray::Type type = GetConstantPoolType(rmode);
4031
4032 // Update constant pool if necessary and get the entry's offset.
4033 int offset;
4034 if (entry->merged_index_ == -1) {
4035 offset = offsets[entry->section_][type];
4036 offsets[entry->section_][type] += ConstantPoolArray::entry_size(type);
4037 if (type == ConstantPoolArray::INT64) {
4038 constant_pool->set_at_offset(offset, rinfo.data64());
4039 } else if (type == ConstantPoolArray::INT32) {
4040 constant_pool->set_at_offset(offset,
4041 static_cast<int32_t>(rinfo.data()));
4042 } else if (type == ConstantPoolArray::CODE_PTR) {
4043 constant_pool->set_at_offset(offset,
4044 reinterpret_cast<Address>(rinfo.data()));
4045 } else {
4046 DCHECK(type == ConstantPoolArray::HEAP_PTR);
4047 constant_pool->set_at_offset(offset,
4048 reinterpret_cast<Object*>(rinfo.data()));
4049 }
4050 offset -= kHeapObjectTag;
4051 entry->merged_index_ = offset; // Stash offset for merged entries.
4052 } else {
4053 DCHECK(entry->merged_index_ < (entry - entries_.begin()));
4054 offset = entries_[entry->merged_index_].merged_index_;
4055 }
4056
4057 // Patch vldr/ldr instruction with correct offset.
4058 Instr instr = assm->instr_at(rinfo.pc());
4059 if (entry->section_ == ConstantPoolArray::EXTENDED_SECTION) {
4060 if (CpuFeatures::IsSupported(ARMv7)) {
4061 // Instructions to patch must be 'movw rd, [#0]' and 'movt rd, [#0].
4062 Instr next_instr = assm->instr_at(rinfo.pc() + Assembler::kInstrSize);
4063 DCHECK((Assembler::IsMovW(instr) &&
4064 Instruction::ImmedMovwMovtValue(instr) == 0));
4065 DCHECK((Assembler::IsMovT(next_instr) &&
4066 Instruction::ImmedMovwMovtValue(next_instr) == 0));
4067 assm->instr_at_put(
4068 rinfo.pc(), Assembler::PatchMovwImmediate(instr, offset & 0xffff));
4069 assm->instr_at_put(
4070 rinfo.pc() + Assembler::kInstrSize,
4071 Assembler::PatchMovwImmediate(next_instr, offset >> 16));
4072 } else {
4073 // Instructions to patch must be 'mov rd, [#0]' and 'orr rd, rd, [#0].
4074 Instr instr_2 = assm->instr_at(rinfo.pc() + Assembler::kInstrSize);
4075 Instr instr_3 = assm->instr_at(rinfo.pc() + 2 * Assembler::kInstrSize);
4076 Instr instr_4 = assm->instr_at(rinfo.pc() + 3 * Assembler::kInstrSize);
4077 DCHECK((Assembler::IsMovImmed(instr) &&
4078 Instruction::Immed8Value(instr) == 0));
4079 DCHECK((Assembler::IsOrrImmed(instr_2) &&
4080 Instruction::Immed8Value(instr_2) == 0) &&
4081 Assembler::GetRn(instr_2).is(Assembler::GetRd(instr_2)));
4082 DCHECK((Assembler::IsOrrImmed(instr_3) &&
4083 Instruction::Immed8Value(instr_3) == 0) &&
4084 Assembler::GetRn(instr_3).is(Assembler::GetRd(instr_3)));
4085 DCHECK((Assembler::IsOrrImmed(instr_4) &&
4086 Instruction::Immed8Value(instr_4) == 0) &&
4087 Assembler::GetRn(instr_4).is(Assembler::GetRd(instr_4)));
4088 assm->instr_at_put(
4089 rinfo.pc(), Assembler::PatchShiftImm(instr, (offset & kImm8Mask)));
4090 assm->instr_at_put(
4091 rinfo.pc() + Assembler::kInstrSize,
4092 Assembler::PatchShiftImm(instr_2, (offset & (kImm8Mask << 8))));
4093 assm->instr_at_put(
4094 rinfo.pc() + 2 * Assembler::kInstrSize,
4095 Assembler::PatchShiftImm(instr_3, (offset & (kImm8Mask << 16))));
4096 assm->instr_at_put(
4097 rinfo.pc() + 3 * Assembler::kInstrSize,
4098 Assembler::PatchShiftImm(instr_4, (offset & (kImm8Mask << 24))));
4099 }
4100 } else if (type == ConstantPoolArray::INT64) {
4101 // Instruction to patch must be 'vldr rd, [pp, #0]'.
4102 DCHECK((Assembler::IsVldrDPpImmediateOffset(instr) &&
4103 Assembler::GetVldrDRegisterImmediateOffset(instr) == 0));
4104 DCHECK(is_uint10(offset));
4105 assm->instr_at_put(rinfo.pc(), Assembler::SetVldrDRegisterImmediateOffset(
4106 instr, offset));
4107 } else {
4108 // Instruction to patch must be 'ldr rd, [pp, #0]'.
4109 DCHECK((Assembler::IsLdrPpImmediateOffset(instr) &&
4110 Assembler::GetLdrRegisterImmediateOffset(instr) == 0));
4111 DCHECK(is_uint12(offset));
4112 assm->instr_at_put(
4113 rinfo.pc(), Assembler::SetLdrRegisterImmediateOffset(instr, offset));
4114 }
4115 }
4116 }
4117
4118 3947
4119 } } // namespace v8::internal 3948 } } // namespace v8::internal
4120 3949
4121 #endif // V8_TARGET_ARCH_ARM 3950 #endif // V8_TARGET_ARCH_ARM
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698