Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(239)

Side by Side Diff: src/arm/assembler-arm.cc

Issue 204403002: Revert "Add out-of-line constant pool support to Arm." (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/assembler-arm.h ('k') | src/arm/assembler-arm-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 1994-2006 Sun Microsystems Inc. 1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved. 2 // All Rights Reserved.
3 // 3 //
4 // Redistribution and use in source and binary forms, with or without 4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions 5 // modification, are permitted provided that the following conditions
6 // are met: 6 // are met:
7 // 7 //
8 // - Redistributions of source code must retain the above copyright notice, 8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer. 9 // this list of conditions and the following disclaimer.
10 // 10 //
(...skipping 275 matching lines...) Expand 10 before | Expand all | Expand 10 after
286 } 286 }
287 287
288 288
289 // ----------------------------------------------------------------------------- 289 // -----------------------------------------------------------------------------
290 // Implementation of RelocInfo 290 // Implementation of RelocInfo
291 291
292 const int RelocInfo::kApplyMask = 0; 292 const int RelocInfo::kApplyMask = 0;
293 293
294 294
295 bool RelocInfo::IsCodedSpecially() { 295 bool RelocInfo::IsCodedSpecially() {
296 // The deserializer needs to know whether a pointer is specially coded.  Being 296 // The deserializer needs to know whether a pointer is specially coded. Being
297 // specially coded on ARM means that it is a movw/movt instruction, or is an 297 // specially coded on ARM means that it is a movw/movt instruction. We don't
298 // out of line constant pool entry.  These only occur if 298 // generate those yet.
299 // FLAG_enable_ool_constant_pool is true. 299 return false;
300 return FLAG_enable_ool_constant_pool;
301 } 300 }
302 301
303 302
304 bool RelocInfo::IsInConstantPool() { 303 bool RelocInfo::IsInConstantPool() {
305 if (FLAG_enable_ool_constant_pool) { 304 return Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc_));
306 return Assembler::IsLdrPpImmediateOffset(Memory::int32_at(pc_));
307 } else {
308 return Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc_));
309 }
310 } 305 }
311 306
312 307
313 void RelocInfo::PatchCode(byte* instructions, int instruction_count) { 308 void RelocInfo::PatchCode(byte* instructions, int instruction_count) {
314 // Patch the code at the current address with the supplied instructions. 309 // Patch the code at the current address with the supplied instructions.
315 Instr* pc = reinterpret_cast<Instr*>(pc_); 310 Instr* pc = reinterpret_cast<Instr*>(pc_);
316 Instr* instr = reinterpret_cast<Instr*>(instructions); 311 Instr* instr = reinterpret_cast<Instr*>(instructions);
317 for (int i = 0; i < instruction_count; i++) { 312 for (int i = 0; i < instruction_count; i++) {
318 *(pc + i) = *(instr + i); 313 *(pc + i) = *(instr + i);
319 } 314 }
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after
478 al | B26 | 4 | NegPreIndex | kRegister_sp_Code * B16; 473 al | B26 | 4 | NegPreIndex | kRegister_sp_Code * B16;
479 // ldr(r, MemOperand(sp, 4, PostIndex), al) instruction (aka pop(r)) 474 // ldr(r, MemOperand(sp, 4, PostIndex), al) instruction (aka pop(r))
480 // register r is not encoded. 475 // register r is not encoded.
481 const Instr kPopRegPattern = 476 const Instr kPopRegPattern =
482 al | B26 | L | 4 | PostIndex | kRegister_sp_Code * B16; 477 al | B26 | L | 4 | PostIndex | kRegister_sp_Code * B16;
483 // mov lr, pc 478 // mov lr, pc
484 const Instr kMovLrPc = al | MOV | kRegister_pc_Code | kRegister_lr_Code * B12; 479 const Instr kMovLrPc = al | MOV | kRegister_pc_Code | kRegister_lr_Code * B12;
485 // ldr rd, [pc, #offset] 480 // ldr rd, [pc, #offset]
486 const Instr kLdrPCMask = 15 * B24 | 7 * B20 | 15 * B16; 481 const Instr kLdrPCMask = 15 * B24 | 7 * B20 | 15 * B16;
487 const Instr kLdrPCPattern = 5 * B24 | L | kRegister_pc_Code * B16; 482 const Instr kLdrPCPattern = 5 * B24 | L | kRegister_pc_Code * B16;
488 // ldr rd, [pp, #offset]
489 const Instr kLdrPpMask = 15 * B24 | 7 * B20 | 15 * B16;
490 const Instr kLdrPpPattern = 5 * B24 | L | kRegister_r8_Code * B16;
491 // vldr dd, [pc, #offset] 483 // vldr dd, [pc, #offset]
492 const Instr kVldrDPCMask = 15 * B24 | 3 * B20 | 15 * B16 | 15 * B8; 484 const Instr kVldrDPCMask = 15 * B24 | 3 * B20 | 15 * B16 | 15 * B8;
493 const Instr kVldrDPCPattern = 13 * B24 | L | kRegister_pc_Code * B16 | 11 * B8; 485 const Instr kVldrDPCPattern = 13 * B24 | L | kRegister_pc_Code * B16 | 11 * B8;
494 // vldr dd, [pp, #offset]
495 const Instr kVldrDPpMask = 15 * B24 | 3 * B20 | 15 * B16 | 15 * B8;
496 const Instr kVldrDPpPattern = 13 * B24 | L | kRegister_r8_Code * B16 | 11 * B8;
497 // blxcc rm 486 // blxcc rm
498 const Instr kBlxRegMask = 487 const Instr kBlxRegMask =
499 15 * B24 | 15 * B20 | 15 * B16 | 15 * B12 | 15 * B8 | 15 * B4; 488 15 * B24 | 15 * B20 | 15 * B16 | 15 * B12 | 15 * B8 | 15 * B4;
500 const Instr kBlxRegPattern = 489 const Instr kBlxRegPattern =
501 B24 | B21 | 15 * B16 | 15 * B12 | 15 * B8 | BLX; 490 B24 | B21 | 15 * B16 | 15 * B12 | 15 * B8 | BLX;
502 const Instr kBlxIp = al | kBlxRegPattern | ip.code(); 491 const Instr kBlxIp = al | kBlxRegPattern | ip.code();
503 const Instr kMovMvnMask = 0x6d * B21 | 0xf * B16; 492 const Instr kMovMvnMask = 0x6d * B21 | 0xf * B16;
504 const Instr kMovMvnPattern = 0xd * B21; 493 const Instr kMovMvnPattern = 0xd * B21;
505 const Instr kMovMvnFlip = B22; 494 const Instr kMovMvnFlip = B22;
506 const Instr kMovLeaveCCMask = 0xdff * B16; 495 const Instr kMovLeaveCCMask = 0xdff * B16;
(...skipping 17 matching lines...) Expand all
524 const Instr kStrRegFpNegOffsetPattern = 513 const Instr kStrRegFpNegOffsetPattern =
525 al | B26 | NegOffset | kRegister_fp_Code * B16; 514 al | B26 | NegOffset | kRegister_fp_Code * B16;
526 const Instr kLdrStrInstrTypeMask = 0xffff0000; 515 const Instr kLdrStrInstrTypeMask = 0xffff0000;
527 const Instr kLdrStrInstrArgumentMask = 0x0000ffff; 516 const Instr kLdrStrInstrArgumentMask = 0x0000ffff;
528 const Instr kLdrStrOffsetMask = 0x00000fff; 517 const Instr kLdrStrOffsetMask = 0x00000fff;
529 518
530 519
531 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size) 520 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size)
532 : AssemblerBase(isolate, buffer, buffer_size), 521 : AssemblerBase(isolate, buffer, buffer_size),
533 recorded_ast_id_(TypeFeedbackId::None()), 522 recorded_ast_id_(TypeFeedbackId::None()),
534 constant_pool_builder_(),
535 positions_recorder_(this) { 523 positions_recorder_(this) {
536 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); 524 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_);
537 num_pending_32_bit_reloc_info_ = 0; 525 num_pending_32_bit_reloc_info_ = 0;
538 num_pending_64_bit_reloc_info_ = 0; 526 num_pending_64_bit_reloc_info_ = 0;
539 next_buffer_check_ = 0; 527 next_buffer_check_ = 0;
540 const_pool_blocked_nesting_ = 0; 528 const_pool_blocked_nesting_ = 0;
541 no_const_pool_before_ = 0; 529 no_const_pool_before_ = 0;
542 first_const_pool_32_use_ = -1; 530 first_const_pool_32_use_ = -1;
543 first_const_pool_64_use_ = -1; 531 first_const_pool_64_use_ = -1;
544 last_bound_pos_ = 0; 532 last_bound_pos_ = 0;
545 constant_pool_available_ = !FLAG_enable_ool_constant_pool; 533 constant_pool_available_ = !FLAG_enable_ool_constant_pool;
546 constant_pool_full_ = false; 534 constant_pool_full_ = false;
547 ClearRecordedAstId(); 535 ClearRecordedAstId();
548 } 536 }
549 537
550 538
551 Assembler::~Assembler() { 539 Assembler::~Assembler() {
552 ASSERT(const_pool_blocked_nesting_ == 0); 540 ASSERT(const_pool_blocked_nesting_ == 0);
553 } 541 }
554 542
555 543
556 void Assembler::GetCode(CodeDesc* desc) { 544 void Assembler::GetCode(CodeDesc* desc) {
557 if (!FLAG_enable_ool_constant_pool) { 545 // Emit constant pool if necessary.
558 // Emit constant pool if necessary. 546 CheckConstPool(true, false);
559 CheckConstPool(true, false); 547 ASSERT(num_pending_32_bit_reloc_info_ == 0);
560 ASSERT(num_pending_32_bit_reloc_info_ == 0); 548 ASSERT(num_pending_64_bit_reloc_info_ == 0);
561 ASSERT(num_pending_64_bit_reloc_info_ == 0); 549
562 }
563 // Set up code descriptor. 550 // Set up code descriptor.
564 desc->buffer = buffer_; 551 desc->buffer = buffer_;
565 desc->buffer_size = buffer_size_; 552 desc->buffer_size = buffer_size_;
566 desc->instr_size = pc_offset(); 553 desc->instr_size = pc_offset();
567 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); 554 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos();
568 desc->origin = this; 555 desc->origin = this;
569 } 556 }
570 557
571 558
572 void Assembler::Align(int m) { 559 void Assembler::Align(int m) {
(...skipping 162 matching lines...) Expand 10 before | Expand all | Expand 10 after
735 } 722 }
736 723
737 724
738 bool Assembler::IsLdrPcImmediateOffset(Instr instr) { 725 bool Assembler::IsLdrPcImmediateOffset(Instr instr) {
739 // Check the instruction is indeed a 726 // Check the instruction is indeed a
740 // ldr<cond> <Rd>, [pc +/- offset_12]. 727 // ldr<cond> <Rd>, [pc +/- offset_12].
741 return (instr & kLdrPCMask) == kLdrPCPattern; 728 return (instr & kLdrPCMask) == kLdrPCPattern;
742 } 729 }
743 730
744 731
745 bool Assembler::IsLdrPpImmediateOffset(Instr instr) {
746 // Check the instruction is indeed a
747 // ldr<cond> <Rd>, [pp +/- offset_12].
748 return (instr & kLdrPpMask) == kLdrPpPattern;
749 }
750
751
752 bool Assembler::IsVldrDPcImmediateOffset(Instr instr) { 732 bool Assembler::IsVldrDPcImmediateOffset(Instr instr) {
753 // Check the instruction is indeed a 733 // Check the instruction is indeed a
754 // vldr<cond> <Dd>, [pc +/- offset_10]. 734 // vldr<cond> <Dd>, [pc +/- offset_10].
755 return (instr & kVldrDPCMask) == kVldrDPCPattern; 735 return (instr & kVldrDPCMask) == kVldrDPCPattern;
756 } 736 }
757 737
758 738
759 bool Assembler::IsVldrDPpImmediateOffset(Instr instr) {
760 // Check the instruction is indeed a
761 // vldr<cond> <Dd>, [pp +/- offset_10].
762 return (instr & kVldrDPpMask) == kVldrDPpPattern;
763 }
764
765
766 bool Assembler::IsTstImmediate(Instr instr) { 739 bool Assembler::IsTstImmediate(Instr instr) {
767 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask)) == 740 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask)) ==
768 (I | TST | S); 741 (I | TST | S);
769 } 742 }
770 743
771 744
772 bool Assembler::IsCmpRegister(Instr instr) { 745 bool Assembler::IsCmpRegister(Instr instr) {
773 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask | B4)) == 746 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask | B4)) ==
774 (CMP | S); 747 (CMP | S);
775 } 748 }
(...skipping 307 matching lines...) Expand 10 before | Expand all | Expand 10 after
1083 return Serializer::enabled(); 1056 return Serializer::enabled();
1084 } else if (RelocInfo::IsNone(rmode_)) { 1057 } else if (RelocInfo::IsNone(rmode_)) {
1085 return false; 1058 return false;
1086 } 1059 }
1087 return true; 1060 return true;
1088 } 1061 }
1089 1062
1090 1063
1091 static bool use_mov_immediate_load(const Operand& x, 1064 static bool use_mov_immediate_load(const Operand& x,
1092 const Assembler* assembler) { 1065 const Assembler* assembler) {
1093 if (assembler != NULL && !assembler->can_use_constant_pool()) { 1066 if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) &&
1094 // If there is no constant pool available, we must use an mov immediate.
1095 // TODO(rmcilroy): enable ARMv6 support.
1096 ASSERT(CpuFeatures::IsSupported(ARMv7));
1097 return true;
1098 } else if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) &&
1099 (assembler == NULL || !assembler->predictable_code_size())) { 1067 (assembler == NULL || !assembler->predictable_code_size())) {
1100 // Prefer movw / movt to constant pool if it is more efficient on the CPU. 1068 // Prefer movw / movt to constant pool if it is more efficient on the CPU.
1101 return true; 1069 return true;
1102 } else if (x.must_output_reloc_info(assembler)) { 1070 } else if (x.must_output_reloc_info(assembler)) {
1103 // Prefer constant pool if data is likely to be patched. 1071 // Prefer constant pool if data is likely to be patched.
1104 return false; 1072 return false;
1105 } else { 1073 } else {
1106 // Otherwise, use immediate load if movw / movt is available. 1074 // Otherwise, use immediate load if movw / movt is available.
1107 return CpuFeatures::IsSupported(ARMv7); 1075 return CpuFeatures::IsSupported(ARMv7);
1108 } 1076 }
(...skipping 22 matching lines...) Expand all
1131 // No use of constant pool and the immediate operand can be encoded as a 1099 // No use of constant pool and the immediate operand can be encoded as a
1132 // shifter operand. 1100 // shifter operand.
1133 return true; 1101 return true;
1134 } 1102 }
1135 } 1103 }
1136 1104
1137 1105
1138 void Assembler::move_32_bit_immediate(Register rd, 1106 void Assembler::move_32_bit_immediate(Register rd,
1139 const Operand& x, 1107 const Operand& x,
1140 Condition cond) { 1108 Condition cond) {
1141 RelocInfo rinfo(pc_, x.rmode_, x.imm32_, NULL); 1109 if (rd.code() != pc.code()) {
1142 if (x.must_output_reloc_info(this)) { 1110 if (use_mov_immediate_load(x, this)) {
1143 RecordRelocInfo(rinfo); 1111 if (x.must_output_reloc_info(this)) {
1112 RecordRelocInfo(x.rmode_, x.imm32_, DONT_USE_CONSTANT_POOL);
1113 // Make sure the movw/movt doesn't get separated.
1114 BlockConstPoolFor(2);
1115 }
1116 emit(cond | 0x30*B20 | rd.code()*B12 |
1117 EncodeMovwImmediate(x.imm32_ & 0xffff));
1118 movt(rd, static_cast<uint32_t>(x.imm32_) >> 16, cond);
1119 return;
1120 }
1144 } 1121 }
1145 1122
1146 if (use_mov_immediate_load(x, this)) { 1123 RecordRelocInfo(x.rmode_, x.imm32_, USE_CONSTANT_POOL);
1147 Register target = rd.code() == pc.code() ? ip : rd; 1124 ldr(rd, MemOperand(pc, 0), cond);
1148 // TODO(rmcilroy): add ARMv6 support for immediate loads.
1149 ASSERT(CpuFeatures::IsSupported(ARMv7));
1150 if (!FLAG_enable_ool_constant_pool && x.must_output_reloc_info(this)) {
1151 // Make sure the movw/movt doesn't get separated.
1152 BlockConstPoolFor(2);
1153 }
1154 emit(cond | 0x30*B20 | target.code()*B12 |
1155 EncodeMovwImmediate(x.imm32_ & 0xffff));
1156 movt(target, static_cast<uint32_t>(x.imm32_) >> 16, cond);
1157 if (target.code() != rd.code()) {
1158 mov(rd, target, LeaveCC, cond);
1159 }
1160 } else {
1161 ASSERT(can_use_constant_pool());
1162 ConstantPoolAddEntry(rinfo);
1163 ldr(rd, MemOperand(FLAG_enable_ool_constant_pool ? pp : pc, 0), cond);
1164 }
1165 } 1125 }
1166 1126
1167 1127
1168 void Assembler::addrmod1(Instr instr, 1128 void Assembler::addrmod1(Instr instr,
1169 Register rn, 1129 Register rn,
1170 Register rd, 1130 Register rd,
1171 const Operand& x) { 1131 const Operand& x) {
1172 CheckBuffer(); 1132 CheckBuffer();
1173 ASSERT((instr & ~(kCondMask | kOpCodeMask | S)) == 0); 1133 ASSERT((instr & ~(kCondMask | kOpCodeMask | S)) == 0);
1174 if (!x.rm_.is_valid()) { 1134 if (!x.rm_.is_valid()) {
(...skipping 1279 matching lines...) Expand 10 before | Expand all | Expand 10 after
2454 if (CpuFeatures::IsSupported(VFP3) && FitsVMOVDoubleImmediate(imm, &enc)) { 2414 if (CpuFeatures::IsSupported(VFP3) && FitsVMOVDoubleImmediate(imm, &enc)) {
2455 // The double can be encoded in the instruction. 2415 // The double can be encoded in the instruction.
2456 // 2416 //
2457 // Dd = immediate 2417 // Dd = immediate
2458 // Instruction details available in ARM DDI 0406C.b, A8-936. 2418 // Instruction details available in ARM DDI 0406C.b, A8-936.
2459 // cond(31-28) | 11101(27-23) | D(22) | 11(21-20) | imm4H(19-16) | 2419 // cond(31-28) | 11101(27-23) | D(22) | 11(21-20) | imm4H(19-16) |
2460 // Vd(15-12) | 101(11-9) | sz=1(8) | imm4L(3-0) 2420 // Vd(15-12) | 101(11-9) | sz=1(8) | imm4L(3-0)
2461 int vd, d; 2421 int vd, d;
2462 dst.split_code(&vd, &d); 2422 dst.split_code(&vd, &d);
2463 emit(al | 0x1D*B23 | d*B22 | 0x3*B20 | vd*B12 | 0x5*B9 | B8 | enc); 2423 emit(al | 0x1D*B23 | d*B22 | 0x3*B20 | vd*B12 | 0x5*B9 | B8 | enc);
2464 } else if (FLAG_enable_vldr_imm && can_use_constant_pool()) { 2424 } else if (FLAG_enable_vldr_imm) {
2465 // TODO(jfb) Temporarily turned off until we have constant blinding or 2425 // TODO(jfb) Temporarily turned off until we have constant blinding or
2466 // some equivalent mitigation: an attacker can otherwise control 2426 // some equivalent mitigation: an attacker can otherwise control
2467 // generated data which also happens to be executable, a Very Bad 2427 // generated data which also happens to be executable, a Very Bad
2468 // Thing indeed. 2428 // Thing indeed.
2469 // Blinding gets tricky because we don't have xor, we probably 2429 // Blinding gets tricky because we don't have xor, we probably
2470 // need to add/subtract without losing precision, which requires a 2430 // need to add/subtract without losing precision, which requires a
2471 // cookie value that Lithium is probably better positioned to 2431 // cookie value that Lithium is probably better positioned to
2472 // choose. 2432 // choose.
2473 // We could also add a few peepholes here like detecting 0.0 and 2433 // We could also add a few peepholes here like detecting 0.0 and
2474 // -0.0 and doing a vmov from the sequestered d14, forcing denorms 2434 // -0.0 and doing a vmov from the sequestered d14, forcing denorms
2475 // to zero (we set flush-to-zero), and normalizing NaN values. 2435 // to zero (we set flush-to-zero), and normalizing NaN values.
2476 // We could also detect redundant values. 2436 // We could also detect redundant values.
2477 // The code could also randomize the order of values, though 2437 // The code could also randomize the order of values, though
2478 // that's tricky because vldr has a limited reach. Furthermore 2438 // that's tricky because vldr has a limited reach. Furthermore
2479 // it breaks load locality. 2439 // it breaks load locality.
2480 RelocInfo rinfo(pc_, imm); 2440 RecordRelocInfo(imm);
2481 ConstantPoolAddEntry(rinfo); 2441 vldr(dst, MemOperand(pc, 0));
2482 vldr(dst, MemOperand(FLAG_enable_ool_constant_pool ? pp : pc, 0));
2483 } else { 2442 } else {
2484 // Synthesise the double from ARM immediates. 2443 // Synthesise the double from ARM immediates.
2485 uint32_t lo, hi; 2444 uint32_t lo, hi;
2486 DoubleAsTwoUInt32(imm, &lo, &hi); 2445 DoubleAsTwoUInt32(imm, &lo, &hi);
2487 2446
2488 if (scratch.is(no_reg)) { 2447 if (scratch.is(no_reg)) {
2489 if (dst.code() < 16) { 2448 if (dst.code() < 16) {
2490 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code()); 2449 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code());
2491 // Move the low part of the double into the lower of the corresponsing S 2450 // Move the low part of the double into the lower of the corresponsing S
2492 // registers of D register dst. 2451 // registers of D register dst.
(...skipping 709 matching lines...) Expand 10 before | Expand all | Expand 10 after
3202 rinfo.rmode() != RelocInfo::POSITION); 3161 rinfo.rmode() != RelocInfo::POSITION);
3203 if (rinfo.rmode() != RelocInfo::JS_RETURN) { 3162 if (rinfo.rmode() != RelocInfo::JS_RETURN) {
3204 rinfo.set_pc(rinfo.pc() + pc_delta); 3163 rinfo.set_pc(rinfo.pc() + pc_delta);
3205 } 3164 }
3206 } 3165 }
3207 for (int i = 0; i < num_pending_64_bit_reloc_info_; i++) { 3166 for (int i = 0; i < num_pending_64_bit_reloc_info_; i++) {
3208 RelocInfo& rinfo = pending_64_bit_reloc_info_[i]; 3167 RelocInfo& rinfo = pending_64_bit_reloc_info_[i];
3209 ASSERT(rinfo.rmode() == RelocInfo::NONE64); 3168 ASSERT(rinfo.rmode() == RelocInfo::NONE64);
3210 rinfo.set_pc(rinfo.pc() + pc_delta); 3169 rinfo.set_pc(rinfo.pc() + pc_delta);
3211 } 3170 }
3212 constant_pool_builder_.Relocate(pc_delta);
3213 } 3171 }
3214 3172
3215 3173
3216 void Assembler::db(uint8_t data) { 3174 void Assembler::db(uint8_t data) {
3217 // No relocation info should be pending while using db. db is used 3175 // No relocation info should be pending while using db. db is used
3218 // to write pure data with no pointers and the constant pool should 3176 // to write pure data with no pointers and the constant pool should
3219 // be emitted before using db. 3177 // be emitted before using db.
3220 ASSERT(num_pending_32_bit_reloc_info_ == 0); 3178 ASSERT(num_pending_32_bit_reloc_info_ == 0);
3221 ASSERT(num_pending_64_bit_reloc_info_ == 0); 3179 ASSERT(num_pending_64_bit_reloc_info_ == 0);
3222 CheckBuffer(); 3180 CheckBuffer();
(...skipping 15 matching lines...) Expand all
3238 3196
3239 3197
3240 void Assembler::emit_code_stub_address(Code* stub) { 3198 void Assembler::emit_code_stub_address(Code* stub) {
3241 CheckBuffer(); 3199 CheckBuffer();
3242 *reinterpret_cast<uint32_t*>(pc_) = 3200 *reinterpret_cast<uint32_t*>(pc_) =
3243 reinterpret_cast<uint32_t>(stub->instruction_start()); 3201 reinterpret_cast<uint32_t>(stub->instruction_start());
3244 pc_ += sizeof(uint32_t); 3202 pc_ += sizeof(uint32_t);
3245 } 3203 }
3246 3204
3247 3205
3248 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { 3206 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data,
3207 UseConstantPoolMode mode) {
3208 // We do not try to reuse pool constants.
3249 RelocInfo rinfo(pc_, rmode, data, NULL); 3209 RelocInfo rinfo(pc_, rmode, data, NULL);
3250 RecordRelocInfo(rinfo); 3210 if (((rmode >= RelocInfo::JS_RETURN) &&
3251 } 3211 (rmode <= RelocInfo::DEBUG_BREAK_SLOT)) ||
3252 3212 (rmode == RelocInfo::CONST_POOL) ||
3253 3213 mode == DONT_USE_CONSTANT_POOL) {
3254 void Assembler::RecordRelocInfo(const RelocInfo& rinfo) { 3214 // Adjust code for new modes.
3215 ASSERT(RelocInfo::IsDebugBreakSlot(rmode)
3216 || RelocInfo::IsJSReturn(rmode)
3217 || RelocInfo::IsComment(rmode)
3218 || RelocInfo::IsPosition(rmode)
3219 || RelocInfo::IsConstPool(rmode)
3220 || mode == DONT_USE_CONSTANT_POOL);
3221 // These modes do not need an entry in the constant pool.
3222 } else {
3223 RecordRelocInfoConstantPoolEntryHelper(rinfo);
3224 }
3255 if (!RelocInfo::IsNone(rinfo.rmode())) { 3225 if (!RelocInfo::IsNone(rinfo.rmode())) {
3256 // Don't record external references unless the heap will be serialized. 3226 // Don't record external references unless the heap will be serialized.
3257 if (rinfo.rmode() == RelocInfo::EXTERNAL_REFERENCE) { 3227 if (rmode == RelocInfo::EXTERNAL_REFERENCE) {
3258 #ifdef DEBUG 3228 #ifdef DEBUG
3259 if (!Serializer::enabled()) { 3229 if (!Serializer::enabled()) {
3260 Serializer::TooLateToEnableNow(); 3230 Serializer::TooLateToEnableNow();
3261 } 3231 }
3262 #endif 3232 #endif
3263 if (!Serializer::enabled() && !emit_debug_code()) { 3233 if (!Serializer::enabled() && !emit_debug_code()) {
3264 return; 3234 return;
3265 } 3235 }
3266 } 3236 }
3267 ASSERT(buffer_space() >= kMaxRelocSize); // too late to grow buffer here 3237 ASSERT(buffer_space() >= kMaxRelocSize); // too late to grow buffer here
3268 if (rinfo.rmode() == RelocInfo::CODE_TARGET_WITH_ID) { 3238 if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
3269 RelocInfo reloc_info_with_ast_id(rinfo.pc(), 3239 RelocInfo reloc_info_with_ast_id(pc_,
3270 rinfo.rmode(), 3240 rmode,
3271 RecordedAstId().ToInt(), 3241 RecordedAstId().ToInt(),
3272 NULL); 3242 NULL);
3273 ClearRecordedAstId(); 3243 ClearRecordedAstId();
3274 reloc_info_writer.Write(&reloc_info_with_ast_id); 3244 reloc_info_writer.Write(&reloc_info_with_ast_id);
3275 } else { 3245 } else {
3276 reloc_info_writer.Write(&rinfo); 3246 reloc_info_writer.Write(&rinfo);
3277 } 3247 }
3278 } 3248 }
3279 } 3249 }
3280 3250
3281 3251
3282 void Assembler::ConstantPoolAddEntry(const RelocInfo& rinfo) { 3252 void Assembler::RecordRelocInfo(double data) {
3283 if (FLAG_enable_ool_constant_pool) { 3253 // We do not try to reuse pool constants.
3284 constant_pool_builder_.AddEntry(this, rinfo); 3254 RelocInfo rinfo(pc_, data);
3255 RecordRelocInfoConstantPoolEntryHelper(rinfo);
3256 }
3257
3258
3259 void Assembler::RecordRelocInfoConstantPoolEntryHelper(const RelocInfo& rinfo) {
3260 if (rinfo.rmode() == RelocInfo::NONE64) {
3261 ASSERT(num_pending_64_bit_reloc_info_ < kMaxNumPending64RelocInfo);
3262 if (num_pending_64_bit_reloc_info_ == 0) {
3263 first_const_pool_64_use_ = pc_offset();
3264 }
3265 pending_64_bit_reloc_info_[num_pending_64_bit_reloc_info_++] = rinfo;
3285 } else { 3266 } else {
3286 if (rinfo.rmode() == RelocInfo::NONE64) { 3267 ASSERT(num_pending_32_bit_reloc_info_ < kMaxNumPending32RelocInfo);
3287 ASSERT(num_pending_64_bit_reloc_info_ < kMaxNumPending64RelocInfo); 3268 if (num_pending_32_bit_reloc_info_ == 0) {
3288 if (num_pending_64_bit_reloc_info_ == 0) { 3269 first_const_pool_32_use_ = pc_offset();
3289 first_const_pool_64_use_ = pc_offset();
3290 }
3291 pending_64_bit_reloc_info_[num_pending_64_bit_reloc_info_++] = rinfo;
3292 } else {
3293 ASSERT(num_pending_32_bit_reloc_info_ < kMaxNumPending32RelocInfo);
3294 if (num_pending_32_bit_reloc_info_ == 0) {
3295 first_const_pool_32_use_ = pc_offset();
3296 }
3297 pending_32_bit_reloc_info_[num_pending_32_bit_reloc_info_++] = rinfo;
3298 } 3270 }
3299 // Make sure the constant pool is not emitted in place of the next 3271 pending_32_bit_reloc_info_[num_pending_32_bit_reloc_info_++] = rinfo;
3300 // instruction for which we just recorded relocation info.
3301 BlockConstPoolFor(1);
3302 } 3272 }
3273 // Make sure the constant pool is not emitted in place of the next
3274 // instruction for which we just recorded relocation info.
3275 BlockConstPoolFor(1);
3303 } 3276 }
3304 3277
3305 3278
3306 void Assembler::BlockConstPoolFor(int instructions) { 3279 void Assembler::BlockConstPoolFor(int instructions) {
3307 if (FLAG_enable_ool_constant_pool) {
3308 // Should be a no-op if using an out-of-line constant pool.
3309 ASSERT(num_pending_32_bit_reloc_info_ == 0);
3310 ASSERT(num_pending_64_bit_reloc_info_ == 0);
3311 return;
3312 }
3313
3314 int pc_limit = pc_offset() + instructions * kInstrSize; 3280 int pc_limit = pc_offset() + instructions * kInstrSize;
3315 if (no_const_pool_before_ < pc_limit) { 3281 if (no_const_pool_before_ < pc_limit) {
3316 // Max pool start (if we need a jump and an alignment). 3282 // Max pool start (if we need a jump and an alignment).
3317 #ifdef DEBUG 3283 #ifdef DEBUG
3318 int start = pc_limit + kInstrSize + 2 * kPointerSize; 3284 int start = pc_limit + kInstrSize + 2 * kPointerSize;
3319 ASSERT((num_pending_32_bit_reloc_info_ == 0) || 3285 ASSERT((num_pending_32_bit_reloc_info_ == 0) ||
3320 (start - first_const_pool_32_use_ + 3286 (start - first_const_pool_32_use_ +
3321 num_pending_64_bit_reloc_info_ * kDoubleSize < kMaxDistToIntPool)); 3287 num_pending_64_bit_reloc_info_ * kDoubleSize < kMaxDistToIntPool));
3322 ASSERT((num_pending_64_bit_reloc_info_ == 0) || 3288 ASSERT((num_pending_64_bit_reloc_info_ == 0) ||
3323 (start - first_const_pool_64_use_ < kMaxDistToFPPool)); 3289 (start - first_const_pool_64_use_ < kMaxDistToFPPool));
3324 #endif 3290 #endif
3325 no_const_pool_before_ = pc_limit; 3291 no_const_pool_before_ = pc_limit;
3326 } 3292 }
3327 3293
3328 if (next_buffer_check_ < no_const_pool_before_) { 3294 if (next_buffer_check_ < no_const_pool_before_) {
3329 next_buffer_check_ = no_const_pool_before_; 3295 next_buffer_check_ = no_const_pool_before_;
3330 } 3296 }
3331 } 3297 }
3332 3298
3333 3299
3334 void Assembler::CheckConstPool(bool force_emit, bool require_jump) { 3300 void Assembler::CheckConstPool(bool force_emit, bool require_jump) {
3335 if (FLAG_enable_ool_constant_pool) {
3336 // Should be a no-op if using an out-of-line constant pool.
3337 ASSERT(num_pending_32_bit_reloc_info_ == 0);
3338 ASSERT(num_pending_64_bit_reloc_info_ == 0);
3339 return;
3340 }
3341
3342 // Some short sequence of instruction mustn't be broken up by constant pool 3301 // Some short sequence of instruction mustn't be broken up by constant pool
3343 // emission, such sequences are protected by calls to BlockConstPoolFor and 3302 // emission, such sequences are protected by calls to BlockConstPoolFor and
3344 // BlockConstPoolScope. 3303 // BlockConstPoolScope.
3345 if (is_const_pool_blocked()) { 3304 if (is_const_pool_blocked()) {
3346 // Something is wrong if emission is forced and blocked at the same time. 3305 // Something is wrong if emission is forced and blocked at the same time.
3347 ASSERT(!force_emit); 3306 ASSERT(!force_emit);
3348 return; 3307 return;
3349 } 3308 }
3350 3309
3351 // There is nothing to do if there are no pending constant pool entries. 3310 // There is nothing to do if there are no pending constant pool entries.
(...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after
3529 bind(&after_pool); 3488 bind(&after_pool);
3530 } 3489 }
3531 } 3490 }
3532 3491
3533 // Since a constant pool was just emitted, move the check offset forward by 3492 // Since a constant pool was just emitted, move the check offset forward by
3534 // the standard interval. 3493 // the standard interval.
3535 next_buffer_check_ = pc_offset() + kCheckPoolInterval; 3494 next_buffer_check_ = pc_offset() + kCheckPoolInterval;
3536 } 3495 }
3537 3496
3538 3497
3539 MaybeObject* Assembler::AllocateConstantPool(Heap* heap) {
3540 ASSERT(FLAG_enable_ool_constant_pool);
3541 return constant_pool_builder_.Allocate(heap);
3542 }
3543
3544
3545 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) {
3546 ASSERT(FLAG_enable_ool_constant_pool);
3547 constant_pool_builder_.Populate(this, constant_pool);
3548 }
3549
3550
3551 ConstantPoolBuilder::ConstantPoolBuilder()
3552 : entries_(),
3553 merged_indexes_(),
3554 count_of_64bit_(0),
3555 count_of_code_ptr_(0),
3556 count_of_heap_ptr_(0),
3557 count_of_32bit_(0) { }
3558
3559
3560 bool ConstantPoolBuilder::IsEmpty() {
3561 return entries_.size() == 0;
3562 }
3563
3564
3565 bool ConstantPoolBuilder::Is64BitEntry(RelocInfo::Mode rmode) {
3566 return rmode == RelocInfo::NONE64;
3567 }
3568
3569
3570 bool ConstantPoolBuilder::Is32BitEntry(RelocInfo::Mode rmode) {
3571 return !RelocInfo::IsGCRelocMode(rmode) && rmode != RelocInfo::NONE64;
3572 }
3573
3574
3575 bool ConstantPoolBuilder::IsCodePtrEntry(RelocInfo::Mode rmode) {
3576 return RelocInfo::IsCodeTarget(rmode);
3577 }
3578
3579
3580 bool ConstantPoolBuilder::IsHeapPtrEntry(RelocInfo::Mode rmode) {
3581 return RelocInfo::IsGCRelocMode(rmode) && !RelocInfo::IsCodeTarget(rmode);
3582 }
3583
3584
3585 void ConstantPoolBuilder::AddEntry(Assembler* assm,
3586 const RelocInfo& rinfo) {
3587 RelocInfo::Mode rmode = rinfo.rmode();
3588 ASSERT(rmode != RelocInfo::COMMENT &&
3589 rmode != RelocInfo::POSITION &&
3590 rmode != RelocInfo::STATEMENT_POSITION &&
3591 rmode != RelocInfo::CONST_POOL);
3592
3593
3594 // Try to merge entries which won't be patched.
3595 int merged_index = -1;
3596 if (RelocInfo::IsNone(rmode) ||
3597 (!Serializer::enabled() && (rmode >= RelocInfo::CELL))) {
3598 size_t i;
3599 std::vector<RelocInfo>::const_iterator it;
3600 for (it = entries_.begin(), i = 0; it != entries_.end(); it++, i++) {
3601 if (RelocInfo::IsEqual(rinfo, *it)) {
3602 merged_index = i;
3603 break;
3604 }
3605 }
3606 }
3607
3608 entries_.push_back(rinfo);
3609 merged_indexes_.push_back(merged_index);
3610
3611 if (merged_index == -1) {
3612 // Not merged, so update the appropriate count.
3613 if (Is64BitEntry(rmode)) {
3614 count_of_64bit_++;
3615 } else if (Is32BitEntry(rmode)) {
3616 count_of_32bit_++;
3617 } else if (IsCodePtrEntry(rmode)) {
3618 count_of_code_ptr_++;
3619 } else {
3620 ASSERT(IsHeapPtrEntry(rmode));
3621 count_of_heap_ptr_++;
3622 }
3623 }
3624
3625 // Check if we still have room for another entry given Arm's ldr and vldr
3626 // immediate offset range.
3627 if (!(is_uint12(ConstantPoolArray::SizeFor(count_of_64bit_,
3628 count_of_code_ptr_,
3629 count_of_heap_ptr_,
3630 count_of_32bit_))) &&
3631 is_uint10(ConstantPoolArray::SizeFor(count_of_64bit_, 0, 0, 0))) {
3632 assm->set_constant_pool_full();
3633 }
3634 }
3635
3636
3637 void ConstantPoolBuilder::Relocate(int pc_delta) {
3638 for (std::vector<RelocInfo>::iterator rinfo = entries_.begin();
3639 rinfo != entries_.end(); rinfo++) {
3640 ASSERT(rinfo->rmode() != RelocInfo::JS_RETURN);
3641 rinfo->set_pc(rinfo->pc() + pc_delta);
3642 }
3643 }
3644
3645
3646 MaybeObject* ConstantPoolBuilder::Allocate(Heap* heap) {
3647 if (IsEmpty()) {
3648 return heap->empty_constant_pool_array();
3649 } else {
3650 return heap->AllocateConstantPoolArray(count_of_64bit_, count_of_code_ptr_,
3651 count_of_heap_ptr_, count_of_32bit_);
3652 }
3653 }
3654
3655
3656 void ConstantPoolBuilder::Populate(Assembler* assm,
3657 ConstantPoolArray* constant_pool) {
3658 ASSERT(constant_pool->count_of_int64_entries() == count_of_64bit_);
3659 ASSERT(constant_pool->count_of_code_ptr_entries() == count_of_code_ptr_);
3660 ASSERT(constant_pool->count_of_heap_ptr_entries() == count_of_heap_ptr_);
3661 ASSERT(constant_pool->count_of_int32_entries() == count_of_32bit_);
3662 ASSERT(entries_.size() == merged_indexes_.size());
3663
3664 int index_64bit = 0;
3665 int index_code_ptr = count_of_64bit_;
3666 int index_heap_ptr = count_of_64bit_ + count_of_code_ptr_;
3667 int index_32bit = count_of_64bit_ + count_of_code_ptr_ + count_of_heap_ptr_;
3668
3669 size_t i;
3670 std::vector<RelocInfo>::const_iterator rinfo;
3671 for (rinfo = entries_.begin(), i = 0; rinfo != entries_.end(); rinfo++, i++) {
3672 RelocInfo::Mode rmode = rinfo->rmode();
3673
3674 // Update constant pool if necessary and get the entry's offset.
3675 int offset;
3676 if (merged_indexes_[i] == -1) {
3677 if (Is64BitEntry(rmode)) {
3678 offset = constant_pool->OffsetOfElementAt(index_64bit) - kHeapObjectTag;
3679 constant_pool->set(index_64bit++, rinfo->data64());
3680 } else if (Is32BitEntry(rmode)) {
3681 offset = constant_pool->OffsetOfElementAt(index_32bit) - kHeapObjectTag;
3682 constant_pool->set(index_32bit++, static_cast<int32_t>(rinfo->data()));
3683 } else if (IsCodePtrEntry(rmode)) {
3684 offset = constant_pool->OffsetOfElementAt(index_code_ptr) -
3685 kHeapObjectTag;
3686 constant_pool->set(index_code_ptr++,
3687 reinterpret_cast<Object *>(rinfo->data()));
3688 } else {
3689 ASSERT(IsHeapPtrEntry(rmode));
3690 offset = constant_pool->OffsetOfElementAt(index_heap_ptr) -
3691 kHeapObjectTag;
3692 constant_pool->set(index_heap_ptr++,
3693 reinterpret_cast<Object *>(rinfo->data()));
3694 }
3695 merged_indexes_[i] = offset; // Stash offset for merged entries.
3696 } else {
3697 size_t merged_index = static_cast<size_t>(merged_indexes_[i]);
3698 ASSERT(merged_index < merged_indexes_.size() && merged_index < i);
3699 offset = merged_indexes_[merged_index];
3700 }
3701
3702 // Patch vldr/ldr instruction with correct offset.
3703 Instr instr = assm->instr_at(rinfo->pc());
3704 if (Is64BitEntry(rmode)) {
3705 // Instruction to patch must be 'vldr rd, [pp, #0]'.
3706 ASSERT((Assembler::IsVldrDPpImmediateOffset(instr) &&
3707 Assembler::GetVldrDRegisterImmediateOffset(instr) == 0));
3708 ASSERT(is_uint10(offset));
3709 assm->instr_at_put(rinfo->pc(),
3710 Assembler::SetVldrDRegisterImmediateOffset(instr, offset));
3711 } else {
3712 // Instruction to patch must be 'ldr rd, [pp, #0]'.
3713 ASSERT((Assembler::IsLdrPpImmediateOffset(instr) &&
3714 Assembler::GetLdrRegisterImmediateOffset(instr) == 0));
3715 ASSERT(is_uint12(offset));
3716 assm->instr_at_put(rinfo->pc(),
3717 Assembler::SetLdrRegisterImmediateOffset(instr, offset));
3718 }
3719 }
3720
3721 ASSERT((index_64bit == count_of_64bit_) &&
3722 (index_code_ptr == (index_64bit + count_of_code_ptr_)) &&
3723 (index_heap_ptr == (index_code_ptr + count_of_heap_ptr_)) &&
3724 (index_32bit == (index_heap_ptr + count_of_32bit_)));
3725 }
3726
3727
3728 } } // namespace v8::internal 3498 } } // namespace v8::internal
3729 3499
3730 #endif // V8_TARGET_ARCH_ARM 3500 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/assembler-arm.h ('k') | src/arm/assembler-arm-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698