OLD | NEW |
---|---|
1 // Copyright (c) 1994-2006 Sun Microsystems Inc. | 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. |
2 // All Rights Reserved. | 2 // All Rights Reserved. |
3 // | 3 // |
4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
5 // modification, are permitted provided that the following conditions | 5 // modification, are permitted provided that the following conditions |
6 // are met: | 6 // are met: |
7 // | 7 // |
8 // - Redistributions of source code must retain the above copyright notice, | 8 // - Redistributions of source code must retain the above copyright notice, |
9 // this list of conditions and the following disclaimer. | 9 // this list of conditions and the following disclaimer. |
10 // | 10 // |
(...skipping 275 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
286 } | 286 } |
287 | 287 |
288 | 288 |
289 // ----------------------------------------------------------------------------- | 289 // ----------------------------------------------------------------------------- |
290 // Implementation of RelocInfo | 290 // Implementation of RelocInfo |
291 | 291 |
292 const int RelocInfo::kApplyMask = 0; | 292 const int RelocInfo::kApplyMask = 0; |
293 | 293 |
294 | 294 |
295 bool RelocInfo::IsCodedSpecially() { | 295 bool RelocInfo::IsCodedSpecially() { |
296 // The deserializer needs to know whether a pointer is specially coded. Being | 296 // The deserializer needs to know whether a pointer is specially coded. Bein |
ulan
2014/03/18 12:27:51
"Being"
rmcilroy
2014/03/18 15:14:35
Done.
| |
297 // specially coded on ARM means that it is a movw/movt instruction. We don't | 297 // specially coded on ARM means that it is a movw/movt instruction, or is an |
298 // generate those yet. | 298 // out of line constant pool entry. These only occur if |
299 return false; | 299 // FLAG_enable_ool_constant_pool is true. |
300 return FLAG_enable_ool_constant_pool; | |
300 } | 301 } |
301 | 302 |
302 | 303 |
303 bool RelocInfo::IsInConstantPool() { | 304 bool RelocInfo::IsInConstantPool() { |
304 return Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc_)); | 305 if (FLAG_enable_ool_constant_pool) { |
306 return Assembler::IsLdrPpImmediateOffset(Memory::int32_at(pc_)); | |
307 } else { | |
308 return Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc_)); | |
309 } | |
305 } | 310 } |
306 | 311 |
307 | 312 |
308 void RelocInfo::PatchCode(byte* instructions, int instruction_count) { | 313 void RelocInfo::PatchCode(byte* instructions, int instruction_count) { |
309 // Patch the code at the current address with the supplied instructions. | 314 // Patch the code at the current address with the supplied instructions. |
310 Instr* pc = reinterpret_cast<Instr*>(pc_); | 315 Instr* pc = reinterpret_cast<Instr*>(pc_); |
311 Instr* instr = reinterpret_cast<Instr*>(instructions); | 316 Instr* instr = reinterpret_cast<Instr*>(instructions); |
312 for (int i = 0; i < instruction_count; i++) { | 317 for (int i = 0; i < instruction_count; i++) { |
313 *(pc + i) = *(instr + i); | 318 *(pc + i) = *(instr + i); |
314 } | 319 } |
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
473 al | B26 | 4 | NegPreIndex | kRegister_sp_Code * B16; | 478 al | B26 | 4 | NegPreIndex | kRegister_sp_Code * B16; |
474 // ldr(r, MemOperand(sp, 4, PostIndex), al) instruction (aka pop(r)) | 479 // ldr(r, MemOperand(sp, 4, PostIndex), al) instruction (aka pop(r)) |
475 // register r is not encoded. | 480 // register r is not encoded. |
476 const Instr kPopRegPattern = | 481 const Instr kPopRegPattern = |
477 al | B26 | L | 4 | PostIndex | kRegister_sp_Code * B16; | 482 al | B26 | L | 4 | PostIndex | kRegister_sp_Code * B16; |
478 // mov lr, pc | 483 // mov lr, pc |
479 const Instr kMovLrPc = al | MOV | kRegister_pc_Code | kRegister_lr_Code * B12; | 484 const Instr kMovLrPc = al | MOV | kRegister_pc_Code | kRegister_lr_Code * B12; |
480 // ldr rd, [pc, #offset] | 485 // ldr rd, [pc, #offset] |
481 const Instr kLdrPCMask = 15 * B24 | 7 * B20 | 15 * B16; | 486 const Instr kLdrPCMask = 15 * B24 | 7 * B20 | 15 * B16; |
482 const Instr kLdrPCPattern = 5 * B24 | L | kRegister_pc_Code * B16; | 487 const Instr kLdrPCPattern = 5 * B24 | L | kRegister_pc_Code * B16; |
488 // ldr rd, [pp, #offset] | |
489 const Instr kLdrPpMask = 15 * B24 | 7 * B20 | 15 * B16; | |
490 const Instr kLdrPpPattern = 5 * B24 | L | kRegister_r8_Code * B16; | |
483 // vldr dd, [pc, #offset] | 491 // vldr dd, [pc, #offset] |
484 const Instr kVldrDPCMask = 15 * B24 | 3 * B20 | 15 * B16 | 15 * B8; | 492 const Instr kVldrDPCMask = 15 * B24 | 3 * B20 | 15 * B16 | 15 * B8; |
485 const Instr kVldrDPCPattern = 13 * B24 | L | kRegister_pc_Code * B16 | 11 * B8; | 493 const Instr kVldrDPCPattern = 13 * B24 | L | kRegister_pc_Code * B16 | 11 * B8; |
494 // vldr dd, [pp, #offset] | |
495 const Instr kVldrDPpMask = 15 * B24 | 3 * B20 | 15 * B16 | 15 * B8; | |
496 const Instr kVldrDPpPattern = 13 * B24 | L | kRegister_r8_Code * B16 | 11 * B8; | |
486 // blxcc rm | 497 // blxcc rm |
487 const Instr kBlxRegMask = | 498 const Instr kBlxRegMask = |
488 15 * B24 | 15 * B20 | 15 * B16 | 15 * B12 | 15 * B8 | 15 * B4; | 499 15 * B24 | 15 * B20 | 15 * B16 | 15 * B12 | 15 * B8 | 15 * B4; |
489 const Instr kBlxRegPattern = | 500 const Instr kBlxRegPattern = |
490 B24 | B21 | 15 * B16 | 15 * B12 | 15 * B8 | BLX; | 501 B24 | B21 | 15 * B16 | 15 * B12 | 15 * B8 | BLX; |
491 const Instr kBlxIp = al | kBlxRegPattern | ip.code(); | 502 const Instr kBlxIp = al | kBlxRegPattern | ip.code(); |
492 const Instr kMovMvnMask = 0x6d * B21 | 0xf * B16; | 503 const Instr kMovMvnMask = 0x6d * B21 | 0xf * B16; |
493 const Instr kMovMvnPattern = 0xd * B21; | 504 const Instr kMovMvnPattern = 0xd * B21; |
494 const Instr kMovMvnFlip = B22; | 505 const Instr kMovMvnFlip = B22; |
495 const Instr kMovLeaveCCMask = 0xdff * B16; | 506 const Instr kMovLeaveCCMask = 0xdff * B16; |
(...skipping 17 matching lines...) Expand all Loading... | |
513 const Instr kStrRegFpNegOffsetPattern = | 524 const Instr kStrRegFpNegOffsetPattern = |
514 al | B26 | NegOffset | kRegister_fp_Code * B16; | 525 al | B26 | NegOffset | kRegister_fp_Code * B16; |
515 const Instr kLdrStrInstrTypeMask = 0xffff0000; | 526 const Instr kLdrStrInstrTypeMask = 0xffff0000; |
516 const Instr kLdrStrInstrArgumentMask = 0x0000ffff; | 527 const Instr kLdrStrInstrArgumentMask = 0x0000ffff; |
517 const Instr kLdrStrOffsetMask = 0x00000fff; | 528 const Instr kLdrStrOffsetMask = 0x00000fff; |
518 | 529 |
519 | 530 |
520 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size) | 531 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size) |
521 : AssemblerBase(isolate, buffer, buffer_size), | 532 : AssemblerBase(isolate, buffer, buffer_size), |
522 recorded_ast_id_(TypeFeedbackId::None()), | 533 recorded_ast_id_(TypeFeedbackId::None()), |
534 constant_pool_builder_(), | |
523 positions_recorder_(this) { | 535 positions_recorder_(this) { |
524 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); | 536 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); |
525 num_pending_32_bit_reloc_info_ = 0; | 537 num_pending_32_bit_reloc_info_ = 0; |
526 num_pending_64_bit_reloc_info_ = 0; | 538 num_pending_64_bit_reloc_info_ = 0; |
527 next_buffer_check_ = 0; | 539 next_buffer_check_ = 0; |
528 const_pool_blocked_nesting_ = 0; | 540 const_pool_blocked_nesting_ = 0; |
529 no_const_pool_before_ = 0; | 541 no_const_pool_before_ = 0; |
530 first_const_pool_32_use_ = -1; | 542 first_const_pool_32_use_ = -1; |
531 first_const_pool_64_use_ = -1; | 543 first_const_pool_64_use_ = -1; |
532 last_bound_pos_ = 0; | 544 last_bound_pos_ = 0; |
533 constant_pool_available_ = !FLAG_enable_ool_constant_pool; | 545 constant_pool_available_ = !FLAG_enable_ool_constant_pool; |
534 constant_pool_full_ = false; | 546 constant_pool_full_ = false; |
535 ClearRecordedAstId(); | 547 ClearRecordedAstId(); |
536 } | 548 } |
537 | 549 |
538 | 550 |
539 Assembler::~Assembler() { | 551 Assembler::~Assembler() { |
540 ASSERT(const_pool_blocked_nesting_ == 0); | 552 ASSERT(const_pool_blocked_nesting_ == 0); |
541 } | 553 } |
542 | 554 |
543 | 555 |
544 void Assembler::GetCode(CodeDesc* desc) { | 556 void Assembler::GetCode(CodeDesc* desc) { |
545 // Emit constant pool if necessary. | 557 if (!FLAG_enable_ool_constant_pool) { |
546 CheckConstPool(true, false); | 558 // Emit constant pool if necessary. |
547 ASSERT(num_pending_32_bit_reloc_info_ == 0); | 559 CheckConstPool(true, false); |
548 ASSERT(num_pending_64_bit_reloc_info_ == 0); | 560 ASSERT(num_pending_32_bit_reloc_info_ == 0); |
549 | 561 ASSERT(num_pending_64_bit_reloc_info_ == 0); |
562 } | |
550 // Set up code descriptor. | 563 // Set up code descriptor. |
551 desc->buffer = buffer_; | 564 desc->buffer = buffer_; |
552 desc->buffer_size = buffer_size_; | 565 desc->buffer_size = buffer_size_; |
553 desc->instr_size = pc_offset(); | 566 desc->instr_size = pc_offset(); |
554 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); | 567 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); |
555 desc->origin = this; | 568 desc->origin = this; |
569 if (FLAG_enable_ool_constant_pool) { | |
570 desc->constant_pool_64bit_count = constant_pool_builder_.count_of_64bit(); | |
571 desc->constant_pool_code_ptr_count = | |
572 constant_pool_builder_.count_of_code_ptr(); | |
573 desc->constant_pool_heap_ptr_count = | |
574 constant_pool_builder_.count_of_heap_ptr(); | |
575 desc->constant_pool_32bit_count = constant_pool_builder_.count_of_32bit(); | |
576 } | |
556 } | 577 } |
557 | 578 |
558 | 579 |
559 void Assembler::Align(int m) { | 580 void Assembler::Align(int m) { |
560 ASSERT(m >= 4 && IsPowerOf2(m)); | 581 ASSERT(m >= 4 && IsPowerOf2(m)); |
561 while ((pc_offset() & (m - 1)) != 0) { | 582 while ((pc_offset() & (m - 1)) != 0) { |
562 nop(); | 583 nop(); |
563 } | 584 } |
564 } | 585 } |
565 | 586 |
(...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
722 } | 743 } |
723 | 744 |
724 | 745 |
725 bool Assembler::IsLdrPcImmediateOffset(Instr instr) { | 746 bool Assembler::IsLdrPcImmediateOffset(Instr instr) { |
726 // Check the instruction is indeed a | 747 // Check the instruction is indeed a |
727 // ldr<cond> <Rd>, [pc +/- offset_12]. | 748 // ldr<cond> <Rd>, [pc +/- offset_12]. |
728 return (instr & kLdrPCMask) == kLdrPCPattern; | 749 return (instr & kLdrPCMask) == kLdrPCPattern; |
729 } | 750 } |
730 | 751 |
731 | 752 |
753 bool Assembler::IsLdrPpImmediateOffset(Instr instr) { | |
754 // Check the instruction is indeed a | |
755 // ldr<cond> <Rd>, [pp +/- offset_12]. | |
756 return (instr & kLdrPpMask) == kLdrPpPattern; | |
757 } | |
758 | |
759 | |
732 bool Assembler::IsVldrDPcImmediateOffset(Instr instr) { | 760 bool Assembler::IsVldrDPcImmediateOffset(Instr instr) { |
733 // Check the instruction is indeed a | 761 // Check the instruction is indeed a |
734 // vldr<cond> <Dd>, [pc +/- offset_10]. | 762 // vldr<cond> <Dd>, [pc +/- offset_10]. |
735 return (instr & kVldrDPCMask) == kVldrDPCPattern; | 763 return (instr & kVldrDPCMask) == kVldrDPCPattern; |
736 } | 764 } |
737 | 765 |
738 | 766 |
767 bool Assembler::IsVldrDPpImmediateOffset(Instr instr) { | |
768 // Check the instruction is indeed a | |
769 // vldr<cond> <Dd>, [pp +/- offset_10]. | |
770 return (instr & kVldrDPpMask) == kVldrDPpPattern; | |
771 } | |
772 | |
773 | |
739 bool Assembler::IsTstImmediate(Instr instr) { | 774 bool Assembler::IsTstImmediate(Instr instr) { |
740 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask)) == | 775 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask)) == |
741 (I | TST | S); | 776 (I | TST | S); |
742 } | 777 } |
743 | 778 |
744 | 779 |
745 bool Assembler::IsCmpRegister(Instr instr) { | 780 bool Assembler::IsCmpRegister(Instr instr) { |
746 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask | B4)) == | 781 return (instr & (B27 | B26 | I | kOpCodeMask | S | kRdMask | B4)) == |
747 (CMP | S); | 782 (CMP | S); |
748 } | 783 } |
(...skipping 307 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1056 return Serializer::enabled(); | 1091 return Serializer::enabled(); |
1057 } else if (RelocInfo::IsNone(rmode_)) { | 1092 } else if (RelocInfo::IsNone(rmode_)) { |
1058 return false; | 1093 return false; |
1059 } | 1094 } |
1060 return true; | 1095 return true; |
1061 } | 1096 } |
1062 | 1097 |
1063 | 1098 |
1064 static bool use_mov_immediate_load(const Operand& x, | 1099 static bool use_mov_immediate_load(const Operand& x, |
1065 const Assembler* assembler) { | 1100 const Assembler* assembler) { |
1066 if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) && | 1101 if (assembler != NULL && !assembler->can_use_constant_pool()) { |
1102 // If there is no constant pool available, we must use an mov immediate. | |
ulan
2014/03/18 12:27:51
Maybe ASSERT(CpuFeatures::IsSupported(MOVW_MOVT_IM
rmcilroy
2014/03/18 15:14:35
What we actually want is IsSupported(ARMv7). MOVW
| |
1103 return true; | |
1104 } else if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) && | |
1067 (assembler == NULL || !assembler->predictable_code_size())) { | 1105 (assembler == NULL || !assembler->predictable_code_size())) { |
1068 // Prefer movw / movt to constant pool if it is more efficient on the CPU. | 1106 // Prefer movw / movt to constant pool if it is more efficient on the CPU. |
1069 return true; | 1107 return true; |
1070 } else if (x.must_output_reloc_info(assembler)) { | 1108 } else if (x.must_output_reloc_info(assembler)) { |
1071 // Prefer constant pool if data is likely to be patched. | 1109 // Prefer constant pool if data is likely to be patched. |
1072 return false; | 1110 return false; |
1073 } else { | 1111 } else { |
1074 // Otherwise, use immediate load if movw / movt is available. | 1112 // Otherwise, use immediate load if movw / movt is available. |
1075 return CpuFeatures::IsSupported(ARMv7); | 1113 return CpuFeatures::IsSupported(ARMv7); |
1076 } | 1114 } |
(...skipping 22 matching lines...) Expand all Loading... | |
1099 // No use of constant pool and the immediate operand can be encoded as a | 1137 // No use of constant pool and the immediate operand can be encoded as a |
1100 // shifter operand. | 1138 // shifter operand. |
1101 return true; | 1139 return true; |
1102 } | 1140 } |
1103 } | 1141 } |
1104 | 1142 |
1105 | 1143 |
1106 void Assembler::move_32_bit_immediate(Register rd, | 1144 void Assembler::move_32_bit_immediate(Register rd, |
1107 const Operand& x, | 1145 const Operand& x, |
1108 Condition cond) { | 1146 Condition cond) { |
1109 if (rd.code() != pc.code()) { | 1147 RelocInfo rinfo(pc_, x.rmode_, x.imm32_, NULL); |
1110 if (use_mov_immediate_load(x, this)) { | 1148 if (x.must_output_reloc_info(this)) { |
1111 if (x.must_output_reloc_info(this)) { | 1149 RecordRelocInfo(rinfo); |
1112 RecordRelocInfo(x.rmode_, x.imm32_, DONT_USE_CONSTANT_POOL); | |
1113 // Make sure the movw/movt doesn't get separated. | |
1114 BlockConstPoolFor(2); | |
1115 } | |
1116 emit(cond | 0x30*B20 | rd.code()*B12 | | |
1117 EncodeMovwImmediate(x.imm32_ & 0xffff)); | |
1118 movt(rd, static_cast<uint32_t>(x.imm32_) >> 16, cond); | |
1119 return; | |
1120 } | |
1121 } | 1150 } |
1122 | 1151 |
1123 RecordRelocInfo(x.rmode_, x.imm32_, USE_CONSTANT_POOL); | 1152 if (use_mov_immediate_load(x, this)) { |
1124 ldr(rd, MemOperand(pc, 0), cond); | 1153 Register target = rd.code() == pc.code() ? ip : rd; |
1154 // TODO(rmcilroy): add ARMv6 support for immediate loads. | |
1155 ASSERT(CpuFeatures::IsSupported(ARMv7)); | |
1156 if (!FLAG_enable_ool_constant_pool && x.must_output_reloc_info(this)) { | |
1157 // Make sure the movw/movt doesn't get separated. | |
1158 BlockConstPoolFor(2); | |
1159 } | |
1160 emit(cond | 0x30*B20 | target.code()*B12 | | |
1161 EncodeMovwImmediate(x.imm32_ & 0xffff)); | |
1162 movt(target, static_cast<uint32_t>(x.imm32_) >> 16, cond); | |
1163 if (target.code() != rd.code()) { | |
1164 mov(rd, target, LeaveCC, cond); | |
1165 } | |
1166 } else { | |
1167 ASSERT(can_use_constant_pool()); | |
1168 ConstantPoolAddEntry(rinfo); | |
1169 ldr(rd, MemOperand(FLAG_enable_ool_constant_pool ? pp : pc, 0), cond); | |
1170 } | |
1125 } | 1171 } |
1126 | 1172 |
1127 | 1173 |
1128 void Assembler::addrmod1(Instr instr, | 1174 void Assembler::addrmod1(Instr instr, |
1129 Register rn, | 1175 Register rn, |
1130 Register rd, | 1176 Register rd, |
1131 const Operand& x) { | 1177 const Operand& x) { |
1132 CheckBuffer(); | 1178 CheckBuffer(); |
1133 ASSERT((instr & ~(kCondMask | kOpCodeMask | S)) == 0); | 1179 ASSERT((instr & ~(kCondMask | kOpCodeMask | S)) == 0); |
1134 if (!x.rm_.is_valid()) { | 1180 if (!x.rm_.is_valid()) { |
(...skipping 1279 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2414 if (CpuFeatures::IsSupported(VFP3) && FitsVMOVDoubleImmediate(imm, &enc)) { | 2460 if (CpuFeatures::IsSupported(VFP3) && FitsVMOVDoubleImmediate(imm, &enc)) { |
2415 // The double can be encoded in the instruction. | 2461 // The double can be encoded in the instruction. |
2416 // | 2462 // |
2417 // Dd = immediate | 2463 // Dd = immediate |
2418 // Instruction details available in ARM DDI 0406C.b, A8-936. | 2464 // Instruction details available in ARM DDI 0406C.b, A8-936. |
2419 // cond(31-28) | 11101(27-23) | D(22) | 11(21-20) | imm4H(19-16) | | 2465 // cond(31-28) | 11101(27-23) | D(22) | 11(21-20) | imm4H(19-16) | |
2420 // Vd(15-12) | 101(11-9) | sz=1(8) | imm4L(3-0) | 2466 // Vd(15-12) | 101(11-9) | sz=1(8) | imm4L(3-0) |
2421 int vd, d; | 2467 int vd, d; |
2422 dst.split_code(&vd, &d); | 2468 dst.split_code(&vd, &d); |
2423 emit(al | 0x1D*B23 | d*B22 | 0x3*B20 | vd*B12 | 0x5*B9 | B8 | enc); | 2469 emit(al | 0x1D*B23 | d*B22 | 0x3*B20 | vd*B12 | 0x5*B9 | B8 | enc); |
2424 } else if (FLAG_enable_vldr_imm) { | 2470 } else if (FLAG_enable_vldr_imm && can_use_constant_pool()) { |
2425 // TODO(jfb) Temporarily turned off until we have constant blinding or | 2471 // TODO(jfb) Temporarily turned off until we have constant blinding or |
2426 // some equivalent mitigation: an attacker can otherwise control | 2472 // some equivalent mitigation: an attacker can otherwise control |
2427 // generated data which also happens to be executable, a Very Bad | 2473 // generated data which also happens to be executable, a Very Bad |
2428 // Thing indeed. | 2474 // Thing indeed. |
2429 // Blinding gets tricky because we don't have xor, we probably | 2475 // Blinding gets tricky because we don't have xor, we probably |
2430 // need to add/subtract without losing precision, which requires a | 2476 // need to add/subtract without losing precision, which requires a |
2431 // cookie value that Lithium is probably better positioned to | 2477 // cookie value that Lithium is probably better positioned to |
2432 // choose. | 2478 // choose. |
2433 // We could also add a few peepholes here like detecting 0.0 and | 2479 // We could also add a few peepholes here like detecting 0.0 and |
2434 // -0.0 and doing a vmov from the sequestered d14, forcing denorms | 2480 // -0.0 and doing a vmov from the sequestered d14, forcing denorms |
2435 // to zero (we set flush-to-zero), and normalizing NaN values. | 2481 // to zero (we set flush-to-zero), and normalizing NaN values. |
2436 // We could also detect redundant values. | 2482 // We could also detect redundant values. |
2437 // The code could also randomize the order of values, though | 2483 // The code could also randomize the order of values, though |
2438 // that's tricky because vldr has a limited reach. Furthermore | 2484 // that's tricky because vldr has a limited reach. Furthermore |
2439 // it breaks load locality. | 2485 // it breaks load locality. |
2440 RecordRelocInfo(imm); | 2486 RelocInfo rinfo(pc_, imm); |
2441 vldr(dst, MemOperand(pc, 0)); | 2487 ConstantPoolAddEntry(rinfo); |
2488 vldr(dst, MemOperand(FLAG_enable_ool_constant_pool ? pp : pc, 0)); | |
2442 } else { | 2489 } else { |
2443 // Synthesise the double from ARM immediates. | 2490 // Synthesise the double from ARM immediates. |
2444 uint32_t lo, hi; | 2491 uint32_t lo, hi; |
2445 DoubleAsTwoUInt32(imm, &lo, &hi); | 2492 DoubleAsTwoUInt32(imm, &lo, &hi); |
2446 | 2493 |
2447 if (scratch.is(no_reg)) { | 2494 if (scratch.is(no_reg)) { |
2448 if (dst.code() < 16) { | 2495 if (dst.code() < 16) { |
2449 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code()); | 2496 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code()); |
2450 // Move the low part of the double into the lower of the corresponsing S | 2497 // Move the low part of the double into the lower of the corresponsing S |
2451 // registers of D register dst. | 2498 // registers of D register dst. |
(...skipping 709 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3161 rinfo.rmode() != RelocInfo::POSITION); | 3208 rinfo.rmode() != RelocInfo::POSITION); |
3162 if (rinfo.rmode() != RelocInfo::JS_RETURN) { | 3209 if (rinfo.rmode() != RelocInfo::JS_RETURN) { |
3163 rinfo.set_pc(rinfo.pc() + pc_delta); | 3210 rinfo.set_pc(rinfo.pc() + pc_delta); |
3164 } | 3211 } |
3165 } | 3212 } |
3166 for (int i = 0; i < num_pending_64_bit_reloc_info_; i++) { | 3213 for (int i = 0; i < num_pending_64_bit_reloc_info_; i++) { |
3167 RelocInfo& rinfo = pending_64_bit_reloc_info_[i]; | 3214 RelocInfo& rinfo = pending_64_bit_reloc_info_[i]; |
3168 ASSERT(rinfo.rmode() == RelocInfo::NONE64); | 3215 ASSERT(rinfo.rmode() == RelocInfo::NONE64); |
3169 rinfo.set_pc(rinfo.pc() + pc_delta); | 3216 rinfo.set_pc(rinfo.pc() + pc_delta); |
3170 } | 3217 } |
3218 constant_pool_builder_.Relocate(pc_delta); | |
3171 } | 3219 } |
3172 | 3220 |
3173 | 3221 |
3174 void Assembler::db(uint8_t data) { | 3222 void Assembler::db(uint8_t data) { |
3175 // No relocation info should be pending while using db. db is used | 3223 // No relocation info should be pending while using db. db is used |
3176 // to write pure data with no pointers and the constant pool should | 3224 // to write pure data with no pointers and the constant pool should |
3177 // be emitted before using db. | 3225 // be emitted before using db. |
3178 ASSERT(num_pending_32_bit_reloc_info_ == 0); | 3226 ASSERT(num_pending_32_bit_reloc_info_ == 0); |
3179 ASSERT(num_pending_64_bit_reloc_info_ == 0); | 3227 ASSERT(num_pending_64_bit_reloc_info_ == 0); |
3180 CheckBuffer(); | 3228 CheckBuffer(); |
(...skipping 15 matching lines...) Expand all Loading... | |
3196 | 3244 |
3197 | 3245 |
3198 void Assembler::emit_code_stub_address(Code* stub) { | 3246 void Assembler::emit_code_stub_address(Code* stub) { |
3199 CheckBuffer(); | 3247 CheckBuffer(); |
3200 *reinterpret_cast<uint32_t*>(pc_) = | 3248 *reinterpret_cast<uint32_t*>(pc_) = |
3201 reinterpret_cast<uint32_t>(stub->instruction_start()); | 3249 reinterpret_cast<uint32_t>(stub->instruction_start()); |
3202 pc_ += sizeof(uint32_t); | 3250 pc_ += sizeof(uint32_t); |
3203 } | 3251 } |
3204 | 3252 |
3205 | 3253 |
3206 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data, | 3254 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { |
3207 UseConstantPoolMode mode) { | |
3208 // We do not try to reuse pool constants. | |
3209 RelocInfo rinfo(pc_, rmode, data, NULL); | 3255 RelocInfo rinfo(pc_, rmode, data, NULL); |
3210 if (((rmode >= RelocInfo::JS_RETURN) && | 3256 RecordRelocInfo(rinfo); |
3211 (rmode <= RelocInfo::DEBUG_BREAK_SLOT)) || | 3257 } |
3212 (rmode == RelocInfo::CONST_POOL) || | 3258 |
3213 mode == DONT_USE_CONSTANT_POOL) { | 3259 |
3214 // Adjust code for new modes. | 3260 void Assembler::RecordRelocInfo(const RelocInfo& rinfo) { |
3215 ASSERT(RelocInfo::IsDebugBreakSlot(rmode) | |
3216 || RelocInfo::IsJSReturn(rmode) | |
3217 || RelocInfo::IsComment(rmode) | |
3218 || RelocInfo::IsPosition(rmode) | |
3219 || RelocInfo::IsConstPool(rmode) | |
3220 || mode == DONT_USE_CONSTANT_POOL); | |
3221 // These modes do not need an entry in the constant pool. | |
3222 } else { | |
3223 RecordRelocInfoConstantPoolEntryHelper(rinfo); | |
3224 } | |
3225 if (!RelocInfo::IsNone(rinfo.rmode())) { | 3261 if (!RelocInfo::IsNone(rinfo.rmode())) { |
3226 // Don't record external references unless the heap will be serialized. | 3262 // Don't record external references unless the heap will be serialized. |
3227 if (rmode == RelocInfo::EXTERNAL_REFERENCE) { | 3263 if (rinfo.rmode() == RelocInfo::EXTERNAL_REFERENCE) { |
3228 #ifdef DEBUG | 3264 #ifdef DEBUG |
3229 if (!Serializer::enabled()) { | 3265 if (!Serializer::enabled()) { |
3230 Serializer::TooLateToEnableNow(); | 3266 Serializer::TooLateToEnableNow(); |
3231 } | 3267 } |
3232 #endif | 3268 #endif |
3233 if (!Serializer::enabled() && !emit_debug_code()) { | 3269 if (!Serializer::enabled() && !emit_debug_code()) { |
3234 return; | 3270 return; |
3235 } | 3271 } |
3236 } | 3272 } |
3237 ASSERT(buffer_space() >= kMaxRelocSize); // too late to grow buffer here | 3273 ASSERT(buffer_space() >= kMaxRelocSize); // too late to grow buffer here |
3238 if (rmode == RelocInfo::CODE_TARGET_WITH_ID) { | 3274 if (rinfo.rmode() == RelocInfo::CODE_TARGET_WITH_ID) { |
3239 RelocInfo reloc_info_with_ast_id(pc_, | 3275 RelocInfo reloc_info_with_ast_id(rinfo.pc(), |
3240 rmode, | 3276 rinfo.rmode(), |
3241 RecordedAstId().ToInt(), | 3277 RecordedAstId().ToInt(), |
3242 NULL); | 3278 NULL); |
3243 ClearRecordedAstId(); | 3279 ClearRecordedAstId(); |
3244 reloc_info_writer.Write(&reloc_info_with_ast_id); | 3280 reloc_info_writer.Write(&reloc_info_with_ast_id); |
3245 } else { | 3281 } else { |
3246 reloc_info_writer.Write(&rinfo); | 3282 reloc_info_writer.Write(&rinfo); |
3247 } | 3283 } |
3248 } | 3284 } |
3249 } | 3285 } |
3250 | 3286 |
3251 | 3287 |
3252 void Assembler::RecordRelocInfo(double data) { | 3288 void Assembler::ConstantPoolAddEntry(const RelocInfo& rinfo) { |
3253 // We do not try to reuse pool constants. | 3289 if (FLAG_enable_ool_constant_pool) { |
3254 RelocInfo rinfo(pc_, data); | 3290 constant_pool_builder_.AddEntry(this, rinfo); |
3255 RecordRelocInfoConstantPoolEntryHelper(rinfo); | 3291 } else { |
3256 } | 3292 if (rinfo.rmode() == RelocInfo::NONE64) { |
3257 | 3293 ASSERT(num_pending_64_bit_reloc_info_ < kMaxNumPending64RelocInfo); |
3258 | 3294 if (num_pending_64_bit_reloc_info_ == 0) { |
3259 void Assembler::RecordRelocInfoConstantPoolEntryHelper(const RelocInfo& rinfo) { | 3295 first_const_pool_64_use_ = pc_offset(); |
3260 if (rinfo.rmode() == RelocInfo::NONE64) { | 3296 } |
3261 ASSERT(num_pending_64_bit_reloc_info_ < kMaxNumPending64RelocInfo); | 3297 pending_64_bit_reloc_info_[num_pending_64_bit_reloc_info_++] = rinfo; |
3262 if (num_pending_64_bit_reloc_info_ == 0) { | 3298 } else { |
3263 first_const_pool_64_use_ = pc_offset(); | 3299 ASSERT(num_pending_32_bit_reloc_info_ < kMaxNumPending32RelocInfo); |
3300 if (num_pending_32_bit_reloc_info_ == 0) { | |
3301 first_const_pool_32_use_ = pc_offset(); | |
3302 } | |
3303 pending_32_bit_reloc_info_[num_pending_32_bit_reloc_info_++] = rinfo; | |
3264 } | 3304 } |
3265 pending_64_bit_reloc_info_[num_pending_64_bit_reloc_info_++] = rinfo; | 3305 // Make sure the constant pool is not emitted in place of the next |
3266 } else { | 3306 // instruction for which we just recorded relocation info. |
3267 ASSERT(num_pending_32_bit_reloc_info_ < kMaxNumPending32RelocInfo); | 3307 BlockConstPoolFor(1); |
3268 if (num_pending_32_bit_reloc_info_ == 0) { | |
3269 first_const_pool_32_use_ = pc_offset(); | |
3270 } | |
3271 pending_32_bit_reloc_info_[num_pending_32_bit_reloc_info_++] = rinfo; | |
3272 } | 3308 } |
3273 // Make sure the constant pool is not emitted in place of the next | |
3274 // instruction for which we just recorded relocation info. | |
3275 BlockConstPoolFor(1); | |
3276 } | 3309 } |
3277 | 3310 |
3278 | 3311 |
3279 void Assembler::BlockConstPoolFor(int instructions) { | 3312 void Assembler::BlockConstPoolFor(int instructions) { |
3313 if (FLAG_enable_ool_constant_pool) return; | |
ulan
2014/03/18 12:27:51
If FLAG_enable_ool_constant_pool then this functio
rmcilroy
2014/03/18 15:14:35
Done.
| |
3314 | |
3280 int pc_limit = pc_offset() + instructions * kInstrSize; | 3315 int pc_limit = pc_offset() + instructions * kInstrSize; |
3281 if (no_const_pool_before_ < pc_limit) { | 3316 if (no_const_pool_before_ < pc_limit) { |
3282 // Max pool start (if we need a jump and an alignment). | 3317 // Max pool start (if we need a jump and an alignment). |
3283 #ifdef DEBUG | 3318 #ifdef DEBUG |
3284 int start = pc_limit + kInstrSize + 2 * kPointerSize; | 3319 int start = pc_limit + kInstrSize + 2 * kPointerSize; |
3285 ASSERT((num_pending_32_bit_reloc_info_ == 0) || | 3320 ASSERT((num_pending_32_bit_reloc_info_ == 0) || |
3286 (start - first_const_pool_32_use_ + | 3321 (start - first_const_pool_32_use_ + |
3287 num_pending_64_bit_reloc_info_ * kDoubleSize < kMaxDistToIntPool)); | 3322 num_pending_64_bit_reloc_info_ * kDoubleSize < kMaxDistToIntPool)); |
3288 ASSERT((num_pending_64_bit_reloc_info_ == 0) || | 3323 ASSERT((num_pending_64_bit_reloc_info_ == 0) || |
3289 (start - first_const_pool_64_use_ < kMaxDistToFPPool)); | 3324 (start - first_const_pool_64_use_ < kMaxDistToFPPool)); |
3290 #endif | 3325 #endif |
3291 no_const_pool_before_ = pc_limit; | 3326 no_const_pool_before_ = pc_limit; |
3292 } | 3327 } |
3293 | 3328 |
3294 if (next_buffer_check_ < no_const_pool_before_) { | 3329 if (next_buffer_check_ < no_const_pool_before_) { |
3295 next_buffer_check_ = no_const_pool_before_; | 3330 next_buffer_check_ = no_const_pool_before_; |
3296 } | 3331 } |
3297 } | 3332 } |
3298 | 3333 |
3299 | 3334 |
3300 void Assembler::CheckConstPool(bool force_emit, bool require_jump) { | 3335 void Assembler::CheckConstPool(bool force_emit, bool require_jump) { |
3336 if (FLAG_enable_ool_constant_pool) return; | |
ulan
2014/03/18 12:27:51
Same as above.
rmcilroy
2014/03/18 15:14:35
Done.
| |
3337 | |
3301 // Some short sequence of instruction mustn't be broken up by constant pool | 3338 // Some short sequence of instruction mustn't be broken up by constant pool |
3302 // emission, such sequences are protected by calls to BlockConstPoolFor and | 3339 // emission, such sequences are protected by calls to BlockConstPoolFor and |
3303 // BlockConstPoolScope. | 3340 // BlockConstPoolScope. |
3304 if (is_const_pool_blocked()) { | 3341 if (is_const_pool_blocked()) { |
3305 // Something is wrong if emission is forced and blocked at the same time. | 3342 // Something is wrong if emission is forced and blocked at the same time. |
3306 ASSERT(!force_emit); | 3343 ASSERT(!force_emit); |
3307 return; | 3344 return; |
3308 } | 3345 } |
3309 | 3346 |
3310 // There is nothing to do if there are no pending constant pool entries. | 3347 // There is nothing to do if there are no pending constant pool entries. |
(...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3488 bind(&after_pool); | 3525 bind(&after_pool); |
3489 } | 3526 } |
3490 } | 3527 } |
3491 | 3528 |
3492 // Since a constant pool was just emitted, move the check offset forward by | 3529 // Since a constant pool was just emitted, move the check offset forward by |
3493 // the standard interval. | 3530 // the standard interval. |
3494 next_buffer_check_ = pc_offset() + kCheckPoolInterval; | 3531 next_buffer_check_ = pc_offset() + kCheckPoolInterval; |
3495 } | 3532 } |
3496 | 3533 |
3497 | 3534 |
3535 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { | |
3536 ASSERT(FLAG_enable_ool_constant_pool); | |
3537 constant_pool_builder_.Populate(this, constant_pool); | |
3538 } | |
3539 | |
3540 | |
3541 ConstantPoolBuilder::ConstantPoolBuilder() | |
3542 : entries_(new RelocInfo[32]), | |
3543 merged_indexes_(new int[32]), | |
3544 buffer_size_(32), | |
3545 number_of_entries_(0), | |
3546 count_of_64bit_(0), | |
3547 count_of_code_ptr_(0), | |
3548 count_of_heap_ptr_(0), | |
3549 count_of_32bit_(0) { } | |
3550 | |
3551 | |
3552 ConstantPoolBuilder::~ConstantPoolBuilder() { | |
3553 delete[] entries_; | |
3554 delete[] merged_indexes_; | |
3555 } | |
3556 | |
3557 | |
3558 bool ConstantPoolBuilder::IsEmpty() { | |
3559 return number_of_entries_ == 0; | |
3560 } | |
3561 | |
3562 | |
3563 bool ConstantPoolBuilder::Is64BitEntry(RelocInfo::Mode rmode) { | |
3564 return rmode == RelocInfo::NONE64; | |
3565 } | |
3566 | |
3567 | |
3568 bool ConstantPoolBuilder::Is32BitEntry(RelocInfo::Mode rmode) { | |
3569 return !RelocInfo::IsGCRelocMode(rmode) && rmode != RelocInfo::NONE64; | |
3570 } | |
3571 | |
3572 | |
3573 bool ConstantPoolBuilder::IsCodePtrEntry(RelocInfo::Mode rmode) { | |
3574 return RelocInfo::IsCodeTarget(rmode); | |
3575 } | |
3576 | |
3577 | |
3578 bool ConstantPoolBuilder::IsHeapPtrEntry(RelocInfo::Mode rmode) { | |
3579 return RelocInfo::IsGCRelocMode(rmode) && !RelocInfo::IsCodeTarget(rmode); | |
3580 } | |
3581 | |
3582 | |
3583 void ConstantPoolBuilder::AddEntry(Assembler* assm, | |
3584 const RelocInfo& rinfo) { | |
3585 RelocInfo::Mode rmode = rinfo.rmode(); | |
3586 ASSERT(rmode != RelocInfo::COMMENT && | |
3587 rmode != RelocInfo::POSITION && | |
3588 rmode != RelocInfo::STATEMENT_POSITION && | |
3589 rmode != RelocInfo::CONST_POOL); | |
3590 | |
3591 if (number_of_entries_ >= buffer_size_) { | |
3592 GrowBuffer(); | |
3593 } | |
3594 int entry_index = number_of_entries_++; | |
3595 entries_[entry_index] = rinfo; | |
3596 merged_indexes_[entry_index] = -1; | |
3597 | |
3598 // Try to merge entries which won't be patched. | |
3599 if (RelocInfo::IsNone(rmode) || | |
3600 (!Serializer::enabled() && (rmode >= RelocInfo::CELL))) { | |
3601 for (int i = 0; i < entry_index; i++) { | |
3602 if (RelocInfo::IsEqual(rinfo, entries_[i])) { | |
3603 merged_indexes_[entry_index] = i; | |
3604 break; | |
3605 } | |
3606 } | |
3607 } | |
3608 | |
3609 if (merged_indexes_[entry_index] == -1) { | |
3610 // Not merged, so update the appropriate count. | |
3611 if (Is64BitEntry(rmode)) { | |
3612 count_of_64bit_++; | |
3613 } else if (Is32BitEntry(rmode)) { | |
3614 count_of_32bit_++; | |
3615 } else if (IsCodePtrEntry(rmode)) { | |
3616 count_of_code_ptr_++; | |
3617 } else { | |
3618 ASSERT(IsHeapPtrEntry(rmode)); | |
3619 count_of_heap_ptr_++; | |
3620 } | |
3621 } | |
3622 | |
3623 // Check if we still have room for another entry given Arm's ldr and vldr | |
3624 // immediate offset range. | |
3625 if (!(is_uint12(ConstantPoolArray::SizeFor(count_of_64bit_, | |
3626 count_of_code_ptr_, | |
3627 count_of_heap_ptr_, | |
3628 count_of_32bit_))) && | |
3629 is_uint10(ConstantPoolArray::SizeFor(count_of_64bit_, 0, 0, 0))) { | |
3630 assm->set_constant_pool_full(); | |
3631 } | |
3632 } | |
3633 | |
3634 | |
3635 void ConstantPoolBuilder::GrowBuffer() { | |
3636 int new_buffer_size = buffer_size_ * 2; | |
ulan
2014/03/18 12:27:51
Since we already depend on STL, I'd suggest to use
rmcilroy
2014/03/18 15:14:35
Done.
| |
3637 | |
3638 RelocInfo* new_entries = new RelocInfo[new_buffer_size]; | |
3639 OS::MemMove(new_entries, entries_, sizeof(RelocInfo) * buffer_size_); | |
3640 delete[] entries_; | |
3641 entries_ = new_entries; | |
3642 | |
3643 int* new_merged_indexes = new int[new_buffer_size]; | |
3644 OS::MemMove(new_merged_indexes, merged_indexes_, | |
3645 sizeof(merged_indexes_[0]) * buffer_size_); | |
3646 delete[] merged_indexes_; | |
3647 merged_indexes_ = new_merged_indexes; | |
3648 | |
3649 buffer_size_ = new_buffer_size; | |
3650 } | |
3651 | |
3652 | |
3653 void ConstantPoolBuilder::Relocate(int pc_delta) { | |
3654 for (int i = 0; i < number_of_entries_; i++) { | |
3655 RelocInfo& rinfo = entries_[i]; | |
3656 ASSERT(rinfo.rmode() != RelocInfo::JS_RETURN); | |
3657 rinfo.set_pc(rinfo.pc() + pc_delta); | |
3658 } | |
3659 } | |
3660 | |
3661 | |
3662 void ConstantPoolBuilder::Populate(Assembler* assm, | |
3663 ConstantPoolArray* constant_pool) { | |
3664 ASSERT(constant_pool->count_of_int64_entries() == count_of_64bit_); | |
3665 ASSERT(constant_pool->count_of_code_ptr_entries() == count_of_code_ptr_); | |
3666 ASSERT(constant_pool->count_of_heap_ptr_entries() == count_of_heap_ptr_); | |
3667 ASSERT(constant_pool->count_of_int32_entries() == count_of_32bit_); | |
3668 | |
3669 int index_64bit = 0; | |
3670 int index_code_ptr = count_of_64bit_; | |
3671 int index_heap_ptr = count_of_64bit_ + count_of_code_ptr_; | |
3672 int index_32bit = count_of_64bit_ + count_of_code_ptr_ + count_of_heap_ptr_; | |
3673 | |
3674 for (int i = 0; i < number_of_entries_; i++) { | |
3675 RelocInfo& rinfo = entries_[i]; | |
3676 RelocInfo::Mode rmode = rinfo.rmode(); | |
3677 | |
3678 // Update constant pool if necessary and get the entry's offset. | |
3679 int offset; | |
3680 if (merged_indexes_[i] == -1) { | |
3681 if (Is64BitEntry(rmode)) { | |
3682 offset = constant_pool->OffsetOfElementAt(index_64bit) - kHeapObjectTag; | |
3683 constant_pool->set(index_64bit++, rinfo.data64()); | |
3684 } else if (Is32BitEntry(rmode)) { | |
3685 offset = constant_pool->OffsetOfElementAt(index_32bit) - kHeapObjectTag; | |
3686 constant_pool->set(index_32bit++, static_cast<int32_t>(rinfo.data())); | |
3687 } else if (IsCodePtrEntry(rmode)) { | |
3688 offset = constant_pool->OffsetOfElementAt(index_code_ptr) - | |
3689 kHeapObjectTag; | |
3690 constant_pool->set(index_code_ptr++, | |
3691 reinterpret_cast<Object *>(rinfo.data())); | |
3692 } else { | |
3693 ASSERT(IsHeapPtrEntry(rmode)); | |
3694 offset = constant_pool->OffsetOfElementAt(index_heap_ptr) - | |
3695 kHeapObjectTag; | |
3696 constant_pool->set(index_heap_ptr++, | |
3697 reinterpret_cast<Object *>(rinfo.data())); | |
3698 } | |
3699 merged_indexes_[i] = offset; // Stash offset for merged entries. | |
3700 } else { | |
3701 int merged_index = merged_indexes_[i]; | |
3702 ASSERT(merged_index < number_of_entries_ && merged_index < i); | |
3703 offset = merged_indexes_[merged_index]; | |
3704 } | |
3705 | |
3706 // Patch vldr/ldr instruction with correct offset. | |
3707 Instr instr = assm->instr_at(rinfo.pc()); | |
3708 if (Is64BitEntry(rmode)) { | |
3709 // Instruction to patch must be 'vldr rd, [pp, #0]'. | |
3710 ASSERT((Assembler::IsVldrDPpImmediateOffset(instr) && | |
3711 Assembler::GetVldrDRegisterImmediateOffset(instr) == 0)); | |
3712 ASSERT(is_uint10(offset)); | |
3713 assm->instr_at_put(rinfo.pc(), | |
3714 Assembler::SetVldrDRegisterImmediateOffset(instr, offset)); | |
3715 } else { | |
3716 // Instruction to patch must be 'ldr rd, [pp, #0]'. | |
3717 ASSERT((Assembler::IsLdrPpImmediateOffset(instr) && | |
3718 Assembler::GetLdrRegisterImmediateOffset(instr) == 0)); | |
3719 ASSERT(is_uint12(offset)); | |
3720 assm->instr_at_put(rinfo.pc(), | |
3721 Assembler::SetLdrRegisterImmediateOffset(instr, offset)); | |
3722 } | |
3723 } | |
3724 | |
3725 ASSERT((index_64bit == count_of_64bit_) && | |
3726 (index_code_ptr == (index_64bit + count_of_code_ptr_)) && | |
3727 (index_heap_ptr == (index_code_ptr + count_of_heap_ptr_)) && | |
3728 (index_32bit == (index_heap_ptr + count_of_32bit_))); | |
3729 } | |
3730 | |
3731 | |
3498 } } // namespace v8::internal | 3732 } } // namespace v8::internal |
3499 | 3733 |
3500 #endif // V8_TARGET_ARCH_ARM | 3734 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |