OLD | NEW |
1 // Copyright (c) 1994-2006 Sun Microsystems Inc. | 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. |
2 // All Rights Reserved. | 2 // All Rights Reserved. |
3 // | 3 // |
4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
5 // modification, are permitted provided that the following conditions | 5 // modification, are permitted provided that the following conditions |
6 // are met: | 6 // are met: |
7 // | 7 // |
8 // - Redistributions of source code must retain the above copyright notice, | 8 // - Redistributions of source code must retain the above copyright notice, |
9 // this list of conditions and the following disclaimer. | 9 // this list of conditions and the following disclaimer. |
10 // | 10 // |
(...skipping 1075 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1086 if (fits_shifter(~imm32, rotate_imm, immed_8, NULL)) { | 1086 if (fits_shifter(~imm32, rotate_imm, immed_8, NULL)) { |
1087 *instr ^= kAndBicFlip; | 1087 *instr ^= kAndBicFlip; |
1088 return true; | 1088 return true; |
1089 } | 1089 } |
1090 } | 1090 } |
1091 } | 1091 } |
1092 } | 1092 } |
1093 return false; | 1093 return false; |
1094 } | 1094 } |
1095 | 1095 |
| 1096 namespace { |
1096 | 1097 |
1097 // We have to use the temporary register for things that can be relocated even | 1098 // We have to use the temporary register for things that can be relocated even |
1098 // if they can be encoded in the ARM's 12 bits of immediate-offset instruction | 1099 // if they can be encoded in the ARM's 12 bits of immediate-offset instruction |
1099 // space. There is no guarantee that the relocated location can be similarly | 1100 // space. There is no guarantee that the relocated location can be similarly |
1100 // encoded. | 1101 // encoded. |
1101 bool Operand::must_output_reloc_info(const Assembler* assembler) const { | 1102 bool must_output_reloc_info(RelocInfo::Mode rmode, const Assembler* assembler) { |
1102 if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) { | 1103 if (rmode == RelocInfo::EXTERNAL_REFERENCE) { |
1103 if (assembler != NULL && assembler->predictable_code_size()) return true; | 1104 if (assembler != NULL && assembler->predictable_code_size()) return true; |
1104 return assembler->serializer_enabled(); | 1105 return assembler->serializer_enabled(); |
1105 } else if (RelocInfo::IsNone(rmode_)) { | 1106 } else if (RelocInfo::IsNone(rmode)) { |
1106 return false; | 1107 return false; |
1107 } | 1108 } |
1108 return true; | 1109 return true; |
1109 } | 1110 } |
1110 | 1111 |
1111 | 1112 bool use_mov_immediate_load(const Operand& x, const Assembler* assembler) { |
1112 static bool use_mov_immediate_load(const Operand& x, | |
1113 const Assembler* assembler) { | |
1114 DCHECK(assembler != nullptr); | 1113 DCHECK(assembler != nullptr); |
1115 if (x.must_output_reloc_info(assembler)) { | 1114 if (x.must_output_reloc_info(assembler)) { |
1116 // Prefer constant pool if data is likely to be patched. | 1115 // Prefer constant pool if data is likely to be patched. |
1117 return false; | 1116 return false; |
1118 } else { | 1117 } else { |
1119 // Otherwise, use immediate load if movw / movt is available. | 1118 // Otherwise, use immediate load if movw / movt is available. |
1120 return CpuFeatures::IsSupported(ARMv7); | 1119 return CpuFeatures::IsSupported(ARMv7); |
1121 } | 1120 } |
1122 } | 1121 } |
1123 | 1122 |
| 1123 } // namespace |
| 1124 |
| 1125 bool Operand::must_output_reloc_info(const Assembler* assembler) const { |
| 1126 return v8::internal::must_output_reloc_info(rmode_, assembler); |
| 1127 } |
1124 | 1128 |
1125 int Operand::instructions_required(const Assembler* assembler, | 1129 int Operand::instructions_required(const Assembler* assembler, |
1126 Instr instr) const { | 1130 Instr instr) const { |
1127 DCHECK(assembler != nullptr); | 1131 DCHECK(assembler != nullptr); |
1128 if (rm_.is_valid()) return 1; | 1132 if (rm_.is_valid()) return 1; |
1129 uint32_t dummy1, dummy2; | 1133 uint32_t dummy1, dummy2; |
1130 if (must_output_reloc_info(assembler) || | 1134 if (must_output_reloc_info(assembler) || |
1131 !fits_shifter(imm32_, &dummy1, &dummy2, &instr)) { | 1135 !fits_shifter(imm32_, &dummy1, &dummy2, &instr)) { |
1132 // The immediate operand cannot be encoded as a shifter operand, or use of | 1136 // The immediate operand cannot be encoded as a shifter operand, or use of |
1133 // constant pool is required. First account for the instructions required | 1137 // constant pool is required. First account for the instructions required |
1134 // for the constant pool or immediate load | 1138 // for the constant pool or immediate load |
1135 int instructions; | 1139 int instructions; |
1136 if (use_mov_immediate_load(*this, assembler)) { | 1140 if (use_mov_immediate_load(*this, assembler)) { |
1137 // A movw / movt or mov / orr immediate load. | 1141 DCHECK(CpuFeatures::IsSupported(ARMv7)); |
1138 instructions = CpuFeatures::IsSupported(ARMv7) ? 2 : 4; | 1142 // A movw / movt immediate load. |
| 1143 instructions = 2; |
1139 } else { | 1144 } else { |
1140 // A small constant pool load. | 1145 // A small constant pool load. |
1141 instructions = 1; | 1146 instructions = 1; |
1142 } | 1147 } |
1143 if ((instr & ~kCondMask) != 13 * B21) { // mov, S not set | 1148 if ((instr & ~kCondMask) != 13 * B21) { // mov, S not set |
1144 // For a mov or mvn instruction which doesn't set the condition | 1149 // For a mov or mvn instruction which doesn't set the condition |
1145 // code, the constant pool or immediate load is enough, otherwise we need | 1150 // code, the constant pool or immediate load is enough, otherwise we need |
1146 // to account for the actual instruction being requested. | 1151 // to account for the actual instruction being requested. |
1147 instructions += 1; | 1152 instructions += 1; |
1148 } | 1153 } |
1149 return instructions; | 1154 return instructions; |
1150 } else { | 1155 } else { |
1151 // No use of constant pool and the immediate operand can be encoded as a | 1156 // No use of constant pool and the immediate operand can be encoded as a |
1152 // shifter operand. | 1157 // shifter operand. |
1153 return 1; | 1158 return 1; |
1154 } | 1159 } |
1155 } | 1160 } |
1156 | 1161 |
1157 | 1162 |
1158 void Assembler::move_32_bit_immediate(Register rd, | 1163 void Assembler::move_32_bit_immediate(Register rd, |
1159 const Operand& x, | 1164 const Operand& x, |
1160 Condition cond) { | 1165 Condition cond) { |
1161 uint32_t imm32 = static_cast<uint32_t>(x.imm32_); | |
1162 if (x.must_output_reloc_info(this)) { | |
1163 RecordRelocInfo(x.rmode_); | |
1164 } | |
1165 | |
1166 if (use_mov_immediate_load(x, this)) { | 1166 if (use_mov_immediate_load(x, this)) { |
1167 // use_mov_immediate_load should return false when we need to output | 1167 // use_mov_immediate_load should return false when we need to output |
1168 // relocation info, since we prefer the constant pool for values that | 1168 // relocation info, since we prefer the constant pool for values that |
1169 // can be patched. | 1169 // can be patched. |
1170 DCHECK(!x.must_output_reloc_info(this)); | 1170 DCHECK(!x.must_output_reloc_info(this)); |
1171 Register target = rd.code() == pc.code() ? ip : rd; | 1171 Register target = rd.code() == pc.code() ? ip : rd; |
1172 if (CpuFeatures::IsSupported(ARMv7)) { | 1172 if (CpuFeatures::IsSupported(ARMv7)) { |
| 1173 uint32_t imm32 = static_cast<uint32_t>(x.imm32_); |
1173 CpuFeatureScope scope(this, ARMv7); | 1174 CpuFeatureScope scope(this, ARMv7); |
1174 movw(target, imm32 & 0xffff, cond); | 1175 movw(target, imm32 & 0xffff, cond); |
1175 movt(target, imm32 >> 16, cond); | 1176 movt(target, imm32 >> 16, cond); |
1176 } | 1177 } |
1177 if (target.code() != rd.code()) { | 1178 if (target.code() != rd.code()) { |
1178 mov(rd, target, LeaveCC, cond); | 1179 mov(rd, target, LeaveCC, cond); |
1179 } | 1180 } |
1180 } else { | 1181 } else { |
1181 ConstantPoolEntry::Access access = | 1182 ConstantPoolAddEntry(pc_offset(), x.rmode_, x.imm32_); |
1182 ConstantPoolAddEntry(pc_offset(), x.rmode_, x.imm32_); | |
1183 DCHECK(access == ConstantPoolEntry::REGULAR); | |
1184 USE(access); | |
1185 ldr(rd, MemOperand(pc, 0), cond); | 1183 ldr(rd, MemOperand(pc, 0), cond); |
1186 } | 1184 } |
1187 } | 1185 } |
1188 | 1186 |
1189 | 1187 |
1190 void Assembler::addrmod1(Instr instr, | 1188 void Assembler::addrmod1(Instr instr, |
1191 Register rn, | 1189 Register rn, |
1192 Register rd, | 1190 Register rd, |
1193 const Operand& x) { | 1191 const Operand& x) { |
1194 CheckBuffer(); | 1192 CheckBuffer(); |
(...skipping 1523 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2718 // need to add/subtract without losing precision, which requires a | 2716 // need to add/subtract without losing precision, which requires a |
2719 // cookie value that Lithium is probably better positioned to | 2717 // cookie value that Lithium is probably better positioned to |
2720 // choose. | 2718 // choose. |
2721 // We could also add a few peepholes here like detecting 0.0 and | 2719 // We could also add a few peepholes here like detecting 0.0 and |
2722 // -0.0 and doing a vmov from the sequestered d14, forcing denorms | 2720 // -0.0 and doing a vmov from the sequestered d14, forcing denorms |
2723 // to zero (we set flush-to-zero), and normalizing NaN values. | 2721 // to zero (we set flush-to-zero), and normalizing NaN values. |
2724 // We could also detect redundant values. | 2722 // We could also detect redundant values. |
2725 // The code could also randomize the order of values, though | 2723 // The code could also randomize the order of values, though |
2726 // that's tricky because vldr has a limited reach. Furthermore | 2724 // that's tricky because vldr has a limited reach. Furthermore |
2727 // it breaks load locality. | 2725 // it breaks load locality. |
2728 ConstantPoolEntry::Access access = ConstantPoolAddEntry(pc_offset(), imm); | 2726 ConstantPoolAddEntry(pc_offset(), imm); |
2729 DCHECK(access == ConstantPoolEntry::REGULAR); | |
2730 USE(access); | |
2731 vldr(dst, MemOperand(pc, 0)); | 2727 vldr(dst, MemOperand(pc, 0)); |
2732 } else { | 2728 } else { |
2733 // Synthesise the double from ARM immediates. | 2729 // Synthesise the double from ARM immediates. |
2734 uint32_t lo, hi; | 2730 uint32_t lo, hi; |
2735 DoubleAsTwoUInt32(imm, &lo, &hi); | 2731 DoubleAsTwoUInt32(imm, &lo, &hi); |
2736 | 2732 |
2737 if (lo == hi) { | 2733 if (lo == hi) { |
2738 // Move the low and high parts of the double to a D register in one | 2734 // Move the low and high parts of the double to a D register in one |
2739 // instruction. | 2735 // instruction. |
2740 mov(ip, Operand(lo)); | 2736 mov(ip, Operand(lo)); |
(...skipping 2210 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4951 } | 4947 } |
4952 | 4948 |
4953 | 4949 |
4954 void Assembler::emit_code_stub_address(Code* stub) { | 4950 void Assembler::emit_code_stub_address(Code* stub) { |
4955 CheckBuffer(); | 4951 CheckBuffer(); |
4956 *reinterpret_cast<uint32_t*>(pc_) = | 4952 *reinterpret_cast<uint32_t*>(pc_) = |
4957 reinterpret_cast<uint32_t>(stub->instruction_start()); | 4953 reinterpret_cast<uint32_t>(stub->instruction_start()); |
4958 pc_ += sizeof(uint32_t); | 4954 pc_ += sizeof(uint32_t); |
4959 } | 4955 } |
4960 | 4956 |
4961 | |
4962 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { | 4957 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { |
4963 if (RelocInfo::IsNone(rmode) || | 4958 if (RelocInfo::IsNone(rmode) || |
4964 // Don't record external references unless the heap will be serialized. | 4959 // Don't record external references unless the heap will be serialized. |
4965 (rmode == RelocInfo::EXTERNAL_REFERENCE && !serializer_enabled() && | 4960 (rmode == RelocInfo::EXTERNAL_REFERENCE && !serializer_enabled() && |
4966 !emit_debug_code())) { | 4961 !emit_debug_code())) { |
4967 return; | 4962 return; |
4968 } | 4963 } |
4969 DCHECK(buffer_space() >= kMaxRelocSize); // too late to grow buffer here | 4964 DCHECK(buffer_space() >= kMaxRelocSize); // too late to grow buffer here |
4970 if (rmode == RelocInfo::CODE_TARGET_WITH_ID) { | 4965 if (rmode == RelocInfo::CODE_TARGET_WITH_ID) { |
4971 data = RecordedAstId().ToInt(); | 4966 data = RecordedAstId().ToInt(); |
4972 ClearRecordedAstId(); | 4967 ClearRecordedAstId(); |
4973 } | 4968 } |
4974 RelocInfo rinfo(pc_, rmode, data, NULL); | 4969 RelocInfo rinfo(pc_, rmode, data, NULL); |
4975 reloc_info_writer.Write(&rinfo); | 4970 reloc_info_writer.Write(&rinfo); |
4976 } | 4971 } |
4977 | 4972 |
4978 | 4973 void Assembler::ConstantPoolAddEntry(int position, RelocInfo::Mode rmode, |
4979 ConstantPoolEntry::Access Assembler::ConstantPoolAddEntry(int position, | 4974 intptr_t value) { |
4980 RelocInfo::Mode rmode, | |
4981 intptr_t value) { | |
4982 DCHECK(rmode != RelocInfo::COMMENT && rmode != RelocInfo::CONST_POOL && | 4975 DCHECK(rmode != RelocInfo::COMMENT && rmode != RelocInfo::CONST_POOL && |
4983 rmode != RelocInfo::NONE64); | 4976 rmode != RelocInfo::NONE64); |
4984 bool sharing_ok = RelocInfo::IsNone(rmode) || | 4977 bool sharing_ok = RelocInfo::IsNone(rmode) || |
4985 !(serializer_enabled() || rmode < RelocInfo::CELL); | 4978 (rmode >= RelocInfo::FIRST_SHAREABLE_RELOC_MODE); |
4986 DCHECK(pending_32_bit_constants_.size() < kMaxNumPending32Constants); | 4979 DCHECK(pending_32_bit_constants_.size() < kMaxNumPending32Constants); |
4987 if (pending_32_bit_constants_.empty()) { | 4980 if (pending_32_bit_constants_.empty()) { |
4988 first_const_pool_32_use_ = position; | 4981 first_const_pool_32_use_ = position; |
4989 } | 4982 } |
4990 ConstantPoolEntry entry(position, value, sharing_ok); | 4983 ConstantPoolEntry entry( |
| 4984 position, value, |
| 4985 sharing_ok || (rmode == RelocInfo::CODE_TARGET && serializer_enabled())); |
| 4986 |
| 4987 bool shared = false; |
| 4988 if (sharing_ok) { |
| 4989 // Merge the constant, if possible. |
| 4990 for (size_t i = 0; i < pending_32_bit_constants_.size(); i++) { |
| 4991 ConstantPoolEntry& current_entry = pending_32_bit_constants_[i]; |
| 4992 if (!current_entry.sharing_ok()) continue; |
| 4993 if (entry.value() == current_entry.value()) { |
| 4994 entry.set_merged_index(i); |
| 4995 shared = true; |
| 4996 break; |
| 4997 } |
| 4998 } |
| 4999 } |
| 5000 |
| 5001 if (rmode == RelocInfo::CODE_TARGET && serializer_enabled()) { |
| 5002 // TODO(all): We only do this in the serializer, for now, because |
| 5003 // full-codegen relies on RelocInfo for translating PCs between full-codegen |
| 5004 // normal and debug code. |
| 5005 // Sharing entries here relies on canonicalized handles - without them, we |
| 5006 // will miss the optimisation opportunity. |
| 5007 Address handle_address = reinterpret_cast<Address>(value); |
| 5008 auto existing = handle_to_index_map_.find(handle_address); |
| 5009 if (existing != handle_to_index_map_.end()) { |
| 5010 int index = existing->second; |
| 5011 entry.set_merged_index(index); |
| 5012 shared = true; |
| 5013 } else { |
| 5014 // Keep track of this code handle. |
| 5015 handle_to_index_map_[handle_address] = |
| 5016 static_cast<int>(pending_32_bit_constants_.size()); |
| 5017 } |
| 5018 } |
| 5019 |
4991 pending_32_bit_constants_.push_back(entry); | 5020 pending_32_bit_constants_.push_back(entry); |
4992 | 5021 |
4993 // Make sure the constant pool is not emitted in place of the next | 5022 // Make sure the constant pool is not emitted in place of the next |
4994 // instruction for which we just recorded relocation info. | 5023 // instruction for which we just recorded relocation info. |
4995 BlockConstPoolFor(1); | 5024 BlockConstPoolFor(1); |
4996 return ConstantPoolEntry::REGULAR; | 5025 |
| 5026 // Emit relocation info. |
| 5027 if (must_output_reloc_info(rmode, this) && !shared) { |
| 5028 RecordRelocInfo(rmode); |
| 5029 } |
4997 } | 5030 } |
4998 | 5031 |
4999 | 5032 void Assembler::ConstantPoolAddEntry(int position, double value) { |
5000 ConstantPoolEntry::Access Assembler::ConstantPoolAddEntry(int position, | |
5001 double value) { | |
5002 DCHECK(pending_64_bit_constants_.size() < kMaxNumPending64Constants); | 5033 DCHECK(pending_64_bit_constants_.size() < kMaxNumPending64Constants); |
5003 if (pending_64_bit_constants_.empty()) { | 5034 if (pending_64_bit_constants_.empty()) { |
5004 first_const_pool_64_use_ = position; | 5035 first_const_pool_64_use_ = position; |
5005 } | 5036 } |
5006 ConstantPoolEntry entry(position, value); | 5037 ConstantPoolEntry entry(position, value); |
| 5038 |
| 5039 // Merge the constant, if possible. |
| 5040 for (size_t i = 0; i < pending_64_bit_constants_.size(); i++) { |
| 5041 ConstantPoolEntry& current_entry = pending_64_bit_constants_[i]; |
| 5042 DCHECK(current_entry.sharing_ok()); |
| 5043 if (entry.value() == current_entry.value()) { |
| 5044 entry.set_merged_index(i); |
| 5045 break; |
| 5046 } |
| 5047 } |
5007 pending_64_bit_constants_.push_back(entry); | 5048 pending_64_bit_constants_.push_back(entry); |
5008 | 5049 |
5009 // Make sure the constant pool is not emitted in place of the next | 5050 // Make sure the constant pool is not emitted in place of the next |
5010 // instruction for which we just recorded relocation info. | 5051 // instruction for which we just recorded relocation info. |
5011 BlockConstPoolFor(1); | 5052 BlockConstPoolFor(1); |
5012 return ConstantPoolEntry::REGULAR; | |
5013 } | 5053 } |
5014 | 5054 |
5015 | 5055 |
5016 void Assembler::BlockConstPoolFor(int instructions) { | 5056 void Assembler::BlockConstPoolFor(int instructions) { |
5017 int pc_limit = pc_offset() + instructions * kInstrSize; | 5057 int pc_limit = pc_offset() + instructions * kInstrSize; |
5018 if (no_const_pool_before_ < pc_limit) { | 5058 if (no_const_pool_before_ < pc_limit) { |
5019 // Max pool start (if we need a jump and an alignment). | 5059 // Max pool start (if we need a jump and an alignment). |
5020 #ifdef DEBUG | 5060 #ifdef DEBUG |
5021 int start = pc_limit + kInstrSize + 2 * kPointerSize; | 5061 int start = pc_limit + kInstrSize + 2 * kPointerSize; |
5022 DCHECK(pending_32_bit_constants_.empty() || | 5062 DCHECK(pending_32_bit_constants_.empty() || |
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5103 need_emit = true; | 5143 need_emit = true; |
5104 } | 5144 } |
5105 } | 5145 } |
5106 if (!need_emit) return; | 5146 if (!need_emit) return; |
5107 } | 5147 } |
5108 | 5148 |
5109 // Deduplicate constants. | 5149 // Deduplicate constants. |
5110 int size_after_marker = estimated_size_after_marker; | 5150 int size_after_marker = estimated_size_after_marker; |
5111 for (size_t i = 0; i < pending_64_bit_constants_.size(); i++) { | 5151 for (size_t i = 0; i < pending_64_bit_constants_.size(); i++) { |
5112 ConstantPoolEntry& entry = pending_64_bit_constants_[i]; | 5152 ConstantPoolEntry& entry = pending_64_bit_constants_[i]; |
5113 DCHECK(!entry.is_merged()); | 5153 if (entry.is_merged()) size_after_marker -= kDoubleSize; |
5114 for (size_t j = 0; j < i; j++) { | |
5115 if (entry.value64() == pending_64_bit_constants_[j].value64()) { | |
5116 DCHECK(!pending_64_bit_constants_[j].is_merged()); | |
5117 entry.set_merged_index(j); | |
5118 size_after_marker -= kDoubleSize; | |
5119 break; | |
5120 } | |
5121 } | |
5122 } | 5154 } |
5123 | 5155 |
5124 for (size_t i = 0; i < pending_32_bit_constants_.size(); i++) { | 5156 for (size_t i = 0; i < pending_32_bit_constants_.size(); i++) { |
5125 ConstantPoolEntry& entry = pending_32_bit_constants_[i]; | 5157 ConstantPoolEntry& entry = pending_32_bit_constants_[i]; |
5126 DCHECK(!entry.is_merged()); | 5158 if (entry.is_merged()) size_after_marker -= kPointerSize; |
5127 if (!entry.sharing_ok()) continue; | |
5128 for (size_t j = 0; j < i; j++) { | |
5129 if (entry.value() == pending_32_bit_constants_[j].value()) { | |
5130 DCHECK(!pending_32_bit_constants_[j].is_merged()); | |
5131 entry.set_merged_index(j); | |
5132 size_after_marker -= kPointerSize; | |
5133 break; | |
5134 } | |
5135 } | |
5136 } | 5159 } |
5137 | 5160 |
5138 int size = size_up_to_marker + size_after_marker; | 5161 int size = size_up_to_marker + size_after_marker; |
5139 | 5162 |
5140 int needed_space = size + kGap; | 5163 int needed_space = size + kGap; |
5141 while (buffer_space() <= needed_space) GrowBuffer(); | 5164 while (buffer_space() <= needed_space) GrowBuffer(); |
5142 | 5165 |
5143 { | 5166 { |
5144 // Block recursive calls to CheckConstPool. | 5167 // Block recursive calls to CheckConstPool. |
5145 BlockConstPoolScope block_const_pool(this); | 5168 BlockConstPoolScope block_const_pool(this); |
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5224 } | 5247 } |
5225 instr_at_put(entry.position(), | 5248 instr_at_put(entry.position(), |
5226 SetLdrRegisterImmediateOffset(instr, delta)); | 5249 SetLdrRegisterImmediateOffset(instr, delta)); |
5227 if (!entry.is_merged()) { | 5250 if (!entry.is_merged()) { |
5228 emit(entry.value()); | 5251 emit(entry.value()); |
5229 } | 5252 } |
5230 } | 5253 } |
5231 | 5254 |
5232 pending_32_bit_constants_.clear(); | 5255 pending_32_bit_constants_.clear(); |
5233 pending_64_bit_constants_.clear(); | 5256 pending_64_bit_constants_.clear(); |
| 5257 handle_to_index_map_.clear(); |
| 5258 |
5234 first_const_pool_32_use_ = -1; | 5259 first_const_pool_32_use_ = -1; |
5235 first_const_pool_64_use_ = -1; | 5260 first_const_pool_64_use_ = -1; |
5236 | 5261 |
5237 RecordComment("]"); | 5262 RecordComment("]"); |
5238 | 5263 |
5239 DCHECK_EQ(size, SizeOfCodeGeneratedSince(&size_check)); | 5264 DCHECK_EQ(size, SizeOfCodeGeneratedSince(&size_check)); |
5240 | 5265 |
5241 if (after_pool.is_linked()) { | 5266 if (after_pool.is_linked()) { |
5242 bind(&after_pool); | 5267 bind(&after_pool); |
5243 } | 5268 } |
(...skipping 25 matching lines...) Expand all Loading... |
5269 } | 5294 } |
5270 | 5295 |
5271 void PatchingAssembler::FlushICache(Isolate* isolate) { | 5296 void PatchingAssembler::FlushICache(Isolate* isolate) { |
5272 Assembler::FlushICache(isolate, buffer_, buffer_size_ - kGap); | 5297 Assembler::FlushICache(isolate, buffer_, buffer_size_ - kGap); |
5273 } | 5298 } |
5274 | 5299 |
5275 } // namespace internal | 5300 } // namespace internal |
5276 } // namespace v8 | 5301 } // namespace v8 |
5277 | 5302 |
5278 #endif // V8_TARGET_ARCH_ARM | 5303 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |