OLD | NEW |
---|---|
1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/interpreter/bytecode-array-writer.h" | 5 #include "src/interpreter/bytecode-array-writer.h" |
6 | 6 |
7 #include "src/api.h" | 7 #include "src/api.h" |
8 #include "src/interpreter/bytecode-label.h" | 8 #include "src/interpreter/bytecode-label.h" |
9 #include "src/interpreter/constant-array-builder.h" | 9 #include "src/interpreter/constant-array-builder.h" |
10 #include "src/log.h" | 10 #include "src/log.h" |
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
98 const BytecodeNode* const node) { | 98 const BytecodeNode* const node) { |
99 int bytecode_offset = static_cast<int>(bytecodes()->size()); | 99 int bytecode_offset = static_cast<int>(bytecodes()->size()); |
100 const BytecodeSourceInfo& source_info = node->source_info(); | 100 const BytecodeSourceInfo& source_info = node->source_info(); |
101 if (source_info.is_valid()) { | 101 if (source_info.is_valid()) { |
102 source_position_table_builder()->AddPosition(bytecode_offset, | 102 source_position_table_builder()->AddPosition(bytecode_offset, |
103 source_info.source_position(), | 103 source_info.source_position(), |
104 source_info.is_statement()); | 104 source_info.is_statement()); |
105 } | 105 } |
106 } | 106 } |
107 | 107 |
108 namespace { | |
109 | |
110 OperandSize GetOperandSize(OperandTypeInfo operand_type_info, | |
111 uint32_t operand_value) { | |
112 switch (operand_type_info) { | |
113 case OperandTypeInfo::kScalableSignedByte: { | |
114 int32_t signed_operand = static_cast<int32_t>(operand_value); | |
115 return Bytecodes::SizeForSignedOperand(signed_operand); | |
116 } | |
117 case OperandTypeInfo::kScalableUnsignedByte: { | |
118 return Bytecodes::SizeForUnsignedOperand(operand_value); | |
119 } | |
120 case OperandTypeInfo::kFixedUnsignedByte: { | |
121 return OperandSize::kByte; | |
122 } | |
123 case OperandTypeInfo::kFixedUnsignedShort: { | |
124 return OperandSize::kByte; | |
rmcilroy
2016/06/09 15:16:35
kShort
oth
2016/06/09 19:59:07
For the daft logic here, it's effectively 1 so Byt
| |
125 } | |
126 case OperandTypeInfo::kNone: | |
127 break; | |
128 } | |
129 UNREACHABLE(); | |
130 return OperandSize::kNone; | |
131 } | |
132 | |
133 OperandScale GetOperandScale(const BytecodeNode* const node) { | |
134 const OperandTypeInfo* operand_type_infos = | |
135 Bytecodes::GetOperandTypeInfos(node->bytecode()); | |
rmcilroy
2016/06/09 15:16:35
Rather than doing this dance between Size and Scal
oth
2016/06/09 19:59:07
Done.
| |
136 switch (node->operand_count()) { | |
137 case 4: | |
138 return Bytecodes::OperandSizesToScale( | |
139 GetOperandSize(operand_type_infos[0], node->operand(0)), | |
140 GetOperandSize(operand_type_infos[1], node->operand(1)), | |
141 GetOperandSize(operand_type_infos[2], node->operand(2)), | |
142 GetOperandSize(operand_type_infos[3], node->operand(3))); | |
143 break; | |
144 case 3: | |
145 return Bytecodes::OperandSizesToScale( | |
146 GetOperandSize(operand_type_infos[0], node->operand(0)), | |
147 GetOperandSize(operand_type_infos[1], node->operand(1)), | |
148 GetOperandSize(operand_type_infos[2], node->operand(2))); | |
149 break; | |
150 case 2: | |
151 return Bytecodes::OperandSizesToScale( | |
152 GetOperandSize(operand_type_infos[0], node->operand(0)), | |
153 GetOperandSize(operand_type_infos[1], node->operand(1))); | |
154 break; | |
155 case 1: | |
156 return Bytecodes::OperandSizesToScale( | |
157 GetOperandSize(operand_type_infos[0], node->operand(0))); | |
158 break; | |
159 case 0: | |
160 return OperandScale::kSingle; | |
161 } | |
162 UNREACHABLE(); | |
163 return OperandScale::kSingle; | |
164 } | |
165 | |
166 } // namespace | |
167 | |
108 void BytecodeArrayWriter::EmitBytecode(const BytecodeNode* const node) { | 168 void BytecodeArrayWriter::EmitBytecode(const BytecodeNode* const node) { |
109 DCHECK_NE(node->bytecode(), Bytecode::kIllegal); | 169 DCHECK_NE(node->bytecode(), Bytecode::kIllegal); |
110 | 170 |
111 OperandScale operand_scale = node->operand_scale(); | 171 OperandScale operand_scale = GetOperandScale(node); |
112 if (operand_scale != OperandScale::kSingle) { | 172 if (operand_scale != OperandScale::kSingle) { |
113 Bytecode prefix = Bytecodes::OperandScaleToPrefixBytecode(operand_scale); | 173 Bytecode prefix = Bytecodes::OperandScaleToPrefixBytecode(operand_scale); |
114 bytecodes()->push_back(Bytecodes::ToByte(prefix)); | 174 bytecodes()->push_back(Bytecodes::ToByte(prefix)); |
115 } | 175 } |
116 | 176 |
117 Bytecode bytecode = node->bytecode(); | 177 Bytecode bytecode = node->bytecode(); |
118 bytecodes()->push_back(Bytecodes::ToByte(bytecode)); | 178 bytecodes()->push_back(Bytecodes::ToByte(bytecode)); |
119 | 179 |
120 int register_operand_bitmap = Bytecodes::GetRegisterOperandBitmap(bytecode); | 180 int register_operand_bitmap = Bytecodes::GetRegisterOperandBitmap(bytecode); |
121 const uint32_t* const operands = node->operands(); | 181 const uint32_t* const operands = node->operands(); |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
153 count = static_cast<int>(operands[i + 1]); | 213 count = static_cast<int>(operands[i + 1]); |
154 } else { | 214 } else { |
155 count = Bytecodes::GetNumberOfRegistersRepresentedBy(operand_type); | 215 count = Bytecodes::GetNumberOfRegistersRepresentedBy(operand_type); |
156 } | 216 } |
157 Register reg = Register::FromOperand(static_cast<int32_t>(operands[i])); | 217 Register reg = Register::FromOperand(static_cast<int32_t>(operands[i])); |
158 max_register_count_ = std::max(max_register_count_, reg.index() + count); | 218 max_register_count_ = std::max(max_register_count_, reg.index() + count); |
159 } | 219 } |
160 } | 220 } |
161 } | 221 } |
162 | 222 |
163 // TODO(rmcilroy): This is the same as SignedOperand in BytecodeArrayBuilder. | |
164 // Once we move the scalable operand processing here remove the SignedOperand | |
165 // in BytecodeArrayBuilder. | |
166 static uint32_t SignedOperand(int value, OperandSize size) { | |
167 switch (size) { | |
168 case OperandSize::kByte: | |
169 return static_cast<uint8_t>(value & 0xff); | |
170 case OperandSize::kShort: | |
171 return static_cast<uint16_t>(value & 0xffff); | |
172 case OperandSize::kQuad: | |
173 return static_cast<uint32_t>(value); | |
174 case OperandSize::kNone: | |
175 UNREACHABLE(); | |
176 } | |
177 return 0; | |
178 } | |
179 | |
180 // static | 223 // static |
181 Bytecode GetJumpWithConstantOperand(Bytecode jump_bytecode) { | 224 Bytecode GetJumpWithConstantOperand(Bytecode jump_bytecode) { |
182 switch (jump_bytecode) { | 225 switch (jump_bytecode) { |
183 case Bytecode::kJump: | 226 case Bytecode::kJump: |
184 return Bytecode::kJumpConstant; | 227 return Bytecode::kJumpConstant; |
185 case Bytecode::kJumpIfTrue: | 228 case Bytecode::kJumpIfTrue: |
186 return Bytecode::kJumpIfTrueConstant; | 229 return Bytecode::kJumpIfTrueConstant; |
187 case Bytecode::kJumpIfFalse: | 230 case Bytecode::kJumpIfFalse: |
188 return Bytecode::kJumpIfFalseConstant; | 231 return Bytecode::kJumpIfFalseConstant; |
189 case Bytecode::kJumpIfToBooleanTrue: | 232 case Bytecode::kJumpIfToBooleanTrue: |
(...skipping 10 matching lines...) Expand all Loading... | |
200 UNREACHABLE(); | 243 UNREACHABLE(); |
201 return Bytecode::kIllegal; | 244 return Bytecode::kIllegal; |
202 } | 245 } |
203 } | 246 } |
204 | 247 |
205 void BytecodeArrayWriter::PatchJumpWith8BitOperand(size_t jump_location, | 248 void BytecodeArrayWriter::PatchJumpWith8BitOperand(size_t jump_location, |
206 int delta) { | 249 int delta) { |
207 Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes()->at(jump_location)); | 250 Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes()->at(jump_location)); |
208 DCHECK(Bytecodes::IsJumpImmediate(jump_bytecode)); | 251 DCHECK(Bytecodes::IsJumpImmediate(jump_bytecode)); |
209 size_t operand_location = jump_location + 1; | 252 size_t operand_location = jump_location + 1; |
210 DCHECK_EQ(bytecodes()->at(operand_location), 0); | 253 DCHECK_EQ(bytecodes()->at(operand_location), k8BitJumpPlaceholder); |
211 if (Bytecodes::SizeForSignedOperand(delta) == OperandSize::kByte) { | 254 if (Bytecodes::SizeForSignedOperand(delta) == OperandSize::kByte) { |
212 // The jump fits within the range of an Imm operand, so cancel | 255 // The jump fits within the range of an Imm operand, so cancel |
213 // the reservation and jump directly. | 256 // the reservation and jump directly. |
214 constant_array_builder()->DiscardReservedEntry(OperandSize::kByte); | 257 constant_array_builder()->DiscardReservedEntry(OperandSize::kByte); |
215 bytecodes()->at(operand_location) = static_cast<uint8_t>(delta); | 258 bytecodes()->at(operand_location) = static_cast<uint8_t>(delta); |
216 } else { | 259 } else { |
217 // The jump does not fit within the range of an Imm operand, so | 260 // The jump does not fit within the range of an Imm operand, so |
218 // commit reservation putting the offset into the constant pool, | 261 // commit reservation putting the offset into the constant pool, |
219 // and update the jump instruction and operand. | 262 // and update the jump instruction and operand. |
220 size_t entry = constant_array_builder()->CommitReservedEntry( | 263 size_t entry = constant_array_builder()->CommitReservedEntry( |
221 OperandSize::kByte, handle(Smi::FromInt(delta), isolate())); | 264 OperandSize::kByte, handle(Smi::FromInt(delta), isolate())); |
222 DCHECK(Bytecodes::SizeForUnsignedOperand(entry) == OperandSize::kByte); | 265 DCHECK_LE(entry, kMaxUInt32); |
266 DCHECK_EQ(Bytecodes::SizeForUnsignedOperand(static_cast<uint32_t>(entry)), | |
267 OperandSize::kByte); | |
223 jump_bytecode = GetJumpWithConstantOperand(jump_bytecode); | 268 jump_bytecode = GetJumpWithConstantOperand(jump_bytecode); |
224 bytecodes()->at(jump_location) = Bytecodes::ToByte(jump_bytecode); | 269 bytecodes()->at(jump_location) = Bytecodes::ToByte(jump_bytecode); |
225 bytecodes()->at(operand_location) = static_cast<uint8_t>(entry); | 270 bytecodes()->at(operand_location) = static_cast<uint8_t>(entry); |
226 } | 271 } |
227 } | 272 } |
228 | 273 |
229 void BytecodeArrayWriter::PatchJumpWith16BitOperand(size_t jump_location, | 274 void BytecodeArrayWriter::PatchJumpWith16BitOperand(size_t jump_location, |
230 int delta) { | 275 int delta) { |
231 Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes()->at(jump_location)); | 276 Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes()->at(jump_location)); |
232 DCHECK(Bytecodes::IsJumpImmediate(jump_bytecode)); | 277 DCHECK(Bytecodes::IsJumpImmediate(jump_bytecode)); |
233 size_t operand_location = jump_location + 1; | 278 size_t operand_location = jump_location + 1; |
234 uint8_t operand_bytes[2]; | 279 uint8_t operand_bytes[2]; |
235 if (Bytecodes::SizeForSignedOperand(delta) <= OperandSize::kShort) { | 280 if (Bytecodes::SizeForSignedOperand(delta) <= OperandSize::kShort) { |
236 constant_array_builder()->DiscardReservedEntry(OperandSize::kShort); | 281 constant_array_builder()->DiscardReservedEntry(OperandSize::kShort); |
237 WriteUnalignedUInt16(operand_bytes, static_cast<uint16_t>(delta)); | 282 WriteUnalignedUInt16(operand_bytes, static_cast<uint16_t>(delta)); |
238 } else { | 283 } else { |
239 jump_bytecode = GetJumpWithConstantOperand(jump_bytecode); | 284 jump_bytecode = GetJumpWithConstantOperand(jump_bytecode); |
240 bytecodes()->at(jump_location) = Bytecodes::ToByte(jump_bytecode); | 285 bytecodes()->at(jump_location) = Bytecodes::ToByte(jump_bytecode); |
241 size_t entry = constant_array_builder()->CommitReservedEntry( | 286 size_t entry = constant_array_builder()->CommitReservedEntry( |
242 OperandSize::kShort, handle(Smi::FromInt(delta), isolate())); | 287 OperandSize::kShort, handle(Smi::FromInt(delta), isolate())); |
243 WriteUnalignedUInt16(operand_bytes, static_cast<uint16_t>(entry)); | 288 WriteUnalignedUInt16(operand_bytes, static_cast<uint16_t>(entry)); |
244 } | 289 } |
245 DCHECK(bytecodes()->at(operand_location) == 0 && | 290 DCHECK(bytecodes()->at(operand_location) == k8BitJumpPlaceholder && |
246 bytecodes()->at(operand_location + 1) == 0); | 291 bytecodes()->at(operand_location + 1) == k8BitJumpPlaceholder); |
247 bytecodes()->at(operand_location++) = operand_bytes[0]; | 292 bytecodes()->at(operand_location++) = operand_bytes[0]; |
248 bytecodes()->at(operand_location) = operand_bytes[1]; | 293 bytecodes()->at(operand_location) = operand_bytes[1]; |
249 } | 294 } |
250 | 295 |
251 void BytecodeArrayWriter::PatchJumpWith32BitOperand(size_t jump_location, | 296 void BytecodeArrayWriter::PatchJumpWith32BitOperand(size_t jump_location, |
252 int delta) { | 297 int delta) { |
253 DCHECK(Bytecodes::IsJumpImmediate( | 298 DCHECK(Bytecodes::IsJumpImmediate( |
254 Bytecodes::FromByte(bytecodes()->at(jump_location)))); | 299 Bytecodes::FromByte(bytecodes()->at(jump_location)))); |
255 constant_array_builder()->DiscardReservedEntry(OperandSize::kQuad); | 300 constant_array_builder()->DiscardReservedEntry(OperandSize::kQuad); |
256 uint8_t operand_bytes[4]; | 301 uint8_t operand_bytes[4]; |
257 WriteUnalignedUInt32(operand_bytes, static_cast<uint32_t>(delta)); | 302 WriteUnalignedUInt32(operand_bytes, static_cast<uint32_t>(delta)); |
258 size_t operand_location = jump_location + 1; | 303 size_t operand_location = jump_location + 1; |
259 DCHECK(bytecodes()->at(operand_location) == 0 && | 304 DCHECK(bytecodes()->at(operand_location) == k8BitJumpPlaceholder && |
260 bytecodes()->at(operand_location + 1) == 0 && | 305 bytecodes()->at(operand_location + 1) == k8BitJumpPlaceholder && |
261 bytecodes()->at(operand_location + 2) == 0 && | 306 bytecodes()->at(operand_location + 2) == k8BitJumpPlaceholder && |
262 bytecodes()->at(operand_location + 3) == 0); | 307 bytecodes()->at(operand_location + 3) == k8BitJumpPlaceholder); |
263 bytecodes()->at(operand_location++) = operand_bytes[0]; | 308 bytecodes()->at(operand_location++) = operand_bytes[0]; |
264 bytecodes()->at(operand_location++) = operand_bytes[1]; | 309 bytecodes()->at(operand_location++) = operand_bytes[1]; |
265 bytecodes()->at(operand_location++) = operand_bytes[2]; | 310 bytecodes()->at(operand_location++) = operand_bytes[2]; |
266 bytecodes()->at(operand_location) = operand_bytes[3]; | 311 bytecodes()->at(operand_location) = operand_bytes[3]; |
267 } | 312 } |
268 | 313 |
269 void BytecodeArrayWriter::PatchJump(size_t jump_target, size_t jump_location) { | 314 void BytecodeArrayWriter::PatchJump(size_t jump_target, size_t jump_location) { |
270 Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes()->at(jump_location)); | 315 Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes()->at(jump_location)); |
271 int delta = static_cast<int>(jump_target - jump_location); | 316 int delta = static_cast<int>(jump_target - jump_location); |
272 int prefix_offset = 0; | 317 int prefix_offset = 0; |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
309 CHECK_LE(current_offset, static_cast<size_t>(kMaxInt)); | 354 CHECK_LE(current_offset, static_cast<size_t>(kMaxInt)); |
310 // Label has been bound already so this is a backwards jump. | 355 // Label has been bound already so this is a backwards jump. |
311 size_t abs_delta = current_offset - label->offset(); | 356 size_t abs_delta = current_offset - label->offset(); |
312 int delta = -static_cast<int>(abs_delta); | 357 int delta = -static_cast<int>(abs_delta); |
313 OperandSize operand_size = Bytecodes::SizeForSignedOperand(delta); | 358 OperandSize operand_size = Bytecodes::SizeForSignedOperand(delta); |
314 if (operand_size > OperandSize::kByte) { | 359 if (operand_size > OperandSize::kByte) { |
315 // Adjust for scaling byte prefix for wide jump offset. | 360 // Adjust for scaling byte prefix for wide jump offset. |
316 DCHECK_LE(delta, 0); | 361 DCHECK_LE(delta, 0); |
317 delta -= 1; | 362 delta -= 1; |
318 } | 363 } |
319 node->set_bytecode(node->bytecode(), SignedOperand(delta, operand_size), | 364 node->set_bytecode(node->bytecode(), delta); |
320 Bytecodes::OperandSizesToScale(operand_size)); | |
321 } else { | 365 } else { |
322 // The label has not yet been bound so this is a forward reference | 366 // The label has not yet been bound so this is a forward reference |
323 // that will be patched when the label is bound. We create a | 367 // that will be patched when the label is bound. We create a |
324 // reservation in the constant pool so the jump can be patched | 368 // reservation in the constant pool so the jump can be patched |
325 // when the label is bound. The reservation means the maximum size | 369 // when the label is bound. The reservation means the maximum size |
326 // of the operand for the constant is known and the jump can | 370 // of the operand for the constant is known and the jump can |
327 // be emitted into the bytecode stream with space for the operand. | 371 // be emitted into the bytecode stream with space for the operand. |
328 unbound_jumps_++; | 372 unbound_jumps_++; |
329 label->set_referrer(current_offset); | 373 label->set_referrer(current_offset); |
330 OperandSize reserved_operand_size = | 374 OperandSize reserved_operand_size = |
331 constant_array_builder()->CreateReservedEntry(); | 375 constant_array_builder()->CreateReservedEntry(); |
332 OperandScale operand_scale = | 376 switch (reserved_operand_size) { |
333 Bytecodes::OperandSizesToScale(reserved_operand_size); | 377 case OperandSize::kNone: |
334 node->set_bytecode(node->bytecode(), 0, operand_scale); | 378 UNREACHABLE(); |
379 break; | |
380 case OperandSize::kByte: | |
381 node->set_bytecode(node->bytecode(), k8BitJumpPlaceholder); | |
382 break; | |
383 case OperandSize::kShort: | |
384 node->set_bytecode(node->bytecode(), k16BitJumpPlaceholder); | |
385 break; | |
386 case OperandSize::kQuad: | |
387 node->set_bytecode(node->bytecode(), k32BitJumpPlaceholder); | |
388 break; | |
389 } | |
335 } | 390 } |
336 EmitBytecode(node); | 391 EmitBytecode(node); |
337 } | 392 } |
338 | 393 |
339 } // namespace interpreter | 394 } // namespace interpreter |
340 } // namespace internal | 395 } // namespace internal |
341 } // namespace v8 | 396 } // namespace v8 |
OLD | NEW |