OLD | NEW |
| (Empty) |
1 // Copyright 2015 the V8 project authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "src/compiler/interpreter-assembler.h" | |
6 | |
7 #include <ostream> | |
8 | |
9 #include "src/code-factory.h" | |
10 #include "src/compiler/graph.h" | |
11 #include "src/compiler/instruction-selector.h" | |
12 #include "src/compiler/linkage.h" | |
13 #include "src/compiler/pipeline.h" | |
14 #include "src/compiler/raw-machine-assembler.h" | |
15 #include "src/compiler/schedule.h" | |
16 #include "src/frames.h" | |
17 #include "src/interface-descriptors.h" | |
18 #include "src/interpreter/bytecodes.h" | |
19 #include "src/machine-type.h" | |
20 #include "src/macro-assembler.h" | |
21 #include "src/zone.h" | |
22 | |
23 namespace v8 { | |
24 namespace internal { | |
25 namespace compiler { | |
26 | |
27 InterpreterAssembler::InterpreterAssembler(Isolate* isolate, Zone* zone, | |
28 interpreter::Bytecode bytecode) | |
29 : bytecode_(bytecode), | |
30 raw_assembler_(new RawMachineAssembler( | |
31 isolate, new (zone) Graph(zone), | |
32 Linkage::GetInterpreterDispatchDescriptor(zone), | |
33 MachineType::PointerRepresentation(), | |
34 InstructionSelector::SupportedMachineOperatorFlags())), | |
35 accumulator_( | |
36 raw_assembler_->Parameter(Linkage::kInterpreterAccumulatorParameter)), | |
37 context_( | |
38 raw_assembler_->Parameter(Linkage::kInterpreterContextParameter)), | |
39 code_generated_(false) { | |
40 if (FLAG_trace_ignition) { | |
41 TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry); | |
42 } | |
43 } | |
44 | |
45 InterpreterAssembler::~InterpreterAssembler() {} | |
46 | |
47 | |
48 Handle<Code> InterpreterAssembler::GenerateCode() { | |
49 DCHECK(!code_generated_); | |
50 | |
51 // Disallow empty handlers that never return. | |
52 DCHECK_NE(0, graph()->end()->InputCount()); | |
53 | |
54 const char* bytecode_name = interpreter::Bytecodes::ToString(bytecode_); | |
55 Schedule* schedule = raw_assembler_->Export(); | |
56 Code::Flags flags = Code::ComputeFlags(Code::STUB); | |
57 Handle<Code> code = Pipeline::GenerateCodeForCodeStub( | |
58 isolate(), raw_assembler_->call_descriptor(), graph(), schedule, flags, | |
59 bytecode_name); | |
60 | |
61 #ifdef ENABLE_DISASSEMBLER | |
62 if (FLAG_trace_ignition_codegen) { | |
63 OFStream os(stdout); | |
64 code->Disassemble(bytecode_name, os); | |
65 os << std::flush; | |
66 } | |
67 #endif | |
68 | |
69 code_generated_ = true; | |
70 return code; | |
71 } | |
72 | |
73 | |
74 Node* InterpreterAssembler::GetAccumulator() { return accumulator_; } | |
75 | |
76 | |
77 void InterpreterAssembler::SetAccumulator(Node* value) { accumulator_ = value; } | |
78 | |
79 | |
80 Node* InterpreterAssembler::GetContext() { return context_; } | |
81 | |
82 | |
83 void InterpreterAssembler::SetContext(Node* value) { | |
84 StoreRegister(value, interpreter::Register::current_context()); | |
85 context_ = value; | |
86 } | |
87 | |
88 Node* InterpreterAssembler::BytecodeOffset() { | |
89 return raw_assembler_->Parameter( | |
90 Linkage::kInterpreterBytecodeOffsetParameter); | |
91 } | |
92 | |
93 Node* InterpreterAssembler::RegisterFileRawPointer() { | |
94 return raw_assembler_->Parameter(Linkage::kInterpreterRegisterFileParameter); | |
95 } | |
96 | |
97 | |
98 Node* InterpreterAssembler::BytecodeArrayTaggedPointer() { | |
99 return raw_assembler_->Parameter(Linkage::kInterpreterBytecodeArrayParameter); | |
100 } | |
101 | |
102 | |
103 Node* InterpreterAssembler::DispatchTableRawPointer() { | |
104 return raw_assembler_->Parameter(Linkage::kInterpreterDispatchTableParameter); | |
105 } | |
106 | |
107 | |
108 Node* InterpreterAssembler::RegisterLocation(Node* reg_index) { | |
109 return IntPtrAdd(RegisterFileRawPointer(), RegisterFrameOffset(reg_index)); | |
110 } | |
111 | |
112 | |
113 Node* InterpreterAssembler::LoadRegister(int offset) { | |
114 return raw_assembler_->Load(MachineType::AnyTagged(), | |
115 RegisterFileRawPointer(), Int32Constant(offset)); | |
116 } | |
117 | |
118 | |
119 Node* InterpreterAssembler::LoadRegister(interpreter::Register reg) { | |
120 return LoadRegister(reg.ToOperand() << kPointerSizeLog2); | |
121 } | |
122 | |
123 | |
124 Node* InterpreterAssembler::RegisterFrameOffset(Node* index) { | |
125 return WordShl(index, kPointerSizeLog2); | |
126 } | |
127 | |
128 | |
129 Node* InterpreterAssembler::LoadRegister(Node* reg_index) { | |
130 return raw_assembler_->Load(MachineType::AnyTagged(), | |
131 RegisterFileRawPointer(), | |
132 RegisterFrameOffset(reg_index)); | |
133 } | |
134 | |
135 | |
136 Node* InterpreterAssembler::StoreRegister(Node* value, int offset) { | |
137 return raw_assembler_->Store(MachineRepresentation::kTagged, | |
138 RegisterFileRawPointer(), Int32Constant(offset), | |
139 value, kNoWriteBarrier); | |
140 } | |
141 | |
142 | |
143 Node* InterpreterAssembler::StoreRegister(Node* value, | |
144 interpreter::Register reg) { | |
145 return StoreRegister(value, reg.ToOperand() << kPointerSizeLog2); | |
146 } | |
147 | |
148 | |
149 Node* InterpreterAssembler::StoreRegister(Node* value, Node* reg_index) { | |
150 return raw_assembler_->Store( | |
151 MachineRepresentation::kTagged, RegisterFileRawPointer(), | |
152 RegisterFrameOffset(reg_index), value, kNoWriteBarrier); | |
153 } | |
154 | |
155 | |
156 Node* InterpreterAssembler::NextRegister(Node* reg_index) { | |
157 // Register indexes are negative, so the next index is minus one. | |
158 return IntPtrAdd(reg_index, Int32Constant(-1)); | |
159 } | |
160 | |
161 | |
162 Node* InterpreterAssembler::BytecodeOperand(int operand_index) { | |
163 DCHECK_LT(operand_index, interpreter::Bytecodes::NumberOfOperands(bytecode_)); | |
164 DCHECK_EQ(interpreter::OperandSize::kByte, | |
165 interpreter::Bytecodes::GetOperandSize(bytecode_, operand_index)); | |
166 return raw_assembler_->Load( | |
167 MachineType::Uint8(), BytecodeArrayTaggedPointer(), | |
168 IntPtrAdd(BytecodeOffset(), | |
169 Int32Constant(interpreter::Bytecodes::GetOperandOffset( | |
170 bytecode_, operand_index)))); | |
171 } | |
172 | |
173 | |
174 Node* InterpreterAssembler::BytecodeOperandSignExtended(int operand_index) { | |
175 DCHECK_LT(operand_index, interpreter::Bytecodes::NumberOfOperands(bytecode_)); | |
176 DCHECK_EQ(interpreter::OperandSize::kByte, | |
177 interpreter::Bytecodes::GetOperandSize(bytecode_, operand_index)); | |
178 Node* load = raw_assembler_->Load( | |
179 MachineType::Int8(), BytecodeArrayTaggedPointer(), | |
180 IntPtrAdd(BytecodeOffset(), | |
181 Int32Constant(interpreter::Bytecodes::GetOperandOffset( | |
182 bytecode_, operand_index)))); | |
183 // Ensure that we sign extend to full pointer size | |
184 if (kPointerSize == 8) { | |
185 load = raw_assembler_->ChangeInt32ToInt64(load); | |
186 } | |
187 return load; | |
188 } | |
189 | |
190 | |
191 Node* InterpreterAssembler::BytecodeOperandShort(int operand_index) { | |
192 DCHECK_LT(operand_index, interpreter::Bytecodes::NumberOfOperands(bytecode_)); | |
193 DCHECK_EQ(interpreter::OperandSize::kShort, | |
194 interpreter::Bytecodes::GetOperandSize(bytecode_, operand_index)); | |
195 if (TargetSupportsUnalignedAccess()) { | |
196 return raw_assembler_->Load( | |
197 MachineType::Uint16(), BytecodeArrayTaggedPointer(), | |
198 IntPtrAdd(BytecodeOffset(), | |
199 Int32Constant(interpreter::Bytecodes::GetOperandOffset( | |
200 bytecode_, operand_index)))); | |
201 } else { | |
202 int offset = | |
203 interpreter::Bytecodes::GetOperandOffset(bytecode_, operand_index); | |
204 Node* first_byte = raw_assembler_->Load( | |
205 MachineType::Uint8(), BytecodeArrayTaggedPointer(), | |
206 IntPtrAdd(BytecodeOffset(), Int32Constant(offset))); | |
207 Node* second_byte = raw_assembler_->Load( | |
208 MachineType::Uint8(), BytecodeArrayTaggedPointer(), | |
209 IntPtrAdd(BytecodeOffset(), Int32Constant(offset + 1))); | |
210 #if V8_TARGET_LITTLE_ENDIAN | |
211 return raw_assembler_->WordOr(WordShl(second_byte, kBitsPerByte), | |
212 first_byte); | |
213 #elif V8_TARGET_BIG_ENDIAN | |
214 return raw_assembler_->WordOr(WordShl(first_byte, kBitsPerByte), | |
215 second_byte); | |
216 #else | |
217 #error "Unknown Architecture" | |
218 #endif | |
219 } | |
220 } | |
221 | |
222 | |
223 Node* InterpreterAssembler::BytecodeOperandShortSignExtended( | |
224 int operand_index) { | |
225 DCHECK_LT(operand_index, interpreter::Bytecodes::NumberOfOperands(bytecode_)); | |
226 DCHECK_EQ(interpreter::OperandSize::kShort, | |
227 interpreter::Bytecodes::GetOperandSize(bytecode_, operand_index)); | |
228 int operand_offset = | |
229 interpreter::Bytecodes::GetOperandOffset(bytecode_, operand_index); | |
230 Node* load; | |
231 if (TargetSupportsUnalignedAccess()) { | |
232 load = raw_assembler_->Load( | |
233 MachineType::Int16(), BytecodeArrayTaggedPointer(), | |
234 IntPtrAdd(BytecodeOffset(), Int32Constant(operand_offset))); | |
235 } else { | |
236 #if V8_TARGET_LITTLE_ENDIAN | |
237 Node* hi_byte_offset = Int32Constant(operand_offset + 1); | |
238 Node* lo_byte_offset = Int32Constant(operand_offset); | |
239 #elif V8_TARGET_BIG_ENDIAN | |
240 Node* hi_byte_offset = Int32Constant(operand_offset); | |
241 Node* lo_byte_offset = Int32Constant(operand_offset + 1); | |
242 #else | |
243 #error "Unknown Architecture" | |
244 #endif | |
245 Node* hi_byte = | |
246 raw_assembler_->Load(MachineType::Int8(), BytecodeArrayTaggedPointer(), | |
247 IntPtrAdd(BytecodeOffset(), hi_byte_offset)); | |
248 Node* lo_byte = | |
249 raw_assembler_->Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), | |
250 IntPtrAdd(BytecodeOffset(), lo_byte_offset)); | |
251 hi_byte = raw_assembler_->Word32Shl(hi_byte, Int32Constant(kBitsPerByte)); | |
252 load = raw_assembler_->Word32Or(hi_byte, lo_byte); | |
253 } | |
254 | |
255 // Ensure that we sign extend to full pointer size | |
256 if (kPointerSize == 8) { | |
257 load = raw_assembler_->ChangeInt32ToInt64(load); | |
258 } | |
259 return load; | |
260 } | |
261 | |
262 | |
263 Node* InterpreterAssembler::BytecodeOperandCount(int operand_index) { | |
264 switch (interpreter::Bytecodes::GetOperandSize(bytecode_, operand_index)) { | |
265 case interpreter::OperandSize::kByte: | |
266 DCHECK_EQ( | |
267 interpreter::OperandType::kRegCount8, | |
268 interpreter::Bytecodes::GetOperandType(bytecode_, operand_index)); | |
269 return BytecodeOperand(operand_index); | |
270 case interpreter::OperandSize::kShort: | |
271 DCHECK_EQ( | |
272 interpreter::OperandType::kRegCount16, | |
273 interpreter::Bytecodes::GetOperandType(bytecode_, operand_index)); | |
274 return BytecodeOperandShort(operand_index); | |
275 case interpreter::OperandSize::kNone: | |
276 UNREACHABLE(); | |
277 } | |
278 return nullptr; | |
279 } | |
280 | |
281 | |
282 Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) { | |
283 DCHECK_EQ(interpreter::OperandType::kImm8, | |
284 interpreter::Bytecodes::GetOperandType(bytecode_, operand_index)); | |
285 return BytecodeOperandSignExtended(operand_index); | |
286 } | |
287 | |
288 | |
289 Node* InterpreterAssembler::BytecodeOperandIdx(int operand_index) { | |
290 switch (interpreter::Bytecodes::GetOperandSize(bytecode_, operand_index)) { | |
291 case interpreter::OperandSize::kByte: | |
292 DCHECK_EQ( | |
293 interpreter::OperandType::kIdx8, | |
294 interpreter::Bytecodes::GetOperandType(bytecode_, operand_index)); | |
295 return BytecodeOperand(operand_index); | |
296 case interpreter::OperandSize::kShort: | |
297 DCHECK_EQ( | |
298 interpreter::OperandType::kIdx16, | |
299 interpreter::Bytecodes::GetOperandType(bytecode_, operand_index)); | |
300 return BytecodeOperandShort(operand_index); | |
301 case interpreter::OperandSize::kNone: | |
302 UNREACHABLE(); | |
303 } | |
304 return nullptr; | |
305 } | |
306 | |
307 | |
308 Node* InterpreterAssembler::BytecodeOperandReg(int operand_index) { | |
309 interpreter::OperandType operand_type = | |
310 interpreter::Bytecodes::GetOperandType(bytecode_, operand_index); | |
311 if (interpreter::Bytecodes::IsRegisterOperandType(operand_type)) { | |
312 interpreter::OperandSize operand_size = | |
313 interpreter::Bytecodes::SizeOfOperand(operand_type); | |
314 if (operand_size == interpreter::OperandSize::kByte) { | |
315 return BytecodeOperandSignExtended(operand_index); | |
316 } else if (operand_size == interpreter::OperandSize::kShort) { | |
317 return BytecodeOperandShortSignExtended(operand_index); | |
318 } | |
319 } | |
320 UNREACHABLE(); | |
321 return nullptr; | |
322 } | |
323 | |
324 | |
325 Node* InterpreterAssembler::Int32Constant(int value) { | |
326 return raw_assembler_->Int32Constant(value); | |
327 } | |
328 | |
329 | |
330 Node* InterpreterAssembler::IntPtrConstant(intptr_t value) { | |
331 return raw_assembler_->IntPtrConstant(value); | |
332 } | |
333 | |
334 | |
335 Node* InterpreterAssembler::NumberConstant(double value) { | |
336 return raw_assembler_->NumberConstant(value); | |
337 } | |
338 | |
339 | |
340 Node* InterpreterAssembler::HeapConstant(Handle<HeapObject> object) { | |
341 return raw_assembler_->HeapConstant(object); | |
342 } | |
343 | |
344 | |
345 Node* InterpreterAssembler::BooleanConstant(bool value) { | |
346 return raw_assembler_->BooleanConstant(value); | |
347 } | |
348 | |
349 | |
350 Node* InterpreterAssembler::SmiShiftBitsConstant() { | |
351 return Int32Constant(kSmiShiftSize + kSmiTagSize); | |
352 } | |
353 | |
354 | |
355 Node* InterpreterAssembler::SmiTag(Node* value) { | |
356 return raw_assembler_->WordShl(value, SmiShiftBitsConstant()); | |
357 } | |
358 | |
359 | |
360 Node* InterpreterAssembler::SmiUntag(Node* value) { | |
361 return raw_assembler_->WordSar(value, SmiShiftBitsConstant()); | |
362 } | |
363 | |
364 | |
365 Node* InterpreterAssembler::IntPtrAdd(Node* a, Node* b) { | |
366 return raw_assembler_->IntPtrAdd(a, b); | |
367 } | |
368 | |
369 | |
370 Node* InterpreterAssembler::IntPtrSub(Node* a, Node* b) { | |
371 return raw_assembler_->IntPtrSub(a, b); | |
372 } | |
373 | |
374 Node* InterpreterAssembler::Int32Sub(Node* a, Node* b) { | |
375 return raw_assembler_->Int32Sub(a, b); | |
376 } | |
377 | |
378 Node* InterpreterAssembler::WordShl(Node* value, int shift) { | |
379 return raw_assembler_->WordShl(value, Int32Constant(shift)); | |
380 } | |
381 | |
382 | |
383 Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) { | |
384 Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(), | |
385 BytecodeArray::kConstantPoolOffset); | |
386 Node* entry_offset = | |
387 IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag), | |
388 WordShl(index, kPointerSizeLog2)); | |
389 return raw_assembler_->Load(MachineType::AnyTagged(), constant_pool, | |
390 entry_offset); | |
391 } | |
392 | |
393 | |
394 Node* InterpreterAssembler::LoadFixedArrayElement(Node* fixed_array, | |
395 int index) { | |
396 Node* entry_offset = | |
397 IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag), | |
398 WordShl(Int32Constant(index), kPointerSizeLog2)); | |
399 return raw_assembler_->Load(MachineType::AnyTagged(), fixed_array, | |
400 entry_offset); | |
401 } | |
402 | |
403 | |
404 Node* InterpreterAssembler::LoadObjectField(Node* object, int offset) { | |
405 return raw_assembler_->Load(MachineType::AnyTagged(), object, | |
406 IntPtrConstant(offset - kHeapObjectTag)); | |
407 } | |
408 | |
409 | |
410 Node* InterpreterAssembler::LoadContextSlot(Node* context, int slot_index) { | |
411 return raw_assembler_->Load(MachineType::AnyTagged(), context, | |
412 IntPtrConstant(Context::SlotOffset(slot_index))); | |
413 } | |
414 | |
415 | |
416 Node* InterpreterAssembler::LoadContextSlot(Node* context, Node* slot_index) { | |
417 Node* offset = | |
418 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2), | |
419 Int32Constant(Context::kHeaderSize - kHeapObjectTag)); | |
420 return raw_assembler_->Load(MachineType::AnyTagged(), context, offset); | |
421 } | |
422 | |
423 | |
424 Node* InterpreterAssembler::StoreContextSlot(Node* context, Node* slot_index, | |
425 Node* value) { | |
426 Node* offset = | |
427 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2), | |
428 Int32Constant(Context::kHeaderSize - kHeapObjectTag)); | |
429 return raw_assembler_->Store(MachineRepresentation::kTagged, context, offset, | |
430 value, kFullWriteBarrier); | |
431 } | |
432 | |
433 | |
434 Node* InterpreterAssembler::LoadTypeFeedbackVector() { | |
435 Node* function = raw_assembler_->Load( | |
436 MachineType::AnyTagged(), RegisterFileRawPointer(), | |
437 IntPtrConstant(InterpreterFrameConstants::kFunctionFromRegisterPointer)); | |
438 Node* shared_info = | |
439 LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset); | |
440 Node* vector = | |
441 LoadObjectField(shared_info, SharedFunctionInfo::kFeedbackVectorOffset); | |
442 return vector; | |
443 } | |
444 | |
445 | |
446 Node* InterpreterAssembler::Projection(int index, Node* node) { | |
447 return raw_assembler_->Projection(index, node); | |
448 } | |
449 | |
450 | |
451 Node* InterpreterAssembler::CallConstruct(Node* new_target, Node* constructor, | |
452 Node* first_arg, Node* arg_count) { | |
453 Callable callable = CodeFactory::InterpreterPushArgsAndConstruct(isolate()); | |
454 CallDescriptor* descriptor = Linkage::GetStubCallDescriptor( | |
455 isolate(), zone(), callable.descriptor(), 0, CallDescriptor::kNoFlags); | |
456 | |
457 Node* code_target = HeapConstant(callable.code()); | |
458 | |
459 Node** args = zone()->NewArray<Node*>(5); | |
460 args[0] = arg_count; | |
461 args[1] = new_target; | |
462 args[2] = constructor; | |
463 args[3] = first_arg; | |
464 args[4] = GetContext(); | |
465 | |
466 return CallN(descriptor, code_target, args); | |
467 } | |
468 | |
469 | |
470 void InterpreterAssembler::CallPrologue() { | |
471 StoreRegister(SmiTag(BytecodeOffset()), | |
472 InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer); | |
473 } | |
474 | |
475 | |
476 Node* InterpreterAssembler::CallN(CallDescriptor* descriptor, Node* code_target, | |
477 Node** args) { | |
478 CallPrologue(); | |
479 | |
480 Node* stack_pointer_before_call = nullptr; | |
481 if (FLAG_debug_code) { | |
482 stack_pointer_before_call = raw_assembler_->LoadStackPointer(); | |
483 } | |
484 Node* return_val = raw_assembler_->CallN(descriptor, code_target, args); | |
485 if (FLAG_debug_code) { | |
486 Node* stack_pointer_after_call = raw_assembler_->LoadStackPointer(); | |
487 AbortIfWordNotEqual(stack_pointer_before_call, stack_pointer_after_call, | |
488 kUnexpectedStackPointer); | |
489 } | |
490 | |
491 return return_val; | |
492 } | |
493 | |
494 | |
495 Node* InterpreterAssembler::CallJS(Node* function, Node* first_arg, | |
496 Node* arg_count) { | |
497 Callable callable = CodeFactory::InterpreterPushArgsAndCall(isolate()); | |
498 CallDescriptor* descriptor = Linkage::GetStubCallDescriptor( | |
499 isolate(), zone(), callable.descriptor(), 0, CallDescriptor::kNoFlags); | |
500 | |
501 Node* code_target = HeapConstant(callable.code()); | |
502 | |
503 Node** args = zone()->NewArray<Node*>(4); | |
504 args[0] = arg_count; | |
505 args[1] = first_arg; | |
506 args[2] = function; | |
507 args[3] = GetContext(); | |
508 | |
509 return CallN(descriptor, code_target, args); | |
510 } | |
511 | |
512 | |
513 Node* InterpreterAssembler::CallIC(CallInterfaceDescriptor descriptor, | |
514 Node* target, Node** args) { | |
515 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor( | |
516 isolate(), zone(), descriptor, 0, CallDescriptor::kNoFlags); | |
517 return CallN(call_descriptor, target, args); | |
518 } | |
519 | |
520 | |
521 Node* InterpreterAssembler::CallIC(CallInterfaceDescriptor descriptor, | |
522 Node* target, Node* arg1, Node* arg2, | |
523 Node* arg3) { | |
524 Node** args = zone()->NewArray<Node*>(4); | |
525 args[0] = arg1; | |
526 args[1] = arg2; | |
527 args[2] = arg3; | |
528 args[3] = GetContext(); | |
529 return CallIC(descriptor, target, args); | |
530 } | |
531 | |
532 | |
533 Node* InterpreterAssembler::CallIC(CallInterfaceDescriptor descriptor, | |
534 Node* target, Node* arg1, Node* arg2, | |
535 Node* arg3, Node* arg4) { | |
536 Node** args = zone()->NewArray<Node*>(5); | |
537 args[0] = arg1; | |
538 args[1] = arg2; | |
539 args[2] = arg3; | |
540 args[3] = arg4; | |
541 args[4] = GetContext(); | |
542 return CallIC(descriptor, target, args); | |
543 } | |
544 | |
545 | |
546 Node* InterpreterAssembler::CallIC(CallInterfaceDescriptor descriptor, | |
547 Node* target, Node* arg1, Node* arg2, | |
548 Node* arg3, Node* arg4, Node* arg5) { | |
549 Node** args = zone()->NewArray<Node*>(6); | |
550 args[0] = arg1; | |
551 args[1] = arg2; | |
552 args[2] = arg3; | |
553 args[3] = arg4; | |
554 args[4] = arg5; | |
555 args[5] = GetContext(); | |
556 return CallIC(descriptor, target, args); | |
557 } | |
558 | |
559 | |
560 Node* InterpreterAssembler::CallRuntime(Node* function_id, Node* first_arg, | |
561 Node* arg_count, int result_size) { | |
562 Callable callable = CodeFactory::InterpreterCEntry(isolate(), result_size); | |
563 CallDescriptor* descriptor = Linkage::GetStubCallDescriptor( | |
564 isolate(), zone(), callable.descriptor(), 0, CallDescriptor::kNoFlags, | |
565 Operator::kNoProperties, MachineType::AnyTagged(), result_size); | |
566 Node* code_target = HeapConstant(callable.code()); | |
567 | |
568 // Get the function entry from the function id. | |
569 Node* function_table = raw_assembler_->ExternalConstant( | |
570 ExternalReference::runtime_function_table_address(isolate())); | |
571 Node* function_offset = raw_assembler_->Int32Mul( | |
572 function_id, Int32Constant(sizeof(Runtime::Function))); | |
573 Node* function = IntPtrAdd(function_table, function_offset); | |
574 Node* function_entry = | |
575 raw_assembler_->Load(MachineType::Pointer(), function, | |
576 Int32Constant(offsetof(Runtime::Function, entry))); | |
577 | |
578 Node** args = zone()->NewArray<Node*>(4); | |
579 args[0] = arg_count; | |
580 args[1] = first_arg; | |
581 args[2] = function_entry; | |
582 args[3] = GetContext(); | |
583 | |
584 return CallN(descriptor, code_target, args); | |
585 } | |
586 | |
587 Node* InterpreterAssembler::CallRuntime(Runtime::FunctionId function_id) { | |
588 CallPrologue(); | |
589 Node* return_val = raw_assembler_->CallRuntime0(function_id, GetContext()); | |
590 return return_val; | |
591 } | |
592 | |
593 Node* InterpreterAssembler::CallRuntime(Runtime::FunctionId function_id, | |
594 Node* arg1) { | |
595 CallPrologue(); | |
596 Node* return_val = | |
597 raw_assembler_->CallRuntime1(function_id, arg1, GetContext()); | |
598 return return_val; | |
599 } | |
600 | |
601 | |
602 Node* InterpreterAssembler::CallRuntime(Runtime::FunctionId function_id, | |
603 Node* arg1, Node* arg2) { | |
604 CallPrologue(); | |
605 Node* return_val = | |
606 raw_assembler_->CallRuntime2(function_id, arg1, arg2, GetContext()); | |
607 return return_val; | |
608 } | |
609 | |
610 Node* InterpreterAssembler::CallRuntime(Runtime::FunctionId function_id, | |
611 Node* arg1, Node* arg2, Node* arg3) { | |
612 CallPrologue(); | |
613 Node* return_val = | |
614 raw_assembler_->CallRuntime3(function_id, arg1, arg2, arg3, GetContext()); | |
615 return return_val; | |
616 } | |
617 | |
618 Node* InterpreterAssembler::CallRuntime(Runtime::FunctionId function_id, | |
619 Node* arg1, Node* arg2, Node* arg3, | |
620 Node* arg4) { | |
621 CallPrologue(); | |
622 Node* return_val = raw_assembler_->CallRuntime4(function_id, arg1, arg2, arg3, | |
623 arg4, GetContext()); | |
624 return return_val; | |
625 } | |
626 | |
627 | |
628 void InterpreterAssembler::Return() { | |
629 if (FLAG_trace_ignition) { | |
630 TraceBytecode(Runtime::kInterpreterTraceBytecodeExit); | |
631 } | |
632 | |
633 Node* exit_trampoline_code_object = | |
634 HeapConstant(isolate()->builtins()->InterpreterExitTrampoline()); | |
635 // If the order of the parameters you need to change the call signature below. | |
636 STATIC_ASSERT(0 == Linkage::kInterpreterAccumulatorParameter); | |
637 STATIC_ASSERT(1 == Linkage::kInterpreterRegisterFileParameter); | |
638 STATIC_ASSERT(2 == Linkage::kInterpreterBytecodeOffsetParameter); | |
639 STATIC_ASSERT(3 == Linkage::kInterpreterBytecodeArrayParameter); | |
640 STATIC_ASSERT(4 == Linkage::kInterpreterDispatchTableParameter); | |
641 STATIC_ASSERT(5 == Linkage::kInterpreterContextParameter); | |
642 Node* args[] = { GetAccumulator(), | |
643 RegisterFileRawPointer(), | |
644 BytecodeOffset(), | |
645 BytecodeArrayTaggedPointer(), | |
646 DispatchTableRawPointer(), | |
647 GetContext() }; | |
648 raw_assembler_->TailCallN(call_descriptor(), exit_trampoline_code_object, | |
649 args); | |
650 } | |
651 | |
652 | |
653 Node* InterpreterAssembler::Advance(int delta) { | |
654 return IntPtrAdd(BytecodeOffset(), Int32Constant(delta)); | |
655 } | |
656 | |
657 | |
658 Node* InterpreterAssembler::Advance(Node* delta) { | |
659 return raw_assembler_->IntPtrAdd(BytecodeOffset(), delta); | |
660 } | |
661 | |
662 void InterpreterAssembler::Jump(Node* delta) { DispatchTo(Advance(delta)); } | |
663 | |
664 void InterpreterAssembler::JumpConditional(Node* condition, Node* delta) { | |
665 RawMachineLabel match, no_match; | |
666 raw_assembler_->Branch(condition, &match, &no_match); | |
667 raw_assembler_->Bind(&match); | |
668 DispatchTo(Advance(delta)); | |
669 raw_assembler_->Bind(&no_match); | |
670 Dispatch(); | |
671 } | |
672 | |
673 void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) { | |
674 JumpConditional(raw_assembler_->WordEqual(lhs, rhs), delta); | |
675 } | |
676 | |
677 void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs, | |
678 Node* delta) { | |
679 JumpConditional(raw_assembler_->WordNotEqual(lhs, rhs), delta); | |
680 } | |
681 | |
682 void InterpreterAssembler::Dispatch() { | |
683 DispatchTo(Advance(interpreter::Bytecodes::Size(bytecode_))); | |
684 } | |
685 | |
686 | |
687 void InterpreterAssembler::DispatchTo(Node* new_bytecode_offset) { | |
688 if (FLAG_trace_ignition) { | |
689 TraceBytecode(Runtime::kInterpreterTraceBytecodeExit); | |
690 } | |
691 Node* target_bytecode = raw_assembler_->Load( | |
692 MachineType::Uint8(), BytecodeArrayTaggedPointer(), new_bytecode_offset); | |
693 | |
694 // TODO(rmcilroy): Create a code target dispatch table to avoid conversion | |
695 // from code object on every dispatch. | |
696 Node* target_code_object = raw_assembler_->Load( | |
697 MachineType::Pointer(), DispatchTableRawPointer(), | |
698 raw_assembler_->Word32Shl(target_bytecode, | |
699 Int32Constant(kPointerSizeLog2))); | |
700 | |
701 // If the order of the parameters you need to change the call signature below. | |
702 STATIC_ASSERT(0 == Linkage::kInterpreterAccumulatorParameter); | |
703 STATIC_ASSERT(1 == Linkage::kInterpreterRegisterFileParameter); | |
704 STATIC_ASSERT(2 == Linkage::kInterpreterBytecodeOffsetParameter); | |
705 STATIC_ASSERT(3 == Linkage::kInterpreterBytecodeArrayParameter); | |
706 STATIC_ASSERT(4 == Linkage::kInterpreterDispatchTableParameter); | |
707 STATIC_ASSERT(5 == Linkage::kInterpreterContextParameter); | |
708 Node* args[] = { GetAccumulator(), | |
709 RegisterFileRawPointer(), | |
710 new_bytecode_offset, | |
711 BytecodeArrayTaggedPointer(), | |
712 DispatchTableRawPointer(), | |
713 GetContext() }; | |
714 raw_assembler_->TailCallN(call_descriptor(), target_code_object, args); | |
715 } | |
716 | |
717 void InterpreterAssembler::StackCheck() { | |
718 RawMachineLabel end, ok, stack_guard; | |
719 Node* sp = raw_assembler_->LoadStackPointer(); | |
720 Node* stack_limit = raw_assembler_->Load( | |
721 MachineType::Pointer(), | |
722 raw_assembler_->ExternalConstant( | |
723 ExternalReference::address_of_stack_limit(isolate()))); | |
724 Node* condition = raw_assembler_->UintPtrGreaterThanOrEqual(sp, stack_limit); | |
725 raw_assembler_->Branch(condition, &ok, &stack_guard); | |
726 raw_assembler_->Bind(&stack_guard); | |
727 CallRuntime(Runtime::kStackGuard); | |
728 raw_assembler_->Goto(&end); | |
729 raw_assembler_->Bind(&ok); | |
730 raw_assembler_->Goto(&end); | |
731 raw_assembler_->Bind(&end); | |
732 } | |
733 | |
734 void InterpreterAssembler::Abort(BailoutReason bailout_reason) { | |
735 Node* abort_id = SmiTag(Int32Constant(bailout_reason)); | |
736 Node* ret_value = CallRuntime(Runtime::kAbort, abort_id); | |
737 // Unreached, but keeps turbofan happy. | |
738 raw_assembler_->Return(ret_value); | |
739 } | |
740 | |
741 | |
742 void InterpreterAssembler::AbortIfWordNotEqual(Node* lhs, Node* rhs, | |
743 BailoutReason bailout_reason) { | |
744 RawMachineLabel match, no_match; | |
745 Node* condition = raw_assembler_->WordEqual(lhs, rhs); | |
746 raw_assembler_->Branch(condition, &match, &no_match); | |
747 raw_assembler_->Bind(&no_match); | |
748 Abort(bailout_reason); | |
749 raw_assembler_->Bind(&match); | |
750 } | |
751 | |
752 void InterpreterAssembler::TraceBytecode(Runtime::FunctionId function_id) { | |
753 CallRuntime(function_id, BytecodeArrayTaggedPointer(), | |
754 SmiTag(BytecodeOffset()), GetAccumulator()); | |
755 } | |
756 | |
757 // static | |
758 bool InterpreterAssembler::TargetSupportsUnalignedAccess() { | |
759 #if V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64 | |
760 return false; | |
761 #elif V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_ARM64 || V8_TARGET_ARCH_PPC | |
762 return CpuFeatures::IsSupported(UNALIGNED_ACCESSES); | |
763 #elif V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_X87 | |
764 return true; | |
765 #else | |
766 #error "Unknown Architecture" | |
767 #endif | |
768 } | |
769 | |
770 | |
771 // RawMachineAssembler delegate helpers: | |
772 Isolate* InterpreterAssembler::isolate() { return raw_assembler_->isolate(); } | |
773 | |
774 | |
775 Graph* InterpreterAssembler::graph() { return raw_assembler_->graph(); } | |
776 | |
777 | |
778 CallDescriptor* InterpreterAssembler::call_descriptor() const { | |
779 return raw_assembler_->call_descriptor(); | |
780 } | |
781 | |
782 | |
783 Zone* InterpreterAssembler::zone() { return raw_assembler_->zone(); } | |
784 | |
785 | |
786 } // namespace compiler | |
787 } // namespace internal | |
788 } // namespace v8 | |
OLD | NEW |