| OLD | NEW |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/interpreter-assembler.h" | 5 #include "src/interpreter/interpreter-assembler.h" |
| 6 | 6 |
| 7 #include <ostream> | 7 #include <ostream> |
| 8 | 8 |
| 9 #include "src/code-factory.h" | 9 #include "src/code-factory.h" |
| 10 #include "src/compiler/graph.h" | |
| 11 #include "src/compiler/instruction-selector.h" | |
| 12 #include "src/compiler/linkage.h" | |
| 13 #include "src/compiler/pipeline.h" | |
| 14 #include "src/compiler/raw-machine-assembler.h" | |
| 15 #include "src/compiler/schedule.h" | |
| 16 #include "src/frames.h" | 10 #include "src/frames.h" |
| 17 #include "src/interface-descriptors.h" | 11 #include "src/interface-descriptors.h" |
| 18 #include "src/interpreter/bytecodes.h" | 12 #include "src/interpreter/bytecodes.h" |
| 19 #include "src/machine-type.h" | 13 #include "src/machine-type.h" |
| 20 #include "src/macro-assembler.h" | 14 #include "src/macro-assembler.h" |
| 21 #include "src/zone.h" | 15 #include "src/zone.h" |
| 22 | 16 |
| 23 namespace v8 { | 17 namespace v8 { |
| 24 namespace internal { | 18 namespace internal { |
| 25 namespace compiler { | 19 namespace interpreter { |
| 20 |
| 21 using compiler::Node; |
| 26 | 22 |
| 27 InterpreterAssembler::InterpreterAssembler(Isolate* isolate, Zone* zone, | 23 InterpreterAssembler::InterpreterAssembler(Isolate* isolate, Zone* zone, |
| 28 interpreter::Bytecode bytecode) | 24 Bytecode bytecode) |
| 29 : bytecode_(bytecode), | 25 : compiler::CodeStubAssembler( |
| 30 raw_assembler_(new RawMachineAssembler( | 26 isolate, zone, InterpreterDispatchDescriptor(isolate), |
| 31 isolate, new (zone) Graph(zone), | 27 Code::ComputeFlags(Code::STUB), Bytecodes::ToString(bytecode), 0), |
| 32 Linkage::GetInterpreterDispatchDescriptor(zone), | 28 bytecode_(bytecode), |
| 33 MachineType::PointerRepresentation(), | |
| 34 InstructionSelector::SupportedMachineOperatorFlags())), | |
| 35 accumulator_( | 29 accumulator_( |
| 36 raw_assembler_->Parameter(Linkage::kInterpreterAccumulatorParameter)), | 30 Parameter(InterpreterDispatchDescriptor::kAccumulatorParameter)), |
| 37 context_( | 31 context_(Parameter(InterpreterDispatchDescriptor::kContextParameter)), |
| 38 raw_assembler_->Parameter(Linkage::kInterpreterContextParameter)), | 32 disable_stack_check_across_call_(false), |
| 39 code_generated_(false) { | 33 stack_pointer_before_call_(nullptr) { |
| 40 if (FLAG_trace_ignition) { | 34 if (FLAG_trace_ignition) { |
| 41 TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry); | 35 TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry); |
| 42 } | 36 } |
| 43 } | 37 } |
| 44 | 38 |
| 45 InterpreterAssembler::~InterpreterAssembler() {} | 39 InterpreterAssembler::~InterpreterAssembler() {} |
| 46 | 40 |
| 47 | |
| 48 Handle<Code> InterpreterAssembler::GenerateCode() { | |
| 49 DCHECK(!code_generated_); | |
| 50 | |
| 51 // Disallow empty handlers that never return. | |
| 52 DCHECK_NE(0, graph()->end()->InputCount()); | |
| 53 | |
| 54 const char* bytecode_name = interpreter::Bytecodes::ToString(bytecode_); | |
| 55 Schedule* schedule = raw_assembler_->Export(); | |
| 56 Code::Flags flags = Code::ComputeFlags(Code::STUB); | |
| 57 Handle<Code> code = Pipeline::GenerateCodeForCodeStub( | |
| 58 isolate(), raw_assembler_->call_descriptor(), graph(), schedule, flags, | |
| 59 bytecode_name); | |
| 60 | |
| 61 #ifdef ENABLE_DISASSEMBLER | |
| 62 if (FLAG_trace_ignition_codegen) { | |
| 63 OFStream os(stdout); | |
| 64 code->Disassemble(bytecode_name, os); | |
| 65 os << std::flush; | |
| 66 } | |
| 67 #endif | |
| 68 | |
| 69 code_generated_ = true; | |
| 70 return code; | |
| 71 } | |
| 72 | |
| 73 | |
| 74 Node* InterpreterAssembler::GetAccumulator() { return accumulator_; } | 41 Node* InterpreterAssembler::GetAccumulator() { return accumulator_; } |
| 75 | 42 |
| 76 | |
| 77 void InterpreterAssembler::SetAccumulator(Node* value) { accumulator_ = value; } | 43 void InterpreterAssembler::SetAccumulator(Node* value) { accumulator_ = value; } |
| 78 | 44 |
| 79 | |
| 80 Node* InterpreterAssembler::GetContext() { return context_; } | 45 Node* InterpreterAssembler::GetContext() { return context_; } |
| 81 | 46 |
| 82 | |
| 83 void InterpreterAssembler::SetContext(Node* value) { | 47 void InterpreterAssembler::SetContext(Node* value) { |
| 84 StoreRegister(value, interpreter::Register::current_context()); | 48 StoreRegister(value, Register::current_context()); |
| 85 context_ = value; | 49 context_ = value; |
| 86 } | 50 } |
| 87 | 51 |
| 88 Node* InterpreterAssembler::BytecodeOffset() { | 52 Node* InterpreterAssembler::BytecodeOffset() { |
| 89 return raw_assembler_->Parameter( | 53 return Parameter(InterpreterDispatchDescriptor::kBytecodeOffsetParameter); |
| 90 Linkage::kInterpreterBytecodeOffsetParameter); | |
| 91 } | 54 } |
| 92 | 55 |
| 93 Node* InterpreterAssembler::RegisterFileRawPointer() { | 56 Node* InterpreterAssembler::RegisterFileRawPointer() { |
| 94 return raw_assembler_->Parameter(Linkage::kInterpreterRegisterFileParameter); | 57 return Parameter(InterpreterDispatchDescriptor::kRegisterFileParameter); |
| 95 } | 58 } |
| 96 | 59 |
| 97 | |
| 98 Node* InterpreterAssembler::BytecodeArrayTaggedPointer() { | 60 Node* InterpreterAssembler::BytecodeArrayTaggedPointer() { |
| 99 return raw_assembler_->Parameter(Linkage::kInterpreterBytecodeArrayParameter); | 61 return Parameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter); |
| 100 } | 62 } |
| 101 | 63 |
| 102 | |
| 103 Node* InterpreterAssembler::DispatchTableRawPointer() { | 64 Node* InterpreterAssembler::DispatchTableRawPointer() { |
| 104 return raw_assembler_->Parameter(Linkage::kInterpreterDispatchTableParameter); | 65 return Parameter(InterpreterDispatchDescriptor::kDispatchTableParameter); |
| 105 } | 66 } |
| 106 | 67 |
| 107 | |
| 108 Node* InterpreterAssembler::RegisterLocation(Node* reg_index) { | 68 Node* InterpreterAssembler::RegisterLocation(Node* reg_index) { |
| 109 return IntPtrAdd(RegisterFileRawPointer(), RegisterFrameOffset(reg_index)); | 69 return IntPtrAdd(RegisterFileRawPointer(), RegisterFrameOffset(reg_index)); |
| 110 } | 70 } |
| 111 | 71 |
| 112 | |
| 113 Node* InterpreterAssembler::LoadRegister(int offset) { | 72 Node* InterpreterAssembler::LoadRegister(int offset) { |
| 114 return raw_assembler_->Load(MachineType::AnyTagged(), | 73 return Load(MachineType::AnyTagged(), RegisterFileRawPointer(), |
| 115 RegisterFileRawPointer(), Int32Constant(offset)); | 74 Int32Constant(offset)); |
| 116 } | 75 } |
| 117 | 76 |
| 118 | 77 Node* InterpreterAssembler::LoadRegister(Register reg) { |
| 119 Node* InterpreterAssembler::LoadRegister(interpreter::Register reg) { | |
| 120 return LoadRegister(reg.ToOperand() << kPointerSizeLog2); | 78 return LoadRegister(reg.ToOperand() << kPointerSizeLog2); |
| 121 } | 79 } |
| 122 | 80 |
| 123 | |
| 124 Node* InterpreterAssembler::RegisterFrameOffset(Node* index) { | 81 Node* InterpreterAssembler::RegisterFrameOffset(Node* index) { |
| 125 return WordShl(index, kPointerSizeLog2); | 82 return WordShl(index, kPointerSizeLog2); |
| 126 } | 83 } |
| 127 | 84 |
| 128 | |
| 129 Node* InterpreterAssembler::LoadRegister(Node* reg_index) { | 85 Node* InterpreterAssembler::LoadRegister(Node* reg_index) { |
| 130 return raw_assembler_->Load(MachineType::AnyTagged(), | 86 return Load(MachineType::AnyTagged(), RegisterFileRawPointer(), |
| 131 RegisterFileRawPointer(), | 87 RegisterFrameOffset(reg_index)); |
| 132 RegisterFrameOffset(reg_index)); | |
| 133 } | 88 } |
| 134 | 89 |
| 135 | |
| 136 Node* InterpreterAssembler::StoreRegister(Node* value, int offset) { | 90 Node* InterpreterAssembler::StoreRegister(Node* value, int offset) { |
| 137 return raw_assembler_->Store(MachineRepresentation::kTagged, | 91 return StoreNoWriteBarrier(MachineRepresentation::kTagged, |
| 138 RegisterFileRawPointer(), Int32Constant(offset), | 92 RegisterFileRawPointer(), Int32Constant(offset), |
| 139 value, kNoWriteBarrier); | 93 value); |
| 140 } | 94 } |
| 141 | 95 |
| 142 | 96 Node* InterpreterAssembler::StoreRegister(Node* value, Register reg) { |
| 143 Node* InterpreterAssembler::StoreRegister(Node* value, | |
| 144 interpreter::Register reg) { | |
| 145 return StoreRegister(value, reg.ToOperand() << kPointerSizeLog2); | 97 return StoreRegister(value, reg.ToOperand() << kPointerSizeLog2); |
| 146 } | 98 } |
| 147 | 99 |
| 148 | |
| 149 Node* InterpreterAssembler::StoreRegister(Node* value, Node* reg_index) { | 100 Node* InterpreterAssembler::StoreRegister(Node* value, Node* reg_index) { |
| 150 return raw_assembler_->Store( | 101 return StoreNoWriteBarrier(MachineRepresentation::kTagged, |
| 151 MachineRepresentation::kTagged, RegisterFileRawPointer(), | 102 RegisterFileRawPointer(), |
| 152 RegisterFrameOffset(reg_index), value, kNoWriteBarrier); | 103 RegisterFrameOffset(reg_index), value); |
| 153 } | 104 } |
| 154 | 105 |
| 155 | |
| 156 Node* InterpreterAssembler::NextRegister(Node* reg_index) { | 106 Node* InterpreterAssembler::NextRegister(Node* reg_index) { |
| 157 // Register indexes are negative, so the next index is minus one. | 107 // Register indexes are negative, so the next index is minus one. |
| 158 return IntPtrAdd(reg_index, Int32Constant(-1)); | 108 return IntPtrAdd(reg_index, Int32Constant(-1)); |
| 159 } | 109 } |
| 160 | 110 |
| 161 | |
| 162 Node* InterpreterAssembler::BytecodeOperand(int operand_index) { | 111 Node* InterpreterAssembler::BytecodeOperand(int operand_index) { |
| 163 DCHECK_LT(operand_index, interpreter::Bytecodes::NumberOfOperands(bytecode_)); | 112 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); |
| 164 DCHECK_EQ(interpreter::OperandSize::kByte, | 113 DCHECK_EQ(OperandSize::kByte, |
| 165 interpreter::Bytecodes::GetOperandSize(bytecode_, operand_index)); | 114 Bytecodes::GetOperandSize(bytecode_, operand_index)); |
| 166 return raw_assembler_->Load( | 115 return Load( |
| 167 MachineType::Uint8(), BytecodeArrayTaggedPointer(), | 116 MachineType::Uint8(), BytecodeArrayTaggedPointer(), |
| 168 IntPtrAdd(BytecodeOffset(), | 117 IntPtrAdd(BytecodeOffset(), Int32Constant(Bytecodes::GetOperandOffset( |
| 169 Int32Constant(interpreter::Bytecodes::GetOperandOffset( | 118 bytecode_, operand_index)))); |
| 170 bytecode_, operand_index)))); | |
| 171 } | 119 } |
| 172 | 120 |
| 173 | |
| 174 Node* InterpreterAssembler::BytecodeOperandSignExtended(int operand_index) { | 121 Node* InterpreterAssembler::BytecodeOperandSignExtended(int operand_index) { |
| 175 DCHECK_LT(operand_index, interpreter::Bytecodes::NumberOfOperands(bytecode_)); | 122 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); |
| 176 DCHECK_EQ(interpreter::OperandSize::kByte, | 123 DCHECK_EQ(OperandSize::kByte, |
| 177 interpreter::Bytecodes::GetOperandSize(bytecode_, operand_index)); | 124 Bytecodes::GetOperandSize(bytecode_, operand_index)); |
| 178 Node* load = raw_assembler_->Load( | 125 Node* load = Load( |
| 179 MachineType::Int8(), BytecodeArrayTaggedPointer(), | 126 MachineType::Int8(), BytecodeArrayTaggedPointer(), |
| 180 IntPtrAdd(BytecodeOffset(), | 127 IntPtrAdd(BytecodeOffset(), Int32Constant(Bytecodes::GetOperandOffset( |
| 181 Int32Constant(interpreter::Bytecodes::GetOperandOffset( | 128 bytecode_, operand_index)))); |
| 182 bytecode_, operand_index)))); | |
| 183 // Ensure that we sign extend to full pointer size | 129 // Ensure that we sign extend to full pointer size |
| 184 if (kPointerSize == 8) { | 130 if (kPointerSize == 8) { |
| 185 load = raw_assembler_->ChangeInt32ToInt64(load); | 131 load = ChangeInt32ToInt64(load); |
| 186 } | 132 } |
| 187 return load; | 133 return load; |
| 188 } | 134 } |
| 189 | 135 |
| 190 | |
| 191 Node* InterpreterAssembler::BytecodeOperandShort(int operand_index) { | 136 Node* InterpreterAssembler::BytecodeOperandShort(int operand_index) { |
| 192 DCHECK_LT(operand_index, interpreter::Bytecodes::NumberOfOperands(bytecode_)); | 137 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); |
| 193 DCHECK_EQ(interpreter::OperandSize::kShort, | 138 DCHECK_EQ(OperandSize::kShort, |
| 194 interpreter::Bytecodes::GetOperandSize(bytecode_, operand_index)); | 139 Bytecodes::GetOperandSize(bytecode_, operand_index)); |
| 195 if (TargetSupportsUnalignedAccess()) { | 140 if (TargetSupportsUnalignedAccess()) { |
| 196 return raw_assembler_->Load( | 141 return Load( |
| 197 MachineType::Uint16(), BytecodeArrayTaggedPointer(), | 142 MachineType::Uint16(), BytecodeArrayTaggedPointer(), |
| 198 IntPtrAdd(BytecodeOffset(), | 143 IntPtrAdd(BytecodeOffset(), Int32Constant(Bytecodes::GetOperandOffset( |
| 199 Int32Constant(interpreter::Bytecodes::GetOperandOffset( | 144 bytecode_, operand_index)))); |
| 200 bytecode_, operand_index)))); | |
| 201 } else { | 145 } else { |
| 202 int offset = | 146 int offset = Bytecodes::GetOperandOffset(bytecode_, operand_index); |
| 203 interpreter::Bytecodes::GetOperandOffset(bytecode_, operand_index); | 147 Node* first_byte = Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), |
| 204 Node* first_byte = raw_assembler_->Load( | 148 IntPtrAdd(BytecodeOffset(), Int32Constant(offset))); |
| 205 MachineType::Uint8(), BytecodeArrayTaggedPointer(), | 149 Node* second_byte = |
| 206 IntPtrAdd(BytecodeOffset(), Int32Constant(offset))); | 150 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), |
| 207 Node* second_byte = raw_assembler_->Load( | 151 IntPtrAdd(BytecodeOffset(), Int32Constant(offset + 1))); |
| 208 MachineType::Uint8(), BytecodeArrayTaggedPointer(), | |
| 209 IntPtrAdd(BytecodeOffset(), Int32Constant(offset + 1))); | |
| 210 #if V8_TARGET_LITTLE_ENDIAN | 152 #if V8_TARGET_LITTLE_ENDIAN |
| 211 return raw_assembler_->WordOr(WordShl(second_byte, kBitsPerByte), | 153 return WordOr(WordShl(second_byte, kBitsPerByte), first_byte); |
| 212 first_byte); | |
| 213 #elif V8_TARGET_BIG_ENDIAN | 154 #elif V8_TARGET_BIG_ENDIAN |
| 214 return raw_assembler_->WordOr(WordShl(first_byte, kBitsPerByte), | 155 return WordOr(WordShl(first_byte, kBitsPerByte), second_byte); |
| 215 second_byte); | |
| 216 #else | 156 #else |
| 217 #error "Unknown Architecture" | 157 #error "Unknown Architecture" |
| 218 #endif | 158 #endif |
| 219 } | 159 } |
| 220 } | 160 } |
| 221 | 161 |
| 222 | |
| 223 Node* InterpreterAssembler::BytecodeOperandShortSignExtended( | 162 Node* InterpreterAssembler::BytecodeOperandShortSignExtended( |
| 224 int operand_index) { | 163 int operand_index) { |
| 225 DCHECK_LT(operand_index, interpreter::Bytecodes::NumberOfOperands(bytecode_)); | 164 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); |
| 226 DCHECK_EQ(interpreter::OperandSize::kShort, | 165 DCHECK_EQ(OperandSize::kShort, |
| 227 interpreter::Bytecodes::GetOperandSize(bytecode_, operand_index)); | 166 Bytecodes::GetOperandSize(bytecode_, operand_index)); |
| 228 int operand_offset = | 167 int operand_offset = Bytecodes::GetOperandOffset(bytecode_, operand_index); |
| 229 interpreter::Bytecodes::GetOperandOffset(bytecode_, operand_index); | |
| 230 Node* load; | 168 Node* load; |
| 231 if (TargetSupportsUnalignedAccess()) { | 169 if (TargetSupportsUnalignedAccess()) { |
| 232 load = raw_assembler_->Load( | 170 load = Load(MachineType::Int16(), BytecodeArrayTaggedPointer(), |
| 233 MachineType::Int16(), BytecodeArrayTaggedPointer(), | 171 IntPtrAdd(BytecodeOffset(), Int32Constant(operand_offset))); |
| 234 IntPtrAdd(BytecodeOffset(), Int32Constant(operand_offset))); | |
| 235 } else { | 172 } else { |
| 236 #if V8_TARGET_LITTLE_ENDIAN | 173 #if V8_TARGET_LITTLE_ENDIAN |
| 237 Node* hi_byte_offset = Int32Constant(operand_offset + 1); | 174 Node* hi_byte_offset = Int32Constant(operand_offset + 1); |
| 238 Node* lo_byte_offset = Int32Constant(operand_offset); | 175 Node* lo_byte_offset = Int32Constant(operand_offset); |
| 239 #elif V8_TARGET_BIG_ENDIAN | 176 #elif V8_TARGET_BIG_ENDIAN |
| 240 Node* hi_byte_offset = Int32Constant(operand_offset); | 177 Node* hi_byte_offset = Int32Constant(operand_offset); |
| 241 Node* lo_byte_offset = Int32Constant(operand_offset + 1); | 178 Node* lo_byte_offset = Int32Constant(operand_offset + 1); |
| 242 #else | 179 #else |
| 243 #error "Unknown Architecture" | 180 #error "Unknown Architecture" |
| 244 #endif | 181 #endif |
| 245 Node* hi_byte = | 182 Node* hi_byte = Load(MachineType::Int8(), BytecodeArrayTaggedPointer(), |
| 246 raw_assembler_->Load(MachineType::Int8(), BytecodeArrayTaggedPointer(), | 183 IntPtrAdd(BytecodeOffset(), hi_byte_offset)); |
| 247 IntPtrAdd(BytecodeOffset(), hi_byte_offset)); | 184 Node* lo_byte = Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), |
| 248 Node* lo_byte = | 185 IntPtrAdd(BytecodeOffset(), lo_byte_offset)); |
| 249 raw_assembler_->Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), | 186 hi_byte = Word32Shl(hi_byte, Int32Constant(kBitsPerByte)); |
| 250 IntPtrAdd(BytecodeOffset(), lo_byte_offset)); | 187 load = Word32Or(hi_byte, lo_byte); |
| 251 hi_byte = raw_assembler_->Word32Shl(hi_byte, Int32Constant(kBitsPerByte)); | |
| 252 load = raw_assembler_->Word32Or(hi_byte, lo_byte); | |
| 253 } | 188 } |
| 254 | 189 |
| 255 // Ensure that we sign extend to full pointer size | 190 // Ensure that we sign extend to full pointer size |
| 256 if (kPointerSize == 8) { | 191 if (kPointerSize == 8) { |
| 257 load = raw_assembler_->ChangeInt32ToInt64(load); | 192 load = ChangeInt32ToInt64(load); |
| 258 } | 193 } |
| 259 return load; | 194 return load; |
| 260 } | 195 } |
| 261 | 196 |
| 262 | |
| 263 Node* InterpreterAssembler::BytecodeOperandCount(int operand_index) { | 197 Node* InterpreterAssembler::BytecodeOperandCount(int operand_index) { |
| 264 switch (interpreter::Bytecodes::GetOperandSize(bytecode_, operand_index)) { | 198 switch (Bytecodes::GetOperandSize(bytecode_, operand_index)) { |
| 265 case interpreter::OperandSize::kByte: | 199 case OperandSize::kByte: |
| 266 DCHECK_EQ( | 200 DCHECK_EQ(OperandType::kRegCount8, |
| 267 interpreter::OperandType::kRegCount8, | 201 Bytecodes::GetOperandType(bytecode_, operand_index)); |
| 268 interpreter::Bytecodes::GetOperandType(bytecode_, operand_index)); | |
| 269 return BytecodeOperand(operand_index); | 202 return BytecodeOperand(operand_index); |
| 270 case interpreter::OperandSize::kShort: | 203 case OperandSize::kShort: |
| 271 DCHECK_EQ( | 204 DCHECK_EQ(OperandType::kRegCount16, |
| 272 interpreter::OperandType::kRegCount16, | 205 Bytecodes::GetOperandType(bytecode_, operand_index)); |
| 273 interpreter::Bytecodes::GetOperandType(bytecode_, operand_index)); | |
| 274 return BytecodeOperandShort(operand_index); | 206 return BytecodeOperandShort(operand_index); |
| 275 case interpreter::OperandSize::kNone: | 207 case OperandSize::kNone: |
| 276 UNREACHABLE(); | 208 UNREACHABLE(); |
| 277 } | 209 } |
| 278 return nullptr; | 210 return nullptr; |
| 279 } | 211 } |
| 280 | 212 |
| 281 | |
| 282 Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) { | 213 Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) { |
| 283 DCHECK_EQ(interpreter::OperandType::kImm8, | 214 DCHECK_EQ(OperandType::kImm8, |
| 284 interpreter::Bytecodes::GetOperandType(bytecode_, operand_index)); | 215 Bytecodes::GetOperandType(bytecode_, operand_index)); |
| 285 return BytecodeOperandSignExtended(operand_index); | 216 return BytecodeOperandSignExtended(operand_index); |
| 286 } | 217 } |
| 287 | 218 |
| 288 | |
| 289 Node* InterpreterAssembler::BytecodeOperandIdx(int operand_index) { | 219 Node* InterpreterAssembler::BytecodeOperandIdx(int operand_index) { |
| 290 switch (interpreter::Bytecodes::GetOperandSize(bytecode_, operand_index)) { | 220 switch (Bytecodes::GetOperandSize(bytecode_, operand_index)) { |
| 291 case interpreter::OperandSize::kByte: | 221 case OperandSize::kByte: |
| 292 DCHECK_EQ( | 222 DCHECK_EQ(OperandType::kIdx8, |
| 293 interpreter::OperandType::kIdx8, | 223 Bytecodes::GetOperandType(bytecode_, operand_index)); |
| 294 interpreter::Bytecodes::GetOperandType(bytecode_, operand_index)); | |
| 295 return BytecodeOperand(operand_index); | 224 return BytecodeOperand(operand_index); |
| 296 case interpreter::OperandSize::kShort: | 225 case OperandSize::kShort: |
| 297 DCHECK_EQ( | 226 DCHECK_EQ(OperandType::kIdx16, |
| 298 interpreter::OperandType::kIdx16, | 227 Bytecodes::GetOperandType(bytecode_, operand_index)); |
| 299 interpreter::Bytecodes::GetOperandType(bytecode_, operand_index)); | |
| 300 return BytecodeOperandShort(operand_index); | 228 return BytecodeOperandShort(operand_index); |
| 301 case interpreter::OperandSize::kNone: | 229 case OperandSize::kNone: |
| 302 UNREACHABLE(); | 230 UNREACHABLE(); |
| 303 } | 231 } |
| 304 return nullptr; | 232 return nullptr; |
| 305 } | 233 } |
| 306 | 234 |
| 307 | |
| 308 Node* InterpreterAssembler::BytecodeOperandReg(int operand_index) { | 235 Node* InterpreterAssembler::BytecodeOperandReg(int operand_index) { |
| 309 interpreter::OperandType operand_type = | 236 OperandType operand_type = |
| 310 interpreter::Bytecodes::GetOperandType(bytecode_, operand_index); | 237 Bytecodes::GetOperandType(bytecode_, operand_index); |
| 311 if (interpreter::Bytecodes::IsRegisterOperandType(operand_type)) { | 238 if (Bytecodes::IsRegisterOperandType(operand_type)) { |
| 312 interpreter::OperandSize operand_size = | 239 OperandSize operand_size = Bytecodes::SizeOfOperand(operand_type); |
| 313 interpreter::Bytecodes::SizeOfOperand(operand_type); | 240 if (operand_size == OperandSize::kByte) { |
| 314 if (operand_size == interpreter::OperandSize::kByte) { | |
| 315 return BytecodeOperandSignExtended(operand_index); | 241 return BytecodeOperandSignExtended(operand_index); |
| 316 } else if (operand_size == interpreter::OperandSize::kShort) { | 242 } else if (operand_size == OperandSize::kShort) { |
| 317 return BytecodeOperandShortSignExtended(operand_index); | 243 return BytecodeOperandShortSignExtended(operand_index); |
| 318 } | 244 } |
| 319 } | 245 } |
| 320 UNREACHABLE(); | 246 UNREACHABLE(); |
| 321 return nullptr; | 247 return nullptr; |
| 322 } | 248 } |
| 323 | 249 |
| 324 | |
| 325 Node* InterpreterAssembler::Int32Constant(int value) { | |
| 326 return raw_assembler_->Int32Constant(value); | |
| 327 } | |
| 328 | |
| 329 | |
| 330 Node* InterpreterAssembler::IntPtrConstant(intptr_t value) { | |
| 331 return raw_assembler_->IntPtrConstant(value); | |
| 332 } | |
| 333 | |
| 334 | |
| 335 Node* InterpreterAssembler::NumberConstant(double value) { | |
| 336 return raw_assembler_->NumberConstant(value); | |
| 337 } | |
| 338 | |
| 339 | |
| 340 Node* InterpreterAssembler::HeapConstant(Handle<HeapObject> object) { | |
| 341 return raw_assembler_->HeapConstant(object); | |
| 342 } | |
| 343 | |
| 344 | |
| 345 Node* InterpreterAssembler::BooleanConstant(bool value) { | |
| 346 return raw_assembler_->BooleanConstant(value); | |
| 347 } | |
| 348 | |
| 349 | |
| 350 Node* InterpreterAssembler::SmiShiftBitsConstant() { | |
| 351 return Int32Constant(kSmiShiftSize + kSmiTagSize); | |
| 352 } | |
| 353 | |
| 354 | |
| 355 Node* InterpreterAssembler::SmiTag(Node* value) { | |
| 356 return raw_assembler_->WordShl(value, SmiShiftBitsConstant()); | |
| 357 } | |
| 358 | |
| 359 | |
| 360 Node* InterpreterAssembler::SmiUntag(Node* value) { | |
| 361 return raw_assembler_->WordSar(value, SmiShiftBitsConstant()); | |
| 362 } | |
| 363 | |
| 364 | |
| 365 Node* InterpreterAssembler::IntPtrAdd(Node* a, Node* b) { | |
| 366 return raw_assembler_->IntPtrAdd(a, b); | |
| 367 } | |
| 368 | |
| 369 | |
| 370 Node* InterpreterAssembler::IntPtrSub(Node* a, Node* b) { | |
| 371 return raw_assembler_->IntPtrSub(a, b); | |
| 372 } | |
| 373 | |
| 374 Node* InterpreterAssembler::Int32Sub(Node* a, Node* b) { | |
| 375 return raw_assembler_->Int32Sub(a, b); | |
| 376 } | |
| 377 | |
| 378 Node* InterpreterAssembler::WordShl(Node* value, int shift) { | |
| 379 return raw_assembler_->WordShl(value, Int32Constant(shift)); | |
| 380 } | |
| 381 | |
| 382 | |
| 383 Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) { | 250 Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) { |
| 384 Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(), | 251 Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(), |
| 385 BytecodeArray::kConstantPoolOffset); | 252 BytecodeArray::kConstantPoolOffset); |
| 386 Node* entry_offset = | 253 Node* entry_offset = |
| 387 IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag), | 254 IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag), |
| 388 WordShl(index, kPointerSizeLog2)); | 255 WordShl(index, kPointerSizeLog2)); |
| 389 return raw_assembler_->Load(MachineType::AnyTagged(), constant_pool, | 256 return Load(MachineType::AnyTagged(), constant_pool, entry_offset); |
| 390 entry_offset); | |
| 391 } | 257 } |
| 392 | 258 |
| 393 | |
| 394 Node* InterpreterAssembler::LoadFixedArrayElement(Node* fixed_array, | 259 Node* InterpreterAssembler::LoadFixedArrayElement(Node* fixed_array, |
| 395 int index) { | 260 int index) { |
| 396 Node* entry_offset = | 261 Node* entry_offset = |
| 397 IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag), | 262 IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag), |
| 398 WordShl(Int32Constant(index), kPointerSizeLog2)); | 263 WordShl(Int32Constant(index), kPointerSizeLog2)); |
| 399 return raw_assembler_->Load(MachineType::AnyTagged(), fixed_array, | 264 return Load(MachineType::AnyTagged(), fixed_array, entry_offset); |
| 400 entry_offset); | |
| 401 } | 265 } |
| 402 | 266 |
| 403 | |
| 404 Node* InterpreterAssembler::LoadObjectField(Node* object, int offset) { | 267 Node* InterpreterAssembler::LoadObjectField(Node* object, int offset) { |
| 405 return raw_assembler_->Load(MachineType::AnyTagged(), object, | 268 return Load(MachineType::AnyTagged(), object, |
| 406 IntPtrConstant(offset - kHeapObjectTag)); | 269 IntPtrConstant(offset - kHeapObjectTag)); |
| 407 } | 270 } |
| 408 | 271 |
| 409 | |
| 410 Node* InterpreterAssembler::LoadContextSlot(Node* context, int slot_index) { | 272 Node* InterpreterAssembler::LoadContextSlot(Node* context, int slot_index) { |
| 411 return raw_assembler_->Load(MachineType::AnyTagged(), context, | 273 return Load(MachineType::AnyTagged(), context, |
| 412 IntPtrConstant(Context::SlotOffset(slot_index))); | 274 IntPtrConstant(Context::SlotOffset(slot_index))); |
| 413 } | 275 } |
| 414 | 276 |
| 415 | |
| 416 Node* InterpreterAssembler::LoadContextSlot(Node* context, Node* slot_index) { | 277 Node* InterpreterAssembler::LoadContextSlot(Node* context, Node* slot_index) { |
| 417 Node* offset = | 278 Node* offset = |
| 418 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2), | 279 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2), |
| 419 Int32Constant(Context::kHeaderSize - kHeapObjectTag)); | 280 Int32Constant(Context::kHeaderSize - kHeapObjectTag)); |
| 420 return raw_assembler_->Load(MachineType::AnyTagged(), context, offset); | 281 return Load(MachineType::AnyTagged(), context, offset); |
| 421 } | 282 } |
| 422 | 283 |
| 423 | |
| 424 Node* InterpreterAssembler::StoreContextSlot(Node* context, Node* slot_index, | 284 Node* InterpreterAssembler::StoreContextSlot(Node* context, Node* slot_index, |
| 425 Node* value) { | 285 Node* value) { |
| 426 Node* offset = | 286 Node* offset = |
| 427 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2), | 287 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2), |
| 428 Int32Constant(Context::kHeaderSize - kHeapObjectTag)); | 288 Int32Constant(Context::kHeaderSize - kHeapObjectTag)); |
| 429 return raw_assembler_->Store(MachineRepresentation::kTagged, context, offset, | 289 return Store(MachineRepresentation::kTagged, context, offset, value); |
| 430 value, kFullWriteBarrier); | |
| 431 } | 290 } |
| 432 | 291 |
| 433 | |
| 434 Node* InterpreterAssembler::LoadTypeFeedbackVector() { | 292 Node* InterpreterAssembler::LoadTypeFeedbackVector() { |
| 435 Node* function = raw_assembler_->Load( | 293 Node* function = Load( |
| 436 MachineType::AnyTagged(), RegisterFileRawPointer(), | 294 MachineType::AnyTagged(), RegisterFileRawPointer(), |
| 437 IntPtrConstant(InterpreterFrameConstants::kFunctionFromRegisterPointer)); | 295 IntPtrConstant(InterpreterFrameConstants::kFunctionFromRegisterPointer)); |
| 438 Node* shared_info = | 296 Node* shared_info = |
| 439 LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset); | 297 LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset); |
| 440 Node* vector = | 298 Node* vector = |
| 441 LoadObjectField(shared_info, SharedFunctionInfo::kFeedbackVectorOffset); | 299 LoadObjectField(shared_info, SharedFunctionInfo::kFeedbackVectorOffset); |
| 442 return vector; | 300 return vector; |
| 443 } | 301 } |
| 444 | 302 |
| 445 | |
| 446 Node* InterpreterAssembler::Projection(int index, Node* node) { | |
| 447 return raw_assembler_->Projection(index, node); | |
| 448 } | |
| 449 | |
| 450 | |
| 451 Node* InterpreterAssembler::CallConstruct(Node* new_target, Node* constructor, | |
| 452 Node* first_arg, Node* arg_count) { | |
| 453 Callable callable = CodeFactory::InterpreterPushArgsAndConstruct(isolate()); | |
| 454 CallDescriptor* descriptor = Linkage::GetStubCallDescriptor( | |
| 455 isolate(), zone(), callable.descriptor(), 0, CallDescriptor::kNoFlags); | |
| 456 | |
| 457 Node* code_target = HeapConstant(callable.code()); | |
| 458 | |
| 459 Node** args = zone()->NewArray<Node*>(5); | |
| 460 args[0] = arg_count; | |
| 461 args[1] = new_target; | |
| 462 args[2] = constructor; | |
| 463 args[3] = first_arg; | |
| 464 args[4] = GetContext(); | |
| 465 | |
| 466 return CallN(descriptor, code_target, args); | |
| 467 } | |
| 468 | |
| 469 | |
| 470 void InterpreterAssembler::CallPrologue() { | 303 void InterpreterAssembler::CallPrologue() { |
| 471 StoreRegister(SmiTag(BytecodeOffset()), | 304 StoreRegister(SmiTag(BytecodeOffset()), |
| 472 InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer); | 305 InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer); |
| 306 |
| 307 if (FLAG_debug_code && !disable_stack_check_across_call_) { |
| 308 DCHECK(stack_pointer_before_call_ == nullptr); |
| 309 stack_pointer_before_call_ = LoadStackPointer(); |
| 310 } |
| 473 } | 311 } |
| 474 | 312 |
| 475 | 313 void InterpreterAssembler::CallEpilogue() { |
| 476 Node* InterpreterAssembler::CallN(CallDescriptor* descriptor, Node* code_target, | 314 if (FLAG_debug_code && !disable_stack_check_across_call_) { |
| 477 Node** args) { | 315 Node* stack_pointer_after_call = LoadStackPointer(); |
| 478 CallPrologue(); | 316 Node* stack_pointer_before_call = stack_pointer_before_call_; |
| 479 | 317 stack_pointer_before_call_ = nullptr; |
| 480 Node* stack_pointer_before_call = nullptr; | |
| 481 if (FLAG_debug_code) { | |
| 482 stack_pointer_before_call = raw_assembler_->LoadStackPointer(); | |
| 483 } | |
| 484 Node* return_val = raw_assembler_->CallN(descriptor, code_target, args); | |
| 485 if (FLAG_debug_code) { | |
| 486 Node* stack_pointer_after_call = raw_assembler_->LoadStackPointer(); | |
| 487 AbortIfWordNotEqual(stack_pointer_before_call, stack_pointer_after_call, | 318 AbortIfWordNotEqual(stack_pointer_before_call, stack_pointer_after_call, |
| 488 kUnexpectedStackPointer); | 319 kUnexpectedStackPointer); |
| 489 } | 320 } |
| 490 | |
| 491 return return_val; | |
| 492 } | 321 } |
| 493 | 322 |
| 494 | 323 Node* InterpreterAssembler::CallJS(Node* function, Node* context, |
| 495 Node* InterpreterAssembler::CallJS(Node* function, Node* first_arg, | 324 Node* first_arg, Node* arg_count) { |
| 496 Node* arg_count) { | |
| 497 Callable callable = CodeFactory::InterpreterPushArgsAndCall(isolate()); | 325 Callable callable = CodeFactory::InterpreterPushArgsAndCall(isolate()); |
| 498 CallDescriptor* descriptor = Linkage::GetStubCallDescriptor( | |
| 499 isolate(), zone(), callable.descriptor(), 0, CallDescriptor::kNoFlags); | |
| 500 | |
| 501 Node* code_target = HeapConstant(callable.code()); | 326 Node* code_target = HeapConstant(callable.code()); |
| 502 | 327 return CallStub(callable.descriptor(), code_target, context, arg_count, |
| 503 Node** args = zone()->NewArray<Node*>(4); | 328 first_arg, function); |
| 504 args[0] = arg_count; | |
| 505 args[1] = first_arg; | |
| 506 args[2] = function; | |
| 507 args[3] = GetContext(); | |
| 508 | |
| 509 return CallN(descriptor, code_target, args); | |
| 510 } | 329 } |
| 511 | 330 |
| 512 | 331 Node* InterpreterAssembler::CallConstruct(Node* constructor, Node* context, |
| 513 Node* InterpreterAssembler::CallIC(CallInterfaceDescriptor descriptor, | 332 Node* new_target, Node* first_arg, |
| 514 Node* target, Node** args) { | 333 Node* arg_count) { |
| 515 CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor( | 334 Callable callable = CodeFactory::InterpreterPushArgsAndConstruct(isolate()); |
| 516 isolate(), zone(), descriptor, 0, CallDescriptor::kNoFlags); | 335 Node* code_target = HeapConstant(callable.code()); |
| 517 return CallN(call_descriptor, target, args); | 336 return CallStub(callable.descriptor(), code_target, context, arg_count, |
| 337 new_target, constructor, first_arg); |
| 518 } | 338 } |
| 519 | 339 |
| 520 | 340 Node* InterpreterAssembler::CallRuntimeN(Node* function_id, Node* context, |
| 521 Node* InterpreterAssembler::CallIC(CallInterfaceDescriptor descriptor, | 341 Node* first_arg, Node* arg_count, |
| 522 Node* target, Node* arg1, Node* arg2, | 342 int result_size) { |
| 523 Node* arg3) { | |
| 524 Node** args = zone()->NewArray<Node*>(4); | |
| 525 args[0] = arg1; | |
| 526 args[1] = arg2; | |
| 527 args[2] = arg3; | |
| 528 args[3] = GetContext(); | |
| 529 return CallIC(descriptor, target, args); | |
| 530 } | |
| 531 | |
| 532 | |
| 533 Node* InterpreterAssembler::CallIC(CallInterfaceDescriptor descriptor, | |
| 534 Node* target, Node* arg1, Node* arg2, | |
| 535 Node* arg3, Node* arg4) { | |
| 536 Node** args = zone()->NewArray<Node*>(5); | |
| 537 args[0] = arg1; | |
| 538 args[1] = arg2; | |
| 539 args[2] = arg3; | |
| 540 args[3] = arg4; | |
| 541 args[4] = GetContext(); | |
| 542 return CallIC(descriptor, target, args); | |
| 543 } | |
| 544 | |
| 545 | |
| 546 Node* InterpreterAssembler::CallIC(CallInterfaceDescriptor descriptor, | |
| 547 Node* target, Node* arg1, Node* arg2, | |
| 548 Node* arg3, Node* arg4, Node* arg5) { | |
| 549 Node** args = zone()->NewArray<Node*>(6); | |
| 550 args[0] = arg1; | |
| 551 args[1] = arg2; | |
| 552 args[2] = arg3; | |
| 553 args[3] = arg4; | |
| 554 args[4] = arg5; | |
| 555 args[5] = GetContext(); | |
| 556 return CallIC(descriptor, target, args); | |
| 557 } | |
| 558 | |
| 559 | |
| 560 Node* InterpreterAssembler::CallRuntime(Node* function_id, Node* first_arg, | |
| 561 Node* arg_count, int result_size) { | |
| 562 Callable callable = CodeFactory::InterpreterCEntry(isolate(), result_size); | 343 Callable callable = CodeFactory::InterpreterCEntry(isolate(), result_size); |
| 563 CallDescriptor* descriptor = Linkage::GetStubCallDescriptor( | |
| 564 isolate(), zone(), callable.descriptor(), 0, CallDescriptor::kNoFlags, | |
| 565 Operator::kNoProperties, MachineType::AnyTagged(), result_size); | |
| 566 Node* code_target = HeapConstant(callable.code()); | 344 Node* code_target = HeapConstant(callable.code()); |
| 567 | 345 |
| 568 // Get the function entry from the function id. | 346 // Get the function entry from the function id. |
| 569 Node* function_table = raw_assembler_->ExternalConstant( | 347 Node* function_table = ExternalConstant( |
| 570 ExternalReference::runtime_function_table_address(isolate())); | 348 ExternalReference::runtime_function_table_address(isolate())); |
| 571 Node* function_offset = raw_assembler_->Int32Mul( | 349 Node* function_offset = |
| 572 function_id, Int32Constant(sizeof(Runtime::Function))); | 350 Int32Mul(function_id, Int32Constant(sizeof(Runtime::Function))); |
| 573 Node* function = IntPtrAdd(function_table, function_offset); | 351 Node* function = IntPtrAdd(function_table, function_offset); |
| 574 Node* function_entry = | 352 Node* function_entry = |
| 575 raw_assembler_->Load(MachineType::Pointer(), function, | 353 Load(MachineType::Pointer(), function, |
| 576 Int32Constant(offsetof(Runtime::Function, entry))); | 354 Int32Constant(offsetof(Runtime::Function, entry))); |
| 577 | 355 |
| 578 Node** args = zone()->NewArray<Node*>(4); | 356 return CallStub(callable.descriptor(), code_target, context, arg_count, |
| 579 args[0] = arg_count; | 357 first_arg, function_entry, result_size); |
| 580 args[1] = first_arg; | |
| 581 args[2] = function_entry; | |
| 582 args[3] = GetContext(); | |
| 583 | |
| 584 return CallN(descriptor, code_target, args); | |
| 585 } | 358 } |
| 586 | 359 |
| 587 Node* InterpreterAssembler::CallRuntime(Runtime::FunctionId function_id) { | |
| 588 CallPrologue(); | |
| 589 Node* return_val = raw_assembler_->CallRuntime0(function_id, GetContext()); | |
| 590 return return_val; | |
| 591 } | |
| 592 | |
| 593 Node* InterpreterAssembler::CallRuntime(Runtime::FunctionId function_id, | |
| 594 Node* arg1) { | |
| 595 CallPrologue(); | |
| 596 Node* return_val = | |
| 597 raw_assembler_->CallRuntime1(function_id, arg1, GetContext()); | |
| 598 return return_val; | |
| 599 } | |
| 600 | |
| 601 | |
| 602 Node* InterpreterAssembler::CallRuntime(Runtime::FunctionId function_id, | |
| 603 Node* arg1, Node* arg2) { | |
| 604 CallPrologue(); | |
| 605 Node* return_val = | |
| 606 raw_assembler_->CallRuntime2(function_id, arg1, arg2, GetContext()); | |
| 607 return return_val; | |
| 608 } | |
| 609 | |
| 610 Node* InterpreterAssembler::CallRuntime(Runtime::FunctionId function_id, | |
| 611 Node* arg1, Node* arg2, Node* arg3) { | |
| 612 CallPrologue(); | |
| 613 Node* return_val = | |
| 614 raw_assembler_->CallRuntime3(function_id, arg1, arg2, arg3, GetContext()); | |
| 615 return return_val; | |
| 616 } | |
| 617 | |
| 618 Node* InterpreterAssembler::CallRuntime(Runtime::FunctionId function_id, | |
| 619 Node* arg1, Node* arg2, Node* arg3, | |
| 620 Node* arg4) { | |
| 621 CallPrologue(); | |
| 622 Node* return_val = raw_assembler_->CallRuntime4(function_id, arg1, arg2, arg3, | |
| 623 arg4, GetContext()); | |
| 624 return return_val; | |
| 625 } | |
| 626 | |
| 627 | |
| 628 void InterpreterAssembler::Return() { | |
| 629 if (FLAG_trace_ignition) { | |
| 630 TraceBytecode(Runtime::kInterpreterTraceBytecodeExit); | |
| 631 } | |
| 632 | |
| 633 Node* exit_trampoline_code_object = | |
| 634 HeapConstant(isolate()->builtins()->InterpreterExitTrampoline()); | |
| 635 // If the order of the parameters you need to change the call signature below. | |
| 636 STATIC_ASSERT(0 == Linkage::kInterpreterAccumulatorParameter); | |
| 637 STATIC_ASSERT(1 == Linkage::kInterpreterRegisterFileParameter); | |
| 638 STATIC_ASSERT(2 == Linkage::kInterpreterBytecodeOffsetParameter); | |
| 639 STATIC_ASSERT(3 == Linkage::kInterpreterBytecodeArrayParameter); | |
| 640 STATIC_ASSERT(4 == Linkage::kInterpreterDispatchTableParameter); | |
| 641 STATIC_ASSERT(5 == Linkage::kInterpreterContextParameter); | |
| 642 Node* args[] = { GetAccumulator(), | |
| 643 RegisterFileRawPointer(), | |
| 644 BytecodeOffset(), | |
| 645 BytecodeArrayTaggedPointer(), | |
| 646 DispatchTableRawPointer(), | |
| 647 GetContext() }; | |
| 648 raw_assembler_->TailCallN(call_descriptor(), exit_trampoline_code_object, | |
| 649 args); | |
| 650 } | |
| 651 | |
| 652 | |
| 653 Node* InterpreterAssembler::Advance(int delta) { | 360 Node* InterpreterAssembler::Advance(int delta) { |
| 654 return IntPtrAdd(BytecodeOffset(), Int32Constant(delta)); | 361 return IntPtrAdd(BytecodeOffset(), Int32Constant(delta)); |
| 655 } | 362 } |
| 656 | 363 |
| 657 | |
| 658 Node* InterpreterAssembler::Advance(Node* delta) { | 364 Node* InterpreterAssembler::Advance(Node* delta) { |
| 659 return raw_assembler_->IntPtrAdd(BytecodeOffset(), delta); | 365 return IntPtrAdd(BytecodeOffset(), delta); |
| 660 } | 366 } |
| 661 | 367 |
| 662 void InterpreterAssembler::Jump(Node* delta) { DispatchTo(Advance(delta)); } | 368 void InterpreterAssembler::Jump(Node* delta) { DispatchTo(Advance(delta)); } |
| 663 | 369 |
| 664 void InterpreterAssembler::JumpConditional(Node* condition, Node* delta) { | 370 void InterpreterAssembler::JumpConditional(Node* condition, Node* delta) { |
| 665 RawMachineLabel match, no_match; | 371 CodeStubAssembler::Label match(this); |
| 666 raw_assembler_->Branch(condition, &match, &no_match); | 372 CodeStubAssembler::Label no_match(this); |
| 667 raw_assembler_->Bind(&match); | 373 |
| 374 Branch(condition, &match, &no_match); |
| 375 Bind(&match); |
| 668 DispatchTo(Advance(delta)); | 376 DispatchTo(Advance(delta)); |
| 669 raw_assembler_->Bind(&no_match); | 377 Bind(&no_match); |
| 670 Dispatch(); | 378 Dispatch(); |
| 671 } | 379 } |
| 672 | 380 |
| 673 void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) { | 381 void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) { |
| 674 JumpConditional(raw_assembler_->WordEqual(lhs, rhs), delta); | 382 JumpConditional(WordEqual(lhs, rhs), delta); |
| 675 } | 383 } |
| 676 | 384 |
| 677 void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs, | 385 void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs, |
| 678 Node* delta) { | 386 Node* delta) { |
| 679 JumpConditional(raw_assembler_->WordNotEqual(lhs, rhs), delta); | 387 JumpConditional(WordNotEqual(lhs, rhs), delta); |
| 680 } | 388 } |
| 681 | 389 |
| 682 void InterpreterAssembler::Dispatch() { | 390 void InterpreterAssembler::Dispatch() { |
| 683 DispatchTo(Advance(interpreter::Bytecodes::Size(bytecode_))); | 391 DispatchTo(Advance(Bytecodes::Size(bytecode_))); |
| 684 } | 392 } |
| 685 | 393 |
| 686 | |
| 687 void InterpreterAssembler::DispatchTo(Node* new_bytecode_offset) { | 394 void InterpreterAssembler::DispatchTo(Node* new_bytecode_offset) { |
| 688 if (FLAG_trace_ignition) { | 395 if (FLAG_trace_ignition) { |
| 689 TraceBytecode(Runtime::kInterpreterTraceBytecodeExit); | 396 TraceBytecode(Runtime::kInterpreterTraceBytecodeExit); |
| 690 } | 397 } |
| 691 Node* target_bytecode = raw_assembler_->Load( | 398 Node* target_bytecode = Load( |
| 692 MachineType::Uint8(), BytecodeArrayTaggedPointer(), new_bytecode_offset); | 399 MachineType::Uint8(), BytecodeArrayTaggedPointer(), new_bytecode_offset); |
| 693 | 400 |
| 694 // TODO(rmcilroy): Create a code target dispatch table to avoid conversion | 401 // TODO(rmcilroy): Create a code target dispatch table to avoid conversion |
| 695 // from code object on every dispatch. | 402 // from code object on every dispatch. |
| 696 Node* target_code_object = raw_assembler_->Load( | 403 Node* target_code_object = |
| 697 MachineType::Pointer(), DispatchTableRawPointer(), | 404 Load(MachineType::Pointer(), DispatchTableRawPointer(), |
| 698 raw_assembler_->Word32Shl(target_bytecode, | 405 Word32Shl(target_bytecode, Int32Constant(kPointerSizeLog2))); |
| 699 Int32Constant(kPointerSizeLog2))); | |
| 700 | 406 |
| 701 // If the order of the parameters you need to change the call signature below. | 407 InterpreterDispatchDescriptor descriptor(isolate()); |
| 702 STATIC_ASSERT(0 == Linkage::kInterpreterAccumulatorParameter); | 408 Node* args[] = {GetAccumulator(), RegisterFileRawPointer(), |
| 703 STATIC_ASSERT(1 == Linkage::kInterpreterRegisterFileParameter); | 409 new_bytecode_offset, BytecodeArrayTaggedPointer(), |
| 704 STATIC_ASSERT(2 == Linkage::kInterpreterBytecodeOffsetParameter); | 410 DispatchTableRawPointer(), GetContext()}; |
| 705 STATIC_ASSERT(3 == Linkage::kInterpreterBytecodeArrayParameter); | 411 TailCall(descriptor, target_code_object, args, 0); |
| 706 STATIC_ASSERT(4 == Linkage::kInterpreterDispatchTableParameter); | 412 } |
| 707 STATIC_ASSERT(5 == Linkage::kInterpreterContextParameter); | 413 |
| 708 Node* args[] = { GetAccumulator(), | 414 void InterpreterAssembler::InterpreterReturn() { |
| 709 RegisterFileRawPointer(), | 415 if (FLAG_trace_ignition) { |
| 710 new_bytecode_offset, | 416 TraceBytecode(Runtime::kInterpreterTraceBytecodeExit); |
| 711 BytecodeArrayTaggedPointer(), | 417 } |
| 712 DispatchTableRawPointer(), | 418 InterpreterDispatchDescriptor descriptor(isolate()); |
| 713 GetContext() }; | 419 Node* exit_trampoline_code_object = |
| 714 raw_assembler_->TailCallN(call_descriptor(), target_code_object, args); | 420 HeapConstant(isolate()->builtins()->InterpreterExitTrampoline()); |
| 421 Node* args[] = {GetAccumulator(), RegisterFileRawPointer(), |
| 422 BytecodeOffset(), BytecodeArrayTaggedPointer(), |
| 423 DispatchTableRawPointer(), GetContext()}; |
| 424 TailCall(descriptor, exit_trampoline_code_object, args, 0); |
| 715 } | 425 } |
| 716 | 426 |
| 717 void InterpreterAssembler::StackCheck() { | 427 void InterpreterAssembler::StackCheck() { |
| 718 RawMachineLabel end, ok, stack_guard; | 428 CodeStubAssembler::Label end(this); |
| 719 Node* sp = raw_assembler_->LoadStackPointer(); | 429 CodeStubAssembler::Label ok(this); |
| 720 Node* stack_limit = raw_assembler_->Load( | 430 CodeStubAssembler::Label stack_guard(this); |
| 431 |
| 432 Node* sp = LoadStackPointer(); |
| 433 Node* stack_limit = Load( |
| 721 MachineType::Pointer(), | 434 MachineType::Pointer(), |
| 722 raw_assembler_->ExternalConstant( | 435 ExternalConstant(ExternalReference::address_of_stack_limit(isolate()))); |
| 723 ExternalReference::address_of_stack_limit(isolate()))); | 436 Node* condition = UintPtrGreaterThanOrEqual(sp, stack_limit); |
| 724 Node* condition = raw_assembler_->UintPtrGreaterThanOrEqual(sp, stack_limit); | 437 Branch(condition, &ok, &stack_guard); |
| 725 raw_assembler_->Branch(condition, &ok, &stack_guard); | 438 Bind(&stack_guard); |
| 726 raw_assembler_->Bind(&stack_guard); | 439 CallRuntime(Runtime::kStackGuard, GetContext()); |
| 727 CallRuntime(Runtime::kStackGuard); | 440 Goto(&end); |
| 728 raw_assembler_->Goto(&end); | 441 Bind(&ok); |
| 729 raw_assembler_->Bind(&ok); | 442 Goto(&end); |
| 730 raw_assembler_->Goto(&end); | 443 Bind(&end); |
| 731 raw_assembler_->Bind(&end); | |
| 732 } | 444 } |
| 733 | 445 |
| 734 void InterpreterAssembler::Abort(BailoutReason bailout_reason) { | 446 void InterpreterAssembler::Abort(BailoutReason bailout_reason) { |
| 447 disable_stack_check_across_call_ = true; |
| 735 Node* abort_id = SmiTag(Int32Constant(bailout_reason)); | 448 Node* abort_id = SmiTag(Int32Constant(bailout_reason)); |
| 736 Node* ret_value = CallRuntime(Runtime::kAbort, abort_id); | 449 Node* ret_value = CallRuntime(Runtime::kAbort, GetContext(), abort_id); |
| 450 disable_stack_check_across_call_ = false; |
| 737 // Unreached, but keeps turbofan happy. | 451 // Unreached, but keeps turbofan happy. |
| 738 raw_assembler_->Return(ret_value); | 452 Return(ret_value); |
| 739 } | 453 } |
| 740 | 454 |
| 741 | |
| 742 void InterpreterAssembler::AbortIfWordNotEqual(Node* lhs, Node* rhs, | 455 void InterpreterAssembler::AbortIfWordNotEqual(Node* lhs, Node* rhs, |
| 743 BailoutReason bailout_reason) { | 456 BailoutReason bailout_reason) { |
| 744 RawMachineLabel match, no_match; | 457 CodeStubAssembler::Label match(this); |
| 745 Node* condition = raw_assembler_->WordEqual(lhs, rhs); | 458 CodeStubAssembler::Label no_match(this); |
| 746 raw_assembler_->Branch(condition, &match, &no_match); | 459 |
| 747 raw_assembler_->Bind(&no_match); | 460 Node* condition = WordEqual(lhs, rhs); |
| 461 Branch(condition, &match, &no_match); |
| 462 Bind(&no_match); |
| 748 Abort(bailout_reason); | 463 Abort(bailout_reason); |
| 749 raw_assembler_->Bind(&match); | 464 Bind(&match); |
| 750 } | 465 } |
| 751 | 466 |
| 752 void InterpreterAssembler::TraceBytecode(Runtime::FunctionId function_id) { | 467 void InterpreterAssembler::TraceBytecode(Runtime::FunctionId function_id) { |
| 753 CallRuntime(function_id, BytecodeArrayTaggedPointer(), | 468 CallRuntime(function_id, GetContext(), BytecodeArrayTaggedPointer(), |
| 754 SmiTag(BytecodeOffset()), GetAccumulator()); | 469 SmiTag(BytecodeOffset()), GetAccumulator()); |
| 755 } | 470 } |
| 756 | 471 |
| 757 // static | 472 // static |
| 758 bool InterpreterAssembler::TargetSupportsUnalignedAccess() { | 473 bool InterpreterAssembler::TargetSupportsUnalignedAccess() { |
| 759 #if V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64 | 474 #if V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64 |
| 760 return false; | 475 return false; |
| 761 #elif V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_ARM64 || V8_TARGET_ARCH_PPC | 476 #elif V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_ARM64 || V8_TARGET_ARCH_PPC |
| 762 return CpuFeatures::IsSupported(UNALIGNED_ACCESSES); | 477 return CpuFeatures::IsSupported(UNALIGNED_ACCESSES); |
| 763 #elif V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_X87 | 478 #elif V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_X87 |
| 764 return true; | 479 return true; |
| 765 #else | 480 #else |
| 766 #error "Unknown Architecture" | 481 #error "Unknown Architecture" |
| 767 #endif | 482 #endif |
| 768 } | 483 } |
| 769 | 484 |
| 770 | 485 } // namespace interpreter |
| 771 // RawMachineAssembler delegate helpers: | |
| 772 Isolate* InterpreterAssembler::isolate() { return raw_assembler_->isolate(); } | |
| 773 | |
| 774 | |
| 775 Graph* InterpreterAssembler::graph() { return raw_assembler_->graph(); } | |
| 776 | |
| 777 | |
| 778 CallDescriptor* InterpreterAssembler::call_descriptor() const { | |
| 779 return raw_assembler_->call_descriptor(); | |
| 780 } | |
| 781 | |
| 782 | |
| 783 Zone* InterpreterAssembler::zone() { return raw_assembler_->zone(); } | |
| 784 | |
| 785 | |
| 786 } // namespace compiler | |
| 787 } // namespace internal | 486 } // namespace internal |
| 788 } // namespace v8 | 487 } // namespace v8 |
| OLD | NEW |