Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/interpreter/interpreter-assembler.h" | 5 #include "src/interpreter/interpreter-assembler.h" |
| 6 | 6 |
| 7 #include <ostream> | 7 #include <ostream> |
| 8 | 8 |
| 9 #include "src/code-factory.h" | 9 #include "src/code-factory.h" |
| 10 #include "src/frames.h" | 10 #include "src/frames.h" |
| 11 #include "src/interface-descriptors.h" | 11 #include "src/interface-descriptors.h" |
| 12 #include "src/interpreter/bytecodes.h" | 12 #include "src/interpreter/bytecodes.h" |
| 13 #include "src/interpreter/interpreter.h" | 13 #include "src/interpreter/interpreter.h" |
| 14 #include "src/machine-type.h" | 14 #include "src/machine-type.h" |
| 15 #include "src/macro-assembler.h" | 15 #include "src/macro-assembler.h" |
| 16 #include "src/zone.h" | 16 #include "src/zone.h" |
| 17 | 17 |
| 18 namespace v8 { | 18 namespace v8 { |
| 19 namespace internal { | 19 namespace internal { |
| 20 namespace interpreter { | 20 namespace interpreter { |
| 21 | 21 |
| 22 using compiler::Node; | 22 using compiler::Node; |
| 23 | 23 |
| 24 InterpreterAssembler::InterpreterAssembler(Isolate* isolate, Zone* zone, | 24 InterpreterAssembler::InterpreterAssembler(Isolate* isolate, Zone* zone, |
| 25 Bytecode bytecode) | 25 Bytecode bytecode, int operand_scale) |
| 26 : compiler::CodeStubAssembler(isolate, zone, | 26 : compiler::CodeStubAssembler(isolate, zone, |
| 27 InterpreterDispatchDescriptor(isolate), | 27 InterpreterDispatchDescriptor(isolate), |
| 28 Code::ComputeFlags(Code::BYTECODE_HANDLER), | 28 Code::ComputeFlags(Code::BYTECODE_HANDLER), |
| 29 Bytecodes::ToString(bytecode), 0), | 29 Bytecodes::ToString(bytecode), 0), |
| 30 bytecode_(bytecode), | 30 bytecode_(bytecode), |
| 31 operand_scale_(operand_scale), | |
| 31 accumulator_(this, MachineRepresentation::kTagged), | 32 accumulator_(this, MachineRepresentation::kTagged), |
| 32 context_(this, MachineRepresentation::kTagged), | 33 context_(this, MachineRepresentation::kTagged), |
| 33 bytecode_array_(this, MachineRepresentation::kTagged), | 34 bytecode_array_(this, MachineRepresentation::kTagged), |
| 34 disable_stack_check_across_call_(false), | 35 disable_stack_check_across_call_(false), |
| 35 stack_pointer_before_call_(nullptr) { | 36 stack_pointer_before_call_(nullptr) { |
| 36 accumulator_.Bind( | 37 accumulator_.Bind( |
| 37 Parameter(InterpreterDispatchDescriptor::kAccumulatorParameter)); | 38 Parameter(InterpreterDispatchDescriptor::kAccumulatorParameter)); |
| 38 context_.Bind(Parameter(InterpreterDispatchDescriptor::kContextParameter)); | 39 context_.Bind(Parameter(InterpreterDispatchDescriptor::kContextParameter)); |
| 39 bytecode_array_.Bind( | 40 bytecode_array_.Bind( |
| 40 Parameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter)); | 41 Parameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter)); |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 77 Node* InterpreterAssembler::RegisterLocation(Node* reg_index) { | 78 Node* InterpreterAssembler::RegisterLocation(Node* reg_index) { |
| 78 return IntPtrAdd(RegisterFileRawPointer(), RegisterFrameOffset(reg_index)); | 79 return IntPtrAdd(RegisterFileRawPointer(), RegisterFrameOffset(reg_index)); |
| 79 } | 80 } |
| 80 | 81 |
| 81 Node* InterpreterAssembler::LoadRegister(int offset) { | 82 Node* InterpreterAssembler::LoadRegister(int offset) { |
| 82 return Load(MachineType::AnyTagged(), RegisterFileRawPointer(), | 83 return Load(MachineType::AnyTagged(), RegisterFileRawPointer(), |
| 83 IntPtrConstant(offset)); | 84 IntPtrConstant(offset)); |
| 84 } | 85 } |
| 85 | 86 |
| 86 Node* InterpreterAssembler::LoadRegister(Register reg) { | 87 Node* InterpreterAssembler::LoadRegister(Register reg) { |
| 87 return LoadRegister(reg.ToOperand() << kPointerSizeLog2); | 88 return LoadRegister(IntPtrConstant(-reg.index())); |
| 88 } | 89 } |
| 89 | 90 |
| 90 Node* InterpreterAssembler::RegisterFrameOffset(Node* index) { | 91 Node* InterpreterAssembler::RegisterFrameOffset(Node* index) { |
| 91 return WordShl(index, kPointerSizeLog2); | 92 return WordShl(index, kPointerSizeLog2); |
| 92 } | 93 } |
| 93 | 94 |
| 94 Node* InterpreterAssembler::LoadRegister(Node* reg_index) { | 95 Node* InterpreterAssembler::LoadRegister(Node* reg_index) { |
| 95 return Load(MachineType::AnyTagged(), RegisterFileRawPointer(), | 96 return Load(MachineType::AnyTagged(), RegisterFileRawPointer(), |
| 96 RegisterFrameOffset(reg_index)); | 97 RegisterFrameOffset(reg_index)); |
| 97 } | 98 } |
| 98 | 99 |
| 99 Node* InterpreterAssembler::StoreRegister(Node* value, int offset) { | 100 Node* InterpreterAssembler::StoreRegister(Node* value, int offset) { |
| 100 return StoreNoWriteBarrier(MachineRepresentation::kTagged, | 101 return StoreNoWriteBarrier(MachineRepresentation::kTagged, |
| 101 RegisterFileRawPointer(), IntPtrConstant(offset), | 102 RegisterFileRawPointer(), IntPtrConstant(offset), |
| 102 value); | 103 value); |
| 103 } | 104 } |
| 104 | 105 |
| 105 Node* InterpreterAssembler::StoreRegister(Node* value, Register reg) { | 106 Node* InterpreterAssembler::StoreRegister(Node* value, Register reg) { |
| 106 return StoreRegister(value, reg.ToOperand() << kPointerSizeLog2); | 107 return StoreRegister(value, IntPtrConstant(-reg.index())); |
| 107 } | 108 } |
| 108 | 109 |
| 109 Node* InterpreterAssembler::StoreRegister(Node* value, Node* reg_index) { | 110 Node* InterpreterAssembler::StoreRegister(Node* value, Node* reg_index) { |
| 110 return StoreNoWriteBarrier(MachineRepresentation::kTagged, | 111 return StoreNoWriteBarrier(MachineRepresentation::kTagged, |
| 111 RegisterFileRawPointer(), | 112 RegisterFileRawPointer(), |
| 112 RegisterFrameOffset(reg_index), value); | 113 RegisterFrameOffset(reg_index), value); |
| 113 } | 114 } |
| 114 | 115 |
| 115 Node* InterpreterAssembler::NextRegister(Node* reg_index) { | 116 Node* InterpreterAssembler::NextRegister(Node* reg_index) { |
| 116 // Register indexes are negative, so the next index is minus one. | 117 // Register indexes are negative, so the next index is minus one. |
| 117 return IntPtrAdd(reg_index, IntPtrConstant(-1)); | 118 return IntPtrAdd(reg_index, IntPtrConstant(-1)); |
| 118 } | 119 } |
| 119 | 120 |
| 120 Node* InterpreterAssembler::BytecodeOperand(int operand_index) { | 121 Node* InterpreterAssembler::BytecodeOperandUnsignedByte(int operand_index) { |
| 121 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); | 122 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); |
| 122 DCHECK_EQ(OperandSize::kByte, | 123 DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize( |
| 123 Bytecodes::GetOperandSize(bytecode_, operand_index)); | 124 bytecode_, operand_index, operand_scale())); |
| 124 return Load( | 125 return Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), |
| 125 MachineType::Uint8(), BytecodeArrayTaggedPointer(), | 126 IntPtrAdd(BytecodeOffset(), |
| 126 IntPtrAdd(BytecodeOffset(), IntPtrConstant(Bytecodes::GetOperandOffset( | 127 IntPtrConstant(Bytecodes::GetOperandOffset( |
| 127 bytecode_, operand_index)))); | 128 bytecode_, operand_index, operand_scale())))); |
| 128 } | 129 } |
| 129 | 130 |
| 130 Node* InterpreterAssembler::BytecodeOperandSignExtended(int operand_index) { | 131 Node* InterpreterAssembler::BytecodeOperandSignedByte(int operand_index) { |
| 131 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); | 132 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); |
| 132 DCHECK_EQ(OperandSize::kByte, | 133 DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize( |
| 133 Bytecodes::GetOperandSize(bytecode_, operand_index)); | 134 bytecode_, operand_index, operand_scale())); |
| 134 Node* load = Load( | 135 Node* load = Load(MachineType::Int8(), BytecodeArrayTaggedPointer(), |
| 135 MachineType::Int8(), BytecodeArrayTaggedPointer(), | 136 IntPtrAdd(BytecodeOffset(), |
| 136 IntPtrAdd(BytecodeOffset(), IntPtrConstant(Bytecodes::GetOperandOffset( | 137 IntPtrConstant(Bytecodes::GetOperandOffset( |
| 137 bytecode_, operand_index)))); | 138 bytecode_, operand_index, operand_scale())))); |
| 138 // Ensure that we sign extend to full pointer size | 139 // Ensure that we sign extend to full pointer size |
| 139 if (kPointerSize == 8) { | 140 if (kPointerSize == 8) { |
| 140 load = ChangeInt32ToInt64(load); | 141 load = ChangeInt32ToInt64(load); |
| 141 } | 142 } |
| 142 return load; | 143 return load; |
| 143 } | 144 } |
| 144 | 145 |
| 145 Node* InterpreterAssembler::BytecodeOperandShort(int operand_index) { | 146 Node* InterpreterAssembler::BytecodeOperandUnsignedShort(int operand_index) { |
| 146 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); | 147 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); |
| 147 DCHECK_EQ(OperandSize::kShort, | 148 DCHECK_EQ( |
| 148 Bytecodes::GetOperandSize(bytecode_, operand_index)); | 149 OperandSize::kShort, |
| 150 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale())); | |
| 149 if (TargetSupportsUnalignedAccess()) { | 151 if (TargetSupportsUnalignedAccess()) { |
| 150 return Load( | 152 return Load(MachineType::Uint16(), BytecodeArrayTaggedPointer(), |
| 151 MachineType::Uint16(), BytecodeArrayTaggedPointer(), | 153 IntPtrAdd(BytecodeOffset(), |
| 152 IntPtrAdd(BytecodeOffset(), IntPtrConstant(Bytecodes::GetOperandOffset( | 154 IntPtrConstant(Bytecodes::GetOperandOffset( |
| 153 bytecode_, operand_index)))); | 155 bytecode_, operand_index, operand_scale())))); |
| 154 } else { | 156 } else { |
| 155 int offset = Bytecodes::GetOperandOffset(bytecode_, operand_index); | 157 int offset = |
| 158 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()); | |
| 156 Node* first_byte = | 159 Node* first_byte = |
| 157 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), | 160 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), |
| 158 IntPtrAdd(BytecodeOffset(), IntPtrConstant(offset))); | 161 IntPtrAdd(BytecodeOffset(), IntPtrConstant(offset))); |
| 159 Node* second_byte = | 162 Node* second_byte = |
| 160 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), | 163 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), |
| 161 IntPtrAdd(BytecodeOffset(), IntPtrConstant(offset + 1))); | 164 IntPtrAdd(BytecodeOffset(), IntPtrConstant(offset + 1))); |
| 162 #if V8_TARGET_LITTLE_ENDIAN | 165 #if V8_TARGET_LITTLE_ENDIAN |
| 163 return WordOr(WordShl(second_byte, kBitsPerByte), first_byte); | 166 return WordOr(WordShl(second_byte, IntPtrConstant(kBitsPerByte)), |
| 167 first_byte); | |
| 164 #elif V8_TARGET_BIG_ENDIAN | 168 #elif V8_TARGET_BIG_ENDIAN |
| 165 return WordOr(WordShl(first_byte, kBitsPerByte), second_byte); | 169 return WordOr(WordShl(first_byte, IntPtrConstant(kBitsPerByte)), |
| 170 second_byte); | |
| 166 #else | 171 #else |
| 167 #error "Unknown Architecture" | 172 #error "Unknown Architecture" |
| 168 #endif | 173 #endif |
| 169 } | 174 } |
| 170 } | 175 } |
| 171 | 176 |
| 172 Node* InterpreterAssembler::BytecodeOperandShortSignExtended( | 177 Node* InterpreterAssembler::BytecodeOperandSignedShort(int operand_index) { |
| 173 int operand_index) { | |
| 174 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); | 178 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); |
| 175 DCHECK_EQ(OperandSize::kShort, | 179 DCHECK_EQ( |
| 176 Bytecodes::GetOperandSize(bytecode_, operand_index)); | 180 OperandSize::kShort, |
| 177 int operand_offset = Bytecodes::GetOperandOffset(bytecode_, operand_index); | 181 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale())); |
| 182 int operand_offset = | |
| 183 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()); | |
| 178 Node* load; | 184 Node* load; |
| 179 if (TargetSupportsUnalignedAccess()) { | 185 if (TargetSupportsUnalignedAccess()) { |
| 180 load = Load(MachineType::Int16(), BytecodeArrayTaggedPointer(), | 186 load = Load(MachineType::Int16(), BytecodeArrayTaggedPointer(), |
| 181 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset))); | 187 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset))); |
| 182 } else { | 188 } else { |
| 183 #if V8_TARGET_LITTLE_ENDIAN | 189 #if V8_TARGET_LITTLE_ENDIAN |
| 184 Node* hi_byte_offset = IntPtrConstant(operand_offset + 1); | 190 Node* hi_byte_offset = IntPtrConstant(operand_offset + 1); |
| 185 Node* lo_byte_offset = IntPtrConstant(operand_offset); | 191 Node* lo_byte_offset = IntPtrConstant(operand_offset); |
| 186 #elif V8_TARGET_BIG_ENDIAN | 192 #elif V8_TARGET_BIG_ENDIAN |
| 187 Node* hi_byte_offset = IntPtrConstant(operand_offset); | 193 Node* hi_byte_offset = IntPtrConstant(operand_offset); |
| 188 Node* lo_byte_offset = IntPtrConstant(operand_offset + 1); | 194 Node* lo_byte_offset = IntPtrConstant(operand_offset + 1); |
| 189 #else | 195 #else |
| 190 #error "Unknown Architecture" | 196 #error "Unknown Architecture" |
| 191 #endif | 197 #endif |
| 192 Node* hi_byte = Load(MachineType::Int8(), BytecodeArrayTaggedPointer(), | 198 Node* hi_byte = Load(MachineType::Int8(), BytecodeArrayTaggedPointer(), |
| 193 IntPtrAdd(BytecodeOffset(), hi_byte_offset)); | 199 IntPtrAdd(BytecodeOffset(), hi_byte_offset)); |
| 194 Node* lo_byte = Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), | 200 Node* lo_byte = Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), |
| 195 IntPtrAdd(BytecodeOffset(), lo_byte_offset)); | 201 IntPtrAdd(BytecodeOffset(), lo_byte_offset)); |
| 196 hi_byte = Word32Shl(hi_byte, Int32Constant(kBitsPerByte)); | 202 hi_byte = Word32Shl(hi_byte, Int32Constant(kBitsPerByte)); |
| 197 load = Word32Or(hi_byte, lo_byte); | 203 load = Word32Or(hi_byte, lo_byte); |
| 198 } | 204 } |
| 199 | 205 |
| 200 // Ensure that we sign extend to full pointer size | 206 // Ensure that we sign extend to full pointer size |
| 201 if (kPointerSize == 8) { | 207 if (kPointerSize == 8) { |
| 202 load = ChangeInt32ToInt64(load); | 208 load = ChangeInt32ToInt64(load); |
| 203 } | 209 } |
| 204 return load; | 210 return load; |
| 205 } | 211 } |
| 206 | 212 |
| 207 Node* InterpreterAssembler::BytecodeOperandCount(int operand_index) { | 213 Node* InterpreterAssembler::BytecodeOperandUnsignedQuad(int operand_index) { |
| 208 switch (Bytecodes::GetOperandSize(bytecode_, operand_index)) { | 214 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); |
| 215 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize( | |
| 216 bytecode_, operand_index, operand_scale())); | |
| 217 int operand_offset = | |
| 218 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()); | |
| 219 if (TargetSupportsUnalignedAccess()) { | |
| 220 return Load(MachineType::Uint32(), BytecodeArrayTaggedPointer(), | |
| 221 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset))); | |
| 222 } else { | |
| 223 Node* bytes[4]; | |
| 224 for (int i = 0; i < static_cast<int>(arraysize(bytes)); ++i) { | |
| 225 bytes[i] = | |
| 226 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), | |
| 227 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset + i))); | |
| 228 } | |
| 229 #if V8_TARGET_LITTLE_ENDIAN | |
| 230 Node* upper = | |
| 231 WordOr(WordShl(bytes[3], IntPtrConstant(kBitsPerByte)), bytes[2]); | |
| 232 Node* lower = | |
| 233 WordOr(WordShl(bytes[1], IntPtrConstant(kBitsPerByte)), bytes[0]); | |
| 234 return WordOr(WordShl(upper, IntPtrConstant(2 * kBitsPerByte)), lower); | |
| 235 #elif V8_TARGET_BIG_ENDIAN | |
| 236 Node* upper = | |
| 237 WordOr(WordShl(bytes[0], IntPtrConstant(kBitsPerByte)), bytes[1]); | |
| 238 Node* lower = | |
| 239 WordOr(WordShl(bytes[2], IntPtrConstant(kBitsPerByte)), bytes[3]); | |
| 240 return WordOr(WordShl(upper, IntPtrConstant(2 * kBitsPerByte)), lower); | |
| 241 #else | |
| 242 #error "Unknown Architecture" | |
| 243 #endif | |
| 244 } | |
| 245 } | |
| 246 | |
| 247 Node* InterpreterAssembler::BytecodeOperandSignedQuad(int operand_index) { | |
| 248 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); | |
| 249 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize( | |
| 250 bytecode_, operand_index, operand_scale())); | |
| 251 int operand_offset = | |
| 252 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()); | |
| 253 Node* load; | |
| 254 if (TargetSupportsUnalignedAccess()) { | |
| 255 load = Load(MachineType::Int32(), BytecodeArrayTaggedPointer(), | |
| 256 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset))); | |
| 257 } else { | |
| 258 Node* bytes[4]; | |
| 259 for (int i = 0; i < static_cast<int>(arraysize(bytes)); ++i) { | |
| 260 bytes[i] = | |
| 261 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), | |
| 262 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset + i))); | |
| 263 } | |
| 264 #if V8_TARGET_LITTLE_ENDIAN | |
| 265 Node* upper = | |
| 266 WordOr(WordShl(bytes[3], IntPtrConstant(kBitsPerByte)), bytes[2]); | |
| 267 Node* lower = | |
| 268 WordOr(WordShl(bytes[1], IntPtrConstant(kBitsPerByte)), bytes[0]); | |
| 269 load = WordOr(WordShl(upper, IntPtrConstant(2 * kBitsPerByte)), lower); | |
| 270 #elif V8_TARGET_BIG_ENDIAN | |
| 271 Node* upper = | |
| 272 WordOr(WordShl(bytes[0], IntPtrConstant(kBitsPerByte)), bytes[1]); | |
| 273 Node* lower = | |
| 274 WordOr(WordShl(bytes[2], IntPtrConstant(kBitsPerByte)), bytes[3]); | |
| 275 load = WordOr(WordShl(upper, IntPtrConstant(2 * kBitsPerByte)), lower); | |
| 276 #else | |
| 277 #error "Unknown Architecture" | |
| 278 #endif | |
| 279 } | |
| 280 | |
| 281 // Ensure that we sign extend to full pointer size | |
| 282 if (kPointerSize == 8) { | |
| 283 load = ChangeInt32ToInt64(load); | |
| 284 } | |
| 285 return load; | |
| 286 } | |
| 287 | |
| 288 Node* InterpreterAssembler::BytecodeSignedOperand(int operand_index, | |
| 289 OperandSize operand_size) { | |
| 290 DCHECK(!Bytecodes::IsUnsignedOperandType( | |
| 291 Bytecodes::GetOperandType(bytecode_, operand_index))); | |
| 292 switch (operand_size) { | |
| 209 case OperandSize::kByte: | 293 case OperandSize::kByte: |
| 210 DCHECK_EQ(OperandType::kRegCount8, | 294 return BytecodeOperandSignedByte(operand_index); |
| 211 Bytecodes::GetOperandType(bytecode_, operand_index)); | |
| 212 return BytecodeOperand(operand_index); | |
| 213 case OperandSize::kShort: | 295 case OperandSize::kShort: |
| 214 DCHECK_EQ(OperandType::kRegCount16, | 296 return BytecodeOperandSignedShort(operand_index); |
| 215 Bytecodes::GetOperandType(bytecode_, operand_index)); | 297 case OperandSize::kQuad: |
| 216 return BytecodeOperandShort(operand_index); | 298 return BytecodeOperandSignedQuad(operand_index); |
| 217 case OperandSize::kNone: | 299 case OperandSize::kNone: |
| 218 UNREACHABLE(); | 300 UNREACHABLE(); |
| 219 } | 301 } |
| 220 return nullptr; | 302 return nullptr; |
| 221 } | 303 } |
| 222 | 304 |
| 223 Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) { | 305 Node* InterpreterAssembler::BytecodeUnsignedOperand(int operand_index, |
| 224 DCHECK_EQ(OperandType::kImm8, | 306 OperandSize operand_size) { |
| 225 Bytecodes::GetOperandType(bytecode_, operand_index)); | 307 DCHECK(Bytecodes::IsUnsignedOperandType( |
| 226 return BytecodeOperandSignExtended(operand_index); | 308 Bytecodes::GetOperandType(bytecode_, operand_index))); |
| 227 } | 309 switch (operand_size) { |
| 228 | |
| 229 Node* InterpreterAssembler::BytecodeOperandIdx(int operand_index) { | |
| 230 switch (Bytecodes::GetOperandSize(bytecode_, operand_index)) { | |
| 231 case OperandSize::kByte: | 310 case OperandSize::kByte: |
| 232 DCHECK_EQ(OperandType::kIdx8, | 311 return BytecodeOperandUnsignedByte(operand_index); |
| 233 Bytecodes::GetOperandType(bytecode_, operand_index)); | |
| 234 return BytecodeOperand(operand_index); | |
| 235 case OperandSize::kShort: | 312 case OperandSize::kShort: |
| 236 DCHECK_EQ(OperandType::kIdx16, | 313 return BytecodeOperandUnsignedShort(operand_index); |
| 237 Bytecodes::GetOperandType(bytecode_, operand_index)); | 314 case OperandSize::kQuad: |
| 238 return BytecodeOperandShort(operand_index); | 315 return BytecodeOperandUnsignedQuad(operand_index); |
| 239 case OperandSize::kNone: | 316 case OperandSize::kNone: |
| 240 UNREACHABLE(); | 317 UNREACHABLE(); |
| 241 } | 318 } |
| 242 return nullptr; | 319 return nullptr; |
| 243 } | 320 } |
| 244 | 321 |
| 322 Node* InterpreterAssembler::BytecodeOperandCount(int operand_index) { | |
| 323 DCHECK_EQ(OperandType::kRegCount, | |
| 324 Bytecodes::GetOperandType(bytecode_, operand_index)); | |
| 325 OperandSize operand_size = | |
| 326 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()); | |
| 327 return BytecodeUnsignedOperand(operand_index, operand_size); | |
| 328 } | |
| 329 | |
| 330 Node* InterpreterAssembler::BytecodeOperandFlag(int operand_index) { | |
| 331 DCHECK_EQ(OperandType::kFlag8, | |
| 332 Bytecodes::GetOperandType(bytecode_, operand_index)); | |
| 333 OperandSize operand_size = | |
| 334 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()); | |
| 335 DCHECK_EQ(operand_size, OperandSize::kByte); | |
| 336 return BytecodeUnsignedOperand(operand_index, operand_size); | |
| 337 } | |
| 338 | |
| 339 Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) { | |
| 340 DCHECK_EQ(OperandType::kImm, | |
| 341 Bytecodes::GetOperandType(bytecode_, operand_index)); | |
| 342 OperandSize operand_size = | |
| 343 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()); | |
| 344 return BytecodeSignedOperand(operand_index, operand_size); | |
| 345 } | |
| 346 | |
| 347 Node* InterpreterAssembler::BytecodeOperandIdx(int operand_index) { | |
| 348 DCHECK(OperandType::kIdx == | |
| 349 Bytecodes::GetOperandType(bytecode_, operand_index)); | |
| 350 OperandSize operand_size = | |
| 351 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()); | |
| 352 return BytecodeUnsignedOperand(operand_index, operand_size); | |
| 353 } | |
| 354 | |
| 245 Node* InterpreterAssembler::BytecodeOperandReg(int operand_index) { | 355 Node* InterpreterAssembler::BytecodeOperandReg(int operand_index) { |
| 246 OperandType operand_type = | 356 DCHECK(Bytecodes::IsRegisterOperandType( |
| 247 Bytecodes::GetOperandType(bytecode_, operand_index); | 357 Bytecodes::GetOperandType(bytecode_, operand_index))); |
| 248 if (Bytecodes::IsRegisterOperandType(operand_type)) { | 358 OperandSize operand_size = |
| 249 OperandSize operand_size = Bytecodes::SizeOfOperand(operand_type); | 359 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()); |
| 250 if (operand_size == OperandSize::kByte) { | 360 return BytecodeSignedOperand(operand_index, operand_size); |
| 251 return BytecodeOperandSignExtended(operand_index); | 361 } |
| 252 } else if (operand_size == OperandSize::kShort) { | 362 |
| 253 return BytecodeOperandShortSignExtended(operand_index); | 363 Node* InterpreterAssembler::BytecodeOperandRuntimeId(int operand_index) { |
| 254 } | 364 DCHECK(OperandType::kRuntimeId == |
| 255 } | 365 Bytecodes::GetOperandType(bytecode_, operand_index)); |
| 256 UNREACHABLE(); | 366 OperandSize operand_size = |
| 257 return nullptr; | 367 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()); |
| 368 DCHECK_EQ(operand_size, OperandSize::kShort); | |
| 369 return BytecodeUnsignedOperand(operand_index, operand_size); | |
| 258 } | 370 } |
| 259 | 371 |
| 260 Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) { | 372 Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) { |
| 261 Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(), | 373 Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(), |
| 262 BytecodeArray::kConstantPoolOffset); | 374 BytecodeArray::kConstantPoolOffset); |
| 263 Node* entry_offset = | 375 Node* entry_offset = |
| 264 IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag), | 376 IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag), |
| 265 WordShl(index, kPointerSizeLog2)); | 377 WordShl(index, kPointerSizeLog2)); |
| 266 return Load(MachineType::AnyTagged(), constant_pool, entry_offset); | 378 return Load(MachineType::AnyTagged(), constant_pool, entry_offset); |
| 267 } | 379 } |
| (...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 425 void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) { | 537 void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) { |
| 426 JumpConditional(WordEqual(lhs, rhs), delta); | 538 JumpConditional(WordEqual(lhs, rhs), delta); |
| 427 } | 539 } |
| 428 | 540 |
| 429 void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs, | 541 void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs, |
| 430 Node* delta) { | 542 Node* delta) { |
| 431 JumpConditional(WordNotEqual(lhs, rhs), delta); | 543 JumpConditional(WordNotEqual(lhs, rhs), delta); |
| 432 } | 544 } |
| 433 | 545 |
| 434 void InterpreterAssembler::Dispatch() { | 546 void InterpreterAssembler::Dispatch() { |
| 435 DispatchTo(Advance(Bytecodes::Size(bytecode_))); | 547 DCHECK(base::bits::IsPowerOfTwo32(operand_scale_)); |
|
rmcilroy
2016/03/16 11:55:54
Not sure why this DCHECK is needed here, the code
oth
2016/03/17 13:48:38
Done.
| |
| 548 DispatchTo(Advance(Bytecodes::Size(bytecode_, operand_scale_))); | |
| 436 } | 549 } |
| 437 | 550 |
| 438 void InterpreterAssembler::DispatchTo(Node* new_bytecode_offset) { | 551 void InterpreterAssembler::DispatchTo(Node* new_bytecode_offset) { |
| 439 Node* target_bytecode = Load( | 552 Node* target_bytecode = Load( |
| 440 MachineType::Uint8(), BytecodeArrayTaggedPointer(), new_bytecode_offset); | 553 MachineType::Uint8(), BytecodeArrayTaggedPointer(), new_bytecode_offset); |
| 441 if (kPointerSize == 8) { | 554 if (kPointerSize == 8) { |
| 442 target_bytecode = ChangeUint32ToUint64(target_bytecode); | 555 target_bytecode = ChangeUint32ToUint64(target_bytecode); |
| 443 } | 556 } |
| 444 | 557 |
| 445 // TODO(rmcilroy): Create a code target dispatch table to avoid conversion | 558 // TODO(rmcilroy): Create a code target dispatch table to avoid conversion |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 457 TraceBytecode(Runtime::kInterpreterTraceBytecodeExit); | 570 TraceBytecode(Runtime::kInterpreterTraceBytecodeExit); |
| 458 } | 571 } |
| 459 | 572 |
| 460 InterpreterDispatchDescriptor descriptor(isolate()); | 573 InterpreterDispatchDescriptor descriptor(isolate()); |
| 461 Node* args[] = {GetAccumulator(), RegisterFileRawPointer(), | 574 Node* args[] = {GetAccumulator(), RegisterFileRawPointer(), |
| 462 bytecode_offset, BytecodeArrayTaggedPointer(), | 575 bytecode_offset, BytecodeArrayTaggedPointer(), |
| 463 DispatchTableRawPointer(), GetContext()}; | 576 DispatchTableRawPointer(), GetContext()}; |
| 464 TailCall(descriptor, handler, args, 0); | 577 TailCall(descriptor, handler, args, 0); |
| 465 } | 578 } |
| 466 | 579 |
| 580 void InterpreterAssembler::RedispatchWide(int operand_scale) { | |
| 581 DCHECK(base::bits::IsPowerOfTwo32(static_cast<uint32_t>(operand_scale))); | |
| 582 DCHECK(operand_scale == 2 || operand_scale == 4); | |
| 583 uint32_t log2_scale = | |
| 584 base::bits::CountTrailingZeros32(static_cast<uint32_t>(operand_scale)); | |
| 585 Node* target_bytecode_hi = IntPtrConstant(log2_scale << kBitsPerByte); | |
| 586 Node* target_bytecode_lo_offset = Advance(1); | |
| 587 Node* target_bytecode_lo = | |
| 588 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), | |
| 589 target_bytecode_lo_offset); | |
| 590 if (kPointerSize == 8) { | |
| 591 target_bytecode_lo = ChangeUint32ToUint64(target_bytecode_lo); | |
| 592 } | |
| 593 Node* target_bytecode = WordOr(target_bytecode_hi, target_bytecode_lo); | |
| 594 Node* target_code_object = | |
| 595 Load(MachineType::Pointer(), DispatchTableRawPointer(), | |
| 596 WordShl(target_bytecode, kPointerSizeLog2)); | |
| 597 DispatchToBytecodeHandler(target_code_object, target_bytecode_lo_offset); | |
| 598 } | |
| 599 | |
| 467 void InterpreterAssembler::InterpreterReturn() { | 600 void InterpreterAssembler::InterpreterReturn() { |
| 468 // TODO(rmcilroy): Investigate whether it is worth supporting self | 601 // TODO(rmcilroy): Investigate whether it is worth supporting self |
| 469 // optimization of primitive functions like FullCodegen. | 602 // optimization of primitive functions like FullCodegen. |
| 470 | 603 |
| 471 // Update profiling count by -BytecodeOffset to simulate backedge to start of | 604 // Update profiling count by -BytecodeOffset to simulate backedge to start of |
| 472 // function. | 605 // function. |
| 473 Node* profiling_weight = | 606 Node* profiling_weight = |
| 474 Int32Sub(Int32Constant(kHeapObjectTag + BytecodeArray::kHeaderSize), | 607 Int32Sub(Int32Constant(kHeapObjectTag + BytecodeArray::kHeaderSize), |
| 475 BytecodeOffset()); | 608 BytecodeOffset()); |
| 476 UpdateInterruptBudget(profiling_weight); | 609 UpdateInterruptBudget(profiling_weight); |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 535 V8_TARGET_ARCH_S390 | 668 V8_TARGET_ARCH_S390 |
| 536 return true; | 669 return true; |
| 537 #else | 670 #else |
| 538 #error "Unknown Architecture" | 671 #error "Unknown Architecture" |
| 539 #endif | 672 #endif |
| 540 } | 673 } |
| 541 | 674 |
| 542 } // namespace interpreter | 675 } // namespace interpreter |
| 543 } // namespace internal | 676 } // namespace internal |
| 544 } // namespace v8 | 677 } // namespace v8 |
| OLD | NEW |