Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(42)

Side by Side Diff: src/interpreter/interpreter-assembler.cc

Issue 1783483002: [interpreter] Add support for scalable operands. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Re-generate golden files. Created 4 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/interpreter/interpreter-assembler.h ('k') | src/interpreter/register-translator.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2015 the V8 project authors. All rights reserved. 1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/interpreter/interpreter-assembler.h" 5 #include "src/interpreter/interpreter-assembler.h"
6 6
7 #include <ostream> 7 #include <ostream>
8 8
9 #include "src/code-factory.h" 9 #include "src/code-factory.h"
10 #include "src/frames.h" 10 #include "src/frames.h"
11 #include "src/interface-descriptors.h" 11 #include "src/interface-descriptors.h"
12 #include "src/interpreter/bytecodes.h" 12 #include "src/interpreter/bytecodes.h"
13 #include "src/interpreter/interpreter.h" 13 #include "src/interpreter/interpreter.h"
14 #include "src/machine-type.h" 14 #include "src/machine-type.h"
15 #include "src/macro-assembler.h" 15 #include "src/macro-assembler.h"
16 #include "src/zone.h" 16 #include "src/zone.h"
17 17
18 namespace v8 { 18 namespace v8 {
19 namespace internal { 19 namespace internal {
20 namespace interpreter { 20 namespace interpreter {
21 21
22 using compiler::Node; 22 using compiler::Node;
23 23
24 InterpreterAssembler::InterpreterAssembler(Isolate* isolate, Zone* zone, 24 InterpreterAssembler::InterpreterAssembler(Isolate* isolate, Zone* zone,
25 Bytecode bytecode) 25 Bytecode bytecode,
26 OperandScale operand_scale)
26 : compiler::CodeStubAssembler(isolate, zone, 27 : compiler::CodeStubAssembler(isolate, zone,
27 InterpreterDispatchDescriptor(isolate), 28 InterpreterDispatchDescriptor(isolate),
28 Code::ComputeFlags(Code::BYTECODE_HANDLER), 29 Code::ComputeFlags(Code::BYTECODE_HANDLER),
29 Bytecodes::ToString(bytecode), 0), 30 Bytecodes::ToString(bytecode), 0),
30 bytecode_(bytecode), 31 bytecode_(bytecode),
32 operand_scale_(operand_scale),
31 accumulator_(this, MachineRepresentation::kTagged), 33 accumulator_(this, MachineRepresentation::kTagged),
32 context_(this, MachineRepresentation::kTagged), 34 context_(this, MachineRepresentation::kTagged),
33 bytecode_array_(this, MachineRepresentation::kTagged), 35 bytecode_array_(this, MachineRepresentation::kTagged),
34 disable_stack_check_across_call_(false), 36 disable_stack_check_across_call_(false),
35 stack_pointer_before_call_(nullptr) { 37 stack_pointer_before_call_(nullptr) {
36 accumulator_.Bind( 38 accumulator_.Bind(
37 Parameter(InterpreterDispatchDescriptor::kAccumulatorParameter)); 39 Parameter(InterpreterDispatchDescriptor::kAccumulatorParameter));
38 context_.Bind(Parameter(InterpreterDispatchDescriptor::kContextParameter)); 40 context_.Bind(Parameter(InterpreterDispatchDescriptor::kContextParameter));
39 bytecode_array_.Bind( 41 bytecode_array_.Bind(
40 Parameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter)); 42 Parameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter));
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
77 Node* InterpreterAssembler::RegisterLocation(Node* reg_index) { 79 Node* InterpreterAssembler::RegisterLocation(Node* reg_index) {
78 return IntPtrAdd(RegisterFileRawPointer(), RegisterFrameOffset(reg_index)); 80 return IntPtrAdd(RegisterFileRawPointer(), RegisterFrameOffset(reg_index));
79 } 81 }
80 82
81 Node* InterpreterAssembler::LoadRegister(int offset) { 83 Node* InterpreterAssembler::LoadRegister(int offset) {
82 return Load(MachineType::AnyTagged(), RegisterFileRawPointer(), 84 return Load(MachineType::AnyTagged(), RegisterFileRawPointer(),
83 IntPtrConstant(offset)); 85 IntPtrConstant(offset));
84 } 86 }
85 87
86 Node* InterpreterAssembler::LoadRegister(Register reg) { 88 Node* InterpreterAssembler::LoadRegister(Register reg) {
87 return LoadRegister(reg.ToOperand() << kPointerSizeLog2); 89 return LoadRegister(IntPtrConstant(-reg.index()));
88 } 90 }
89 91
90 Node* InterpreterAssembler::RegisterFrameOffset(Node* index) { 92 Node* InterpreterAssembler::RegisterFrameOffset(Node* index) {
91 return WordShl(index, kPointerSizeLog2); 93 return WordShl(index, kPointerSizeLog2);
92 } 94 }
93 95
94 Node* InterpreterAssembler::LoadRegister(Node* reg_index) { 96 Node* InterpreterAssembler::LoadRegister(Node* reg_index) {
95 return Load(MachineType::AnyTagged(), RegisterFileRawPointer(), 97 return Load(MachineType::AnyTagged(), RegisterFileRawPointer(),
96 RegisterFrameOffset(reg_index)); 98 RegisterFrameOffset(reg_index));
97 } 99 }
98 100
99 Node* InterpreterAssembler::StoreRegister(Node* value, int offset) { 101 Node* InterpreterAssembler::StoreRegister(Node* value, int offset) {
100 return StoreNoWriteBarrier(MachineRepresentation::kTagged, 102 return StoreNoWriteBarrier(MachineRepresentation::kTagged,
101 RegisterFileRawPointer(), IntPtrConstant(offset), 103 RegisterFileRawPointer(), IntPtrConstant(offset),
102 value); 104 value);
103 } 105 }
104 106
105 Node* InterpreterAssembler::StoreRegister(Node* value, Register reg) { 107 Node* InterpreterAssembler::StoreRegister(Node* value, Register reg) {
106 return StoreRegister(value, reg.ToOperand() << kPointerSizeLog2); 108 return StoreRegister(value, IntPtrConstant(-reg.index()));
107 } 109 }
108 110
109 Node* InterpreterAssembler::StoreRegister(Node* value, Node* reg_index) { 111 Node* InterpreterAssembler::StoreRegister(Node* value, Node* reg_index) {
110 return StoreNoWriteBarrier(MachineRepresentation::kTagged, 112 return StoreNoWriteBarrier(MachineRepresentation::kTagged,
111 RegisterFileRawPointer(), 113 RegisterFileRawPointer(),
112 RegisterFrameOffset(reg_index), value); 114 RegisterFrameOffset(reg_index), value);
113 } 115 }
114 116
115 Node* InterpreterAssembler::NextRegister(Node* reg_index) { 117 Node* InterpreterAssembler::NextRegister(Node* reg_index) {
116 // Register indexes are negative, so the next index is minus one. 118 // Register indexes are negative, so the next index is minus one.
117 return IntPtrAdd(reg_index, IntPtrConstant(-1)); 119 return IntPtrAdd(reg_index, IntPtrConstant(-1));
118 } 120 }
119 121
120 Node* InterpreterAssembler::BytecodeOperand(int operand_index) { 122 Node* InterpreterAssembler::OperandOffset(int operand_index) {
121 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); 123 return IntPtrConstant(
122 DCHECK_EQ(OperandSize::kByte, 124 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()));
123 Bytecodes::GetOperandSize(bytecode_, operand_index));
124 return Load(
125 MachineType::Uint8(), BytecodeArrayTaggedPointer(),
126 IntPtrAdd(BytecodeOffset(), IntPtrConstant(Bytecodes::GetOperandOffset(
127 bytecode_, operand_index))));
128 } 125 }
129 126
130 Node* InterpreterAssembler::BytecodeOperandSignExtended(int operand_index) { 127 Node* InterpreterAssembler::BytecodeOperandUnsignedByte(int operand_index) {
131 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); 128 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
132 DCHECK_EQ(OperandSize::kByte, 129 DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
133 Bytecodes::GetOperandSize(bytecode_, operand_index)); 130 bytecode_, operand_index, operand_scale()));
134 Node* load = Load( 131 Node* operand_offset = OperandOffset(operand_index);
135 MachineType::Int8(), BytecodeArrayTaggedPointer(), 132 return Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
136 IntPtrAdd(BytecodeOffset(), IntPtrConstant(Bytecodes::GetOperandOffset( 133 IntPtrAdd(BytecodeOffset(), operand_offset));
137 bytecode_, operand_index)))); 134 }
135
136 Node* InterpreterAssembler::BytecodeOperandSignedByte(int operand_index) {
137 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
138 DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
139 bytecode_, operand_index, operand_scale()));
140 Node* operand_offset = OperandOffset(operand_index);
141 Node* load = Load(MachineType::Int8(), BytecodeArrayTaggedPointer(),
142 IntPtrAdd(BytecodeOffset(), operand_offset));
143
138 // Ensure that we sign extend to full pointer size 144 // Ensure that we sign extend to full pointer size
139 if (kPointerSize == 8) { 145 if (kPointerSize == 8) {
140 load = ChangeInt32ToInt64(load); 146 load = ChangeInt32ToInt64(load);
141 } 147 }
142 return load; 148 return load;
143 } 149 }
144 150
145 Node* InterpreterAssembler::BytecodeOperandShort(int operand_index) { 151 compiler::Node* InterpreterAssembler::BytecodeOperandReadUnaligned(
146 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); 152 int relative_offset, MachineType result_type) {
147 DCHECK_EQ(OperandSize::kShort, 153 static const int kMaxCount = 4;
148 Bytecodes::GetOperandSize(bytecode_, operand_index)); 154 DCHECK(!TargetSupportsUnalignedAccess());
149 if (TargetSupportsUnalignedAccess()) { 155
150 return Load( 156 int count;
151 MachineType::Uint16(), BytecodeArrayTaggedPointer(), 157 switch (result_type.representation()) {
152 IntPtrAdd(BytecodeOffset(), IntPtrConstant(Bytecodes::GetOperandOffset( 158 case MachineRepresentation::kWord16:
153 bytecode_, operand_index)))); 159 count = 2;
154 } else { 160 break;
155 int offset = Bytecodes::GetOperandOffset(bytecode_, operand_index); 161 case MachineRepresentation::kWord32:
156 Node* first_byte = 162 count = 4;
157 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), 163 break;
158 IntPtrAdd(BytecodeOffset(), IntPtrConstant(offset))); 164 default:
159 Node* second_byte = 165 UNREACHABLE();
160 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), 166 break;
161 IntPtrAdd(BytecodeOffset(), IntPtrConstant(offset + 1))); 167 }
168 MachineType msb_type =
169 result_type.IsSigned() ? MachineType::Int8() : MachineType::Uint8();
170
162 #if V8_TARGET_LITTLE_ENDIAN 171 #if V8_TARGET_LITTLE_ENDIAN
163 return WordOr(WordShl(second_byte, kBitsPerByte), first_byte); 172 const int kStep = -1;
173 int msb_offset = count - 1;
164 #elif V8_TARGET_BIG_ENDIAN 174 #elif V8_TARGET_BIG_ENDIAN
165 return WordOr(WordShl(first_byte, kBitsPerByte), second_byte); 175 const int kStep = 1;
176 int msb_offset = 0;
166 #else 177 #else
167 #error "Unknown Architecture" 178 #error "Unknown Architecture"
168 #endif 179 #endif
180
181 // Read the most signicant bytecode into bytes[0] and then in order
182 // down to least significant in bytes[count - 1].
183 DCHECK(count <= kMaxCount);
184 compiler::Node* bytes[kMaxCount];
185 for (int i = 0; i < count; i++) {
186 MachineType machine_type = (i == 0) ? msb_type : MachineType::Uint8();
187 Node* offset = IntPtrConstant(relative_offset + msb_offset + i * kStep);
188 Node* array_offset = IntPtrAdd(BytecodeOffset(), offset);
189 bytes[i] = Load(machine_type, BytecodeArrayTaggedPointer(), array_offset);
190 }
191
192 // Pack LSB to MSB.
193 Node* result = bytes[--count];
194 for (int i = 1; --count >= 0; i++) {
195 Node* shift = Int32Constant(i * kBitsPerByte);
196 Node* value = Word32Shl(bytes[count], shift);
197 result = Word32Or(value, result);
198 }
199 return result;
200 }
201
202 Node* InterpreterAssembler::BytecodeOperandUnsignedShort(int operand_index) {
203 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
204 DCHECK_EQ(
205 OperandSize::kShort,
206 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
207 int operand_offset =
208 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
209 if (TargetSupportsUnalignedAccess()) {
210 return Load(MachineType::Uint16(), BytecodeArrayTaggedPointer(),
211 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
212 } else {
213 return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint16());
169 } 214 }
170 } 215 }
171 216
172 Node* InterpreterAssembler::BytecodeOperandShortSignExtended( 217 Node* InterpreterAssembler::BytecodeOperandSignedShort(int operand_index) {
173 int operand_index) {
174 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); 218 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
175 DCHECK_EQ(OperandSize::kShort, 219 DCHECK_EQ(
176 Bytecodes::GetOperandSize(bytecode_, operand_index)); 220 OperandSize::kShort,
177 int operand_offset = Bytecodes::GetOperandOffset(bytecode_, operand_index); 221 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
222 int operand_offset =
223 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
178 Node* load; 224 Node* load;
179 if (TargetSupportsUnalignedAccess()) { 225 if (TargetSupportsUnalignedAccess()) {
180 load = Load(MachineType::Int16(), BytecodeArrayTaggedPointer(), 226 load = Load(MachineType::Int16(), BytecodeArrayTaggedPointer(),
181 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset))); 227 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
182 } else { 228 } else {
183 #if V8_TARGET_LITTLE_ENDIAN 229 load = BytecodeOperandReadUnaligned(operand_offset, MachineType::Int16());
184 Node* hi_byte_offset = IntPtrConstant(operand_offset + 1);
185 Node* lo_byte_offset = IntPtrConstant(operand_offset);
186 #elif V8_TARGET_BIG_ENDIAN
187 Node* hi_byte_offset = IntPtrConstant(operand_offset);
188 Node* lo_byte_offset = IntPtrConstant(operand_offset + 1);
189 #else
190 #error "Unknown Architecture"
191 #endif
192 Node* hi_byte = Load(MachineType::Int8(), BytecodeArrayTaggedPointer(),
193 IntPtrAdd(BytecodeOffset(), hi_byte_offset));
194 Node* lo_byte = Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
195 IntPtrAdd(BytecodeOffset(), lo_byte_offset));
196 hi_byte = Word32Shl(hi_byte, Int32Constant(kBitsPerByte));
197 load = Word32Or(hi_byte, lo_byte);
198 } 230 }
199 231
200 // Ensure that we sign extend to full pointer size 232 // Ensure that we sign extend to full pointer size
201 if (kPointerSize == 8) { 233 if (kPointerSize == 8) {
202 load = ChangeInt32ToInt64(load); 234 load = ChangeInt32ToInt64(load);
203 } 235 }
204 return load; 236 return load;
205 } 237 }
206 238
207 Node* InterpreterAssembler::BytecodeOperandCount(int operand_index) { 239 Node* InterpreterAssembler::BytecodeOperandUnsignedQuad(int operand_index) {
208 switch (Bytecodes::GetOperandSize(bytecode_, operand_index)) { 240 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
241 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
242 bytecode_, operand_index, operand_scale()));
243 int operand_offset =
244 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
245 if (TargetSupportsUnalignedAccess()) {
246 return Load(MachineType::Uint32(), BytecodeArrayTaggedPointer(),
247 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
248 } else {
249 return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint32());
250 }
251 }
252
253 Node* InterpreterAssembler::BytecodeOperandSignedQuad(int operand_index) {
254 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
255 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
256 bytecode_, operand_index, operand_scale()));
257 int operand_offset =
258 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
259 Node* load;
260 if (TargetSupportsUnalignedAccess()) {
261 load = Load(MachineType::Int32(), BytecodeArrayTaggedPointer(),
262 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
263 } else {
264 load = BytecodeOperandReadUnaligned(operand_offset, MachineType::Int32());
265 }
266
267 // Ensure that we sign extend to full pointer size
268 if (kPointerSize == 8) {
269 load = ChangeInt32ToInt64(load);
270 }
271 return load;
272 }
273
274 Node* InterpreterAssembler::BytecodeSignedOperand(int operand_index,
275 OperandSize operand_size) {
276 DCHECK(!Bytecodes::IsUnsignedOperandType(
277 Bytecodes::GetOperandType(bytecode_, operand_index)));
278 switch (operand_size) {
209 case OperandSize::kByte: 279 case OperandSize::kByte:
210 DCHECK_EQ(OperandType::kRegCount8, 280 return BytecodeOperandSignedByte(operand_index);
211 Bytecodes::GetOperandType(bytecode_, operand_index));
212 return BytecodeOperand(operand_index);
213 case OperandSize::kShort: 281 case OperandSize::kShort:
214 DCHECK_EQ(OperandType::kRegCount16, 282 return BytecodeOperandSignedShort(operand_index);
215 Bytecodes::GetOperandType(bytecode_, operand_index)); 283 case OperandSize::kQuad:
216 return BytecodeOperandShort(operand_index); 284 return BytecodeOperandSignedQuad(operand_index);
217 case OperandSize::kNone: 285 case OperandSize::kNone:
218 UNREACHABLE(); 286 UNREACHABLE();
219 } 287 }
220 return nullptr; 288 return nullptr;
221 } 289 }
222 290
223 Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) { 291 Node* InterpreterAssembler::BytecodeUnsignedOperand(int operand_index,
224 DCHECK_EQ(OperandType::kImm8, 292 OperandSize operand_size) {
225 Bytecodes::GetOperandType(bytecode_, operand_index)); 293 DCHECK(Bytecodes::IsUnsignedOperandType(
226 return BytecodeOperandSignExtended(operand_index); 294 Bytecodes::GetOperandType(bytecode_, operand_index)));
227 } 295 switch (operand_size) {
228
229 Node* InterpreterAssembler::BytecodeOperandIdx(int operand_index) {
230 switch (Bytecodes::GetOperandSize(bytecode_, operand_index)) {
231 case OperandSize::kByte: 296 case OperandSize::kByte:
232 DCHECK_EQ(OperandType::kIdx8, 297 return BytecodeOperandUnsignedByte(operand_index);
233 Bytecodes::GetOperandType(bytecode_, operand_index));
234 return BytecodeOperand(operand_index);
235 case OperandSize::kShort: 298 case OperandSize::kShort:
236 DCHECK_EQ(OperandType::kIdx16, 299 return BytecodeOperandUnsignedShort(operand_index);
237 Bytecodes::GetOperandType(bytecode_, operand_index)); 300 case OperandSize::kQuad:
238 return BytecodeOperandShort(operand_index); 301 return BytecodeOperandUnsignedQuad(operand_index);
239 case OperandSize::kNone: 302 case OperandSize::kNone:
240 UNREACHABLE(); 303 UNREACHABLE();
241 } 304 }
242 return nullptr; 305 return nullptr;
243 } 306 }
244 307
308 Node* InterpreterAssembler::BytecodeOperandCount(int operand_index) {
309 DCHECK_EQ(OperandType::kRegCount,
310 Bytecodes::GetOperandType(bytecode_, operand_index));
311 OperandSize operand_size =
312 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
313 return BytecodeUnsignedOperand(operand_index, operand_size);
314 }
315
316 Node* InterpreterAssembler::BytecodeOperandFlag(int operand_index) {
317 DCHECK_EQ(OperandType::kFlag8,
318 Bytecodes::GetOperandType(bytecode_, operand_index));
319 OperandSize operand_size =
320 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
321 DCHECK_EQ(operand_size, OperandSize::kByte);
322 return BytecodeUnsignedOperand(operand_index, operand_size);
323 }
324
325 Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) {
326 DCHECK_EQ(OperandType::kImm,
327 Bytecodes::GetOperandType(bytecode_, operand_index));
328 OperandSize operand_size =
329 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
330 return BytecodeSignedOperand(operand_index, operand_size);
331 }
332
333 Node* InterpreterAssembler::BytecodeOperandIdx(int operand_index) {
334 DCHECK(OperandType::kIdx ==
335 Bytecodes::GetOperandType(bytecode_, operand_index));
336 OperandSize operand_size =
337 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
338 return BytecodeUnsignedOperand(operand_index, operand_size);
339 }
340
245 Node* InterpreterAssembler::BytecodeOperandReg(int operand_index) { 341 Node* InterpreterAssembler::BytecodeOperandReg(int operand_index) {
246 OperandType operand_type = 342 DCHECK(Bytecodes::IsRegisterOperandType(
247 Bytecodes::GetOperandType(bytecode_, operand_index); 343 Bytecodes::GetOperandType(bytecode_, operand_index)));
248 if (Bytecodes::IsRegisterOperandType(operand_type)) { 344 OperandSize operand_size =
249 OperandSize operand_size = Bytecodes::SizeOfOperand(operand_type); 345 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
250 if (operand_size == OperandSize::kByte) { 346 return BytecodeSignedOperand(operand_index, operand_size);
251 return BytecodeOperandSignExtended(operand_index); 347 }
252 } else if (operand_size == OperandSize::kShort) { 348
253 return BytecodeOperandShortSignExtended(operand_index); 349 Node* InterpreterAssembler::BytecodeOperandRuntimeId(int operand_index) {
254 } 350 DCHECK(OperandType::kRuntimeId ==
255 } 351 Bytecodes::GetOperandType(bytecode_, operand_index));
256 UNREACHABLE(); 352 OperandSize operand_size =
257 return nullptr; 353 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
354 DCHECK_EQ(operand_size, OperandSize::kShort);
355 return BytecodeUnsignedOperand(operand_index, operand_size);
258 } 356 }
259 357
260 Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) { 358 Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) {
261 Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(), 359 Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(),
262 BytecodeArray::kConstantPoolOffset); 360 BytecodeArray::kConstantPoolOffset);
263 Node* entry_offset = 361 Node* entry_offset =
264 IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag), 362 IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag),
265 WordShl(index, kPointerSizeLog2)); 363 WordShl(index, kPointerSizeLog2));
266 return Load(MachineType::AnyTagged(), constant_pool, entry_offset); 364 return Load(MachineType::AnyTagged(), constant_pool, entry_offset);
267 } 365 }
(...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after
423 void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) { 521 void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) {
424 JumpConditional(WordEqual(lhs, rhs), delta); 522 JumpConditional(WordEqual(lhs, rhs), delta);
425 } 523 }
426 524
427 void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs, 525 void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs,
428 Node* delta) { 526 Node* delta) {
429 JumpConditional(WordNotEqual(lhs, rhs), delta); 527 JumpConditional(WordNotEqual(lhs, rhs), delta);
430 } 528 }
431 529
432 void InterpreterAssembler::Dispatch() { 530 void InterpreterAssembler::Dispatch() {
433 DispatchTo(Advance(Bytecodes::Size(bytecode_))); 531 DispatchTo(Advance(Bytecodes::Size(bytecode_, operand_scale_)));
434 } 532 }
435 533
436 void InterpreterAssembler::DispatchTo(Node* new_bytecode_offset) { 534 void InterpreterAssembler::DispatchTo(Node* new_bytecode_offset) {
437 Node* target_bytecode = Load( 535 Node* target_bytecode = Load(
438 MachineType::Uint8(), BytecodeArrayTaggedPointer(), new_bytecode_offset); 536 MachineType::Uint8(), BytecodeArrayTaggedPointer(), new_bytecode_offset);
439 if (kPointerSize == 8) { 537 if (kPointerSize == 8) {
440 target_bytecode = ChangeUint32ToUint64(target_bytecode); 538 target_bytecode = ChangeUint32ToUint64(target_bytecode);
441 } 539 }
442 540
443 // TODO(rmcilroy): Create a code target dispatch table to avoid conversion 541 // TODO(rmcilroy): Create a code target dispatch table to avoid conversion
(...skipping 11 matching lines...) Expand all
455 TraceBytecode(Runtime::kInterpreterTraceBytecodeExit); 553 TraceBytecode(Runtime::kInterpreterTraceBytecodeExit);
456 } 554 }
457 555
458 InterpreterDispatchDescriptor descriptor(isolate()); 556 InterpreterDispatchDescriptor descriptor(isolate());
459 Node* args[] = {GetAccumulator(), RegisterFileRawPointer(), 557 Node* args[] = {GetAccumulator(), RegisterFileRawPointer(),
460 bytecode_offset, BytecodeArrayTaggedPointer(), 558 bytecode_offset, BytecodeArrayTaggedPointer(),
461 DispatchTableRawPointer(), GetContext()}; 559 DispatchTableRawPointer(), GetContext()};
462 TailCall(descriptor, handler, args, 0); 560 TailCall(descriptor, handler, args, 0);
463 } 561 }
464 562
563 void InterpreterAssembler::DispatchWide(OperandScale operand_scale) {
564 // Dispatching a wide bytecode requires treating the prefix
565 // bytecode a base pointer into the dispatch table and dispatching
566 // the bytecode that follows relative to this base.
567 //
568 // Indices 0-255 correspond to bytecodes with operand_scale == 0
569 // Indices 256-511 correspond to bytecodes with operand_scale == 1
570 // Indices 512-767 correspond to bytecodes with operand_scale == 2
571 Node* next_bytecode_offset = Advance(1);
572 Node* next_bytecode = Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
573 next_bytecode_offset);
574 if (kPointerSize == 8) {
575 next_bytecode = ChangeUint32ToUint64(next_bytecode);
576 }
577 Node* base_index;
578 switch (operand_scale) {
579 case OperandScale::kDouble:
580 base_index = IntPtrConstant(1 << kBitsPerByte);
581 break;
582 case OperandScale::kQuadruple:
583 base_index = IntPtrConstant(2 << kBitsPerByte);
584 break;
585 default:
586 UNREACHABLE();
587 base_index = nullptr;
588 }
589 Node* target_index = IntPtrAdd(base_index, next_bytecode);
590 Node* target_code_object =
591 Load(MachineType::Pointer(), DispatchTableRawPointer(),
592 WordShl(target_index, kPointerSizeLog2));
593
594 DispatchToBytecodeHandler(target_code_object, next_bytecode_offset);
595 }
596
465 void InterpreterAssembler::InterpreterReturn() { 597 void InterpreterAssembler::InterpreterReturn() {
466 // TODO(rmcilroy): Investigate whether it is worth supporting self 598 // TODO(rmcilroy): Investigate whether it is worth supporting self
467 // optimization of primitive functions like FullCodegen. 599 // optimization of primitive functions like FullCodegen.
468 600
469 // Update profiling count by -BytecodeOffset to simulate backedge to start of 601 // Update profiling count by -BytecodeOffset to simulate backedge to start of
470 // function. 602 // function.
471 Node* profiling_weight = 603 Node* profiling_weight =
472 Int32Sub(Int32Constant(kHeapObjectTag + BytecodeArray::kHeaderSize), 604 Int32Sub(Int32Constant(kHeapObjectTag + BytecodeArray::kHeaderSize),
473 BytecodeOffset()); 605 BytecodeOffset());
474 UpdateInterruptBudget(profiling_weight); 606 UpdateInterruptBudget(profiling_weight);
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
533 V8_TARGET_ARCH_S390 665 V8_TARGET_ARCH_S390
534 return true; 666 return true;
535 #else 667 #else
536 #error "Unknown Architecture" 668 #error "Unknown Architecture"
537 #endif 669 #endif
538 } 670 }
539 671
540 } // namespace interpreter 672 } // namespace interpreter
541 } // namespace internal 673 } // namespace internal
542 } // namespace v8 674 } // namespace v8
OLDNEW
« no previous file with comments | « src/interpreter/interpreter-assembler.h ('k') | src/interpreter/register-translator.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698