Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(167)

Side by Side Diff: src/interpreter/interpreter-assembler.cc

Issue 1783483002: [interpreter] Add support for scalable operands. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Operand renaming. Created 4 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2015 the V8 project authors. All rights reserved. 1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/interpreter/interpreter-assembler.h" 5 #include "src/interpreter/interpreter-assembler.h"
6 6
7 #include <ostream> 7 #include <ostream>
8 8
9 #include "src/code-factory.h" 9 #include "src/code-factory.h"
10 #include "src/frames.h" 10 #include "src/frames.h"
11 #include "src/interface-descriptors.h" 11 #include "src/interface-descriptors.h"
12 #include "src/interpreter/bytecodes.h" 12 #include "src/interpreter/bytecodes.h"
13 #include "src/interpreter/interpreter.h" 13 #include "src/interpreter/interpreter.h"
14 #include "src/machine-type.h" 14 #include "src/machine-type.h"
15 #include "src/macro-assembler.h" 15 #include "src/macro-assembler.h"
16 #include "src/zone.h" 16 #include "src/zone.h"
17 17
18 namespace v8 { 18 namespace v8 {
19 namespace internal { 19 namespace internal {
20 namespace interpreter { 20 namespace interpreter {
21 21
22 using compiler::Node; 22 using compiler::Node;
23 23
24 InterpreterAssembler::InterpreterAssembler(Isolate* isolate, Zone* zone, 24 InterpreterAssembler::InterpreterAssembler(Isolate* isolate, Zone* zone,
25 Bytecode bytecode) 25 Bytecode bytecode, int operand_scale)
26 : compiler::CodeStubAssembler(isolate, zone, 26 : compiler::CodeStubAssembler(isolate, zone,
27 InterpreterDispatchDescriptor(isolate), 27 InterpreterDispatchDescriptor(isolate),
28 Code::ComputeFlags(Code::BYTECODE_HANDLER), 28 Code::ComputeFlags(Code::BYTECODE_HANDLER),
29 Bytecodes::ToString(bytecode), 0), 29 Bytecodes::ToString(bytecode), 0),
30 bytecode_(bytecode), 30 bytecode_(bytecode),
31 operand_scale_(operand_scale),
31 accumulator_(this, MachineRepresentation::kTagged), 32 accumulator_(this, MachineRepresentation::kTagged),
32 context_(this, MachineRepresentation::kTagged), 33 context_(this, MachineRepresentation::kTagged),
33 bytecode_array_(this, MachineRepresentation::kTagged), 34 bytecode_array_(this, MachineRepresentation::kTagged),
35 disable_stack_check_across_call_(false),
36 stack_pointer_before_call_(nullptr) {
37 accumulator_.Bind(
38 Parameter(InterpreterDispatchDescriptor::kAccumulatorParameter));
39 context_.Bind(Parameter(InterpreterDispatchDescriptor::kContextParameter));
40 bytecode_array_.Bind(
41 Parameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter));
42 if (FLAG_trace_ignition) {
43 TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry);
44 }
45 }
46
47 InterpreterAssembler::InterpreterAssembler(Isolate* isolate, Zone* zone)
48 : compiler::CodeStubAssembler(
49 isolate, zone, InterpreterDispatchDescriptor(isolate),
50 Code::ComputeFlags(Code::STUB), "InvalidBytecode", 0),
51 bytecode_(static_cast<Bytecode>(-1)),
52 operand_scale_(-1),
53 accumulator_(this, MachineRepresentation::kTagged),
54 context_(this, MachineRepresentation::kTagged),
55 bytecode_array_(this, MachineRepresentation::kTagged),
34 disable_stack_check_across_call_(false), 56 disable_stack_check_across_call_(false),
35 stack_pointer_before_call_(nullptr) { 57 stack_pointer_before_call_(nullptr) {
36 accumulator_.Bind( 58 accumulator_.Bind(
37 Parameter(InterpreterDispatchDescriptor::kAccumulatorParameter)); 59 Parameter(InterpreterDispatchDescriptor::kAccumulatorParameter));
38 context_.Bind(Parameter(InterpreterDispatchDescriptor::kContextParameter)); 60 context_.Bind(Parameter(InterpreterDispatchDescriptor::kContextParameter));
39 bytecode_array_.Bind( 61 bytecode_array_.Bind(
40 Parameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter)); 62 Parameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter));
41 if (FLAG_trace_ignition) { 63 if (FLAG_trace_ignition) {
42 TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry); 64 TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry);
43 } 65 }
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
112 RegisterFrameOffset(reg_index), value); 134 RegisterFrameOffset(reg_index), value);
113 } 135 }
114 136
115 Node* InterpreterAssembler::NextRegister(Node* reg_index) { 137 Node* InterpreterAssembler::NextRegister(Node* reg_index) {
116 // Register indexes are negative, so the next index is minus one. 138 // Register indexes are negative, so the next index is minus one.
117 return IntPtrAdd(reg_index, IntPtrConstant(-1)); 139 return IntPtrAdd(reg_index, IntPtrConstant(-1));
118 } 140 }
119 141
120 Node* InterpreterAssembler::BytecodeOperand(int operand_index) { 142 Node* InterpreterAssembler::BytecodeOperand(int operand_index) {
121 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); 143 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
122 DCHECK_EQ(OperandSize::kByte, 144 DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
123 Bytecodes::GetOperandSize(bytecode_, operand_index)); 145 bytecode_, operand_index, operand_scale()));
124 return Load( 146 return Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
125 MachineType::Uint8(), BytecodeArrayTaggedPointer(), 147 IntPtrAdd(BytecodeOffset(),
126 IntPtrAdd(BytecodeOffset(), IntPtrConstant(Bytecodes::GetOperandOffset( 148 IntPtrConstant(Bytecodes::GetOperandOffset(
127 bytecode_, operand_index)))); 149 bytecode_, operand_index, operand_scale()))));
128 } 150 }
129 151
130 Node* InterpreterAssembler::BytecodeOperandSignExtended(int operand_index) { 152 Node* InterpreterAssembler::BytecodeOperandSignExtended(int operand_index) {
131 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); 153 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
132 DCHECK_EQ(OperandSize::kByte, 154 DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
133 Bytecodes::GetOperandSize(bytecode_, operand_index)); 155 bytecode_, operand_index, operand_scale()));
134 Node* load = Load( 156 Node* load = Load(MachineType::Int8(), BytecodeArrayTaggedPointer(),
135 MachineType::Int8(), BytecodeArrayTaggedPointer(), 157 IntPtrAdd(BytecodeOffset(),
136 IntPtrAdd(BytecodeOffset(), IntPtrConstant(Bytecodes::GetOperandOffset( 158 IntPtrConstant(Bytecodes::GetOperandOffset(
137 bytecode_, operand_index)))); 159 bytecode_, operand_index, operand_scale()))));
138 // Ensure that we sign extend to full pointer size 160 // Ensure that we sign extend to full pointer size
139 if (kPointerSize == 8) { 161 if (kPointerSize == 8) {
140 load = ChangeInt32ToInt64(load); 162 load = ChangeInt32ToInt64(load);
141 } 163 }
142 return load; 164 return load;
143 } 165 }
144 166
145 Node* InterpreterAssembler::BytecodeOperandShort(int operand_index) { 167 Node* InterpreterAssembler::BytecodeOperandShort(int operand_index) {
146 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); 168 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
147 DCHECK_EQ(OperandSize::kShort, 169 DCHECK_EQ(
148 Bytecodes::GetOperandSize(bytecode_, operand_index)); 170 OperandSize::kShort,
171 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
149 if (TargetSupportsUnalignedAccess()) { 172 if (TargetSupportsUnalignedAccess()) {
150 return Load( 173 return Load(MachineType::Uint16(), BytecodeArrayTaggedPointer(),
151 MachineType::Uint16(), BytecodeArrayTaggedPointer(), 174 IntPtrAdd(BytecodeOffset(),
152 IntPtrAdd(BytecodeOffset(), IntPtrConstant(Bytecodes::GetOperandOffset( 175 IntPtrConstant(Bytecodes::GetOperandOffset(
153 bytecode_, operand_index)))); 176 bytecode_, operand_index, operand_scale()))));
154 } else { 177 } else {
155 int offset = Bytecodes::GetOperandOffset(bytecode_, operand_index); 178 int offset =
179 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
156 Node* first_byte = 180 Node* first_byte =
157 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), 181 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
158 IntPtrAdd(BytecodeOffset(), IntPtrConstant(offset))); 182 IntPtrAdd(BytecodeOffset(), IntPtrConstant(offset)));
159 Node* second_byte = 183 Node* second_byte =
160 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), 184 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
161 IntPtrAdd(BytecodeOffset(), IntPtrConstant(offset + 1))); 185 IntPtrAdd(BytecodeOffset(), IntPtrConstant(offset + 1)));
162 #if V8_TARGET_LITTLE_ENDIAN 186 #if V8_TARGET_LITTLE_ENDIAN
163 return WordOr(WordShl(second_byte, kBitsPerByte), first_byte); 187 return WordOr(WordShl(second_byte, IntPtrConstant(kBitsPerByte)),
188 first_byte);
164 #elif V8_TARGET_BIG_ENDIAN 189 #elif V8_TARGET_BIG_ENDIAN
165 return WordOr(WordShl(first_byte, kBitsPerByte), second_byte); 190 return WordOr(WordShl(first_byte, IntPtrConstant(kBitsPerByte)),
191 second_byte);
166 #else 192 #else
167 #error "Unknown Architecture" 193 #error "Unknown Architecture"
168 #endif 194 #endif
169 } 195 }
170 } 196 }
171 197
172 Node* InterpreterAssembler::BytecodeOperandShortSignExtended( 198 Node* InterpreterAssembler::BytecodeOperandShortSignExtended(
173 int operand_index) { 199 int operand_index) {
174 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); 200 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
175 DCHECK_EQ(OperandSize::kShort, 201 DCHECK_EQ(
176 Bytecodes::GetOperandSize(bytecode_, operand_index)); 202 OperandSize::kShort,
177 int operand_offset = Bytecodes::GetOperandOffset(bytecode_, operand_index); 203 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
204 int operand_offset =
205 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
178 Node* load; 206 Node* load;
179 if (TargetSupportsUnalignedAccess()) { 207 if (TargetSupportsUnalignedAccess()) {
180 load = Load(MachineType::Int16(), BytecodeArrayTaggedPointer(), 208 load = Load(MachineType::Int16(), BytecodeArrayTaggedPointer(),
181 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset))); 209 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
182 } else { 210 } else {
183 #if V8_TARGET_LITTLE_ENDIAN 211 #if V8_TARGET_LITTLE_ENDIAN
184 Node* hi_byte_offset = IntPtrConstant(operand_offset + 1); 212 Node* hi_byte_offset = IntPtrConstant(operand_offset + 1);
185 Node* lo_byte_offset = IntPtrConstant(operand_offset); 213 Node* lo_byte_offset = IntPtrConstant(operand_offset);
186 #elif V8_TARGET_BIG_ENDIAN 214 #elif V8_TARGET_BIG_ENDIAN
187 Node* hi_byte_offset = IntPtrConstant(operand_offset); 215 Node* hi_byte_offset = IntPtrConstant(operand_offset);
188 Node* lo_byte_offset = IntPtrConstant(operand_offset + 1); 216 Node* lo_byte_offset = IntPtrConstant(operand_offset + 1);
189 #else 217 #else
190 #error "Unknown Architecture" 218 #error "Unknown Architecture"
191 #endif 219 #endif
192 Node* hi_byte = Load(MachineType::Int8(), BytecodeArrayTaggedPointer(), 220 Node* hi_byte = Load(MachineType::Int8(), BytecodeArrayTaggedPointer(),
193 IntPtrAdd(BytecodeOffset(), hi_byte_offset)); 221 IntPtrAdd(BytecodeOffset(), hi_byte_offset));
194 Node* lo_byte = Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), 222 Node* lo_byte = Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
195 IntPtrAdd(BytecodeOffset(), lo_byte_offset)); 223 IntPtrAdd(BytecodeOffset(), lo_byte_offset));
196 hi_byte = Word32Shl(hi_byte, Int32Constant(kBitsPerByte)); 224 hi_byte = Word32Shl(hi_byte, Int32Constant(kBitsPerByte));
197 load = Word32Or(hi_byte, lo_byte); 225 load = Word32Or(hi_byte, lo_byte);
198 } 226 }
199 227
200 // Ensure that we sign extend to full pointer size 228 // Ensure that we sign extend to full pointer size
201 if (kPointerSize == 8) { 229 if (kPointerSize == 8) {
202 load = ChangeInt32ToInt64(load); 230 load = ChangeInt32ToInt64(load);
203 } 231 }
204 return load; 232 return load;
205 } 233 }
206 234
235 Node* InterpreterAssembler::BytecodeOperandQuad(int operand_index) {
236 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
237 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
238 bytecode_, operand_index, operand_scale()));
239 int operand_offset =
240 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
241 if (TargetSupportsUnalignedAccess()) {
242 return Load(MachineType::Uint32(), BytecodeArrayTaggedPointer(),
243 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
244 } else {
245 Node* bytes[4];
246 for (int i = 0; i < static_cast<int>(arraysize(bytes)); ++i) {
247 bytes[i] =
248 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
249 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset + i)));
250 }
251 #if V8_TARGET_LITTLE_ENDIAN
252 Node* upper =
253 WordOr(WordShl(bytes[3], IntPtrConstant(kBitsPerByte)), bytes[2]);
254 Node* lower =
255 WordOr(WordShl(bytes[1], IntPtrConstant(kBitsPerByte)), bytes[0]);
256 return WordOr(WordShl(upper, IntPtrConstant(2 * kBitsPerByte)), lower);
257 #elif V8_TARGET_BIG_ENDIAN
258 Node* upper =
259 WordOr(WordShl(bytes[0], IntPtrConstant(kBitsPerByte)), bytes[1]);
260 Node* lower =
261 WordOr(WordShl(bytes[2], IntPtrConstant(kBitsPerByte)), bytes[3]);
262 return WordOr(WordShl(upper, IntPtrConstant(2 * kBitsPerByte)), lower);
263 #else
264 #error "Unknown Architecture"
265 #endif
266 }
267 }
268
269 Node* InterpreterAssembler::BytecodeOperandQuadSignExtended(int operand_index) {
270 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
271 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
272 bytecode_, operand_index, operand_scale()));
273 int operand_offset =
274 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
275 Node* load;
276 if (TargetSupportsUnalignedAccess()) {
277 load = Load(MachineType::Int32(), BytecodeArrayTaggedPointer(),
278 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
279 } else {
280 Node* bytes[4];
281 for (int i = 0; i < static_cast<int>(arraysize(bytes)); ++i) {
282 bytes[i] =
283 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
284 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset + i)));
285 }
286 #if V8_TARGET_LITTLE_ENDIAN
287 Node* upper =
288 WordOr(WordShl(bytes[3], IntPtrConstant(kBitsPerByte)), bytes[2]);
289 Node* lower =
290 WordOr(WordShl(bytes[1], IntPtrConstant(kBitsPerByte)), bytes[0]);
291 load = WordOr(WordShl(upper, IntPtrConstant(2 * kBitsPerByte)), lower);
292 #elif V8_TARGET_BIG_ENDIAN
293 Node* upper =
294 WordOr(WordShl(bytes[0], IntPtrConstant(kBitsPerByte)), bytes[1]);
295 Node* lower =
296 WordOr(WordShl(bytes[2], IntPtrConstant(kBitsPerByte)), bytes[3]);
297 load = WordOr(WordShl(upper, IntPtrConstant(2 * kBitsPerByte)), lower);
298 #else
299 #error "Unknown Architecture"
300 #endif
301 }
302
303 // Ensure that we sign extend to full pointer size
304 if (kPointerSize == 8) {
305 load = ChangeInt32ToInt64(load);
306 }
307 return load;
308 }
309
207 Node* InterpreterAssembler::BytecodeOperandCount(int operand_index) { 310 Node* InterpreterAssembler::BytecodeOperandCount(int operand_index) {
208 switch (Bytecodes::GetOperandSize(bytecode_, operand_index)) { 311 DCHECK_EQ(OperandType::kRegCount8,
312 Bytecodes::GetOperandType(bytecode_, operand_index));
313 OperandSize operand_size =
314 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
315 switch (operand_size) {
209 case OperandSize::kByte: 316 case OperandSize::kByte:
210 DCHECK_EQ(OperandType::kRegCount8,
211 Bytecodes::GetOperandType(bytecode_, operand_index));
212 return BytecodeOperand(operand_index); 317 return BytecodeOperand(operand_index);
213 case OperandSize::kShort: 318 case OperandSize::kShort:
214 DCHECK_EQ(OperandType::kRegCount16,
215 Bytecodes::GetOperandType(bytecode_, operand_index));
216 return BytecodeOperandShort(operand_index); 319 return BytecodeOperandShort(operand_index);
320 case OperandSize::kQuad:
321 return BytecodeOperandQuad(operand_index);
217 case OperandSize::kNone: 322 case OperandSize::kNone:
218 UNREACHABLE(); 323 UNREACHABLE();
219 } 324 }
325 return nullptr;
326 }
327
328 Node* InterpreterAssembler::BytecodeOperandFlag(int operand_index) {
329 DCHECK_EQ(OperandType::kFlag8,
330 Bytecodes::GetOperandType(bytecode_, operand_index));
331 OperandSize operand_size =
332 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
333 switch (operand_size) {
334 case OperandSize::kByte:
335 return BytecodeOperand(operand_index);
336 case OperandSize::kShort:
337 case OperandSize::kQuad:
338 case OperandSize::kNone:
339 UNREACHABLE();
340 }
220 return nullptr; 341 return nullptr;
221 } 342 }
222 343
223 Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) { 344 Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) {
224 DCHECK_EQ(OperandType::kImm8, 345 DCHECK_EQ(OperandType::kImm8,
225 Bytecodes::GetOperandType(bytecode_, operand_index)); 346 Bytecodes::GetOperandType(bytecode_, operand_index));
226 return BytecodeOperandSignExtended(operand_index); 347 OperandSize operand_size =
348 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
349 switch (operand_size) {
350 case OperandSize::kByte:
351 return BytecodeOperandSignExtended(operand_index);
352 case OperandSize::kShort:
353 return BytecodeOperandShortSignExtended(operand_index);
354 case OperandSize::kQuad:
355 return BytecodeOperandQuadSignExtended(operand_index);
356 case OperandSize::kNone:
357 UNREACHABLE();
358 }
359 return nullptr;
227 } 360 }
228 361
229 Node* InterpreterAssembler::BytecodeOperandIdx(int operand_index) { 362 Node* InterpreterAssembler::BytecodeOperandIdx(int operand_index) {
230 switch (Bytecodes::GetOperandSize(bytecode_, operand_index)) { 363 DCHECK(OperandType::kIdx8 ==
364 Bytecodes::GetOperandType(bytecode_, operand_index) ||
365 OperandType::kRuntimeId ==
366 Bytecodes::GetOperandType(bytecode_, operand_index));
367 OperandSize operand_size =
368 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
369 switch (operand_size) {
231 case OperandSize::kByte: 370 case OperandSize::kByte:
232 DCHECK_EQ(OperandType::kIdx8,
233 Bytecodes::GetOperandType(bytecode_, operand_index));
234 return BytecodeOperand(operand_index); 371 return BytecodeOperand(operand_index);
235 case OperandSize::kShort: 372 case OperandSize::kShort:
236 DCHECK_EQ(OperandType::kIdx16,
237 Bytecodes::GetOperandType(bytecode_, operand_index));
238 return BytecodeOperandShort(operand_index); 373 return BytecodeOperandShort(operand_index);
374 case OperandSize::kQuad:
375 return BytecodeOperandQuad(operand_index);
239 case OperandSize::kNone: 376 case OperandSize::kNone:
240 UNREACHABLE(); 377 UNREACHABLE();
241 } 378 }
242 return nullptr; 379 return nullptr;
243 } 380 }
244 381
245 Node* InterpreterAssembler::BytecodeOperandReg(int operand_index) { 382 Node* InterpreterAssembler::BytecodeOperandReg(int operand_index) {
246 OperandType operand_type = 383 OperandType operand_type =
247 Bytecodes::GetOperandType(bytecode_, operand_index); 384 Bytecodes::GetOperandType(bytecode_, operand_index);
248 if (Bytecodes::IsRegisterOperandType(operand_type)) { 385 DCHECK(Bytecodes::IsRegisterOperandType(operand_type));
249 OperandSize operand_size = Bytecodes::SizeOfOperand(operand_type); 386 switch (Bytecodes::SizeOfOperand(operand_type, operand_scale())) {
250 if (operand_size == OperandSize::kByte) { 387 case OperandSize::kByte:
251 return BytecodeOperandSignExtended(operand_index); 388 return BytecodeOperandSignExtended(operand_index);
252 } else if (operand_size == OperandSize::kShort) { 389 case OperandSize::kShort:
253 return BytecodeOperandShortSignExtended(operand_index); 390 return BytecodeOperandShortSignExtended(operand_index);
254 } 391 case OperandSize::kQuad:
392 return BytecodeOperandQuadSignExtended(operand_index);
393 case OperandSize::kNone:
394 UNREACHABLE();
255 } 395 }
256 UNREACHABLE();
257 return nullptr; 396 return nullptr;
258 } 397 }
259 398
260 Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) { 399 Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) {
261 Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(), 400 Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(),
262 BytecodeArray::kConstantPoolOffset); 401 BytecodeArray::kConstantPoolOffset);
263 Node* entry_offset = 402 Node* entry_offset =
264 IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag), 403 IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag),
265 WordShl(index, kPointerSizeLog2)); 404 WordShl(index, kPointerSizeLog2));
266 return Load(MachineType::AnyTagged(), constant_pool, entry_offset); 405 return Load(MachineType::AnyTagged(), constant_pool, entry_offset);
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after
425 void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) { 564 void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) {
426 JumpConditional(WordEqual(lhs, rhs), delta); 565 JumpConditional(WordEqual(lhs, rhs), delta);
427 } 566 }
428 567
429 void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs, 568 void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs,
430 Node* delta) { 569 Node* delta) {
431 JumpConditional(WordNotEqual(lhs, rhs), delta); 570 JumpConditional(WordNotEqual(lhs, rhs), delta);
432 } 571 }
433 572
434 void InterpreterAssembler::Dispatch() { 573 void InterpreterAssembler::Dispatch() {
435 DispatchTo(Advance(Bytecodes::Size(bytecode_))); 574 DCHECK(base::bits::IsPowerOfTwo32(operand_scale_));
575 DispatchTo(Advance(Bytecodes::Size(bytecode_, operand_scale_)));
436 } 576 }
437 577
438 void InterpreterAssembler::DispatchTo(Node* new_bytecode_offset) { 578 void InterpreterAssembler::DispatchTo(Node* new_bytecode_offset) {
439 Node* target_bytecode = Load( 579 Node* target_bytecode = Load(
440 MachineType::Uint8(), BytecodeArrayTaggedPointer(), new_bytecode_offset); 580 MachineType::Uint8(), BytecodeArrayTaggedPointer(), new_bytecode_offset);
441 if (kPointerSize == 8) { 581 if (kPointerSize == 8) {
442 target_bytecode = ChangeUint32ToUint64(target_bytecode); 582 target_bytecode = ChangeUint32ToUint64(target_bytecode);
443 } 583 }
444 584
445 // TODO(rmcilroy): Create a code target dispatch table to avoid conversion 585 // TODO(rmcilroy): Create a code target dispatch table to avoid conversion
(...skipping 11 matching lines...) Expand all
457 TraceBytecode(Runtime::kInterpreterTraceBytecodeExit); 597 TraceBytecode(Runtime::kInterpreterTraceBytecodeExit);
458 } 598 }
459 599
460 InterpreterDispatchDescriptor descriptor(isolate()); 600 InterpreterDispatchDescriptor descriptor(isolate());
461 Node* args[] = {GetAccumulator(), RegisterFileRawPointer(), 601 Node* args[] = {GetAccumulator(), RegisterFileRawPointer(),
462 bytecode_offset, BytecodeArrayTaggedPointer(), 602 bytecode_offset, BytecodeArrayTaggedPointer(),
463 DispatchTableRawPointer(), GetContext()}; 603 DispatchTableRawPointer(), GetContext()};
464 TailCall(descriptor, handler, args, 0); 604 TailCall(descriptor, handler, args, 0);
465 } 605 }
466 606
607 void InterpreterAssembler::RedispatchWide() {
608 STATIC_ASSERT(static_cast<int>(Bytecode::kExtraWide) ==
609 static_cast<int>(Bytecode::kWide) + 1);
610 Node* upper = Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
611 BytecodeOffset());
612 upper =
613 IntPtrAdd(upper, Int32Constant(-static_cast<int>(Bytecode::kWide) + 1));
614 Node* lower = Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
615 IntPtrAdd(BytecodeOffset(), Int32Constant(1)));
616 Node* target_bytecode =
617 Word32Or(Word32Shl(upper, Int32Constant(kBitsPerByte)), lower);
618 Node* target_code_object =
619 Load(MachineType::Pointer(), DispatchTableRawPointer(),
620 Word32Shl(target_bytecode, Int32Constant(kPointerSizeLog2)));
621 DispatchToBytecodeHandler(target_code_object, Advance(1));
622 }
623
467 void InterpreterAssembler::InterpreterReturn() { 624 void InterpreterAssembler::InterpreterReturn() {
468 // TODO(rmcilroy): Investigate whether it is worth supporting self 625 // TODO(rmcilroy): Investigate whether it is worth supporting self
469 // optimization of primitive functions like FullCodegen. 626 // optimization of primitive functions like FullCodegen.
470 627
471 // Update profiling count by -BytecodeOffset to simulate backedge to start of 628 // Update profiling count by -BytecodeOffset to simulate backedge to start of
472 // function. 629 // function.
473 Node* profiling_weight = 630 Node* profiling_weight =
474 Int32Sub(Int32Constant(kHeapObjectTag + BytecodeArray::kHeaderSize), 631 Int32Sub(Int32Constant(kHeapObjectTag + BytecodeArray::kHeaderSize),
475 BytecodeOffset()); 632 BytecodeOffset());
476 UpdateInterruptBudget(profiling_weight); 633 UpdateInterruptBudget(profiling_weight);
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
535 V8_TARGET_ARCH_S390 692 V8_TARGET_ARCH_S390
536 return true; 693 return true;
537 #else 694 #else
538 #error "Unknown Architecture" 695 #error "Unknown Architecture"
539 #endif 696 #endif
540 } 697 }
541 698
542 } // namespace interpreter 699 } // namespace interpreter
543 } // namespace internal 700 } // namespace internal
544 } // namespace v8 701 } // namespace v8
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698