Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(15)

Side by Side Diff: src/interpreter/interpreter-assembler.cc

Issue 1783483002: [interpreter] Add support for scalable operands. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Additional test for debugger stepping and wider constant array builder test. Created 4 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2015 the V8 project authors. All rights reserved. 1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/interpreter/interpreter-assembler.h" 5 #include "src/interpreter/interpreter-assembler.h"
6 6
7 #include <ostream> 7 #include <ostream>
8 8
9 #include "src/code-factory.h" 9 #include "src/code-factory.h"
10 #include "src/frames.h" 10 #include "src/frames.h"
11 #include "src/interface-descriptors.h" 11 #include "src/interface-descriptors.h"
12 #include "src/interpreter/bytecodes.h" 12 #include "src/interpreter/bytecodes.h"
13 #include "src/interpreter/interpreter.h" 13 #include "src/interpreter/interpreter.h"
14 #include "src/machine-type.h" 14 #include "src/machine-type.h"
15 #include "src/macro-assembler.h" 15 #include "src/macro-assembler.h"
16 #include "src/zone.h" 16 #include "src/zone.h"
17 17
18 namespace v8 { 18 namespace v8 {
19 namespace internal { 19 namespace internal {
20 namespace interpreter { 20 namespace interpreter {
21 21
22 using compiler::Node; 22 using compiler::Node;
23 23
24 InterpreterAssembler::InterpreterAssembler(Isolate* isolate, Zone* zone, 24 InterpreterAssembler::InterpreterAssembler(Isolate* isolate, Zone* zone,
25 Bytecode bytecode) 25 Bytecode bytecode,
26 OperandScale operand_scale)
26 : compiler::CodeStubAssembler(isolate, zone, 27 : compiler::CodeStubAssembler(isolate, zone,
27 InterpreterDispatchDescriptor(isolate), 28 InterpreterDispatchDescriptor(isolate),
28 Code::ComputeFlags(Code::BYTECODE_HANDLER), 29 Code::ComputeFlags(Code::BYTECODE_HANDLER),
29 Bytecodes::ToString(bytecode), 0), 30 Bytecodes::ToString(bytecode), 0),
30 bytecode_(bytecode), 31 bytecode_(bytecode),
32 operand_scale_(operand_scale),
31 accumulator_(this, MachineRepresentation::kTagged), 33 accumulator_(this, MachineRepresentation::kTagged),
32 context_(this, MachineRepresentation::kTagged), 34 context_(this, MachineRepresentation::kTagged),
33 bytecode_array_(this, MachineRepresentation::kTagged), 35 bytecode_array_(this, MachineRepresentation::kTagged),
34 disable_stack_check_across_call_(false), 36 disable_stack_check_across_call_(false),
35 stack_pointer_before_call_(nullptr) { 37 stack_pointer_before_call_(nullptr) {
36 accumulator_.Bind( 38 accumulator_.Bind(
37 Parameter(InterpreterDispatchDescriptor::kAccumulatorParameter)); 39 Parameter(InterpreterDispatchDescriptor::kAccumulatorParameter));
38 context_.Bind(Parameter(InterpreterDispatchDescriptor::kContextParameter)); 40 context_.Bind(Parameter(InterpreterDispatchDescriptor::kContextParameter));
39 bytecode_array_.Bind( 41 bytecode_array_.Bind(
40 Parameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter)); 42 Parameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter));
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
77 Node* InterpreterAssembler::RegisterLocation(Node* reg_index) { 79 Node* InterpreterAssembler::RegisterLocation(Node* reg_index) {
78 return IntPtrAdd(RegisterFileRawPointer(), RegisterFrameOffset(reg_index)); 80 return IntPtrAdd(RegisterFileRawPointer(), RegisterFrameOffset(reg_index));
79 } 81 }
80 82
81 Node* InterpreterAssembler::LoadRegister(int offset) { 83 Node* InterpreterAssembler::LoadRegister(int offset) {
82 return Load(MachineType::AnyTagged(), RegisterFileRawPointer(), 84 return Load(MachineType::AnyTagged(), RegisterFileRawPointer(),
83 IntPtrConstant(offset)); 85 IntPtrConstant(offset));
84 } 86 }
85 87
86 Node* InterpreterAssembler::LoadRegister(Register reg) { 88 Node* InterpreterAssembler::LoadRegister(Register reg) {
87 return LoadRegister(reg.ToOperand() << kPointerSizeLog2); 89 return LoadRegister(IntPtrConstant(-reg.index()));
88 } 90 }
89 91
90 Node* InterpreterAssembler::RegisterFrameOffset(Node* index) { 92 Node* InterpreterAssembler::RegisterFrameOffset(Node* index) {
91 return WordShl(index, kPointerSizeLog2); 93 return WordShl(index, kPointerSizeLog2);
92 } 94 }
93 95
94 Node* InterpreterAssembler::LoadRegister(Node* reg_index) { 96 Node* InterpreterAssembler::LoadRegister(Node* reg_index) {
95 return Load(MachineType::AnyTagged(), RegisterFileRawPointer(), 97 return Load(MachineType::AnyTagged(), RegisterFileRawPointer(),
96 RegisterFrameOffset(reg_index)); 98 RegisterFrameOffset(reg_index));
97 } 99 }
98 100
99 Node* InterpreterAssembler::StoreRegister(Node* value, int offset) { 101 Node* InterpreterAssembler::StoreRegister(Node* value, int offset) {
100 return StoreNoWriteBarrier(MachineRepresentation::kTagged, 102 return StoreNoWriteBarrier(MachineRepresentation::kTagged,
101 RegisterFileRawPointer(), IntPtrConstant(offset), 103 RegisterFileRawPointer(), IntPtrConstant(offset),
102 value); 104 value);
103 } 105 }
104 106
105 Node* InterpreterAssembler::StoreRegister(Node* value, Register reg) { 107 Node* InterpreterAssembler::StoreRegister(Node* value, Register reg) {
106 return StoreRegister(value, reg.ToOperand() << kPointerSizeLog2); 108 return StoreRegister(value, IntPtrConstant(-reg.index()));
107 } 109 }
108 110
109 Node* InterpreterAssembler::StoreRegister(Node* value, Node* reg_index) { 111 Node* InterpreterAssembler::StoreRegister(Node* value, Node* reg_index) {
110 return StoreNoWriteBarrier(MachineRepresentation::kTagged, 112 return StoreNoWriteBarrier(MachineRepresentation::kTagged,
111 RegisterFileRawPointer(), 113 RegisterFileRawPointer(),
112 RegisterFrameOffset(reg_index), value); 114 RegisterFrameOffset(reg_index), value);
113 } 115 }
114 116
115 Node* InterpreterAssembler::NextRegister(Node* reg_index) { 117 Node* InterpreterAssembler::NextRegister(Node* reg_index) {
116 // Register indexes are negative, so the next index is minus one. 118 // Register indexes are negative, so the next index is minus one.
117 return IntPtrAdd(reg_index, IntPtrConstant(-1)); 119 return IntPtrAdd(reg_index, IntPtrConstant(-1));
118 } 120 }
119 121
120 Node* InterpreterAssembler::BytecodeOperand(int operand_index) { 122 Node* InterpreterAssembler::BytecodeOperandUnsignedByte(int operand_index) {
121 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); 123 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
122 DCHECK_EQ(OperandSize::kByte, 124 DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
123 Bytecodes::GetOperandSize(bytecode_, operand_index)); 125 bytecode_, operand_index, operand_scale()));
124 return Load( 126 return Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
125 MachineType::Uint8(), BytecodeArrayTaggedPointer(), 127 IntPtrAdd(BytecodeOffset(),
126 IntPtrAdd(BytecodeOffset(), IntPtrConstant(Bytecodes::GetOperandOffset( 128 IntPtrConstant(Bytecodes::GetOperandOffset(
127 bytecode_, operand_index)))); 129 bytecode_, operand_index, operand_scale()))));
rmcilroy 2016/03/17 17:30:50 nit - could you pull out the IntPtrConstant as a v
oth 2016/03/21 09:16:54 Done.
128 } 130 }
129 131
130 Node* InterpreterAssembler::BytecodeOperandSignExtended(int operand_index) { 132 Node* InterpreterAssembler::BytecodeOperandSignedByte(int operand_index) {
131 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); 133 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
132 DCHECK_EQ(OperandSize::kByte, 134 DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
133 Bytecodes::GetOperandSize(bytecode_, operand_index)); 135 bytecode_, operand_index, operand_scale()));
134 Node* load = Load( 136 Node* load = Load(MachineType::Int8(), BytecodeArrayTaggedPointer(),
135 MachineType::Int8(), BytecodeArrayTaggedPointer(), 137 IntPtrAdd(BytecodeOffset(),
136 IntPtrAdd(BytecodeOffset(), IntPtrConstant(Bytecodes::GetOperandOffset( 138 IntPtrConstant(Bytecodes::GetOperandOffset(
137 bytecode_, operand_index)))); 139 bytecode_, operand_index, operand_scale()))));
138 // Ensure that we sign extend to full pointer size 140 // Ensure that we sign extend to full pointer size
139 if (kPointerSize == 8) { 141 if (kPointerSize == 8) {
140 load = ChangeInt32ToInt64(load); 142 load = ChangeInt32ToInt64(load);
141 } 143 }
142 return load; 144 return load;
143 } 145 }
144 146
145 Node* InterpreterAssembler::BytecodeOperandShort(int operand_index) { 147 Node* InterpreterAssembler::BytecodeOperandUnsignedShort(int operand_index) {
146 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); 148 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
147 DCHECK_EQ(OperandSize::kShort, 149 DCHECK_EQ(
148 Bytecodes::GetOperandSize(bytecode_, operand_index)); 150 OperandSize::kShort,
151 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
149 if (TargetSupportsUnalignedAccess()) { 152 if (TargetSupportsUnalignedAccess()) {
150 return Load( 153 return Load(MachineType::Uint16(), BytecodeArrayTaggedPointer(),
151 MachineType::Uint16(), BytecodeArrayTaggedPointer(), 154 IntPtrAdd(BytecodeOffset(),
152 IntPtrAdd(BytecodeOffset(), IntPtrConstant(Bytecodes::GetOperandOffset( 155 IntPtrConstant(Bytecodes::GetOperandOffset(
153 bytecode_, operand_index)))); 156 bytecode_, operand_index, operand_scale()))));
154 } else { 157 } else {
155 int offset = Bytecodes::GetOperandOffset(bytecode_, operand_index); 158 int offset =
159 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
156 Node* first_byte = 160 Node* first_byte =
157 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), 161 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
158 IntPtrAdd(BytecodeOffset(), IntPtrConstant(offset))); 162 IntPtrAdd(BytecodeOffset(), IntPtrConstant(offset)));
159 Node* second_byte = 163 Node* second_byte =
160 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), 164 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
161 IntPtrAdd(BytecodeOffset(), IntPtrConstant(offset + 1))); 165 IntPtrAdd(BytecodeOffset(), IntPtrConstant(offset + 1)));
162 #if V8_TARGET_LITTLE_ENDIAN 166 #if V8_TARGET_LITTLE_ENDIAN
163 return WordOr(WordShl(second_byte, kBitsPerByte), first_byte); 167 return WordOr(WordShl(second_byte, IntPtrConstant(kBitsPerByte)),
168 first_byte);
164 #elif V8_TARGET_BIG_ENDIAN 169 #elif V8_TARGET_BIG_ENDIAN
165 return WordOr(WordShl(first_byte, kBitsPerByte), second_byte); 170 return WordOr(WordShl(first_byte, IntPtrConstant(kBitsPerByte)),
171 second_byte);
166 #else 172 #else
167 #error "Unknown Architecture" 173 #error "Unknown Architecture"
168 #endif 174 #endif
169 } 175 }
170 } 176 }
171 177
172 Node* InterpreterAssembler::BytecodeOperandShortSignExtended( 178 Node* InterpreterAssembler::BytecodeOperandSignedShort(int operand_index) {
173 int operand_index) {
174 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); 179 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
175 DCHECK_EQ(OperandSize::kShort, 180 DCHECK_EQ(
176 Bytecodes::GetOperandSize(bytecode_, operand_index)); 181 OperandSize::kShort,
177 int operand_offset = Bytecodes::GetOperandOffset(bytecode_, operand_index); 182 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
183 int operand_offset =
184 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
178 Node* load; 185 Node* load;
179 if (TargetSupportsUnalignedAccess()) { 186 if (TargetSupportsUnalignedAccess()) {
180 load = Load(MachineType::Int16(), BytecodeArrayTaggedPointer(), 187 load = Load(MachineType::Int16(), BytecodeArrayTaggedPointer(),
181 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset))); 188 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
182 } else { 189 } else {
183 #if V8_TARGET_LITTLE_ENDIAN 190 #if V8_TARGET_LITTLE_ENDIAN
184 Node* hi_byte_offset = IntPtrConstant(operand_offset + 1); 191 Node* hi_byte_offset = IntPtrConstant(operand_offset + 1);
185 Node* lo_byte_offset = IntPtrConstant(operand_offset); 192 Node* lo_byte_offset = IntPtrConstant(operand_offset);
186 #elif V8_TARGET_BIG_ENDIAN 193 #elif V8_TARGET_BIG_ENDIAN
187 Node* hi_byte_offset = IntPtrConstant(operand_offset); 194 Node* hi_byte_offset = IntPtrConstant(operand_offset);
188 Node* lo_byte_offset = IntPtrConstant(operand_offset + 1); 195 Node* lo_byte_offset = IntPtrConstant(operand_offset + 1);
189 #else 196 #else
190 #error "Unknown Architecture" 197 #error "Unknown Architecture"
191 #endif 198 #endif
192 Node* hi_byte = Load(MachineType::Int8(), BytecodeArrayTaggedPointer(), 199 Node* hi_byte = Load(MachineType::Int8(), BytecodeArrayTaggedPointer(),
193 IntPtrAdd(BytecodeOffset(), hi_byte_offset)); 200 IntPtrAdd(BytecodeOffset(), hi_byte_offset));
194 Node* lo_byte = Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), 201 Node* lo_byte = Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
195 IntPtrAdd(BytecodeOffset(), lo_byte_offset)); 202 IntPtrAdd(BytecodeOffset(), lo_byte_offset));
196 hi_byte = Word32Shl(hi_byte, Int32Constant(kBitsPerByte)); 203 hi_byte = Word32Shl(hi_byte, Int32Constant(kBitsPerByte));
197 load = Word32Or(hi_byte, lo_byte); 204 load = Word32Or(hi_byte, lo_byte);
198 } 205 }
199 206
200 // Ensure that we sign extend to full pointer size 207 // Ensure that we sign extend to full pointer size
201 if (kPointerSize == 8) { 208 if (kPointerSize == 8) {
202 load = ChangeInt32ToInt64(load); 209 load = ChangeInt32ToInt64(load);
203 } 210 }
204 return load; 211 return load;
205 } 212 }
206 213
207 Node* InterpreterAssembler::BytecodeOperandCount(int operand_index) { 214 Node* InterpreterAssembler::BytecodeOperandUnsignedQuad(int operand_index) {
208 switch (Bytecodes::GetOperandSize(bytecode_, operand_index)) { 215 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
216 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
217 bytecode_, operand_index, operand_scale()));
218 int operand_offset =
219 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
220 if (TargetSupportsUnalignedAccess()) {
221 return Load(MachineType::Uint32(), BytecodeArrayTaggedPointer(),
222 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
223 } else {
224 Node* bytes[4];
225 for (int i = 0; i < static_cast<int>(arraysize(bytes)); ++i) {
226 bytes[i] =
227 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
228 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset + i)));
229 }
230 #if V8_TARGET_LITTLE_ENDIAN
231 Node* upper =
rmcilroy 2016/03/17 17:30:50 Can we pull out the shared code here and below in
oth 2016/03/21 09:16:54 The latest revision tries to make more of the unal
rmcilroy 2016/03/21 12:41:36 Nice, looks good - couple of comments up above. Al
oth 2016/03/21 14:21:48 Ack. MIPS looks okay. IIRC PPC ran earlier, but bu
232 WordOr(WordShl(bytes[3], IntPtrConstant(kBitsPerByte)), bytes[2]);
233 Node* lower =
234 WordOr(WordShl(bytes[1], IntPtrConstant(kBitsPerByte)), bytes[0]);
235 return WordOr(WordShl(upper, IntPtrConstant(2 * kBitsPerByte)), lower);
236 #elif V8_TARGET_BIG_ENDIAN
237 Node* upper =
238 WordOr(WordShl(bytes[0], IntPtrConstant(kBitsPerByte)), bytes[1]);
239 Node* lower =
240 WordOr(WordShl(bytes[2], IntPtrConstant(kBitsPerByte)), bytes[3]);
241 return WordOr(WordShl(upper, IntPtrConstant(2 * kBitsPerByte)), lower);
242 #else
243 #error "Unknown Architecture"
244 #endif
245 }
246 }
247
248 Node* InterpreterAssembler::BytecodeOperandSignedQuad(int operand_index) {
249 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
250 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
251 bytecode_, operand_index, operand_scale()));
252 int operand_offset =
253 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
254 Node* load;
255 if (TargetSupportsUnalignedAccess()) {
256 load = Load(MachineType::Int32(), BytecodeArrayTaggedPointer(),
257 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
258 } else {
259 Node* bytes[4];
260 for (int i = 0; i < static_cast<int>(arraysize(bytes)); ++i) {
261 bytes[i] =
262 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
263 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset + i)));
264 }
265 #if V8_TARGET_LITTLE_ENDIAN
266 Node* upper =
267 WordOr(WordShl(bytes[3], IntPtrConstant(kBitsPerByte)), bytes[2]);
268 Node* lower =
269 WordOr(WordShl(bytes[1], IntPtrConstant(kBitsPerByte)), bytes[0]);
270 load = WordOr(WordShl(upper, IntPtrConstant(2 * kBitsPerByte)), lower);
271 #elif V8_TARGET_BIG_ENDIAN
272 Node* upper =
273 WordOr(WordShl(bytes[0], IntPtrConstant(kBitsPerByte)), bytes[1]);
274 Node* lower =
275 WordOr(WordShl(bytes[2], IntPtrConstant(kBitsPerByte)), bytes[3]);
276 load = WordOr(WordShl(upper, IntPtrConstant(2 * kBitsPerByte)), lower);
277 #else
278 #error "Unknown Architecture"
279 #endif
280 }
281
282 // Ensure that we sign extend to full pointer size
283 if (kPointerSize == 8) {
284 load = ChangeInt32ToInt64(load);
285 }
286 return load;
287 }
288
289 Node* InterpreterAssembler::BytecodeSignedOperand(int operand_index,
290 OperandSize operand_size) {
291 DCHECK(!Bytecodes::IsUnsignedOperandType(
292 Bytecodes::GetOperandType(bytecode_, operand_index)));
293 switch (operand_size) {
209 case OperandSize::kByte: 294 case OperandSize::kByte:
210 DCHECK_EQ(OperandType::kRegCount8, 295 return BytecodeOperandSignedByte(operand_index);
211 Bytecodes::GetOperandType(bytecode_, operand_index));
212 return BytecodeOperand(operand_index);
213 case OperandSize::kShort: 296 case OperandSize::kShort:
214 DCHECK_EQ(OperandType::kRegCount16, 297 return BytecodeOperandSignedShort(operand_index);
215 Bytecodes::GetOperandType(bytecode_, operand_index)); 298 case OperandSize::kQuad:
216 return BytecodeOperandShort(operand_index); 299 return BytecodeOperandSignedQuad(operand_index);
217 case OperandSize::kNone: 300 case OperandSize::kNone:
218 UNREACHABLE(); 301 UNREACHABLE();
219 } 302 }
220 return nullptr; 303 return nullptr;
221 } 304 }
222 305
223 Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) { 306 Node* InterpreterAssembler::BytecodeUnsignedOperand(int operand_index,
224 DCHECK_EQ(OperandType::kImm8, 307 OperandSize operand_size) {
225 Bytecodes::GetOperandType(bytecode_, operand_index)); 308 DCHECK(Bytecodes::IsUnsignedOperandType(
226 return BytecodeOperandSignExtended(operand_index); 309 Bytecodes::GetOperandType(bytecode_, operand_index)));
227 } 310 switch (operand_size) {
228
229 Node* InterpreterAssembler::BytecodeOperandIdx(int operand_index) {
230 switch (Bytecodes::GetOperandSize(bytecode_, operand_index)) {
231 case OperandSize::kByte: 311 case OperandSize::kByte:
232 DCHECK_EQ(OperandType::kIdx8, 312 return BytecodeOperandUnsignedByte(operand_index);
233 Bytecodes::GetOperandType(bytecode_, operand_index));
234 return BytecodeOperand(operand_index);
235 case OperandSize::kShort: 313 case OperandSize::kShort:
236 DCHECK_EQ(OperandType::kIdx16, 314 return BytecodeOperandUnsignedShort(operand_index);
237 Bytecodes::GetOperandType(bytecode_, operand_index)); 315 case OperandSize::kQuad:
238 return BytecodeOperandShort(operand_index); 316 return BytecodeOperandUnsignedQuad(operand_index);
239 case OperandSize::kNone: 317 case OperandSize::kNone:
240 UNREACHABLE(); 318 UNREACHABLE();
241 } 319 }
242 return nullptr; 320 return nullptr;
243 } 321 }
244 322
323 Node* InterpreterAssembler::BytecodeOperandCount(int operand_index) {
324 DCHECK_EQ(OperandType::kRegCount,
325 Bytecodes::GetOperandType(bytecode_, operand_index));
326 OperandSize operand_size =
327 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
328 return BytecodeUnsignedOperand(operand_index, operand_size);
329 }
330
331 Node* InterpreterAssembler::BytecodeOperandFlag(int operand_index) {
332 DCHECK_EQ(OperandType::kFlag8,
333 Bytecodes::GetOperandType(bytecode_, operand_index));
334 OperandSize operand_size =
335 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
336 DCHECK_EQ(operand_size, OperandSize::kByte);
337 return BytecodeUnsignedOperand(operand_index, operand_size);
338 }
339
340 Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) {
341 DCHECK_EQ(OperandType::kImm,
342 Bytecodes::GetOperandType(bytecode_, operand_index));
343 OperandSize operand_size =
344 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
345 return BytecodeSignedOperand(operand_index, operand_size);
346 }
347
348 Node* InterpreterAssembler::BytecodeOperandIdx(int operand_index) {
349 DCHECK(OperandType::kIdx ==
350 Bytecodes::GetOperandType(bytecode_, operand_index));
351 OperandSize operand_size =
352 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
353 return BytecodeUnsignedOperand(operand_index, operand_size);
354 }
355
245 Node* InterpreterAssembler::BytecodeOperandReg(int operand_index) { 356 Node* InterpreterAssembler::BytecodeOperandReg(int operand_index) {
246 OperandType operand_type = 357 DCHECK(Bytecodes::IsRegisterOperandType(
247 Bytecodes::GetOperandType(bytecode_, operand_index); 358 Bytecodes::GetOperandType(bytecode_, operand_index)));
248 if (Bytecodes::IsRegisterOperandType(operand_type)) { 359 OperandSize operand_size =
249 OperandSize operand_size = Bytecodes::SizeOfOperand(operand_type); 360 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
250 if (operand_size == OperandSize::kByte) { 361 return BytecodeSignedOperand(operand_index, operand_size);
251 return BytecodeOperandSignExtended(operand_index); 362 }
252 } else if (operand_size == OperandSize::kShort) { 363
253 return BytecodeOperandShortSignExtended(operand_index); 364 Node* InterpreterAssembler::BytecodeOperandRuntimeId(int operand_index) {
254 } 365 DCHECK(OperandType::kRuntimeId ==
255 } 366 Bytecodes::GetOperandType(bytecode_, operand_index));
256 UNREACHABLE(); 367 OperandSize operand_size =
257 return nullptr; 368 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
369 DCHECK_EQ(operand_size, OperandSize::kShort);
370 return BytecodeUnsignedOperand(operand_index, operand_size);
258 } 371 }
259 372
260 Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) { 373 Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) {
261 Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(), 374 Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(),
262 BytecodeArray::kConstantPoolOffset); 375 BytecodeArray::kConstantPoolOffset);
263 Node* entry_offset = 376 Node* entry_offset =
264 IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag), 377 IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag),
265 WordShl(index, kPointerSizeLog2)); 378 WordShl(index, kPointerSizeLog2));
266 return Load(MachineType::AnyTagged(), constant_pool, entry_offset); 379 return Load(MachineType::AnyTagged(), constant_pool, entry_offset);
267 } 380 }
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after
425 void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) { 538 void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) {
426 JumpConditional(WordEqual(lhs, rhs), delta); 539 JumpConditional(WordEqual(lhs, rhs), delta);
427 } 540 }
428 541
429 void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs, 542 void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs,
430 Node* delta) { 543 Node* delta) {
431 JumpConditional(WordNotEqual(lhs, rhs), delta); 544 JumpConditional(WordNotEqual(lhs, rhs), delta);
432 } 545 }
433 546
434 void InterpreterAssembler::Dispatch() { 547 void InterpreterAssembler::Dispatch() {
435 DispatchTo(Advance(Bytecodes::Size(bytecode_))); 548 DispatchTo(Advance(Bytecodes::Size(bytecode_, operand_scale_)));
436 } 549 }
437 550
438 void InterpreterAssembler::DispatchTo(Node* new_bytecode_offset) { 551 void InterpreterAssembler::DispatchTo(Node* new_bytecode_offset) {
439 Node* target_bytecode = Load( 552 Node* target_bytecode = Load(
440 MachineType::Uint8(), BytecodeArrayTaggedPointer(), new_bytecode_offset); 553 MachineType::Uint8(), BytecodeArrayTaggedPointer(), new_bytecode_offset);
441 if (kPointerSize == 8) { 554 if (kPointerSize == 8) {
442 target_bytecode = ChangeUint32ToUint64(target_bytecode); 555 target_bytecode = ChangeUint32ToUint64(target_bytecode);
443 } 556 }
444 557
445 // TODO(rmcilroy): Create a code target dispatch table to avoid conversion 558 // TODO(rmcilroy): Create a code target dispatch table to avoid conversion
(...skipping 11 matching lines...) Expand all
457 TraceBytecode(Runtime::kInterpreterTraceBytecodeExit); 570 TraceBytecode(Runtime::kInterpreterTraceBytecodeExit);
458 } 571 }
459 572
460 InterpreterDispatchDescriptor descriptor(isolate()); 573 InterpreterDispatchDescriptor descriptor(isolate());
461 Node* args[] = {GetAccumulator(), RegisterFileRawPointer(), 574 Node* args[] = {GetAccumulator(), RegisterFileRawPointer(),
462 bytecode_offset, BytecodeArrayTaggedPointer(), 575 bytecode_offset, BytecodeArrayTaggedPointer(),
463 DispatchTableRawPointer(), GetContext()}; 576 DispatchTableRawPointer(), GetContext()};
464 TailCall(descriptor, handler, args, 0); 577 TailCall(descriptor, handler, args, 0);
465 } 578 }
466 579
580 void InterpreterAssembler::DispatchWide(OperandScale operand_scale) {
581 DCHECK(operand_scale == OperandScale::k2X ||
582 operand_scale == OperandScale::k4X);
583 uint32_t scale = static_cast<uint32_t>(operand_scale);
584 DCHECK(base::bits::IsPowerOfTwo32(scale));
585 uint32_t log2_scale = base::bits::CountTrailingZeros32(scale);
586 Node* target_bytecode_hi = IntPtrConstant(log2_scale << kBitsPerByte);
587 Node* target_bytecode_lo_offset = Advance(1);
588 Node* target_bytecode_lo =
589 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
590 target_bytecode_lo_offset);
591 if (kPointerSize == 8) {
592 target_bytecode_lo = ChangeUint32ToUint64(target_bytecode_lo);
593 }
594 Node* target_bytecode = WordOr(target_bytecode_hi, target_bytecode_lo);
rmcilroy 2016/03/17 17:30:50 This seems pretty complicated, can't we just do: i
oth 2016/03/21 09:16:54 Okay, have reworked to a middle ground and can a c
rmcilroy 2016/03/21 12:41:36 Looks good, thanks!
595 Node* target_code_object =
596 Load(MachineType::Pointer(), DispatchTableRawPointer(),
597 WordShl(target_bytecode, kPointerSizeLog2));
598 DispatchToBytecodeHandler(target_code_object, target_bytecode_lo_offset);
599 }
600
467 void InterpreterAssembler::InterpreterReturn() { 601 void InterpreterAssembler::InterpreterReturn() {
468 // TODO(rmcilroy): Investigate whether it is worth supporting self 602 // TODO(rmcilroy): Investigate whether it is worth supporting self
469 // optimization of primitive functions like FullCodegen. 603 // optimization of primitive functions like FullCodegen.
470 604
471 // Update profiling count by -BytecodeOffset to simulate backedge to start of 605 // Update profiling count by -BytecodeOffset to simulate backedge to start of
472 // function. 606 // function.
473 Node* profiling_weight = 607 Node* profiling_weight =
474 Int32Sub(Int32Constant(kHeapObjectTag + BytecodeArray::kHeaderSize), 608 Int32Sub(Int32Constant(kHeapObjectTag + BytecodeArray::kHeaderSize),
475 BytecodeOffset()); 609 BytecodeOffset());
476 UpdateInterruptBudget(profiling_weight); 610 UpdateInterruptBudget(profiling_weight);
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
535 V8_TARGET_ARCH_S390 669 V8_TARGET_ARCH_S390
536 return true; 670 return true;
537 #else 671 #else
538 #error "Unknown Architecture" 672 #error "Unknown Architecture"
539 #endif 673 #endif
540 } 674 }
541 675
542 } // namespace interpreter 676 } // namespace interpreter
543 } // namespace internal 677 } // namespace internal
544 } // namespace v8 678 } // namespace v8
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698