Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(192)

Side by Side Diff: src/interpreter/interpreter-assembler.cc

Issue 2092763002: [Interpreter] Maintain a pointer to the current position in the bytecode Base URL: https://chromium.googlesource.com/v8/v8.git@int_cache_fp_and_bytecode
Patch Set: Rebase Created 4 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2015 the V8 project authors. All rights reserved. 1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/interpreter/interpreter-assembler.h" 5 #include "src/interpreter/interpreter-assembler.h"
6 6
7 #include <limits> 7 #include <limits>
8 #include <ostream> 8 #include <ostream>
9 9
10 #include "src/code-factory.h" 10 #include "src/code-factory.h"
(...skipping 14 matching lines...) Expand all
25 InterpreterAssembler::InterpreterAssembler(Isolate* isolate, Zone* zone, 25 InterpreterAssembler::InterpreterAssembler(Isolate* isolate, Zone* zone,
26 Bytecode bytecode, 26 Bytecode bytecode,
27 OperandScale operand_scale) 27 OperandScale operand_scale)
28 : CodeStubAssembler(isolate, zone, InterpreterDispatchDescriptor(isolate), 28 : CodeStubAssembler(isolate, zone, InterpreterDispatchDescriptor(isolate),
29 Code::ComputeFlags(Code::BYTECODE_HANDLER), 29 Code::ComputeFlags(Code::BYTECODE_HANDLER),
30 Bytecodes::ToString(bytecode), 30 Bytecodes::ToString(bytecode),
31 Bytecodes::ReturnCount(bytecode)), 31 Bytecodes::ReturnCount(bytecode)),
32 bytecode_(bytecode), 32 bytecode_(bytecode),
33 operand_scale_(operand_scale), 33 operand_scale_(operand_scale),
34 interpreted_frame_pointer_(this, MachineType::PointerRepresentation()), 34 interpreted_frame_pointer_(this, MachineType::PointerRepresentation()),
35 bytecode_inner_pointer_(this, MachineType::PointerRepresentation()),
35 accumulator_(this, MachineRepresentation::kTagged), 36 accumulator_(this, MachineRepresentation::kTagged),
36 accumulator_use_(AccumulatorUse::kNone), 37 accumulator_use_(AccumulatorUse::kNone),
37 made_call_(false), 38 made_call_(false),
38 disable_stack_check_across_call_(false), 39 disable_stack_check_across_call_(false),
39 stack_pointer_before_call_(nullptr) { 40 stack_pointer_before_call_(nullptr) {
40 accumulator_.Bind( 41 accumulator_.Bind(
41 Parameter(InterpreterDispatchDescriptor::kAccumulatorParameter)); 42 Parameter(InterpreterDispatchDescriptor::kAccumulatorParameter));
42 if (FLAG_trace_ignition) { 43 if (FLAG_trace_ignition) {
43 TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry); 44 TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry);
44 } 45 }
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
89 Node* InterpreterAssembler::BytecodeArrayTaggedPointer() { 90 Node* InterpreterAssembler::BytecodeArrayTaggedPointer() {
90 if (made_call_) { 91 if (made_call_) {
91 // If we have made a call, restore bytecode array from stack frame in case 92 // If we have made a call, restore bytecode array from stack frame in case
92 // the debugger has swapped us to the patched debugger bytecode array. 93 // the debugger has swapped us to the patched debugger bytecode array.
93 return LoadRegister(Register::bytecode_array()); 94 return LoadRegister(Register::bytecode_array());
94 } else { 95 } else {
95 return Parameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter); 96 return Parameter(InterpreterDispatchDescriptor::kBytecodeArrayParameter);
96 } 97 }
97 } 98 }
98 99
100 Node* InterpreterAssembler::BytecodeInnerPointer() {
101 if (!bytecode_inner_pointer_.IsBound() || made_call_) {
102 bytecode_inner_pointer_.Bind(
103 IntPtrAdd(BytecodeArrayTaggedPointer(), BytecodeOffset()));
104 }
105 return bytecode_inner_pointer_.value();
106 }
107
99 Node* InterpreterAssembler::DispatchTableRawPointer() { 108 Node* InterpreterAssembler::DispatchTableRawPointer() {
100 return Parameter(InterpreterDispatchDescriptor::kDispatchTableParameter); 109 return Parameter(InterpreterDispatchDescriptor::kDispatchTableParameter);
101 } 110 }
102 111
103 Node* InterpreterAssembler::RegisterLocation(Node* reg_index) { 112 Node* InterpreterAssembler::RegisterLocation(Node* reg_index) {
104 return IntPtrAdd(GetInterpretedFramePointer(), 113 return IntPtrAdd(GetInterpretedFramePointer(),
105 RegisterFrameOffset(reg_index)); 114 RegisterFrameOffset(reg_index));
106 } 115 }
107 116
108 Node* InterpreterAssembler::RegisterFrameOffset(Node* index) { 117 Node* InterpreterAssembler::RegisterFrameOffset(Node* index) {
(...skipping 30 matching lines...) Expand all
139 Node* InterpreterAssembler::OperandOffset(int operand_index) { 148 Node* InterpreterAssembler::OperandOffset(int operand_index) {
140 return IntPtrConstant( 149 return IntPtrConstant(
141 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale())); 150 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()));
142 } 151 }
143 152
144 Node* InterpreterAssembler::BytecodeOperandUnsignedByte(int operand_index) { 153 Node* InterpreterAssembler::BytecodeOperandUnsignedByte(int operand_index) {
145 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); 154 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
146 DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize( 155 DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
147 bytecode_, operand_index, operand_scale())); 156 bytecode_, operand_index, operand_scale()));
148 Node* operand_offset = OperandOffset(operand_index); 157 Node* operand_offset = OperandOffset(operand_index);
149 return Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), 158 return Load(MachineType::Uint8(), BytecodeInnerPointer(), operand_offset);
150 IntPtrAdd(BytecodeOffset(), operand_offset));
151 } 159 }
152 160
153 Node* InterpreterAssembler::BytecodeOperandSignedByte(int operand_index) { 161 Node* InterpreterAssembler::BytecodeOperandSignedByte(int operand_index) {
154 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); 162 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
155 DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize( 163 DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
156 bytecode_, operand_index, operand_scale())); 164 bytecode_, operand_index, operand_scale()));
157 Node* operand_offset = OperandOffset(operand_index); 165 Node* operand_offset = OperandOffset(operand_index);
158 Node* load = Load(MachineType::Int8(), BytecodeArrayTaggedPointer(), 166 Node* load =
159 IntPtrAdd(BytecodeOffset(), operand_offset)); 167 Load(MachineType::Int8(), BytecodeInnerPointer(), operand_offset);
160 168
161 // Ensure that we sign extend to full pointer size 169 // Ensure that we sign extend to full pointer size
162 if (kPointerSize == 8) { 170 if (kPointerSize == 8) {
163 load = ChangeInt32ToInt64(load); 171 load = ChangeInt32ToInt64(load);
164 } 172 }
165 return load; 173 return load;
166 } 174 }
167 175
168 compiler::Node* InterpreterAssembler::BytecodeOperandReadUnaligned( 176 compiler::Node* InterpreterAssembler::BytecodeOperandReadUnaligned(
169 int relative_offset, MachineType result_type) { 177 int relative_offset, MachineType result_type) {
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
217 } 225 }
218 226
219 Node* InterpreterAssembler::BytecodeOperandUnsignedShort(int operand_index) { 227 Node* InterpreterAssembler::BytecodeOperandUnsignedShort(int operand_index) {
220 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); 228 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
221 DCHECK_EQ( 229 DCHECK_EQ(
222 OperandSize::kShort, 230 OperandSize::kShort,
223 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale())); 231 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
224 int operand_offset = 232 int operand_offset =
225 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()); 233 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
226 if (TargetSupportsUnalignedAccess()) { 234 if (TargetSupportsUnalignedAccess()) {
227 return Load(MachineType::Uint16(), BytecodeArrayTaggedPointer(), 235 return Load(MachineType::Uint16(), BytecodeInnerPointer(),
228 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset))); 236 IntPtrConstant(operand_offset));
229 } else { 237 } else {
230 return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint16()); 238 return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint16());
231 } 239 }
232 } 240 }
233 241
234 Node* InterpreterAssembler::BytecodeOperandSignedShort(int operand_index) { 242 Node* InterpreterAssembler::BytecodeOperandSignedShort(int operand_index) {
235 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); 243 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
236 DCHECK_EQ( 244 DCHECK_EQ(
237 OperandSize::kShort, 245 OperandSize::kShort,
238 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale())); 246 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
239 int operand_offset = 247 int operand_offset =
240 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()); 248 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
241 Node* load; 249 Node* load;
242 if (TargetSupportsUnalignedAccess()) { 250 if (TargetSupportsUnalignedAccess()) {
243 load = Load(MachineType::Int16(), BytecodeArrayTaggedPointer(), 251 load = Load(MachineType::Int16(), BytecodeInnerPointer(),
244 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset))); 252 IntPtrConstant(operand_offset));
245 } else { 253 } else {
246 load = BytecodeOperandReadUnaligned(operand_offset, MachineType::Int16()); 254 load = BytecodeOperandReadUnaligned(operand_offset, MachineType::Int16());
247 } 255 }
248 256
249 // Ensure that we sign extend to full pointer size 257 // Ensure that we sign extend to full pointer size
250 if (kPointerSize == 8) { 258 if (kPointerSize == 8) {
251 load = ChangeInt32ToInt64(load); 259 load = ChangeInt32ToInt64(load);
252 } 260 }
253 return load; 261 return load;
254 } 262 }
255 263
256 Node* InterpreterAssembler::BytecodeOperandUnsignedQuad(int operand_index) { 264 Node* InterpreterAssembler::BytecodeOperandUnsignedQuad(int operand_index) {
257 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); 265 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
258 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize( 266 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
259 bytecode_, operand_index, operand_scale())); 267 bytecode_, operand_index, operand_scale()));
260 int operand_offset = 268 int operand_offset =
261 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()); 269 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
262 if (TargetSupportsUnalignedAccess()) { 270 if (TargetSupportsUnalignedAccess()) {
263 return Load(MachineType::Uint32(), BytecodeArrayTaggedPointer(), 271 return Load(MachineType::Uint32(), BytecodeInnerPointer(),
264 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset))); 272 IntPtrConstant(operand_offset));
265 } else { 273 } else {
266 return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint32()); 274 return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint32());
267 } 275 }
268 } 276 }
269 277
270 Node* InterpreterAssembler::BytecodeOperandSignedQuad(int operand_index) { 278 Node* InterpreterAssembler::BytecodeOperandSignedQuad(int operand_index) {
271 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); 279 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
272 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize( 280 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
273 bytecode_, operand_index, operand_scale())); 281 bytecode_, operand_index, operand_scale()));
274 int operand_offset = 282 int operand_offset =
275 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()); 283 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
276 Node* load; 284 Node* load;
277 if (TargetSupportsUnalignedAccess()) { 285 if (TargetSupportsUnalignedAccess()) {
278 load = Load(MachineType::Int32(), BytecodeArrayTaggedPointer(), 286 load = Load(MachineType::Int32(), BytecodeInnerPointer(),
279 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset))); 287 IntPtrConstant(operand_offset));
280 } else { 288 } else {
281 load = BytecodeOperandReadUnaligned(operand_offset, MachineType::Int32()); 289 load = BytecodeOperandReadUnaligned(operand_offset, MachineType::Int32());
282 } 290 }
283 291
284 // Ensure that we sign extend to full pointer size 292 // Ensure that we sign extend to full pointer size
285 if (kPointerSize == 8) { 293 if (kPointerSize == 8) {
286 load = ChangeInt32ToInt64(load); 294 load = ChangeInt32ToInt64(load);
287 } 295 }
288 return load; 296 return load;
289 } 297 }
(...skipping 489 matching lines...) Expand 10 before | Expand all | Expand 10 after
779 Goto(&loop); 787 Goto(&loop);
780 } 788 }
781 Bind(&done_loop); 789 Bind(&done_loop);
782 790
783 return array; 791 return array;
784 } 792 }
785 793
786 } // namespace interpreter 794 } // namespace interpreter
787 } // namespace internal 795 } // namespace internal
788 } // namespace v8 796 } // namespace v8
OLDNEW
« no previous file with comments | « src/interpreter/interpreter-assembler.h ('k') | test/unittests/interpreter/interpreter-assembler-unittest.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698