| OLD | NEW |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/interpreter/interpreter.h" | 5 #include "src/interpreter/interpreter.h" |
| 6 | 6 |
| 7 #include "src/ast/prettyprinter.h" | 7 #include "src/ast/prettyprinter.h" |
| 8 #include "src/code-factory.h" | 8 #include "src/code-factory.h" |
| 9 #include "src/compiler.h" | 9 #include "src/compiler.h" |
| 10 #include "src/compiler/interpreter-assembler.h" | |
| 11 #include "src/factory.h" | 10 #include "src/factory.h" |
| 12 #include "src/interpreter/bytecode-generator.h" | 11 #include "src/interpreter/bytecode-generator.h" |
| 13 #include "src/interpreter/bytecodes.h" | 12 #include "src/interpreter/bytecodes.h" |
| 13 #include "src/interpreter/interpreter-assembler.h" |
| 14 #include "src/zone.h" | 14 #include "src/zone.h" |
| 15 | 15 |
| 16 namespace v8 { | 16 namespace v8 { |
| 17 namespace internal { | 17 namespace internal { |
| 18 namespace interpreter { | 18 namespace interpreter { |
| 19 | 19 |
| 20 using compiler::Node; | 20 using compiler::Node; |
| 21 | 21 |
| 22 #define __ assembler-> | 22 #define __ assembler-> |
| 23 | 23 |
| 24 Interpreter::Interpreter(Isolate* isolate) : isolate_(isolate) { | 24 Interpreter::Interpreter(Isolate* isolate) : isolate_(isolate) { |
| 25 memset(&dispatch_table_, 0, sizeof(dispatch_table_)); | 25 memset(&dispatch_table_, 0, sizeof(dispatch_table_)); |
| 26 } | 26 } |
| 27 | 27 |
| 28 | |
| 29 void Interpreter::Initialize() { | 28 void Interpreter::Initialize() { |
| 30 DCHECK(FLAG_ignition); | 29 DCHECK(FLAG_ignition); |
| 31 if (IsDispatchTableInitialized()) return; | 30 if (IsDispatchTableInitialized()) return; |
| 32 Zone zone; | 31 Zone zone; |
| 33 HandleScope scope(isolate_); | 32 HandleScope scope(isolate_); |
| 34 | 33 |
| 35 #define GENERATE_CODE(Name, ...) \ | 34 #define GENERATE_CODE(Name, ...) \ |
| 36 { \ | 35 { \ |
| 37 compiler::InterpreterAssembler assembler(isolate_, &zone, \ | 36 InterpreterAssembler assembler(isolate_, &zone, Bytecode::k##Name); \ |
| 38 Bytecode::k##Name); \ | 37 Do##Name(&assembler); \ |
| 39 Do##Name(&assembler); \ | 38 Handle<Code> code = assembler.GenerateCode(); \ |
| 40 Handle<Code> code = assembler.GenerateCode(); \ | 39 TraceCodegen(code, #Name); \ |
| 41 int index = static_cast<int>(Bytecode::k##Name); \ | 40 int index = static_cast<int>(Bytecode::k##Name); \ |
| 42 dispatch_table_[index] = *code; \ | 41 dispatch_table_[index] = *code; \ |
| 43 } | 42 } |
| 44 BYTECODE_LIST(GENERATE_CODE) | 43 BYTECODE_LIST(GENERATE_CODE) |
| 45 #undef GENERATE_CODE | 44 #undef GENERATE_CODE |
| 46 } | 45 } |
| 47 | 46 |
| 48 void Interpreter::IterateDispatchTable(ObjectVisitor* v) { | 47 void Interpreter::IterateDispatchTable(ObjectVisitor* v) { |
| 49 v->VisitPointers(&dispatch_table_[0], | 48 v->VisitPointers(&dispatch_table_[0], |
| 50 &dispatch_table_[0] + kDispatchTableSize); | 49 &dispatch_table_[0] + kDispatchTableSize); |
| 51 } | 50 } |
| 52 | 51 |
| 53 | |
| 54 bool Interpreter::MakeBytecode(CompilationInfo* info) { | 52 bool Interpreter::MakeBytecode(CompilationInfo* info) { |
| 55 if (FLAG_print_bytecode || FLAG_print_source || FLAG_print_ast) { | 53 if (FLAG_print_bytecode || FLAG_print_source || FLAG_print_ast) { |
| 56 OFStream os(stdout); | 54 OFStream os(stdout); |
| 57 base::SmartArrayPointer<char> name = info->GetDebugName(); | 55 base::SmartArrayPointer<char> name = info->GetDebugName(); |
| 58 os << "[generating bytecode for function: " << info->GetDebugName().get() | 56 os << "[generating bytecode for function: " << info->GetDebugName().get() |
| 59 << "]" << std::endl | 57 << "]" << std::endl |
| 60 << std::flush; | 58 << std::flush; |
| 61 } | 59 } |
| 62 | 60 |
| 63 #ifdef DEBUG | 61 #ifdef DEBUG |
| (...skipping 28 matching lines...) Expand all Loading... |
| 92 } | 90 } |
| 93 | 91 |
| 94 bool Interpreter::IsDispatchTableInitialized() { | 92 bool Interpreter::IsDispatchTableInitialized() { |
| 95 if (FLAG_trace_ignition) { | 93 if (FLAG_trace_ignition) { |
| 96 // Regenerate table to add bytecode tracing operations. | 94 // Regenerate table to add bytecode tracing operations. |
| 97 return false; | 95 return false; |
| 98 } | 96 } |
| 99 return dispatch_table_[0] != nullptr; | 97 return dispatch_table_[0] != nullptr; |
| 100 } | 98 } |
| 101 | 99 |
| 100 void Interpreter::TraceCodegen(Handle<Code> code, const char* name) { |
| 101 #ifdef ENABLE_DISASSEMBLER |
| 102 if (FLAG_trace_ignition_codegen) { |
| 103 OFStream os(stdout); |
| 104 code->Disassemble(name, os); |
| 105 os << std::flush; |
| 106 } |
| 107 #endif // ENABLE_DISASSEMBLER |
| 108 } |
| 109 |
| 102 // LdaZero | 110 // LdaZero |
| 103 // | 111 // |
| 104 // Load literal '0' into the accumulator. | 112 // Load literal '0' into the accumulator. |
| 105 void Interpreter::DoLdaZero(compiler::InterpreterAssembler* assembler) { | 113 void Interpreter::DoLdaZero(InterpreterAssembler* assembler) { |
| 106 Node* zero_value = __ NumberConstant(0.0); | 114 Node* zero_value = __ NumberConstant(0.0); |
| 107 __ SetAccumulator(zero_value); | 115 __ SetAccumulator(zero_value); |
| 108 __ Dispatch(); | 116 __ Dispatch(); |
| 109 } | 117 } |
| 110 | 118 |
| 111 | 119 |
| 112 // LdaSmi8 <imm8> | 120 // LdaSmi8 <imm8> |
| 113 // | 121 // |
| 114 // Load an 8-bit integer literal into the accumulator as a Smi. | 122 // Load an 8-bit integer literal into the accumulator as a Smi. |
| 115 void Interpreter::DoLdaSmi8(compiler::InterpreterAssembler* assembler) { | 123 void Interpreter::DoLdaSmi8(InterpreterAssembler* assembler) { |
| 116 Node* raw_int = __ BytecodeOperandImm(0); | 124 Node* raw_int = __ BytecodeOperandImm(0); |
| 117 Node* smi_int = __ SmiTag(raw_int); | 125 Node* smi_int = __ SmiTag(raw_int); |
| 118 __ SetAccumulator(smi_int); | 126 __ SetAccumulator(smi_int); |
| 119 __ Dispatch(); | 127 __ Dispatch(); |
| 120 } | 128 } |
| 121 | 129 |
| 122 | 130 void Interpreter::DoLoadConstant(InterpreterAssembler* assembler) { |
| 123 void Interpreter::DoLoadConstant(compiler::InterpreterAssembler* assembler) { | |
| 124 Node* index = __ BytecodeOperandIdx(0); | 131 Node* index = __ BytecodeOperandIdx(0); |
| 125 Node* constant = __ LoadConstantPoolEntry(index); | 132 Node* constant = __ LoadConstantPoolEntry(index); |
| 126 __ SetAccumulator(constant); | 133 __ SetAccumulator(constant); |
| 127 __ Dispatch(); | 134 __ Dispatch(); |
| 128 } | 135 } |
| 129 | 136 |
| 130 | 137 |
| 131 // LdaConstant <idx> | 138 // LdaConstant <idx> |
| 132 // | 139 // |
| 133 // Load constant literal at |idx| in the constant pool into the accumulator. | 140 // Load constant literal at |idx| in the constant pool into the accumulator. |
| 134 void Interpreter::DoLdaConstant(compiler::InterpreterAssembler* assembler) { | 141 void Interpreter::DoLdaConstant(InterpreterAssembler* assembler) { |
| 135 DoLoadConstant(assembler); | 142 DoLoadConstant(assembler); |
| 136 } | 143 } |
| 137 | 144 |
| 138 | 145 |
| 139 // LdaConstantWide <idx> | 146 // LdaConstantWide <idx> |
| 140 // | 147 // |
| 141 // Load constant literal at |idx| in the constant pool into the accumulator. | 148 // Load constant literal at |idx| in the constant pool into the accumulator. |
| 142 void Interpreter::DoLdaConstantWide(compiler::InterpreterAssembler* assembler) { | 149 void Interpreter::DoLdaConstantWide(InterpreterAssembler* assembler) { |
| 143 DoLoadConstant(assembler); | 150 DoLoadConstant(assembler); |
| 144 } | 151 } |
| 145 | 152 |
| 146 | 153 |
| 147 // LdaUndefined | 154 // LdaUndefined |
| 148 // | 155 // |
| 149 // Load Undefined into the accumulator. | 156 // Load Undefined into the accumulator. |
| 150 void Interpreter::DoLdaUndefined(compiler::InterpreterAssembler* assembler) { | 157 void Interpreter::DoLdaUndefined(InterpreterAssembler* assembler) { |
| 151 Node* undefined_value = | 158 Node* undefined_value = |
| 152 __ HeapConstant(isolate_->factory()->undefined_value()); | 159 __ HeapConstant(isolate_->factory()->undefined_value()); |
| 153 __ SetAccumulator(undefined_value); | 160 __ SetAccumulator(undefined_value); |
| 154 __ Dispatch(); | 161 __ Dispatch(); |
| 155 } | 162 } |
| 156 | 163 |
| 157 | 164 |
| 158 // LdaNull | 165 // LdaNull |
| 159 // | 166 // |
| 160 // Load Null into the accumulator. | 167 // Load Null into the accumulator. |
| 161 void Interpreter::DoLdaNull(compiler::InterpreterAssembler* assembler) { | 168 void Interpreter::DoLdaNull(InterpreterAssembler* assembler) { |
| 162 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); | 169 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); |
| 163 __ SetAccumulator(null_value); | 170 __ SetAccumulator(null_value); |
| 164 __ Dispatch(); | 171 __ Dispatch(); |
| 165 } | 172 } |
| 166 | 173 |
| 167 | 174 |
| 168 // LdaTheHole | 175 // LdaTheHole |
| 169 // | 176 // |
| 170 // Load TheHole into the accumulator. | 177 // Load TheHole into the accumulator. |
| 171 void Interpreter::DoLdaTheHole(compiler::InterpreterAssembler* assembler) { | 178 void Interpreter::DoLdaTheHole(InterpreterAssembler* assembler) { |
| 172 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value()); | 179 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value()); |
| 173 __ SetAccumulator(the_hole_value); | 180 __ SetAccumulator(the_hole_value); |
| 174 __ Dispatch(); | 181 __ Dispatch(); |
| 175 } | 182 } |
| 176 | 183 |
| 177 | 184 |
| 178 // LdaTrue | 185 // LdaTrue |
| 179 // | 186 // |
| 180 // Load True into the accumulator. | 187 // Load True into the accumulator. |
| 181 void Interpreter::DoLdaTrue(compiler::InterpreterAssembler* assembler) { | 188 void Interpreter::DoLdaTrue(InterpreterAssembler* assembler) { |
| 182 Node* true_value = __ HeapConstant(isolate_->factory()->true_value()); | 189 Node* true_value = __ HeapConstant(isolate_->factory()->true_value()); |
| 183 __ SetAccumulator(true_value); | 190 __ SetAccumulator(true_value); |
| 184 __ Dispatch(); | 191 __ Dispatch(); |
| 185 } | 192 } |
| 186 | 193 |
| 187 | 194 |
| 188 // LdaFalse | 195 // LdaFalse |
| 189 // | 196 // |
| 190 // Load False into the accumulator. | 197 // Load False into the accumulator. |
| 191 void Interpreter::DoLdaFalse(compiler::InterpreterAssembler* assembler) { | 198 void Interpreter::DoLdaFalse(InterpreterAssembler* assembler) { |
| 192 Node* false_value = __ HeapConstant(isolate_->factory()->false_value()); | 199 Node* false_value = __ HeapConstant(isolate_->factory()->false_value()); |
| 193 __ SetAccumulator(false_value); | 200 __ SetAccumulator(false_value); |
| 194 __ Dispatch(); | 201 __ Dispatch(); |
| 195 } | 202 } |
| 196 | 203 |
| 197 | 204 |
| 198 // Ldar <src> | 205 // Ldar <src> |
| 199 // | 206 // |
| 200 // Load accumulator with value from register <src>. | 207 // Load accumulator with value from register <src>. |
| 201 void Interpreter::DoLdar(compiler::InterpreterAssembler* assembler) { | 208 void Interpreter::DoLdar(InterpreterAssembler* assembler) { |
| 202 Node* reg_index = __ BytecodeOperandReg(0); | 209 Node* reg_index = __ BytecodeOperandReg(0); |
| 203 Node* value = __ LoadRegister(reg_index); | 210 Node* value = __ LoadRegister(reg_index); |
| 204 __ SetAccumulator(value); | 211 __ SetAccumulator(value); |
| 205 __ Dispatch(); | 212 __ Dispatch(); |
| 206 } | 213 } |
| 207 | 214 |
| 208 | 215 |
| 209 // Star <dst> | 216 // Star <dst> |
| 210 // | 217 // |
| 211 // Store accumulator to register <dst>. | 218 // Store accumulator to register <dst>. |
| 212 void Interpreter::DoStar(compiler::InterpreterAssembler* assembler) { | 219 void Interpreter::DoStar(InterpreterAssembler* assembler) { |
| 213 Node* reg_index = __ BytecodeOperandReg(0); | 220 Node* reg_index = __ BytecodeOperandReg(0); |
| 214 Node* accumulator = __ GetAccumulator(); | 221 Node* accumulator = __ GetAccumulator(); |
| 215 __ StoreRegister(accumulator, reg_index); | 222 __ StoreRegister(accumulator, reg_index); |
| 216 __ Dispatch(); | 223 __ Dispatch(); |
| 217 } | 224 } |
| 218 | 225 |
| 219 | 226 |
| 220 // Mov <src> <dst> | 227 // Mov <src> <dst> |
| 221 // | 228 // |
| 222 // Stores the value of register <src> to register <dst>. | 229 // Stores the value of register <src> to register <dst>. |
| 223 void Interpreter::DoMov(compiler::InterpreterAssembler* assembler) { | 230 void Interpreter::DoMov(InterpreterAssembler* assembler) { |
| 224 Node* src_index = __ BytecodeOperandReg(0); | 231 Node* src_index = __ BytecodeOperandReg(0); |
| 225 Node* src_value = __ LoadRegister(src_index); | 232 Node* src_value = __ LoadRegister(src_index); |
| 226 Node* dst_index = __ BytecodeOperandReg(1); | 233 Node* dst_index = __ BytecodeOperandReg(1); |
| 227 __ StoreRegister(src_value, dst_index); | 234 __ StoreRegister(src_value, dst_index); |
| 228 __ Dispatch(); | 235 __ Dispatch(); |
| 229 } | 236 } |
| 230 | 237 |
| 231 | 238 |
| 232 // MovWide <src> <dst> | 239 // MovWide <src> <dst> |
| 233 // | 240 // |
| 234 // Stores the value of register <src> to register <dst>. | 241 // Stores the value of register <src> to register <dst>. |
| 235 void Interpreter::DoMovWide(compiler::InterpreterAssembler* assembler) { | 242 void Interpreter::DoMovWide(InterpreterAssembler* assembler) { |
| 236 DoMov(assembler); | 243 DoMov(assembler); |
| 237 } | 244 } |
| 238 | 245 |
| 239 | 246 void Interpreter::DoLoadGlobal(Callable ic, InterpreterAssembler* assembler) { |
| 240 void Interpreter::DoLoadGlobal(Callable ic, | |
| 241 compiler::InterpreterAssembler* assembler) { | |
| 242 // Get the global object. | 247 // Get the global object. |
| 243 Node* context = __ GetContext(); | 248 Node* context = __ GetContext(); |
| 244 Node* native_context = | 249 Node* native_context = |
| 245 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX); | 250 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX); |
| 246 Node* global = __ LoadContextSlot(native_context, Context::EXTENSION_INDEX); | 251 Node* global = __ LoadContextSlot(native_context, Context::EXTENSION_INDEX); |
| 247 | 252 |
| 248 // Load the global via the LoadIC. | 253 // Load the global via the LoadIC. |
| 249 Node* code_target = __ HeapConstant(ic.code()); | 254 Node* code_target = __ HeapConstant(ic.code()); |
| 250 Node* constant_index = __ BytecodeOperandIdx(0); | 255 Node* constant_index = __ BytecodeOperandIdx(0); |
| 251 Node* name = __ LoadConstantPoolEntry(constant_index); | 256 Node* name = __ LoadConstantPoolEntry(constant_index); |
| 252 Node* raw_slot = __ BytecodeOperandIdx(1); | 257 Node* raw_slot = __ BytecodeOperandIdx(1); |
| 253 Node* smi_slot = __ SmiTag(raw_slot); | 258 Node* smi_slot = __ SmiTag(raw_slot); |
| 254 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); | 259 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); |
| 255 Node* result = __ CallIC(ic.descriptor(), code_target, global, name, smi_slot, | 260 Node* result = __ CallStub(ic.descriptor(), code_target, context, global, |
| 256 type_feedback_vector); | 261 name, smi_slot, type_feedback_vector); |
| 257 __ SetAccumulator(result); | 262 __ SetAccumulator(result); |
| 258 __ Dispatch(); | 263 __ Dispatch(); |
| 259 } | 264 } |
| 260 | 265 |
| 261 | 266 |
| 262 // LdaGlobalSloppy <name_index> <slot> | 267 // LdaGlobalSloppy <name_index> <slot> |
| 263 // | 268 // |
| 264 // Load the global with name in constant pool entry <name_index> into the | 269 // Load the global with name in constant pool entry <name_index> into the |
| 265 // accumulator using FeedBackVector slot <slot> in sloppy mode. | 270 // accumulator using FeedBackVector slot <slot> in sloppy mode. |
| 266 void Interpreter::DoLdaGlobalSloppy(compiler::InterpreterAssembler* assembler) { | 271 void Interpreter::DoLdaGlobalSloppy(InterpreterAssembler* assembler) { |
| 267 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, | 272 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, |
| 268 SLOPPY, UNINITIALIZED); | 273 SLOPPY, UNINITIALIZED); |
| 269 DoLoadGlobal(ic, assembler); | 274 DoLoadGlobal(ic, assembler); |
| 270 } | 275 } |
| 271 | 276 |
| 272 | 277 |
| 273 // LdaGlobalSloppy <name_index> <slot> | 278 // LdaGlobalSloppy <name_index> <slot> |
| 274 // | 279 // |
| 275 // Load the global with name in constant pool entry <name_index> into the | 280 // Load the global with name in constant pool entry <name_index> into the |
| 276 // accumulator using FeedBackVector slot <slot> in strict mode. | 281 // accumulator using FeedBackVector slot <slot> in strict mode. |
| 277 void Interpreter::DoLdaGlobalStrict(compiler::InterpreterAssembler* assembler) { | 282 void Interpreter::DoLdaGlobalStrict(InterpreterAssembler* assembler) { |
| 278 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, | 283 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, |
| 279 STRICT, UNINITIALIZED); | 284 STRICT, UNINITIALIZED); |
| 280 DoLoadGlobal(ic, assembler); | 285 DoLoadGlobal(ic, assembler); |
| 281 } | 286 } |
| 282 | 287 |
| 283 | 288 |
| 284 // LdaGlobalInsideTypeofSloppy <name_index> <slot> | 289 // LdaGlobalInsideTypeofSloppy <name_index> <slot> |
| 285 // | 290 // |
| 286 // Load the global with name in constant pool entry <name_index> into the | 291 // Load the global with name in constant pool entry <name_index> into the |
| 287 // accumulator using FeedBackVector slot <slot> in sloppy mode. | 292 // accumulator using FeedBackVector slot <slot> in sloppy mode. |
| 288 void Interpreter::DoLdaGlobalInsideTypeofSloppy( | 293 void Interpreter::DoLdaGlobalInsideTypeofSloppy( |
| 289 compiler::InterpreterAssembler* assembler) { | 294 InterpreterAssembler* assembler) { |
| 290 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF, | 295 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF, |
| 291 SLOPPY, UNINITIALIZED); | 296 SLOPPY, UNINITIALIZED); |
| 292 DoLoadGlobal(ic, assembler); | 297 DoLoadGlobal(ic, assembler); |
| 293 } | 298 } |
| 294 | 299 |
| 295 | 300 |
| 296 // LdaGlobalInsideTypeofStrict <name_index> <slot> | 301 // LdaGlobalInsideTypeofStrict <name_index> <slot> |
| 297 // | 302 // |
| 298 // Load the global with name in constant pool entry <name_index> into the | 303 // Load the global with name in constant pool entry <name_index> into the |
| 299 // accumulator using FeedBackVector slot <slot> in strict mode. | 304 // accumulator using FeedBackVector slot <slot> in strict mode. |
| 300 void Interpreter::DoLdaGlobalInsideTypeofStrict( | 305 void Interpreter::DoLdaGlobalInsideTypeofStrict( |
| 301 compiler::InterpreterAssembler* assembler) { | 306 InterpreterAssembler* assembler) { |
| 302 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF, | 307 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF, |
| 303 STRICT, UNINITIALIZED); | 308 STRICT, UNINITIALIZED); |
| 304 DoLoadGlobal(ic, assembler); | 309 DoLoadGlobal(ic, assembler); |
| 305 } | 310 } |
| 306 | 311 |
| 307 | 312 |
| 308 // LdaGlobalSloppyWide <name_index> <slot> | 313 // LdaGlobalSloppyWide <name_index> <slot> |
| 309 // | 314 // |
| 310 // Load the global with name in constant pool entry <name_index> into the | 315 // Load the global with name in constant pool entry <name_index> into the |
| 311 // accumulator using FeedBackVector slot <slot> in sloppy mode. | 316 // accumulator using FeedBackVector slot <slot> in sloppy mode. |
| 312 void Interpreter::DoLdaGlobalSloppyWide( | 317 void Interpreter::DoLdaGlobalSloppyWide(InterpreterAssembler* assembler) { |
| 313 compiler::InterpreterAssembler* assembler) { | |
| 314 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, | 318 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, |
| 315 SLOPPY, UNINITIALIZED); | 319 SLOPPY, UNINITIALIZED); |
| 316 DoLoadGlobal(ic, assembler); | 320 DoLoadGlobal(ic, assembler); |
| 317 } | 321 } |
| 318 | 322 |
| 319 | 323 |
| 320 // LdaGlobalSloppyWide <name_index> <slot> | 324 // LdaGlobalSloppyWide <name_index> <slot> |
| 321 // | 325 // |
| 322 // Load the global with name in constant pool entry <name_index> into the | 326 // Load the global with name in constant pool entry <name_index> into the |
| 323 // accumulator using FeedBackVector slot <slot> in strict mode. | 327 // accumulator using FeedBackVector slot <slot> in strict mode. |
| 324 void Interpreter::DoLdaGlobalStrictWide( | 328 void Interpreter::DoLdaGlobalStrictWide(InterpreterAssembler* assembler) { |
| 325 compiler::InterpreterAssembler* assembler) { | |
| 326 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, | 329 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, |
| 327 STRICT, UNINITIALIZED); | 330 STRICT, UNINITIALIZED); |
| 328 DoLoadGlobal(ic, assembler); | 331 DoLoadGlobal(ic, assembler); |
| 329 } | 332 } |
| 330 | 333 |
| 331 | 334 |
| 332 // LdaGlobalInsideTypeofSloppyWide <name_index> <slot> | 335 // LdaGlobalInsideTypeofSloppyWide <name_index> <slot> |
| 333 // | 336 // |
| 334 // Load the global with name in constant pool entry <name_index> into the | 337 // Load the global with name in constant pool entry <name_index> into the |
| 335 // accumulator using FeedBackVector slot <slot> in sloppy mode. | 338 // accumulator using FeedBackVector slot <slot> in sloppy mode. |
| 336 void Interpreter::DoLdaGlobalInsideTypeofSloppyWide( | 339 void Interpreter::DoLdaGlobalInsideTypeofSloppyWide( |
| 337 compiler::InterpreterAssembler* assembler) { | 340 InterpreterAssembler* assembler) { |
| 338 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF, | 341 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF, |
| 339 SLOPPY, UNINITIALIZED); | 342 SLOPPY, UNINITIALIZED); |
| 340 DoLoadGlobal(ic, assembler); | 343 DoLoadGlobal(ic, assembler); |
| 341 } | 344 } |
| 342 | 345 |
| 343 | 346 |
| 344 // LdaGlobalInsideTypeofSloppyWide <name_index> <slot> | 347 // LdaGlobalInsideTypeofSloppyWide <name_index> <slot> |
| 345 // | 348 // |
| 346 // Load the global with name in constant pool entry <name_index> into the | 349 // Load the global with name in constant pool entry <name_index> into the |
| 347 // accumulator using FeedBackVector slot <slot> in strict mode. | 350 // accumulator using FeedBackVector slot <slot> in strict mode. |
| 348 void Interpreter::DoLdaGlobalInsideTypeofStrictWide( | 351 void Interpreter::DoLdaGlobalInsideTypeofStrictWide( |
| 349 compiler::InterpreterAssembler* assembler) { | 352 InterpreterAssembler* assembler) { |
| 350 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF, | 353 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF, |
| 351 STRICT, UNINITIALIZED); | 354 STRICT, UNINITIALIZED); |
| 352 DoLoadGlobal(ic, assembler); | 355 DoLoadGlobal(ic, assembler); |
| 353 } | 356 } |
| 354 | 357 |
| 355 | 358 void Interpreter::DoStoreGlobal(Callable ic, InterpreterAssembler* assembler) { |
| 356 void Interpreter::DoStoreGlobal(Callable ic, | |
| 357 compiler::InterpreterAssembler* assembler) { | |
| 358 // Get the global object. | 359 // Get the global object. |
| 359 Node* context = __ GetContext(); | 360 Node* context = __ GetContext(); |
| 360 Node* native_context = | 361 Node* native_context = |
| 361 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX); | 362 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX); |
| 362 Node* global = __ LoadContextSlot(native_context, Context::EXTENSION_INDEX); | 363 Node* global = __ LoadContextSlot(native_context, Context::EXTENSION_INDEX); |
| 363 | 364 |
| 364 // Store the global via the StoreIC. | 365 // Store the global via the StoreIC. |
| 365 Node* code_target = __ HeapConstant(ic.code()); | 366 Node* code_target = __ HeapConstant(ic.code()); |
| 366 Node* constant_index = __ BytecodeOperandIdx(0); | 367 Node* constant_index = __ BytecodeOperandIdx(0); |
| 367 Node* name = __ LoadConstantPoolEntry(constant_index); | 368 Node* name = __ LoadConstantPoolEntry(constant_index); |
| 368 Node* value = __ GetAccumulator(); | 369 Node* value = __ GetAccumulator(); |
| 369 Node* raw_slot = __ BytecodeOperandIdx(1); | 370 Node* raw_slot = __ BytecodeOperandIdx(1); |
| 370 Node* smi_slot = __ SmiTag(raw_slot); | 371 Node* smi_slot = __ SmiTag(raw_slot); |
| 371 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); | 372 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); |
| 372 __ CallIC(ic.descriptor(), code_target, global, name, value, smi_slot, | 373 __ CallStub(ic.descriptor(), code_target, context, global, name, value, |
| 373 type_feedback_vector); | 374 smi_slot, type_feedback_vector); |
| 374 | 375 |
| 375 __ Dispatch(); | 376 __ Dispatch(); |
| 376 } | 377 } |
| 377 | 378 |
| 378 | 379 |
| 379 // StaGlobalSloppy <name_index> <slot> | 380 // StaGlobalSloppy <name_index> <slot> |
| 380 // | 381 // |
| 381 // Store the value in the accumulator into the global with name in constant pool | 382 // Store the value in the accumulator into the global with name in constant pool |
| 382 // entry <name_index> using FeedBackVector slot <slot> in sloppy mode. | 383 // entry <name_index> using FeedBackVector slot <slot> in sloppy mode. |
| 383 void Interpreter::DoStaGlobalSloppy(compiler::InterpreterAssembler* assembler) { | 384 void Interpreter::DoStaGlobalSloppy(InterpreterAssembler* assembler) { |
| 384 Callable ic = | 385 Callable ic = |
| 385 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); | 386 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); |
| 386 DoStoreGlobal(ic, assembler); | 387 DoStoreGlobal(ic, assembler); |
| 387 } | 388 } |
| 388 | 389 |
| 389 | 390 |
| 390 // StaGlobalStrict <name_index> <slot> | 391 // StaGlobalStrict <name_index> <slot> |
| 391 // | 392 // |
| 392 // Store the value in the accumulator into the global with name in constant pool | 393 // Store the value in the accumulator into the global with name in constant pool |
| 393 // entry <name_index> using FeedBackVector slot <slot> in strict mode. | 394 // entry <name_index> using FeedBackVector slot <slot> in strict mode. |
| 394 void Interpreter::DoStaGlobalStrict(compiler::InterpreterAssembler* assembler) { | 395 void Interpreter::DoStaGlobalStrict(InterpreterAssembler* assembler) { |
| 395 Callable ic = | 396 Callable ic = |
| 396 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); | 397 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); |
| 397 DoStoreGlobal(ic, assembler); | 398 DoStoreGlobal(ic, assembler); |
| 398 } | 399 } |
| 399 | 400 |
| 400 | 401 |
| 401 // StaGlobalSloppyWide <name_index> <slot> | 402 // StaGlobalSloppyWide <name_index> <slot> |
| 402 // | 403 // |
| 403 // Store the value in the accumulator into the global with name in constant pool | 404 // Store the value in the accumulator into the global with name in constant pool |
| 404 // entry <name_index> using FeedBackVector slot <slot> in sloppy mode. | 405 // entry <name_index> using FeedBackVector slot <slot> in sloppy mode. |
| 405 void Interpreter::DoStaGlobalSloppyWide( | 406 void Interpreter::DoStaGlobalSloppyWide(InterpreterAssembler* assembler) { |
| 406 compiler::InterpreterAssembler* assembler) { | |
| 407 Callable ic = | 407 Callable ic = |
| 408 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); | 408 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); |
| 409 DoStoreGlobal(ic, assembler); | 409 DoStoreGlobal(ic, assembler); |
| 410 } | 410 } |
| 411 | 411 |
| 412 | 412 |
| 413 // StaGlobalStrictWide <name_index> <slot> | 413 // StaGlobalStrictWide <name_index> <slot> |
| 414 // | 414 // |
| 415 // Store the value in the accumulator into the global with name in constant pool | 415 // Store the value in the accumulator into the global with name in constant pool |
| 416 // entry <name_index> using FeedBackVector slot <slot> in strict mode. | 416 // entry <name_index> using FeedBackVector slot <slot> in strict mode. |
| 417 void Interpreter::DoStaGlobalStrictWide( | 417 void Interpreter::DoStaGlobalStrictWide(InterpreterAssembler* assembler) { |
| 418 compiler::InterpreterAssembler* assembler) { | |
| 419 Callable ic = | 418 Callable ic = |
| 420 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); | 419 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); |
| 421 DoStoreGlobal(ic, assembler); | 420 DoStoreGlobal(ic, assembler); |
| 422 } | 421 } |
| 423 | 422 |
| 424 | 423 |
| 425 // LdaContextSlot <context> <slot_index> | 424 // LdaContextSlot <context> <slot_index> |
| 426 // | 425 // |
| 427 // Load the object in |slot_index| of |context| into the accumulator. | 426 // Load the object in |slot_index| of |context| into the accumulator. |
| 428 void Interpreter::DoLdaContextSlot(compiler::InterpreterAssembler* assembler) { | 427 void Interpreter::DoLdaContextSlot(InterpreterAssembler* assembler) { |
| 429 Node* reg_index = __ BytecodeOperandReg(0); | 428 Node* reg_index = __ BytecodeOperandReg(0); |
| 430 Node* context = __ LoadRegister(reg_index); | 429 Node* context = __ LoadRegister(reg_index); |
| 431 Node* slot_index = __ BytecodeOperandIdx(1); | 430 Node* slot_index = __ BytecodeOperandIdx(1); |
| 432 Node* result = __ LoadContextSlot(context, slot_index); | 431 Node* result = __ LoadContextSlot(context, slot_index); |
| 433 __ SetAccumulator(result); | 432 __ SetAccumulator(result); |
| 434 __ Dispatch(); | 433 __ Dispatch(); |
| 435 } | 434 } |
| 436 | 435 |
| 437 | 436 |
| 438 // LdaContextSlotWide <context> <slot_index> | 437 // LdaContextSlotWide <context> <slot_index> |
| 439 // | 438 // |
| 440 // Load the object in |slot_index| of |context| into the accumulator. | 439 // Load the object in |slot_index| of |context| into the accumulator. |
| 441 void Interpreter::DoLdaContextSlotWide( | 440 void Interpreter::DoLdaContextSlotWide(InterpreterAssembler* assembler) { |
| 442 compiler::InterpreterAssembler* assembler) { | |
| 443 DoLdaContextSlot(assembler); | 441 DoLdaContextSlot(assembler); |
| 444 } | 442 } |
| 445 | 443 |
| 446 | 444 |
| 447 // StaContextSlot <context> <slot_index> | 445 // StaContextSlot <context> <slot_index> |
| 448 // | 446 // |
| 449 // Stores the object in the accumulator into |slot_index| of |context|. | 447 // Stores the object in the accumulator into |slot_index| of |context|. |
| 450 void Interpreter::DoStaContextSlot(compiler::InterpreterAssembler* assembler) { | 448 void Interpreter::DoStaContextSlot(InterpreterAssembler* assembler) { |
| 451 Node* value = __ GetAccumulator(); | 449 Node* value = __ GetAccumulator(); |
| 452 Node* reg_index = __ BytecodeOperandReg(0); | 450 Node* reg_index = __ BytecodeOperandReg(0); |
| 453 Node* context = __ LoadRegister(reg_index); | 451 Node* context = __ LoadRegister(reg_index); |
| 454 Node* slot_index = __ BytecodeOperandIdx(1); | 452 Node* slot_index = __ BytecodeOperandIdx(1); |
| 455 __ StoreContextSlot(context, slot_index, value); | 453 __ StoreContextSlot(context, slot_index, value); |
| 456 __ Dispatch(); | 454 __ Dispatch(); |
| 457 } | 455 } |
| 458 | 456 |
| 459 | 457 |
| 460 // StaContextSlot <context> <slot_index> | 458 // StaContextSlot <context> <slot_index> |
| 461 // | 459 // |
| 462 // Stores the object in the accumulator into |slot_index| of |context|. | 460 // Stores the object in the accumulator into |slot_index| of |context|. |
| 463 void Interpreter::DoStaContextSlotWide( | 461 void Interpreter::DoStaContextSlotWide(InterpreterAssembler* assembler) { |
| 464 compiler::InterpreterAssembler* assembler) { | |
| 465 DoStaContextSlot(assembler); | 462 DoStaContextSlot(assembler); |
| 466 } | 463 } |
| 467 | 464 |
| 468 | |
| 469 void Interpreter::DoLoadLookupSlot(Runtime::FunctionId function_id, | 465 void Interpreter::DoLoadLookupSlot(Runtime::FunctionId function_id, |
| 470 compiler::InterpreterAssembler* assembler) { | 466 InterpreterAssembler* assembler) { |
| 471 Node* index = __ BytecodeOperandIdx(0); | 467 Node* index = __ BytecodeOperandIdx(0); |
| 472 Node* name = __ LoadConstantPoolEntry(index); | 468 Node* name = __ LoadConstantPoolEntry(index); |
| 473 Node* context = __ GetContext(); | 469 Node* context = __ GetContext(); |
| 474 Node* result_pair = __ CallRuntime(function_id, context, name); | 470 Node* result_pair = __ CallRuntime(function_id, context, context, name); |
| 475 Node* result = __ Projection(0, result_pair); | 471 Node* result = __ Projection(0, result_pair); |
| 476 __ SetAccumulator(result); | 472 __ SetAccumulator(result); |
| 477 __ Dispatch(); | 473 __ Dispatch(); |
| 478 } | 474 } |
| 479 | 475 |
| 480 | 476 |
| 481 // LdaLookupSlot <name_index> | 477 // LdaLookupSlot <name_index> |
| 482 // | 478 // |
| 483 // Lookup the object with the name in constant pool entry |name_index| | 479 // Lookup the object with the name in constant pool entry |name_index| |
| 484 // dynamically. | 480 // dynamically. |
| 485 void Interpreter::DoLdaLookupSlot(compiler::InterpreterAssembler* assembler) { | 481 void Interpreter::DoLdaLookupSlot(InterpreterAssembler* assembler) { |
| 486 DoLoadLookupSlot(Runtime::kLoadLookupSlot, assembler); | 482 DoLoadLookupSlot(Runtime::kLoadLookupSlot, assembler); |
| 487 } | 483 } |
| 488 | 484 |
| 489 | 485 |
| 490 // LdaLookupSlotInsideTypeof <name_index> | 486 // LdaLookupSlotInsideTypeof <name_index> |
| 491 // | 487 // |
| 492 // Lookup the object with the name in constant pool entry |name_index| | 488 // Lookup the object with the name in constant pool entry |name_index| |
| 493 // dynamically without causing a NoReferenceError. | 489 // dynamically without causing a NoReferenceError. |
| 494 void Interpreter::DoLdaLookupSlotInsideTypeof( | 490 void Interpreter::DoLdaLookupSlotInsideTypeof(InterpreterAssembler* assembler) { |
| 495 compiler::InterpreterAssembler* assembler) { | |
| 496 DoLoadLookupSlot(Runtime::kLoadLookupSlotNoReferenceError, assembler); | 491 DoLoadLookupSlot(Runtime::kLoadLookupSlotNoReferenceError, assembler); |
| 497 } | 492 } |
| 498 | 493 |
| 499 | 494 |
| 500 // LdaLookupSlotWide <name_index> | 495 // LdaLookupSlotWide <name_index> |
| 501 // | 496 // |
| 502 // Lookup the object with the name in constant pool entry |name_index| | 497 // Lookup the object with the name in constant pool entry |name_index| |
| 503 // dynamically. | 498 // dynamically. |
| 504 void Interpreter::DoLdaLookupSlotWide( | 499 void Interpreter::DoLdaLookupSlotWide(InterpreterAssembler* assembler) { |
| 505 compiler::InterpreterAssembler* assembler) { | |
| 506 DoLdaLookupSlot(assembler); | 500 DoLdaLookupSlot(assembler); |
| 507 } | 501 } |
| 508 | 502 |
| 509 | 503 |
| 510 // LdaLookupSlotInsideTypeofWide <name_index> | 504 // LdaLookupSlotInsideTypeofWide <name_index> |
| 511 // | 505 // |
| 512 // Lookup the object with the name in constant pool entry |name_index| | 506 // Lookup the object with the name in constant pool entry |name_index| |
| 513 // dynamically without causing a NoReferenceError. | 507 // dynamically without causing a NoReferenceError. |
| 514 void Interpreter::DoLdaLookupSlotInsideTypeofWide( | 508 void Interpreter::DoLdaLookupSlotInsideTypeofWide( |
| 515 compiler::InterpreterAssembler* assembler) { | 509 InterpreterAssembler* assembler) { |
| 516 DoLdaLookupSlotInsideTypeof(assembler); | 510 DoLdaLookupSlotInsideTypeof(assembler); |
| 517 } | 511 } |
| 518 | 512 |
| 519 | |
| 520 void Interpreter::DoStoreLookupSlot(LanguageMode language_mode, | 513 void Interpreter::DoStoreLookupSlot(LanguageMode language_mode, |
| 521 compiler::InterpreterAssembler* assembler) { | 514 InterpreterAssembler* assembler) { |
| 522 Node* value = __ GetAccumulator(); | 515 Node* value = __ GetAccumulator(); |
| 523 Node* index = __ BytecodeOperandIdx(0); | 516 Node* index = __ BytecodeOperandIdx(0); |
| 524 Node* name = __ LoadConstantPoolEntry(index); | 517 Node* name = __ LoadConstantPoolEntry(index); |
| 525 Node* context = __ GetContext(); | 518 Node* context = __ GetContext(); |
| 526 Node* language_mode_node = __ NumberConstant(language_mode); | 519 Node* language_mode_node = __ NumberConstant(language_mode); |
| 527 Node* result = __ CallRuntime(Runtime::kStoreLookupSlot, value, context, name, | 520 Node* result = __ CallRuntime(Runtime::kStoreLookupSlot, context, value, |
| 528 language_mode_node); | 521 context, name, language_mode_node); |
| 529 __ SetAccumulator(result); | 522 __ SetAccumulator(result); |
| 530 __ Dispatch(); | 523 __ Dispatch(); |
| 531 } | 524 } |
| 532 | 525 |
| 533 | 526 |
| 534 // StaLookupSlotSloppy <name_index> | 527 // StaLookupSlotSloppy <name_index> |
| 535 // | 528 // |
| 536 // Store the object in accumulator to the object with the name in constant | 529 // Store the object in accumulator to the object with the name in constant |
| 537 // pool entry |name_index| in sloppy mode. | 530 // pool entry |name_index| in sloppy mode. |
| 538 void Interpreter::DoStaLookupSlotSloppy( | 531 void Interpreter::DoStaLookupSlotSloppy(InterpreterAssembler* assembler) { |
| 539 compiler::InterpreterAssembler* assembler) { | |
| 540 DoStoreLookupSlot(LanguageMode::SLOPPY, assembler); | 532 DoStoreLookupSlot(LanguageMode::SLOPPY, assembler); |
| 541 } | 533 } |
| 542 | 534 |
| 543 | 535 |
| 544 // StaLookupSlotStrict <name_index> | 536 // StaLookupSlotStrict <name_index> |
| 545 // | 537 // |
| 546 // Store the object in accumulator to the object with the name in constant | 538 // Store the object in accumulator to the object with the name in constant |
| 547 // pool entry |name_index| in strict mode. | 539 // pool entry |name_index| in strict mode. |
| 548 void Interpreter::DoStaLookupSlotStrict( | 540 void Interpreter::DoStaLookupSlotStrict(InterpreterAssembler* assembler) { |
| 549 compiler::InterpreterAssembler* assembler) { | |
| 550 DoStoreLookupSlot(LanguageMode::STRICT, assembler); | 541 DoStoreLookupSlot(LanguageMode::STRICT, assembler); |
| 551 } | 542 } |
| 552 | 543 |
| 553 | 544 |
| 554 // StaLookupSlotSloppyWide <name_index> | 545 // StaLookupSlotSloppyWide <name_index> |
| 555 // | 546 // |
| 556 // Store the object in accumulator to the object with the name in constant | 547 // Store the object in accumulator to the object with the name in constant |
| 557 // pool entry |name_index| in sloppy mode. | 548 // pool entry |name_index| in sloppy mode. |
| 558 void Interpreter::DoStaLookupSlotSloppyWide( | 549 void Interpreter::DoStaLookupSlotSloppyWide(InterpreterAssembler* assembler) { |
| 559 compiler::InterpreterAssembler* assembler) { | |
| 560 DoStaLookupSlotSloppy(assembler); | 550 DoStaLookupSlotSloppy(assembler); |
| 561 } | 551 } |
| 562 | 552 |
| 563 | 553 |
| 564 // StaLookupSlotStrictWide <name_index> | 554 // StaLookupSlotStrictWide <name_index> |
| 565 // | 555 // |
| 566 // Store the object in accumulator to the object with the name in constant | 556 // Store the object in accumulator to the object with the name in constant |
| 567 // pool entry |name_index| in strict mode. | 557 // pool entry |name_index| in strict mode. |
| 568 void Interpreter::DoStaLookupSlotStrictWide( | 558 void Interpreter::DoStaLookupSlotStrictWide(InterpreterAssembler* assembler) { |
| 569 compiler::InterpreterAssembler* assembler) { | |
| 570 DoStaLookupSlotStrict(assembler); | 559 DoStaLookupSlotStrict(assembler); |
| 571 } | 560 } |
| 572 | 561 |
| 573 | 562 void Interpreter::DoLoadIC(Callable ic, InterpreterAssembler* assembler) { |
| 574 void Interpreter::DoLoadIC(Callable ic, | |
| 575 compiler::InterpreterAssembler* assembler) { | |
| 576 Node* code_target = __ HeapConstant(ic.code()); | 563 Node* code_target = __ HeapConstant(ic.code()); |
| 577 Node* register_index = __ BytecodeOperandReg(0); | 564 Node* register_index = __ BytecodeOperandReg(0); |
| 578 Node* object = __ LoadRegister(register_index); | 565 Node* object = __ LoadRegister(register_index); |
| 579 Node* constant_index = __ BytecodeOperandIdx(1); | 566 Node* constant_index = __ BytecodeOperandIdx(1); |
| 580 Node* name = __ LoadConstantPoolEntry(constant_index); | 567 Node* name = __ LoadConstantPoolEntry(constant_index); |
| 581 Node* raw_slot = __ BytecodeOperandIdx(2); | 568 Node* raw_slot = __ BytecodeOperandIdx(2); |
| 582 Node* smi_slot = __ SmiTag(raw_slot); | 569 Node* smi_slot = __ SmiTag(raw_slot); |
| 583 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); | 570 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); |
| 584 Node* result = __ CallIC(ic.descriptor(), code_target, object, name, smi_slot, | 571 Node* context = __ GetContext(); |
| 585 type_feedback_vector); | 572 Node* result = __ CallStub(ic.descriptor(), code_target, context, object, |
| 573 name, smi_slot, type_feedback_vector); |
| 586 __ SetAccumulator(result); | 574 __ SetAccumulator(result); |
| 587 __ Dispatch(); | 575 __ Dispatch(); |
| 588 } | 576 } |
| 589 | 577 |
| 590 | 578 |
| 591 // LoadICSloppy <object> <name_index> <slot> | 579 // LoadICSloppy <object> <name_index> <slot> |
| 592 // | 580 // |
| 593 // Calls the sloppy mode LoadIC at FeedBackVector slot <slot> for <object> and | 581 // Calls the sloppy mode LoadIC at FeedBackVector slot <slot> for <object> and |
| 594 // the name at constant pool entry <name_index>. | 582 // the name at constant pool entry <name_index>. |
| 595 void Interpreter::DoLoadICSloppy(compiler::InterpreterAssembler* assembler) { | 583 void Interpreter::DoLoadICSloppy(InterpreterAssembler* assembler) { |
| 596 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, | 584 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, |
| 597 SLOPPY, UNINITIALIZED); | 585 SLOPPY, UNINITIALIZED); |
| 598 DoLoadIC(ic, assembler); | 586 DoLoadIC(ic, assembler); |
| 599 } | 587 } |
| 600 | 588 |
| 601 | 589 |
| 602 // LoadICStrict <object> <name_index> <slot> | 590 // LoadICStrict <object> <name_index> <slot> |
| 603 // | 591 // |
| 604 // Calls the sloppy mode LoadIC at FeedBackVector slot <slot> for <object> and | 592 // Calls the sloppy mode LoadIC at FeedBackVector slot <slot> for <object> and |
| 605 // the name at constant pool entry <name_index>. | 593 // the name at constant pool entry <name_index>. |
| 606 void Interpreter::DoLoadICStrict(compiler::InterpreterAssembler* assembler) { | 594 void Interpreter::DoLoadICStrict(InterpreterAssembler* assembler) { |
| 607 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, | 595 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, |
| 608 STRICT, UNINITIALIZED); | 596 STRICT, UNINITIALIZED); |
| 609 DoLoadIC(ic, assembler); | 597 DoLoadIC(ic, assembler); |
| 610 } | 598 } |
| 611 | 599 |
| 612 | 600 |
| 613 // LoadICSloppyWide <object> <name_index> <slot> | 601 // LoadICSloppyWide <object> <name_index> <slot> |
| 614 // | 602 // |
| 615 // Calls the sloppy mode LoadIC at FeedBackVector slot <slot> for <object> and | 603 // Calls the sloppy mode LoadIC at FeedBackVector slot <slot> for <object> and |
| 616 // the name at constant pool entry <name_index>. | 604 // the name at constant pool entry <name_index>. |
| 617 void Interpreter::DoLoadICSloppyWide( | 605 void Interpreter::DoLoadICSloppyWide(InterpreterAssembler* assembler) { |
| 618 compiler::InterpreterAssembler* assembler) { | |
| 619 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, | 606 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, |
| 620 SLOPPY, UNINITIALIZED); | 607 SLOPPY, UNINITIALIZED); |
| 621 DoLoadIC(ic, assembler); | 608 DoLoadIC(ic, assembler); |
| 622 } | 609 } |
| 623 | 610 |
| 624 | 611 |
| 625 // LoadICStrictWide <object> <name_index> <slot> | 612 // LoadICStrictWide <object> <name_index> <slot> |
| 626 // | 613 // |
| 627 // Calls the sloppy mode LoadIC at FeedBackVector slot <slot> for <object> and | 614 // Calls the sloppy mode LoadIC at FeedBackVector slot <slot> for <object> and |
| 628 // the name at constant pool entry <name_index>. | 615 // the name at constant pool entry <name_index>. |
| 629 void Interpreter::DoLoadICStrictWide( | 616 void Interpreter::DoLoadICStrictWide(InterpreterAssembler* assembler) { |
| 630 compiler::InterpreterAssembler* assembler) { | |
| 631 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, | 617 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, |
| 632 STRICT, UNINITIALIZED); | 618 STRICT, UNINITIALIZED); |
| 633 DoLoadIC(ic, assembler); | 619 DoLoadIC(ic, assembler); |
| 634 } | 620 } |
| 635 | 621 |
| 636 | 622 void Interpreter::DoKeyedLoadIC(Callable ic, InterpreterAssembler* assembler) { |
| 637 void Interpreter::DoKeyedLoadIC(Callable ic, | |
| 638 compiler::InterpreterAssembler* assembler) { | |
| 639 Node* code_target = __ HeapConstant(ic.code()); | 623 Node* code_target = __ HeapConstant(ic.code()); |
| 640 Node* reg_index = __ BytecodeOperandReg(0); | 624 Node* reg_index = __ BytecodeOperandReg(0); |
| 641 Node* object = __ LoadRegister(reg_index); | 625 Node* object = __ LoadRegister(reg_index); |
| 642 Node* name = __ GetAccumulator(); | 626 Node* name = __ GetAccumulator(); |
| 643 Node* raw_slot = __ BytecodeOperandIdx(1); | 627 Node* raw_slot = __ BytecodeOperandIdx(1); |
| 644 Node* smi_slot = __ SmiTag(raw_slot); | 628 Node* smi_slot = __ SmiTag(raw_slot); |
| 645 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); | 629 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); |
| 646 Node* result = __ CallIC(ic.descriptor(), code_target, object, name, smi_slot, | 630 Node* context = __ GetContext(); |
| 647 type_feedback_vector); | 631 Node* result = __ CallStub(ic.descriptor(), code_target, context, object, |
| 632 name, smi_slot, type_feedback_vector); |
| 648 __ SetAccumulator(result); | 633 __ SetAccumulator(result); |
| 649 __ Dispatch(); | 634 __ Dispatch(); |
| 650 } | 635 } |
| 651 | 636 |
| 652 | 637 |
| 653 // KeyedLoadICSloppy <object> <slot> | 638 // KeyedLoadICSloppy <object> <slot> |
| 654 // | 639 // |
| 655 // Calls the sloppy mode KeyedLoadIC at FeedBackVector slot <slot> for <object> | 640 // Calls the sloppy mode KeyedLoadIC at FeedBackVector slot <slot> for <object> |
| 656 // and the key in the accumulator. | 641 // and the key in the accumulator. |
| 657 void Interpreter::DoKeyedLoadICSloppy( | 642 void Interpreter::DoKeyedLoadICSloppy(InterpreterAssembler* assembler) { |
| 658 compiler::InterpreterAssembler* assembler) { | |
| 659 Callable ic = | 643 Callable ic = |
| 660 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); | 644 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); |
| 661 DoKeyedLoadIC(ic, assembler); | 645 DoKeyedLoadIC(ic, assembler); |
| 662 } | 646 } |
| 663 | 647 |
| 664 | 648 |
| 665 // KeyedLoadICStrict <object> <slot> | 649 // KeyedLoadICStrict <object> <slot> |
| 666 // | 650 // |
| 667 // Calls the strict mode KeyedLoadIC at FeedBackVector slot <slot> for <object> | 651 // Calls the strict mode KeyedLoadIC at FeedBackVector slot <slot> for <object> |
| 668 // and the key in the accumulator. | 652 // and the key in the accumulator. |
| 669 void Interpreter::DoKeyedLoadICStrict( | 653 void Interpreter::DoKeyedLoadICStrict(InterpreterAssembler* assembler) { |
| 670 compiler::InterpreterAssembler* assembler) { | |
| 671 Callable ic = | 654 Callable ic = |
| 672 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); | 655 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); |
| 673 DoKeyedLoadIC(ic, assembler); | 656 DoKeyedLoadIC(ic, assembler); |
| 674 } | 657 } |
| 675 | 658 |
| 676 | 659 |
| 677 // KeyedLoadICSloppyWide <object> <slot> | 660 // KeyedLoadICSloppyWide <object> <slot> |
| 678 // | 661 // |
| 679 // Calls the sloppy mode KeyedLoadIC at FeedBackVector slot <slot> for <object> | 662 // Calls the sloppy mode KeyedLoadIC at FeedBackVector slot <slot> for <object> |
| 680 // and the key in the accumulator. | 663 // and the key in the accumulator. |
| 681 void Interpreter::DoKeyedLoadICSloppyWide( | 664 void Interpreter::DoKeyedLoadICSloppyWide(InterpreterAssembler* assembler) { |
| 682 compiler::InterpreterAssembler* assembler) { | |
| 683 Callable ic = | 665 Callable ic = |
| 684 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); | 666 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); |
| 685 DoKeyedLoadIC(ic, assembler); | 667 DoKeyedLoadIC(ic, assembler); |
| 686 } | 668 } |
| 687 | 669 |
| 688 | 670 |
| 689 // KeyedLoadICStrictWide <object> <slot> | 671 // KeyedLoadICStrictWide <object> <slot> |
| 690 // | 672 // |
| 691 // Calls the strict mode KeyedLoadIC at FeedBackVector slot <slot> for <object> | 673 // Calls the strict mode KeyedLoadIC at FeedBackVector slot <slot> for <object> |
| 692 // and the key in the accumulator. | 674 // and the key in the accumulator. |
| 693 void Interpreter::DoKeyedLoadICStrictWide( | 675 void Interpreter::DoKeyedLoadICStrictWide(InterpreterAssembler* assembler) { |
| 694 compiler::InterpreterAssembler* assembler) { | |
| 695 Callable ic = | 676 Callable ic = |
| 696 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); | 677 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); |
| 697 DoKeyedLoadIC(ic, assembler); | 678 DoKeyedLoadIC(ic, assembler); |
| 698 } | 679 } |
| 699 | 680 |
| 700 | 681 void Interpreter::DoStoreIC(Callable ic, InterpreterAssembler* assembler) { |
| 701 void Interpreter::DoStoreIC(Callable ic, | |
| 702 compiler::InterpreterAssembler* assembler) { | |
| 703 Node* code_target = __ HeapConstant(ic.code()); | 682 Node* code_target = __ HeapConstant(ic.code()); |
| 704 Node* object_reg_index = __ BytecodeOperandReg(0); | 683 Node* object_reg_index = __ BytecodeOperandReg(0); |
| 705 Node* object = __ LoadRegister(object_reg_index); | 684 Node* object = __ LoadRegister(object_reg_index); |
| 706 Node* constant_index = __ BytecodeOperandIdx(1); | 685 Node* constant_index = __ BytecodeOperandIdx(1); |
| 707 Node* name = __ LoadConstantPoolEntry(constant_index); | 686 Node* name = __ LoadConstantPoolEntry(constant_index); |
| 708 Node* value = __ GetAccumulator(); | 687 Node* value = __ GetAccumulator(); |
| 709 Node* raw_slot = __ BytecodeOperandIdx(2); | 688 Node* raw_slot = __ BytecodeOperandIdx(2); |
| 710 Node* smi_slot = __ SmiTag(raw_slot); | 689 Node* smi_slot = __ SmiTag(raw_slot); |
| 711 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); | 690 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); |
| 712 __ CallIC(ic.descriptor(), code_target, object, name, value, smi_slot, | 691 Node* context = __ GetContext(); |
| 713 type_feedback_vector); | 692 __ CallStub(ic.descriptor(), code_target, context, object, name, value, |
| 693 smi_slot, type_feedback_vector); |
| 714 __ Dispatch(); | 694 __ Dispatch(); |
| 715 } | 695 } |
| 716 | 696 |
| 717 | 697 |
| 718 // StoreICSloppy <object> <name_index> <slot> | 698 // StoreICSloppy <object> <name_index> <slot> |
| 719 // | 699 // |
| 720 // Calls the sloppy mode StoreIC at FeedBackVector slot <slot> for <object> and | 700 // Calls the sloppy mode StoreIC at FeedBackVector slot <slot> for <object> and |
| 721 // the name in constant pool entry <name_index> with the value in the | 701 // the name in constant pool entry <name_index> with the value in the |
| 722 // accumulator. | 702 // accumulator. |
| 723 void Interpreter::DoStoreICSloppy(compiler::InterpreterAssembler* assembler) { | 703 void Interpreter::DoStoreICSloppy(InterpreterAssembler* assembler) { |
| 724 Callable ic = | 704 Callable ic = |
| 725 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); | 705 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); |
| 726 DoStoreIC(ic, assembler); | 706 DoStoreIC(ic, assembler); |
| 727 } | 707 } |
| 728 | 708 |
| 729 | 709 |
| 730 // StoreICStrict <object> <name_index> <slot> | 710 // StoreICStrict <object> <name_index> <slot> |
| 731 // | 711 // |
| 732 // Calls the strict mode StoreIC at FeedBackVector slot <slot> for <object> and | 712 // Calls the strict mode StoreIC at FeedBackVector slot <slot> for <object> and |
| 733 // the name in constant pool entry <name_index> with the value in the | 713 // the name in constant pool entry <name_index> with the value in the |
| 734 // accumulator. | 714 // accumulator. |
| 735 void Interpreter::DoStoreICStrict(compiler::InterpreterAssembler* assembler) { | 715 void Interpreter::DoStoreICStrict(InterpreterAssembler* assembler) { |
| 736 Callable ic = | 716 Callable ic = |
| 737 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); | 717 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); |
| 738 DoStoreIC(ic, assembler); | 718 DoStoreIC(ic, assembler); |
| 739 } | 719 } |
| 740 | 720 |
| 741 | 721 |
| 742 // StoreICSloppyWide <object> <name_index> <slot> | 722 // StoreICSloppyWide <object> <name_index> <slot> |
| 743 // | 723 // |
| 744 // Calls the sloppy mode StoreIC at FeedBackVector slot <slot> for <object> and | 724 // Calls the sloppy mode StoreIC at FeedBackVector slot <slot> for <object> and |
| 745 // the name in constant pool entry <name_index> with the value in the | 725 // the name in constant pool entry <name_index> with the value in the |
| 746 // accumulator. | 726 // accumulator. |
| 747 void Interpreter::DoStoreICSloppyWide( | 727 void Interpreter::DoStoreICSloppyWide(InterpreterAssembler* assembler) { |
| 748 compiler::InterpreterAssembler* assembler) { | |
| 749 Callable ic = | 728 Callable ic = |
| 750 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); | 729 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); |
| 751 DoStoreIC(ic, assembler); | 730 DoStoreIC(ic, assembler); |
| 752 } | 731 } |
| 753 | 732 |
| 754 | 733 |
| 755 // StoreICStrictWide <object> <name_index> <slot> | 734 // StoreICStrictWide <object> <name_index> <slot> |
| 756 // | 735 // |
| 757 // Calls the strict mode StoreIC at FeedBackVector slot <slot> for <object> and | 736 // Calls the strict mode StoreIC at FeedBackVector slot <slot> for <object> and |
| 758 // the name in constant pool entry <name_index> with the value in the | 737 // the name in constant pool entry <name_index> with the value in the |
| 759 // accumulator. | 738 // accumulator. |
| 760 void Interpreter::DoStoreICStrictWide( | 739 void Interpreter::DoStoreICStrictWide(InterpreterAssembler* assembler) { |
| 761 compiler::InterpreterAssembler* assembler) { | |
| 762 Callable ic = | 740 Callable ic = |
| 763 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); | 741 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); |
| 764 DoStoreIC(ic, assembler); | 742 DoStoreIC(ic, assembler); |
| 765 } | 743 } |
| 766 | 744 |
| 767 | 745 void Interpreter::DoKeyedStoreIC(Callable ic, InterpreterAssembler* assembler) { |
| 768 void Interpreter::DoKeyedStoreIC(Callable ic, | |
| 769 compiler::InterpreterAssembler* assembler) { | |
| 770 Node* code_target = __ HeapConstant(ic.code()); | 746 Node* code_target = __ HeapConstant(ic.code()); |
| 771 Node* object_reg_index = __ BytecodeOperandReg(0); | 747 Node* object_reg_index = __ BytecodeOperandReg(0); |
| 772 Node* object = __ LoadRegister(object_reg_index); | 748 Node* object = __ LoadRegister(object_reg_index); |
| 773 Node* name_reg_index = __ BytecodeOperandReg(1); | 749 Node* name_reg_index = __ BytecodeOperandReg(1); |
| 774 Node* name = __ LoadRegister(name_reg_index); | 750 Node* name = __ LoadRegister(name_reg_index); |
| 775 Node* value = __ GetAccumulator(); | 751 Node* value = __ GetAccumulator(); |
| 776 Node* raw_slot = __ BytecodeOperandIdx(2); | 752 Node* raw_slot = __ BytecodeOperandIdx(2); |
| 777 Node* smi_slot = __ SmiTag(raw_slot); | 753 Node* smi_slot = __ SmiTag(raw_slot); |
| 778 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); | 754 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); |
| 779 __ CallIC(ic.descriptor(), code_target, object, name, value, smi_slot, | 755 Node* context = __ GetContext(); |
| 780 type_feedback_vector); | 756 __ CallStub(ic.descriptor(), code_target, context, object, name, value, |
| 757 smi_slot, type_feedback_vector); |
| 781 __ Dispatch(); | 758 __ Dispatch(); |
| 782 } | 759 } |
| 783 | 760 |
| 784 | 761 |
| 785 // KeyedStoreICSloppy <object> <key> <slot> | 762 // KeyedStoreICSloppy <object> <key> <slot> |
| 786 // | 763 // |
| 787 // Calls the sloppy mode KeyStoreIC at FeedBackVector slot <slot> for <object> | 764 // Calls the sloppy mode KeyStoreIC at FeedBackVector slot <slot> for <object> |
| 788 // and the key <key> with the value in the accumulator. | 765 // and the key <key> with the value in the accumulator. |
| 789 void Interpreter::DoKeyedStoreICSloppy( | 766 void Interpreter::DoKeyedStoreICSloppy(InterpreterAssembler* assembler) { |
| 790 compiler::InterpreterAssembler* assembler) { | |
| 791 Callable ic = | 767 Callable ic = |
| 792 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); | 768 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); |
| 793 DoKeyedStoreIC(ic, assembler); | 769 DoKeyedStoreIC(ic, assembler); |
| 794 } | 770 } |
| 795 | 771 |
| 796 | 772 |
| 797 // KeyedStoreICStore <object> <key> <slot> | 773 // KeyedStoreICStore <object> <key> <slot> |
| 798 // | 774 // |
| 799 // Calls the strict mode KeyStoreIC at FeedBackVector slot <slot> for <object> | 775 // Calls the strict mode KeyStoreIC at FeedBackVector slot <slot> for <object> |
| 800 // and the key <key> with the value in the accumulator. | 776 // and the key <key> with the value in the accumulator. |
| 801 void Interpreter::DoKeyedStoreICStrict( | 777 void Interpreter::DoKeyedStoreICStrict(InterpreterAssembler* assembler) { |
| 802 compiler::InterpreterAssembler* assembler) { | |
| 803 Callable ic = | 778 Callable ic = |
| 804 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); | 779 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); |
| 805 DoKeyedStoreIC(ic, assembler); | 780 DoKeyedStoreIC(ic, assembler); |
| 806 } | 781 } |
| 807 | 782 |
| 808 | 783 |
| 809 // KeyedStoreICSloppyWide <object> <key> <slot> | 784 // KeyedStoreICSloppyWide <object> <key> <slot> |
| 810 // | 785 // |
| 811 // Calls the sloppy mode KeyStoreIC at FeedBackVector slot <slot> for <object> | 786 // Calls the sloppy mode KeyStoreIC at FeedBackVector slot <slot> for <object> |
| 812 // and the key <key> with the value in the accumulator. | 787 // and the key <key> with the value in the accumulator. |
| 813 void Interpreter::DoKeyedStoreICSloppyWide( | 788 void Interpreter::DoKeyedStoreICSloppyWide(InterpreterAssembler* assembler) { |
| 814 compiler::InterpreterAssembler* assembler) { | |
| 815 Callable ic = | 789 Callable ic = |
| 816 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); | 790 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); |
| 817 DoKeyedStoreIC(ic, assembler); | 791 DoKeyedStoreIC(ic, assembler); |
| 818 } | 792 } |
| 819 | 793 |
| 820 | 794 |
| 821 // KeyedStoreICStoreWide <object> <key> <slot> | 795 // KeyedStoreICStoreWide <object> <key> <slot> |
| 822 // | 796 // |
| 823 // Calls the strict mode KeyStoreIC at FeedBackVector slot <slot> for <object> | 797 // Calls the strict mode KeyStoreIC at FeedBackVector slot <slot> for <object> |
| 824 // and the key <key> with the value in the accumulator. | 798 // and the key <key> with the value in the accumulator. |
| 825 void Interpreter::DoKeyedStoreICStrictWide( | 799 void Interpreter::DoKeyedStoreICStrictWide(InterpreterAssembler* assembler) { |
| 826 compiler::InterpreterAssembler* assembler) { | |
| 827 Callable ic = | 800 Callable ic = |
| 828 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); | 801 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); |
| 829 DoKeyedStoreIC(ic, assembler); | 802 DoKeyedStoreIC(ic, assembler); |
| 830 } | 803 } |
| 831 | 804 |
| 832 // PushContext <context> | 805 // PushContext <context> |
| 833 // | 806 // |
| 834 // Saves the current context in <context>, and pushes the accumulator as the | 807 // Saves the current context in <context>, and pushes the accumulator as the |
| 835 // new current context. | 808 // new current context. |
| 836 void Interpreter::DoPushContext(compiler::InterpreterAssembler* assembler) { | 809 void Interpreter::DoPushContext(InterpreterAssembler* assembler) { |
| 837 Node* reg_index = __ BytecodeOperandReg(0); | 810 Node* reg_index = __ BytecodeOperandReg(0); |
| 838 Node* new_context = __ GetAccumulator(); | 811 Node* new_context = __ GetAccumulator(); |
| 839 Node* old_context = __ GetContext(); | 812 Node* old_context = __ GetContext(); |
| 840 __ StoreRegister(old_context, reg_index); | 813 __ StoreRegister(old_context, reg_index); |
| 841 __ SetContext(new_context); | 814 __ SetContext(new_context); |
| 842 __ Dispatch(); | 815 __ Dispatch(); |
| 843 } | 816 } |
| 844 | 817 |
| 845 | 818 |
| 846 // PopContext <context> | 819 // PopContext <context> |
| 847 // | 820 // |
| 848 // Pops the current context and sets <context> as the new context. | 821 // Pops the current context and sets <context> as the new context. |
| 849 void Interpreter::DoPopContext(compiler::InterpreterAssembler* assembler) { | 822 void Interpreter::DoPopContext(InterpreterAssembler* assembler) { |
| 850 Node* reg_index = __ BytecodeOperandReg(0); | 823 Node* reg_index = __ BytecodeOperandReg(0); |
| 851 Node* context = __ LoadRegister(reg_index); | 824 Node* context = __ LoadRegister(reg_index); |
| 852 __ SetContext(context); | 825 __ SetContext(context); |
| 853 __ Dispatch(); | 826 __ Dispatch(); |
| 854 } | 827 } |
| 855 | 828 |
| 856 | |
| 857 void Interpreter::DoBinaryOp(Runtime::FunctionId function_id, | 829 void Interpreter::DoBinaryOp(Runtime::FunctionId function_id, |
| 858 compiler::InterpreterAssembler* assembler) { | 830 InterpreterAssembler* assembler) { |
| 859 // TODO(rmcilroy): Call ICs which back-patch bytecode with type specialized | 831 // TODO(rmcilroy): Call ICs which back-patch bytecode with type specialized |
| 860 // operations, instead of calling builtins directly. | 832 // operations, instead of calling builtins directly. |
| 861 Node* reg_index = __ BytecodeOperandReg(0); | 833 Node* reg_index = __ BytecodeOperandReg(0); |
| 862 Node* lhs = __ LoadRegister(reg_index); | 834 Node* lhs = __ LoadRegister(reg_index); |
| 863 Node* rhs = __ GetAccumulator(); | 835 Node* rhs = __ GetAccumulator(); |
| 864 Node* result = __ CallRuntime(function_id, lhs, rhs); | 836 Node* context = __ GetContext(); |
| 837 Node* result = __ CallRuntime(function_id, context, lhs, rhs); |
| 865 __ SetAccumulator(result); | 838 __ SetAccumulator(result); |
| 866 __ Dispatch(); | 839 __ Dispatch(); |
| 867 } | 840 } |
| 868 | 841 |
| 869 | 842 |
| 870 // Add <src> | 843 // Add <src> |
| 871 // | 844 // |
| 872 // Add register <src> to accumulator. | 845 // Add register <src> to accumulator. |
| 873 void Interpreter::DoAdd(compiler::InterpreterAssembler* assembler) { | 846 void Interpreter::DoAdd(InterpreterAssembler* assembler) { |
| 874 DoBinaryOp(Runtime::kAdd, assembler); | 847 DoBinaryOp(Runtime::kAdd, assembler); |
| 875 } | 848 } |
| 876 | 849 |
| 877 | 850 |
| 878 // Sub <src> | 851 // Sub <src> |
| 879 // | 852 // |
| 880 // Subtract register <src> from accumulator. | 853 // Subtract register <src> from accumulator. |
| 881 void Interpreter::DoSub(compiler::InterpreterAssembler* assembler) { | 854 void Interpreter::DoSub(InterpreterAssembler* assembler) { |
| 882 DoBinaryOp(Runtime::kSubtract, assembler); | 855 DoBinaryOp(Runtime::kSubtract, assembler); |
| 883 } | 856 } |
| 884 | 857 |
| 885 | 858 |
| 886 // Mul <src> | 859 // Mul <src> |
| 887 // | 860 // |
| 888 // Multiply accumulator by register <src>. | 861 // Multiply accumulator by register <src>. |
| 889 void Interpreter::DoMul(compiler::InterpreterAssembler* assembler) { | 862 void Interpreter::DoMul(InterpreterAssembler* assembler) { |
| 890 DoBinaryOp(Runtime::kMultiply, assembler); | 863 DoBinaryOp(Runtime::kMultiply, assembler); |
| 891 } | 864 } |
| 892 | 865 |
| 893 | 866 |
| 894 // Div <src> | 867 // Div <src> |
| 895 // | 868 // |
| 896 // Divide register <src> by accumulator. | 869 // Divide register <src> by accumulator. |
| 897 void Interpreter::DoDiv(compiler::InterpreterAssembler* assembler) { | 870 void Interpreter::DoDiv(InterpreterAssembler* assembler) { |
| 898 DoBinaryOp(Runtime::kDivide, assembler); | 871 DoBinaryOp(Runtime::kDivide, assembler); |
| 899 } | 872 } |
| 900 | 873 |
| 901 | 874 |
| 902 // Mod <src> | 875 // Mod <src> |
| 903 // | 876 // |
| 904 // Modulo register <src> by accumulator. | 877 // Modulo register <src> by accumulator. |
| 905 void Interpreter::DoMod(compiler::InterpreterAssembler* assembler) { | 878 void Interpreter::DoMod(InterpreterAssembler* assembler) { |
| 906 DoBinaryOp(Runtime::kModulus, assembler); | 879 DoBinaryOp(Runtime::kModulus, assembler); |
| 907 } | 880 } |
| 908 | 881 |
| 909 | 882 |
| 910 // BitwiseOr <src> | 883 // BitwiseOr <src> |
| 911 // | 884 // |
| 912 // BitwiseOr register <src> to accumulator. | 885 // BitwiseOr register <src> to accumulator. |
| 913 void Interpreter::DoBitwiseOr(compiler::InterpreterAssembler* assembler) { | 886 void Interpreter::DoBitwiseOr(InterpreterAssembler* assembler) { |
| 914 DoBinaryOp(Runtime::kBitwiseOr, assembler); | 887 DoBinaryOp(Runtime::kBitwiseOr, assembler); |
| 915 } | 888 } |
| 916 | 889 |
| 917 | 890 |
| 918 // BitwiseXor <src> | 891 // BitwiseXor <src> |
| 919 // | 892 // |
| 920 // BitwiseXor register <src> to accumulator. | 893 // BitwiseXor register <src> to accumulator. |
| 921 void Interpreter::DoBitwiseXor(compiler::InterpreterAssembler* assembler) { | 894 void Interpreter::DoBitwiseXor(InterpreterAssembler* assembler) { |
| 922 DoBinaryOp(Runtime::kBitwiseXor, assembler); | 895 DoBinaryOp(Runtime::kBitwiseXor, assembler); |
| 923 } | 896 } |
| 924 | 897 |
| 925 | 898 |
| 926 // BitwiseAnd <src> | 899 // BitwiseAnd <src> |
| 927 // | 900 // |
| 928 // BitwiseAnd register <src> to accumulator. | 901 // BitwiseAnd register <src> to accumulator. |
| 929 void Interpreter::DoBitwiseAnd(compiler::InterpreterAssembler* assembler) { | 902 void Interpreter::DoBitwiseAnd(InterpreterAssembler* assembler) { |
| 930 DoBinaryOp(Runtime::kBitwiseAnd, assembler); | 903 DoBinaryOp(Runtime::kBitwiseAnd, assembler); |
| 931 } | 904 } |
| 932 | 905 |
| 933 | 906 |
| 934 // ShiftLeft <src> | 907 // ShiftLeft <src> |
| 935 // | 908 // |
| 936 // Left shifts register <src> by the count specified in the accumulator. | 909 // Left shifts register <src> by the count specified in the accumulator. |
| 937 // Register <src> is converted to an int32 and the accumulator to uint32 | 910 // Register <src> is converted to an int32 and the accumulator to uint32 |
| 938 // before the operation. 5 lsb bits from the accumulator are used as count | 911 // before the operation. 5 lsb bits from the accumulator are used as count |
| 939 // i.e. <src> << (accumulator & 0x1F). | 912 // i.e. <src> << (accumulator & 0x1F). |
| 940 void Interpreter::DoShiftLeft(compiler::InterpreterAssembler* assembler) { | 913 void Interpreter::DoShiftLeft(InterpreterAssembler* assembler) { |
| 941 DoBinaryOp(Runtime::kShiftLeft, assembler); | 914 DoBinaryOp(Runtime::kShiftLeft, assembler); |
| 942 } | 915 } |
| 943 | 916 |
| 944 | 917 |
| 945 // ShiftRight <src> | 918 // ShiftRight <src> |
| 946 // | 919 // |
| 947 // Right shifts register <src> by the count specified in the accumulator. | 920 // Right shifts register <src> by the count specified in the accumulator. |
| 948 // Result is sign extended. Register <src> is converted to an int32 and the | 921 // Result is sign extended. Register <src> is converted to an int32 and the |
| 949 // accumulator to uint32 before the operation. 5 lsb bits from the accumulator | 922 // accumulator to uint32 before the operation. 5 lsb bits from the accumulator |
| 950 // are used as count i.e. <src> >> (accumulator & 0x1F). | 923 // are used as count i.e. <src> >> (accumulator & 0x1F). |
| 951 void Interpreter::DoShiftRight(compiler::InterpreterAssembler* assembler) { | 924 void Interpreter::DoShiftRight(InterpreterAssembler* assembler) { |
| 952 DoBinaryOp(Runtime::kShiftRight, assembler); | 925 DoBinaryOp(Runtime::kShiftRight, assembler); |
| 953 } | 926 } |
| 954 | 927 |
| 955 | 928 |
| 956 // ShiftRightLogical <src> | 929 // ShiftRightLogical <src> |
| 957 // | 930 // |
| 958 // Right Shifts register <src> by the count specified in the accumulator. | 931 // Right Shifts register <src> by the count specified in the accumulator. |
| 959 // Result is zero-filled. The accumulator and register <src> are converted to | 932 // Result is zero-filled. The accumulator and register <src> are converted to |
| 960 // uint32 before the operation 5 lsb bits from the accumulator are used as | 933 // uint32 before the operation 5 lsb bits from the accumulator are used as |
| 961 // count i.e. <src> << (accumulator & 0x1F). | 934 // count i.e. <src> << (accumulator & 0x1F). |
| 962 void Interpreter::DoShiftRightLogical( | 935 void Interpreter::DoShiftRightLogical(InterpreterAssembler* assembler) { |
| 963 compiler::InterpreterAssembler* assembler) { | |
| 964 DoBinaryOp(Runtime::kShiftRightLogical, assembler); | 936 DoBinaryOp(Runtime::kShiftRightLogical, assembler); |
| 965 } | 937 } |
| 966 | 938 |
| 967 | |
| 968 void Interpreter::DoCountOp(Runtime::FunctionId function_id, | 939 void Interpreter::DoCountOp(Runtime::FunctionId function_id, |
| 969 compiler::InterpreterAssembler* assembler) { | 940 InterpreterAssembler* assembler) { |
| 970 Node* value = __ GetAccumulator(); | 941 Node* value = __ GetAccumulator(); |
| 971 Node* one = __ NumberConstant(1); | 942 Node* one = __ NumberConstant(1); |
| 972 Node* result = __ CallRuntime(function_id, value, one); | 943 Node* context = __ GetContext(); |
| 944 Node* result = __ CallRuntime(function_id, context, value, one); |
| 973 __ SetAccumulator(result); | 945 __ SetAccumulator(result); |
| 974 __ Dispatch(); | 946 __ Dispatch(); |
| 975 } | 947 } |
| 976 | 948 |
| 977 | 949 |
| 978 // Inc | 950 // Inc |
| 979 // | 951 // |
| 980 // Increments value in the accumulator by one. | 952 // Increments value in the accumulator by one. |
| 981 void Interpreter::DoInc(compiler::InterpreterAssembler* assembler) { | 953 void Interpreter::DoInc(InterpreterAssembler* assembler) { |
| 982 DoCountOp(Runtime::kAdd, assembler); | 954 DoCountOp(Runtime::kAdd, assembler); |
| 983 } | 955 } |
| 984 | 956 |
| 985 | 957 |
| 986 // Dec | 958 // Dec |
| 987 // | 959 // |
| 988 // Decrements value in the accumulator by one. | 960 // Decrements value in the accumulator by one. |
| 989 void Interpreter::DoDec(compiler::InterpreterAssembler* assembler) { | 961 void Interpreter::DoDec(InterpreterAssembler* assembler) { |
| 990 DoCountOp(Runtime::kSubtract, assembler); | 962 DoCountOp(Runtime::kSubtract, assembler); |
| 991 } | 963 } |
| 992 | 964 |
| 993 | 965 |
| 994 // LogicalNot | 966 // LogicalNot |
| 995 // | 967 // |
| 996 // Perform logical-not on the accumulator, first casting the | 968 // Perform logical-not on the accumulator, first casting the |
| 997 // accumulator to a boolean value if required. | 969 // accumulator to a boolean value if required. |
| 998 void Interpreter::DoLogicalNot(compiler::InterpreterAssembler* assembler) { | 970 void Interpreter::DoLogicalNot(InterpreterAssembler* assembler) { |
| 999 Node* accumulator = __ GetAccumulator(); | 971 Node* accumulator = __ GetAccumulator(); |
| 1000 Node* result = __ CallRuntime(Runtime::kInterpreterLogicalNot, accumulator); | 972 Node* context = __ GetContext(); |
| 973 Node* result = |
| 974 __ CallRuntime(Runtime::kInterpreterLogicalNot, context, accumulator); |
| 1001 __ SetAccumulator(result); | 975 __ SetAccumulator(result); |
| 1002 __ Dispatch(); | 976 __ Dispatch(); |
| 1003 } | 977 } |
| 1004 | 978 |
| 1005 | 979 |
| 1006 // TypeOf | 980 // TypeOf |
| 1007 // | 981 // |
| 1008 // Load the accumulator with the string representating type of the | 982 // Load the accumulator with the string representating type of the |
| 1009 // object in the accumulator. | 983 // object in the accumulator. |
| 1010 void Interpreter::DoTypeOf(compiler::InterpreterAssembler* assembler) { | 984 void Interpreter::DoTypeOf(InterpreterAssembler* assembler) { |
| 1011 Node* accumulator = __ GetAccumulator(); | 985 Node* accumulator = __ GetAccumulator(); |
| 1012 Node* result = __ CallRuntime(Runtime::kInterpreterTypeOf, accumulator); | 986 Node* context = __ GetContext(); |
| 987 Node* result = |
| 988 __ CallRuntime(Runtime::kInterpreterTypeOf, context, accumulator); |
| 1013 __ SetAccumulator(result); | 989 __ SetAccumulator(result); |
| 1014 __ Dispatch(); | 990 __ Dispatch(); |
| 1015 } | 991 } |
| 1016 | 992 |
| 1017 | |
| 1018 void Interpreter::DoDelete(Runtime::FunctionId function_id, | 993 void Interpreter::DoDelete(Runtime::FunctionId function_id, |
| 1019 compiler::InterpreterAssembler* assembler) { | 994 InterpreterAssembler* assembler) { |
| 1020 Node* reg_index = __ BytecodeOperandReg(0); | 995 Node* reg_index = __ BytecodeOperandReg(0); |
| 1021 Node* object = __ LoadRegister(reg_index); | 996 Node* object = __ LoadRegister(reg_index); |
| 1022 Node* key = __ GetAccumulator(); | 997 Node* key = __ GetAccumulator(); |
| 1023 Node* result = __ CallRuntime(function_id, object, key); | 998 Node* context = __ GetContext(); |
| 999 Node* result = __ CallRuntime(function_id, context, object, key); |
| 1024 __ SetAccumulator(result); | 1000 __ SetAccumulator(result); |
| 1025 __ Dispatch(); | 1001 __ Dispatch(); |
| 1026 } | 1002 } |
| 1027 | 1003 |
| 1028 | 1004 |
| 1029 // DeletePropertyStrict | 1005 // DeletePropertyStrict |
| 1030 // | 1006 // |
| 1031 // Delete the property specified in the accumulator from the object | 1007 // Delete the property specified in the accumulator from the object |
| 1032 // referenced by the register operand following strict mode semantics. | 1008 // referenced by the register operand following strict mode semantics. |
| 1033 void Interpreter::DoDeletePropertyStrict( | 1009 void Interpreter::DoDeletePropertyStrict(InterpreterAssembler* assembler) { |
| 1034 compiler::InterpreterAssembler* assembler) { | |
| 1035 DoDelete(Runtime::kDeleteProperty_Strict, assembler); | 1010 DoDelete(Runtime::kDeleteProperty_Strict, assembler); |
| 1036 } | 1011 } |
| 1037 | 1012 |
| 1038 | 1013 |
| 1039 // DeletePropertySloppy | 1014 // DeletePropertySloppy |
| 1040 // | 1015 // |
| 1041 // Delete the property specified in the accumulator from the object | 1016 // Delete the property specified in the accumulator from the object |
| 1042 // referenced by the register operand following sloppy mode semantics. | 1017 // referenced by the register operand following sloppy mode semantics. |
| 1043 void Interpreter::DoDeletePropertySloppy( | 1018 void Interpreter::DoDeletePropertySloppy(InterpreterAssembler* assembler) { |
| 1044 compiler::InterpreterAssembler* assembler) { | |
| 1045 DoDelete(Runtime::kDeleteProperty_Sloppy, assembler); | 1019 DoDelete(Runtime::kDeleteProperty_Sloppy, assembler); |
| 1046 } | 1020 } |
| 1047 | 1021 |
| 1048 | 1022 |
| 1049 // DeleteLookupSlot | 1023 // DeleteLookupSlot |
| 1050 // | 1024 // |
| 1051 // Delete the variable with the name specified in the accumulator by dynamically | 1025 // Delete the variable with the name specified in the accumulator by dynamically |
| 1052 // looking it up. | 1026 // looking it up. |
| 1053 void Interpreter::DoDeleteLookupSlot( | 1027 void Interpreter::DoDeleteLookupSlot(InterpreterAssembler* assembler) { |
| 1054 compiler::InterpreterAssembler* assembler) { | |
| 1055 Node* name = __ GetAccumulator(); | 1028 Node* name = __ GetAccumulator(); |
| 1056 Node* context = __ GetContext(); | 1029 Node* context = __ GetContext(); |
| 1057 Node* result = __ CallRuntime(Runtime::kDeleteLookupSlot, context, name); | 1030 Node* result = |
| 1031 __ CallRuntime(Runtime::kDeleteLookupSlot, context, context, name); |
| 1058 __ SetAccumulator(result); | 1032 __ SetAccumulator(result); |
| 1059 __ Dispatch(); | 1033 __ Dispatch(); |
| 1060 } | 1034 } |
| 1061 | 1035 |
| 1062 | 1036 void Interpreter::DoJSCall(InterpreterAssembler* assembler) { |
| 1063 void Interpreter::DoJSCall(compiler::InterpreterAssembler* assembler) { | |
| 1064 Node* function_reg = __ BytecodeOperandReg(0); | 1037 Node* function_reg = __ BytecodeOperandReg(0); |
| 1065 Node* function = __ LoadRegister(function_reg); | 1038 Node* function = __ LoadRegister(function_reg); |
| 1066 Node* receiver_reg = __ BytecodeOperandReg(1); | 1039 Node* receiver_reg = __ BytecodeOperandReg(1); |
| 1067 Node* receiver_arg = __ RegisterLocation(receiver_reg); | 1040 Node* receiver_arg = __ RegisterLocation(receiver_reg); |
| 1068 Node* receiver_args_count = __ BytecodeOperandCount(2); | 1041 Node* receiver_args_count = __ BytecodeOperandCount(2); |
| 1069 Node* receiver_count = __ Int32Constant(1); | 1042 Node* receiver_count = __ Int32Constant(1); |
| 1070 Node* args_count = __ Int32Sub(receiver_args_count, receiver_count); | 1043 Node* args_count = __ Int32Sub(receiver_args_count, receiver_count); |
| 1071 // TODO(rmcilroy): Use the call type feedback slot to call via CallIC. | 1044 Node* context = __ GetContext(); |
| 1072 Node* result = __ CallJS(function, receiver_arg, args_count); | 1045 // TODO(rmcilroy): Use the call type feedback slot to call via CallStub. |
| 1046 Node* result = __ CallJS(function, context, receiver_arg, args_count); |
| 1073 __ SetAccumulator(result); | 1047 __ SetAccumulator(result); |
| 1074 __ Dispatch(); | 1048 __ Dispatch(); |
| 1075 } | 1049 } |
| 1076 | 1050 |
| 1077 | 1051 |
| 1078 // Call <callable> <receiver> <arg_count> | 1052 // Call <callable> <receiver> <arg_count> |
| 1079 // | 1053 // |
| 1080 // Call a JSfunction or Callable in |callable| with the |receiver| and | 1054 // Call a JSfunction or Callable in |callable| with the |receiver| and |
| 1081 // |arg_count| arguments in subsequent registers. | 1055 // |arg_count| arguments in subsequent registers. |
| 1082 void Interpreter::DoCall(compiler::InterpreterAssembler* assembler) { | 1056 void Interpreter::DoCall(InterpreterAssembler* assembler) { |
| 1083 DoJSCall(assembler); | 1057 DoJSCall(assembler); |
| 1084 } | 1058 } |
| 1085 | 1059 |
| 1086 | 1060 |
| 1087 // CallWide <callable> <receiver> <arg_count> | 1061 // CallWide <callable> <receiver> <arg_count> |
| 1088 // | 1062 // |
| 1089 // Call a JSfunction or Callable in |callable| with the |receiver| and | 1063 // Call a JSfunction or Callable in |callable| with the |receiver| and |
| 1090 // |arg_count| arguments in subsequent registers. | 1064 // |arg_count| arguments in subsequent registers. |
| 1091 void Interpreter::DoCallWide(compiler::InterpreterAssembler* assembler) { | 1065 void Interpreter::DoCallWide(InterpreterAssembler* assembler) { |
| 1092 DoJSCall(assembler); | 1066 DoJSCall(assembler); |
| 1093 } | 1067 } |
| 1094 | 1068 |
| 1095 | 1069 void Interpreter::DoCallRuntimeCommon(InterpreterAssembler* assembler) { |
| 1096 void Interpreter::DoCallRuntimeCommon( | |
| 1097 compiler::InterpreterAssembler* assembler) { | |
| 1098 Node* function_id = __ BytecodeOperandIdx(0); | 1070 Node* function_id = __ BytecodeOperandIdx(0); |
| 1099 Node* first_arg_reg = __ BytecodeOperandReg(1); | 1071 Node* first_arg_reg = __ BytecodeOperandReg(1); |
| 1100 Node* first_arg = __ RegisterLocation(first_arg_reg); | 1072 Node* first_arg = __ RegisterLocation(first_arg_reg); |
| 1101 Node* args_count = __ BytecodeOperandCount(2); | 1073 Node* args_count = __ BytecodeOperandCount(2); |
| 1102 Node* result = __ CallRuntime(function_id, first_arg, args_count); | 1074 Node* context = __ GetContext(); |
| 1075 Node* result = __ CallRuntimeN(function_id, context, first_arg, args_count); |
| 1103 __ SetAccumulator(result); | 1076 __ SetAccumulator(result); |
| 1104 __ Dispatch(); | 1077 __ Dispatch(); |
| 1105 } | 1078 } |
| 1106 | 1079 |
| 1107 | 1080 |
| 1108 // CallRuntime <function_id> <first_arg> <arg_count> | 1081 // CallRuntime <function_id> <first_arg> <arg_count> |
| 1109 // | 1082 // |
| 1110 // Call the runtime function |function_id| with the first argument in | 1083 // Call the runtime function |function_id| with the first argument in |
| 1111 // register |first_arg| and |arg_count| arguments in subsequent | 1084 // register |first_arg| and |arg_count| arguments in subsequent |
| 1112 // registers. | 1085 // registers. |
| 1113 void Interpreter::DoCallRuntime(compiler::InterpreterAssembler* assembler) { | 1086 void Interpreter::DoCallRuntime(InterpreterAssembler* assembler) { |
| 1114 DoCallRuntimeCommon(assembler); | 1087 DoCallRuntimeCommon(assembler); |
| 1115 } | 1088 } |
| 1116 | 1089 |
| 1117 | 1090 |
| 1118 // CallRuntime <function_id> <first_arg> <arg_count> | 1091 // CallRuntime <function_id> <first_arg> <arg_count> |
| 1119 // | 1092 // |
| 1120 // Call the runtime function |function_id| with the first argument in | 1093 // Call the runtime function |function_id| with the first argument in |
| 1121 // register |first_arg| and |arg_count| arguments in subsequent | 1094 // register |first_arg| and |arg_count| arguments in subsequent |
| 1122 // registers. | 1095 // registers. |
| 1123 void Interpreter::DoCallRuntimeWide(compiler::InterpreterAssembler* assembler) { | 1096 void Interpreter::DoCallRuntimeWide(InterpreterAssembler* assembler) { |
| 1124 DoCallRuntimeCommon(assembler); | 1097 DoCallRuntimeCommon(assembler); |
| 1125 } | 1098 } |
| 1126 | 1099 |
| 1127 | 1100 void Interpreter::DoCallRuntimeForPairCommon(InterpreterAssembler* assembler) { |
| 1128 void Interpreter::DoCallRuntimeForPairCommon( | |
| 1129 compiler::InterpreterAssembler* assembler) { | |
| 1130 // Call the runtime function. | 1101 // Call the runtime function. |
| 1131 Node* function_id = __ BytecodeOperandIdx(0); | 1102 Node* function_id = __ BytecodeOperandIdx(0); |
| 1132 Node* first_arg_reg = __ BytecodeOperandReg(1); | 1103 Node* first_arg_reg = __ BytecodeOperandReg(1); |
| 1133 Node* first_arg = __ RegisterLocation(first_arg_reg); | 1104 Node* first_arg = __ RegisterLocation(first_arg_reg); |
| 1134 Node* args_count = __ BytecodeOperandCount(2); | 1105 Node* args_count = __ BytecodeOperandCount(2); |
| 1135 Node* result_pair = __ CallRuntime(function_id, first_arg, args_count, 2); | 1106 Node* context = __ GetContext(); |
| 1107 Node* result_pair = |
| 1108 __ CallRuntimeN(function_id, context, first_arg, args_count, 2); |
| 1136 | 1109 |
| 1137 // Store the results in <first_return> and <first_return + 1> | 1110 // Store the results in <first_return> and <first_return + 1> |
| 1138 Node* first_return_reg = __ BytecodeOperandReg(3); | 1111 Node* first_return_reg = __ BytecodeOperandReg(3); |
| 1139 Node* second_return_reg = __ NextRegister(first_return_reg); | 1112 Node* second_return_reg = __ NextRegister(first_return_reg); |
| 1140 Node* result0 = __ Projection(0, result_pair); | 1113 Node* result0 = __ Projection(0, result_pair); |
| 1141 Node* result1 = __ Projection(1, result_pair); | 1114 Node* result1 = __ Projection(1, result_pair); |
| 1142 __ StoreRegister(result0, first_return_reg); | 1115 __ StoreRegister(result0, first_return_reg); |
| 1143 __ StoreRegister(result1, second_return_reg); | 1116 __ StoreRegister(result1, second_return_reg); |
| 1144 __ Dispatch(); | 1117 __ Dispatch(); |
| 1145 } | 1118 } |
| 1146 | 1119 |
| 1147 | 1120 |
| 1148 // CallRuntimeForPair <function_id> <first_arg> <arg_count> <first_return> | 1121 // CallRuntimeForPair <function_id> <first_arg> <arg_count> <first_return> |
| 1149 // | 1122 // |
| 1150 // Call the runtime function |function_id| which returns a pair, with the | 1123 // Call the runtime function |function_id| which returns a pair, with the |
| 1151 // first argument in register |first_arg| and |arg_count| arguments in | 1124 // first argument in register |first_arg| and |arg_count| arguments in |
| 1152 // subsequent registers. Returns the result in <first_return> and | 1125 // subsequent registers. Returns the result in <first_return> and |
| 1153 // <first_return + 1> | 1126 // <first_return + 1> |
| 1154 void Interpreter::DoCallRuntimeForPair( | 1127 void Interpreter::DoCallRuntimeForPair(InterpreterAssembler* assembler) { |
| 1155 compiler::InterpreterAssembler* assembler) { | |
| 1156 DoCallRuntimeForPairCommon(assembler); | 1128 DoCallRuntimeForPairCommon(assembler); |
| 1157 } | 1129 } |
| 1158 | 1130 |
| 1159 | 1131 |
| 1160 // CallRuntimeForPairWide <function_id> <first_arg> <arg_count> <first_return> | 1132 // CallRuntimeForPairWide <function_id> <first_arg> <arg_count> <first_return> |
| 1161 // | 1133 // |
| 1162 // Call the runtime function |function_id| which returns a pair, with the | 1134 // Call the runtime function |function_id| which returns a pair, with the |
| 1163 // first argument in register |first_arg| and |arg_count| arguments in | 1135 // first argument in register |first_arg| and |arg_count| arguments in |
| 1164 // subsequent registers. Returns the result in <first_return> and | 1136 // subsequent registers. Returns the result in <first_return> and |
| 1165 // <first_return + 1> | 1137 // <first_return + 1> |
| 1166 void Interpreter::DoCallRuntimeForPairWide( | 1138 void Interpreter::DoCallRuntimeForPairWide(InterpreterAssembler* assembler) { |
| 1167 compiler::InterpreterAssembler* assembler) { | |
| 1168 DoCallRuntimeForPairCommon(assembler); | 1139 DoCallRuntimeForPairCommon(assembler); |
| 1169 } | 1140 } |
| 1170 | 1141 |
| 1171 | 1142 void Interpreter::DoCallJSRuntimeCommon(InterpreterAssembler* assembler) { |
| 1172 void Interpreter::DoCallJSRuntimeCommon( | |
| 1173 compiler::InterpreterAssembler* assembler) { | |
| 1174 Node* context_index = __ BytecodeOperandIdx(0); | 1143 Node* context_index = __ BytecodeOperandIdx(0); |
| 1175 Node* receiver_reg = __ BytecodeOperandReg(1); | 1144 Node* receiver_reg = __ BytecodeOperandReg(1); |
| 1176 Node* first_arg = __ RegisterLocation(receiver_reg); | 1145 Node* first_arg = __ RegisterLocation(receiver_reg); |
| 1177 Node* receiver_args_count = __ BytecodeOperandCount(2); | 1146 Node* receiver_args_count = __ BytecodeOperandCount(2); |
| 1178 Node* receiver_count = __ Int32Constant(1); | 1147 Node* receiver_count = __ Int32Constant(1); |
| 1179 Node* args_count = __ Int32Sub(receiver_args_count, receiver_count); | 1148 Node* args_count = __ Int32Sub(receiver_args_count, receiver_count); |
| 1180 | 1149 |
| 1181 // Get the function to call from the native context. | 1150 // Get the function to call from the native context. |
| 1182 Node* context = __ GetContext(); | 1151 Node* context = __ GetContext(); |
| 1183 Node* native_context = | 1152 Node* native_context = |
| 1184 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX); | 1153 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX); |
| 1185 Node* function = __ LoadContextSlot(native_context, context_index); | 1154 Node* function = __ LoadContextSlot(native_context, context_index); |
| 1186 | 1155 |
| 1187 // Call the function. | 1156 // Call the function. |
| 1188 Node* result = __ CallJS(function, first_arg, args_count); | 1157 Node* result = __ CallJS(function, context, first_arg, args_count); |
| 1189 __ SetAccumulator(result); | 1158 __ SetAccumulator(result); |
| 1190 __ Dispatch(); | 1159 __ Dispatch(); |
| 1191 } | 1160 } |
| 1192 | 1161 |
| 1193 | 1162 |
| 1194 // CallJSRuntime <context_index> <receiver> <arg_count> | 1163 // CallJSRuntime <context_index> <receiver> <arg_count> |
| 1195 // | 1164 // |
| 1196 // Call the JS runtime function that has the |context_index| with the receiver | 1165 // Call the JS runtime function that has the |context_index| with the receiver |
| 1197 // in register |receiver| and |arg_count| arguments in subsequent registers. | 1166 // in register |receiver| and |arg_count| arguments in subsequent registers. |
| 1198 void Interpreter::DoCallJSRuntime(compiler::InterpreterAssembler* assembler) { | 1167 void Interpreter::DoCallJSRuntime(InterpreterAssembler* assembler) { |
| 1199 DoCallJSRuntimeCommon(assembler); | 1168 DoCallJSRuntimeCommon(assembler); |
| 1200 } | 1169 } |
| 1201 | 1170 |
| 1202 | 1171 |
| 1203 // CallJSRuntimeWide <context_index> <receiver> <arg_count> | 1172 // CallJSRuntimeWide <context_index> <receiver> <arg_count> |
| 1204 // | 1173 // |
| 1205 // Call the JS runtime function that has the |context_index| with the receiver | 1174 // Call the JS runtime function that has the |context_index| with the receiver |
| 1206 // in register |receiver| and |arg_count| arguments in subsequent registers. | 1175 // in register |receiver| and |arg_count| arguments in subsequent registers. |
| 1207 void Interpreter::DoCallJSRuntimeWide( | 1176 void Interpreter::DoCallJSRuntimeWide(InterpreterAssembler* assembler) { |
| 1208 compiler::InterpreterAssembler* assembler) { | |
| 1209 DoCallJSRuntimeCommon(assembler); | 1177 DoCallJSRuntimeCommon(assembler); |
| 1210 } | 1178 } |
| 1211 | 1179 |
| 1212 | 1180 void Interpreter::DoCallConstruct(InterpreterAssembler* assembler) { |
| 1213 void Interpreter::DoCallConstruct(compiler::InterpreterAssembler* assembler) { | |
| 1214 Callable ic = CodeFactory::InterpreterPushArgsAndConstruct(isolate_); | 1181 Callable ic = CodeFactory::InterpreterPushArgsAndConstruct(isolate_); |
| 1215 Node* constructor_reg = __ BytecodeOperandReg(0); | 1182 Node* constructor_reg = __ BytecodeOperandReg(0); |
| 1216 Node* constructor = __ LoadRegister(constructor_reg); | 1183 Node* constructor = __ LoadRegister(constructor_reg); |
| 1217 Node* first_arg_reg = __ BytecodeOperandReg(1); | 1184 Node* first_arg_reg = __ BytecodeOperandReg(1); |
| 1218 Node* first_arg = __ RegisterLocation(first_arg_reg); | 1185 Node* first_arg = __ RegisterLocation(first_arg_reg); |
| 1219 Node* args_count = __ BytecodeOperandCount(2); | 1186 Node* args_count = __ BytecodeOperandCount(2); |
| 1220 Node* result = | 1187 Node* context = __ GetContext(); |
| 1221 __ CallConstruct(constructor, constructor, first_arg, args_count); | 1188 Node* result = __ CallConstruct(constructor, context, constructor, first_arg, |
| 1189 args_count); |
| 1222 __ SetAccumulator(result); | 1190 __ SetAccumulator(result); |
| 1223 __ Dispatch(); | 1191 __ Dispatch(); |
| 1224 } | 1192 } |
| 1225 | 1193 |
| 1226 | 1194 |
| 1227 // New <constructor> <first_arg> <arg_count> | 1195 // New <constructor> <first_arg> <arg_count> |
| 1228 // | 1196 // |
| 1229 // Call operator new with |constructor| and the first argument in | 1197 // Call operator new with |constructor| and the first argument in |
| 1230 // register |first_arg| and |arg_count| arguments in subsequent | 1198 // register |first_arg| and |arg_count| arguments in subsequent |
| 1231 // | 1199 // |
| 1232 void Interpreter::DoNew(compiler::InterpreterAssembler* assembler) { | 1200 void Interpreter::DoNew(InterpreterAssembler* assembler) { |
| 1233 DoCallConstruct(assembler); | 1201 DoCallConstruct(assembler); |
| 1234 } | 1202 } |
| 1235 | 1203 |
| 1236 | 1204 |
| 1237 // NewWide <constructor> <first_arg> <arg_count> | 1205 // NewWide <constructor> <first_arg> <arg_count> |
| 1238 // | 1206 // |
| 1239 // Call operator new with |constructor| and the first argument in | 1207 // Call operator new with |constructor| and the first argument in |
| 1240 // register |first_arg| and |arg_count| arguments in subsequent | 1208 // register |first_arg| and |arg_count| arguments in subsequent |
| 1241 // | 1209 // |
| 1242 void Interpreter::DoNewWide(compiler::InterpreterAssembler* assembler) { | 1210 void Interpreter::DoNewWide(InterpreterAssembler* assembler) { |
| 1243 DoCallConstruct(assembler); | 1211 DoCallConstruct(assembler); |
| 1244 } | 1212 } |
| 1245 | 1213 |
| 1246 | 1214 |
| 1247 // TestEqual <src> | 1215 // TestEqual <src> |
| 1248 // | 1216 // |
| 1249 // Test if the value in the <src> register equals the accumulator. | 1217 // Test if the value in the <src> register equals the accumulator. |
| 1250 void Interpreter::DoTestEqual(compiler::InterpreterAssembler* assembler) { | 1218 void Interpreter::DoTestEqual(InterpreterAssembler* assembler) { |
| 1251 DoBinaryOp(Runtime::kInterpreterEquals, assembler); | 1219 DoBinaryOp(Runtime::kInterpreterEquals, assembler); |
| 1252 } | 1220 } |
| 1253 | 1221 |
| 1254 | 1222 |
| 1255 // TestNotEqual <src> | 1223 // TestNotEqual <src> |
| 1256 // | 1224 // |
| 1257 // Test if the value in the <src> register is not equal to the accumulator. | 1225 // Test if the value in the <src> register is not equal to the accumulator. |
| 1258 void Interpreter::DoTestNotEqual(compiler::InterpreterAssembler* assembler) { | 1226 void Interpreter::DoTestNotEqual(InterpreterAssembler* assembler) { |
| 1259 DoBinaryOp(Runtime::kInterpreterNotEquals, assembler); | 1227 DoBinaryOp(Runtime::kInterpreterNotEquals, assembler); |
| 1260 } | 1228 } |
| 1261 | 1229 |
| 1262 | 1230 |
| 1263 // TestEqualStrict <src> | 1231 // TestEqualStrict <src> |
| 1264 // | 1232 // |
| 1265 // Test if the value in the <src> register is strictly equal to the accumulator. | 1233 // Test if the value in the <src> register is strictly equal to the accumulator. |
| 1266 void Interpreter::DoTestEqualStrict(compiler::InterpreterAssembler* assembler) { | 1234 void Interpreter::DoTestEqualStrict(InterpreterAssembler* assembler) { |
| 1267 DoBinaryOp(Runtime::kInterpreterStrictEquals, assembler); | 1235 DoBinaryOp(Runtime::kInterpreterStrictEquals, assembler); |
| 1268 } | 1236 } |
| 1269 | 1237 |
| 1270 | 1238 |
| 1271 // TestNotEqualStrict <src> | 1239 // TestNotEqualStrict <src> |
| 1272 // | 1240 // |
| 1273 // Test if the value in the <src> register is not strictly equal to the | 1241 // Test if the value in the <src> register is not strictly equal to the |
| 1274 // accumulator. | 1242 // accumulator. |
| 1275 void Interpreter::DoTestNotEqualStrict( | 1243 void Interpreter::DoTestNotEqualStrict(InterpreterAssembler* assembler) { |
| 1276 compiler::InterpreterAssembler* assembler) { | |
| 1277 DoBinaryOp(Runtime::kInterpreterStrictNotEquals, assembler); | 1244 DoBinaryOp(Runtime::kInterpreterStrictNotEquals, assembler); |
| 1278 } | 1245 } |
| 1279 | 1246 |
| 1280 | 1247 |
| 1281 // TestLessThan <src> | 1248 // TestLessThan <src> |
| 1282 // | 1249 // |
| 1283 // Test if the value in the <src> register is less than the accumulator. | 1250 // Test if the value in the <src> register is less than the accumulator. |
| 1284 void Interpreter::DoTestLessThan(compiler::InterpreterAssembler* assembler) { | 1251 void Interpreter::DoTestLessThan(InterpreterAssembler* assembler) { |
| 1285 DoBinaryOp(Runtime::kInterpreterLessThan, assembler); | 1252 DoBinaryOp(Runtime::kInterpreterLessThan, assembler); |
| 1286 } | 1253 } |
| 1287 | 1254 |
| 1288 | 1255 |
| 1289 // TestGreaterThan <src> | 1256 // TestGreaterThan <src> |
| 1290 // | 1257 // |
| 1291 // Test if the value in the <src> register is greater than the accumulator. | 1258 // Test if the value in the <src> register is greater than the accumulator. |
| 1292 void Interpreter::DoTestGreaterThan(compiler::InterpreterAssembler* assembler) { | 1259 void Interpreter::DoTestGreaterThan(InterpreterAssembler* assembler) { |
| 1293 DoBinaryOp(Runtime::kInterpreterGreaterThan, assembler); | 1260 DoBinaryOp(Runtime::kInterpreterGreaterThan, assembler); |
| 1294 } | 1261 } |
| 1295 | 1262 |
| 1296 | 1263 |
| 1297 // TestLessThanOrEqual <src> | 1264 // TestLessThanOrEqual <src> |
| 1298 // | 1265 // |
| 1299 // Test if the value in the <src> register is less than or equal to the | 1266 // Test if the value in the <src> register is less than or equal to the |
| 1300 // accumulator. | 1267 // accumulator. |
| 1301 void Interpreter::DoTestLessThanOrEqual( | 1268 void Interpreter::DoTestLessThanOrEqual(InterpreterAssembler* assembler) { |
| 1302 compiler::InterpreterAssembler* assembler) { | |
| 1303 DoBinaryOp(Runtime::kInterpreterLessThanOrEqual, assembler); | 1269 DoBinaryOp(Runtime::kInterpreterLessThanOrEqual, assembler); |
| 1304 } | 1270 } |
| 1305 | 1271 |
| 1306 | 1272 |
| 1307 // TestGreaterThanOrEqual <src> | 1273 // TestGreaterThanOrEqual <src> |
| 1308 // | 1274 // |
| 1309 // Test if the value in the <src> register is greater than or equal to the | 1275 // Test if the value in the <src> register is greater than or equal to the |
| 1310 // accumulator. | 1276 // accumulator. |
| 1311 void Interpreter::DoTestGreaterThanOrEqual( | 1277 void Interpreter::DoTestGreaterThanOrEqual(InterpreterAssembler* assembler) { |
| 1312 compiler::InterpreterAssembler* assembler) { | |
| 1313 DoBinaryOp(Runtime::kInterpreterGreaterThanOrEqual, assembler); | 1278 DoBinaryOp(Runtime::kInterpreterGreaterThanOrEqual, assembler); |
| 1314 } | 1279 } |
| 1315 | 1280 |
| 1316 | 1281 |
| 1317 // TestIn <src> | 1282 // TestIn <src> |
| 1318 // | 1283 // |
| 1319 // Test if the object referenced by the register operand is a property of the | 1284 // Test if the object referenced by the register operand is a property of the |
| 1320 // object referenced by the accumulator. | 1285 // object referenced by the accumulator. |
| 1321 void Interpreter::DoTestIn(compiler::InterpreterAssembler* assembler) { | 1286 void Interpreter::DoTestIn(InterpreterAssembler* assembler) { |
| 1322 DoBinaryOp(Runtime::kHasProperty, assembler); | 1287 DoBinaryOp(Runtime::kHasProperty, assembler); |
| 1323 } | 1288 } |
| 1324 | 1289 |
| 1325 | 1290 |
| 1326 // TestInstanceOf <src> | 1291 // TestInstanceOf <src> |
| 1327 // | 1292 // |
| 1328 // Test if the object referenced by the <src> register is an an instance of type | 1293 // Test if the object referenced by the <src> register is an an instance of type |
| 1329 // referenced by the accumulator. | 1294 // referenced by the accumulator. |
| 1330 void Interpreter::DoTestInstanceOf(compiler::InterpreterAssembler* assembler) { | 1295 void Interpreter::DoTestInstanceOf(InterpreterAssembler* assembler) { |
| 1331 DoBinaryOp(Runtime::kInstanceOf, assembler); | 1296 DoBinaryOp(Runtime::kInstanceOf, assembler); |
| 1332 } | 1297 } |
| 1333 | 1298 |
| 1334 | 1299 |
| 1335 // ToName | 1300 // ToName |
| 1336 // | 1301 // |
| 1337 // Cast the object referenced by the accumulator to a name. | 1302 // Cast the object referenced by the accumulator to a name. |
| 1338 void Interpreter::DoToName(compiler::InterpreterAssembler* assembler) { | 1303 void Interpreter::DoToName(InterpreterAssembler* assembler) { |
| 1339 Node* accumulator = __ GetAccumulator(); | 1304 Node* accumulator = __ GetAccumulator(); |
| 1340 Node* result = __ CallRuntime(Runtime::kToName, accumulator); | 1305 Node* context = __ GetContext(); |
| 1306 Node* result = __ CallRuntime(Runtime::kToName, context, accumulator); |
| 1341 __ SetAccumulator(result); | 1307 __ SetAccumulator(result); |
| 1342 __ Dispatch(); | 1308 __ Dispatch(); |
| 1343 } | 1309 } |
| 1344 | 1310 |
| 1345 | 1311 |
| 1346 // ToNumber | 1312 // ToNumber |
| 1347 // | 1313 // |
| 1348 // Cast the object referenced by the accumulator to a number. | 1314 // Cast the object referenced by the accumulator to a number. |
| 1349 void Interpreter::DoToNumber(compiler::InterpreterAssembler* assembler) { | 1315 void Interpreter::DoToNumber(InterpreterAssembler* assembler) { |
| 1350 Node* accumulator = __ GetAccumulator(); | 1316 Node* accumulator = __ GetAccumulator(); |
| 1351 Node* result = __ CallRuntime(Runtime::kToNumber, accumulator); | 1317 Node* context = __ GetContext(); |
| 1318 Node* result = __ CallRuntime(Runtime::kToNumber, context, accumulator); |
| 1352 __ SetAccumulator(result); | 1319 __ SetAccumulator(result); |
| 1353 __ Dispatch(); | 1320 __ Dispatch(); |
| 1354 } | 1321 } |
| 1355 | 1322 |
| 1356 | 1323 |
| 1357 // ToObject | 1324 // ToObject |
| 1358 // | 1325 // |
| 1359 // Cast the object referenced by the accumulator to a JSObject. | 1326 // Cast the object referenced by the accumulator to a JSObject. |
| 1360 void Interpreter::DoToObject(compiler::InterpreterAssembler* assembler) { | 1327 void Interpreter::DoToObject(InterpreterAssembler* assembler) { |
| 1361 Node* accumulator = __ GetAccumulator(); | 1328 Node* accumulator = __ GetAccumulator(); |
| 1362 Node* result = __ CallRuntime(Runtime::kToObject, accumulator); | 1329 Node* context = __ GetContext(); |
| 1330 Node* result = __ CallRuntime(Runtime::kToObject, context, accumulator); |
| 1363 __ SetAccumulator(result); | 1331 __ SetAccumulator(result); |
| 1364 __ Dispatch(); | 1332 __ Dispatch(); |
| 1365 } | 1333 } |
| 1366 | 1334 |
| 1367 | 1335 |
| 1368 // Jump <imm8> | 1336 // Jump <imm8> |
| 1369 // | 1337 // |
| 1370 // Jump by number of bytes represented by the immediate operand |imm8|. | 1338 // Jump by number of bytes represented by the immediate operand |imm8|. |
| 1371 void Interpreter::DoJump(compiler::InterpreterAssembler* assembler) { | 1339 void Interpreter::DoJump(InterpreterAssembler* assembler) { |
| 1372 Node* relative_jump = __ BytecodeOperandImm(0); | 1340 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1373 __ Jump(relative_jump); | 1341 __ Jump(relative_jump); |
| 1374 } | 1342 } |
| 1375 | 1343 |
| 1376 | 1344 |
| 1377 // JumpConstant <idx8> | 1345 // JumpConstant <idx8> |
| 1378 // | 1346 // |
| 1379 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool. | 1347 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool. |
| 1380 void Interpreter::DoJumpConstant(compiler::InterpreterAssembler* assembler) { | 1348 void Interpreter::DoJumpConstant(InterpreterAssembler* assembler) { |
| 1381 Node* index = __ BytecodeOperandIdx(0); | 1349 Node* index = __ BytecodeOperandIdx(0); |
| 1382 Node* constant = __ LoadConstantPoolEntry(index); | 1350 Node* constant = __ LoadConstantPoolEntry(index); |
| 1383 Node* relative_jump = __ SmiUntag(constant); | 1351 Node* relative_jump = __ SmiUntag(constant); |
| 1384 __ Jump(relative_jump); | 1352 __ Jump(relative_jump); |
| 1385 } | 1353 } |
| 1386 | 1354 |
| 1387 | 1355 |
| 1388 // JumpConstantWide <idx16> | 1356 // JumpConstantWide <idx16> |
| 1389 // | 1357 // |
| 1390 // Jump by number of bytes in the Smi in the |idx16| entry in the | 1358 // Jump by number of bytes in the Smi in the |idx16| entry in the |
| 1391 // constant pool. | 1359 // constant pool. |
| 1392 void Interpreter::DoJumpConstantWide( | 1360 void Interpreter::DoJumpConstantWide(InterpreterAssembler* assembler) { |
| 1393 compiler::InterpreterAssembler* assembler) { | |
| 1394 DoJumpConstant(assembler); | 1361 DoJumpConstant(assembler); |
| 1395 } | 1362 } |
| 1396 | 1363 |
| 1397 | 1364 |
| 1398 // JumpIfTrue <imm8> | 1365 // JumpIfTrue <imm8> |
| 1399 // | 1366 // |
| 1400 // Jump by number of bytes represented by an immediate operand if the | 1367 // Jump by number of bytes represented by an immediate operand if the |
| 1401 // accumulator contains true. | 1368 // accumulator contains true. |
| 1402 void Interpreter::DoJumpIfTrue(compiler::InterpreterAssembler* assembler) { | 1369 void Interpreter::DoJumpIfTrue(InterpreterAssembler* assembler) { |
| 1403 Node* accumulator = __ GetAccumulator(); | 1370 Node* accumulator = __ GetAccumulator(); |
| 1404 Node* relative_jump = __ BytecodeOperandImm(0); | 1371 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1405 Node* true_value = __ BooleanConstant(true); | 1372 Node* true_value = __ BooleanConstant(true); |
| 1406 __ JumpIfWordEqual(accumulator, true_value, relative_jump); | 1373 __ JumpIfWordEqual(accumulator, true_value, relative_jump); |
| 1407 } | 1374 } |
| 1408 | 1375 |
| 1409 | 1376 |
| 1410 // JumpIfTrueConstant <idx8> | 1377 // JumpIfTrueConstant <idx8> |
| 1411 // | 1378 // |
| 1412 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool | 1379 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool |
| 1413 // if the accumulator contains true. | 1380 // if the accumulator contains true. |
| 1414 void Interpreter::DoJumpIfTrueConstant( | 1381 void Interpreter::DoJumpIfTrueConstant(InterpreterAssembler* assembler) { |
| 1415 compiler::InterpreterAssembler* assembler) { | |
| 1416 Node* accumulator = __ GetAccumulator(); | 1382 Node* accumulator = __ GetAccumulator(); |
| 1417 Node* index = __ BytecodeOperandIdx(0); | 1383 Node* index = __ BytecodeOperandIdx(0); |
| 1418 Node* constant = __ LoadConstantPoolEntry(index); | 1384 Node* constant = __ LoadConstantPoolEntry(index); |
| 1419 Node* relative_jump = __ SmiUntag(constant); | 1385 Node* relative_jump = __ SmiUntag(constant); |
| 1420 Node* true_value = __ BooleanConstant(true); | 1386 Node* true_value = __ BooleanConstant(true); |
| 1421 __ JumpIfWordEqual(accumulator, true_value, relative_jump); | 1387 __ JumpIfWordEqual(accumulator, true_value, relative_jump); |
| 1422 } | 1388 } |
| 1423 | 1389 |
| 1424 | 1390 |
| 1425 // JumpIfTrueConstantWide <idx16> | 1391 // JumpIfTrueConstantWide <idx16> |
| 1426 // | 1392 // |
| 1427 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool | 1393 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool |
| 1428 // if the accumulator contains true. | 1394 // if the accumulator contains true. |
| 1429 void Interpreter::DoJumpIfTrueConstantWide( | 1395 void Interpreter::DoJumpIfTrueConstantWide(InterpreterAssembler* assembler) { |
| 1430 compiler::InterpreterAssembler* assembler) { | |
| 1431 DoJumpIfTrueConstant(assembler); | 1396 DoJumpIfTrueConstant(assembler); |
| 1432 } | 1397 } |
| 1433 | 1398 |
| 1434 | 1399 |
| 1435 // JumpIfFalse <imm8> | 1400 // JumpIfFalse <imm8> |
| 1436 // | 1401 // |
| 1437 // Jump by number of bytes represented by an immediate operand if the | 1402 // Jump by number of bytes represented by an immediate operand if the |
| 1438 // accumulator contains false. | 1403 // accumulator contains false. |
| 1439 void Interpreter::DoJumpIfFalse(compiler::InterpreterAssembler* assembler) { | 1404 void Interpreter::DoJumpIfFalse(InterpreterAssembler* assembler) { |
| 1440 Node* accumulator = __ GetAccumulator(); | 1405 Node* accumulator = __ GetAccumulator(); |
| 1441 Node* relative_jump = __ BytecodeOperandImm(0); | 1406 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1442 Node* false_value = __ BooleanConstant(false); | 1407 Node* false_value = __ BooleanConstant(false); |
| 1443 __ JumpIfWordEqual(accumulator, false_value, relative_jump); | 1408 __ JumpIfWordEqual(accumulator, false_value, relative_jump); |
| 1444 } | 1409 } |
| 1445 | 1410 |
| 1446 | 1411 |
| 1447 // JumpIfFalseConstant <idx8> | 1412 // JumpIfFalseConstant <idx8> |
| 1448 // | 1413 // |
| 1449 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool | 1414 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool |
| 1450 // if the accumulator contains false. | 1415 // if the accumulator contains false. |
| 1451 void Interpreter::DoJumpIfFalseConstant( | 1416 void Interpreter::DoJumpIfFalseConstant(InterpreterAssembler* assembler) { |
| 1452 compiler::InterpreterAssembler* assembler) { | |
| 1453 Node* accumulator = __ GetAccumulator(); | 1417 Node* accumulator = __ GetAccumulator(); |
| 1454 Node* index = __ BytecodeOperandIdx(0); | 1418 Node* index = __ BytecodeOperandIdx(0); |
| 1455 Node* constant = __ LoadConstantPoolEntry(index); | 1419 Node* constant = __ LoadConstantPoolEntry(index); |
| 1456 Node* relative_jump = __ SmiUntag(constant); | 1420 Node* relative_jump = __ SmiUntag(constant); |
| 1457 Node* false_value = __ BooleanConstant(false); | 1421 Node* false_value = __ BooleanConstant(false); |
| 1458 __ JumpIfWordEqual(accumulator, false_value, relative_jump); | 1422 __ JumpIfWordEqual(accumulator, false_value, relative_jump); |
| 1459 } | 1423 } |
| 1460 | 1424 |
| 1461 | 1425 |
| 1462 // JumpIfFalseConstant <idx16> | 1426 // JumpIfFalseConstant <idx16> |
| 1463 // | 1427 // |
| 1464 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool | 1428 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool |
| 1465 // if the accumulator contains false. | 1429 // if the accumulator contains false. |
| 1466 void Interpreter::DoJumpIfFalseConstantWide( | 1430 void Interpreter::DoJumpIfFalseConstantWide(InterpreterAssembler* assembler) { |
| 1467 compiler::InterpreterAssembler* assembler) { | |
| 1468 DoJumpIfFalseConstant(assembler); | 1431 DoJumpIfFalseConstant(assembler); |
| 1469 } | 1432 } |
| 1470 | 1433 |
| 1471 | 1434 |
| 1472 // JumpIfToBooleanTrue <imm8> | 1435 // JumpIfToBooleanTrue <imm8> |
| 1473 // | 1436 // |
| 1474 // Jump by number of bytes represented by an immediate operand if the object | 1437 // Jump by number of bytes represented by an immediate operand if the object |
| 1475 // referenced by the accumulator is true when the object is cast to boolean. | 1438 // referenced by the accumulator is true when the object is cast to boolean. |
| 1476 void Interpreter::DoJumpIfToBooleanTrue( | 1439 void Interpreter::DoJumpIfToBooleanTrue(InterpreterAssembler* assembler) { |
| 1477 compiler::InterpreterAssembler* assembler) { | |
| 1478 Node* accumulator = __ GetAccumulator(); | 1440 Node* accumulator = __ GetAccumulator(); |
| 1441 Node* context = __ GetContext(); |
| 1479 Node* to_boolean_value = | 1442 Node* to_boolean_value = |
| 1480 __ CallRuntime(Runtime::kInterpreterToBoolean, accumulator); | 1443 __ CallRuntime(Runtime::kInterpreterToBoolean, context, accumulator); |
| 1481 Node* relative_jump = __ BytecodeOperandImm(0); | 1444 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1482 Node* true_value = __ BooleanConstant(true); | 1445 Node* true_value = __ BooleanConstant(true); |
| 1483 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump); | 1446 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump); |
| 1484 } | 1447 } |
| 1485 | 1448 |
| 1486 | 1449 |
| 1487 // JumpIfToBooleanTrueConstant <idx8> | 1450 // JumpIfToBooleanTrueConstant <idx8> |
| 1488 // | 1451 // |
| 1489 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool | 1452 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool |
| 1490 // if the object referenced by the accumulator is true when the object is cast | 1453 // if the object referenced by the accumulator is true when the object is cast |
| 1491 // to boolean. | 1454 // to boolean. |
| 1492 void Interpreter::DoJumpIfToBooleanTrueConstant( | 1455 void Interpreter::DoJumpIfToBooleanTrueConstant( |
| 1493 compiler::InterpreterAssembler* assembler) { | 1456 InterpreterAssembler* assembler) { |
| 1494 Node* accumulator = __ GetAccumulator(); | 1457 Node* accumulator = __ GetAccumulator(); |
| 1458 Node* context = __ GetContext(); |
| 1495 Node* to_boolean_value = | 1459 Node* to_boolean_value = |
| 1496 __ CallRuntime(Runtime::kInterpreterToBoolean, accumulator); | 1460 __ CallRuntime(Runtime::kInterpreterToBoolean, context, accumulator); |
| 1497 Node* index = __ BytecodeOperandIdx(0); | 1461 Node* index = __ BytecodeOperandIdx(0); |
| 1498 Node* constant = __ LoadConstantPoolEntry(index); | 1462 Node* constant = __ LoadConstantPoolEntry(index); |
| 1499 Node* relative_jump = __ SmiUntag(constant); | 1463 Node* relative_jump = __ SmiUntag(constant); |
| 1500 Node* true_value = __ BooleanConstant(true); | 1464 Node* true_value = __ BooleanConstant(true); |
| 1501 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump); | 1465 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump); |
| 1502 } | 1466 } |
| 1503 | 1467 |
| 1504 | 1468 |
| 1505 // JumpIfToBooleanTrueConstantWide <idx16> | 1469 // JumpIfToBooleanTrueConstantWide <idx16> |
| 1506 // | 1470 // |
| 1507 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool | 1471 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool |
| 1508 // if the object referenced by the accumulator is true when the object is cast | 1472 // if the object referenced by the accumulator is true when the object is cast |
| 1509 // to boolean. | 1473 // to boolean. |
| 1510 void Interpreter::DoJumpIfToBooleanTrueConstantWide( | 1474 void Interpreter::DoJumpIfToBooleanTrueConstantWide( |
| 1511 compiler::InterpreterAssembler* assembler) { | 1475 InterpreterAssembler* assembler) { |
| 1512 DoJumpIfToBooleanTrueConstant(assembler); | 1476 DoJumpIfToBooleanTrueConstant(assembler); |
| 1513 } | 1477 } |
| 1514 | 1478 |
| 1515 | 1479 |
| 1516 // JumpIfToBooleanFalse <imm8> | 1480 // JumpIfToBooleanFalse <imm8> |
| 1517 // | 1481 // |
| 1518 // Jump by number of bytes represented by an immediate operand if the object | 1482 // Jump by number of bytes represented by an immediate operand if the object |
| 1519 // referenced by the accumulator is false when the object is cast to boolean. | 1483 // referenced by the accumulator is false when the object is cast to boolean. |
| 1520 void Interpreter::DoJumpIfToBooleanFalse( | 1484 void Interpreter::DoJumpIfToBooleanFalse(InterpreterAssembler* assembler) { |
| 1521 compiler::InterpreterAssembler* assembler) { | |
| 1522 Node* accumulator = __ GetAccumulator(); | 1485 Node* accumulator = __ GetAccumulator(); |
| 1486 Node* context = __ GetContext(); |
| 1523 Node* to_boolean_value = | 1487 Node* to_boolean_value = |
| 1524 __ CallRuntime(Runtime::kInterpreterToBoolean, accumulator); | 1488 __ CallRuntime(Runtime::kInterpreterToBoolean, context, accumulator); |
| 1525 Node* relative_jump = __ BytecodeOperandImm(0); | 1489 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1526 Node* false_value = __ BooleanConstant(false); | 1490 Node* false_value = __ BooleanConstant(false); |
| 1527 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump); | 1491 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump); |
| 1528 } | 1492 } |
| 1529 | 1493 |
| 1530 | 1494 |
| 1531 // JumpIfToBooleanFalseConstant <idx8> | 1495 // JumpIfToBooleanFalseConstant <idx8> |
| 1532 // | 1496 // |
| 1533 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool | 1497 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool |
| 1534 // if the object referenced by the accumulator is false when the object is cast | 1498 // if the object referenced by the accumulator is false when the object is cast |
| 1535 // to boolean. | 1499 // to boolean. |
| 1536 void Interpreter::DoJumpIfToBooleanFalseConstant( | 1500 void Interpreter::DoJumpIfToBooleanFalseConstant( |
| 1537 compiler::InterpreterAssembler* assembler) { | 1501 InterpreterAssembler* assembler) { |
| 1538 Node* accumulator = __ GetAccumulator(); | 1502 Node* accumulator = __ GetAccumulator(); |
| 1503 Node* context = __ GetContext(); |
| 1539 Node* to_boolean_value = | 1504 Node* to_boolean_value = |
| 1540 __ CallRuntime(Runtime::kInterpreterToBoolean, accumulator); | 1505 __ CallRuntime(Runtime::kInterpreterToBoolean, context, accumulator); |
| 1541 Node* index = __ BytecodeOperandIdx(0); | 1506 Node* index = __ BytecodeOperandIdx(0); |
| 1542 Node* constant = __ LoadConstantPoolEntry(index); | 1507 Node* constant = __ LoadConstantPoolEntry(index); |
| 1543 Node* relative_jump = __ SmiUntag(constant); | 1508 Node* relative_jump = __ SmiUntag(constant); |
| 1544 Node* false_value = __ BooleanConstant(false); | 1509 Node* false_value = __ BooleanConstant(false); |
| 1545 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump); | 1510 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump); |
| 1546 } | 1511 } |
| 1547 | 1512 |
| 1548 | 1513 |
| 1549 // JumpIfToBooleanFalseConstantWide <idx16> | 1514 // JumpIfToBooleanFalseConstantWide <idx16> |
| 1550 // | 1515 // |
| 1551 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool | 1516 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool |
| 1552 // if the object referenced by the accumulator is false when the object is cast | 1517 // if the object referenced by the accumulator is false when the object is cast |
| 1553 // to boolean. | 1518 // to boolean. |
| 1554 void Interpreter::DoJumpIfToBooleanFalseConstantWide( | 1519 void Interpreter::DoJumpIfToBooleanFalseConstantWide( |
| 1555 compiler::InterpreterAssembler* assembler) { | 1520 InterpreterAssembler* assembler) { |
| 1556 DoJumpIfToBooleanFalseConstant(assembler); | 1521 DoJumpIfToBooleanFalseConstant(assembler); |
| 1557 } | 1522 } |
| 1558 | 1523 |
| 1559 | 1524 |
| 1560 // JumpIfNull <imm8> | 1525 // JumpIfNull <imm8> |
| 1561 // | 1526 // |
| 1562 // Jump by number of bytes represented by an immediate operand if the object | 1527 // Jump by number of bytes represented by an immediate operand if the object |
| 1563 // referenced by the accumulator is the null constant. | 1528 // referenced by the accumulator is the null constant. |
| 1564 void Interpreter::DoJumpIfNull(compiler::InterpreterAssembler* assembler) { | 1529 void Interpreter::DoJumpIfNull(InterpreterAssembler* assembler) { |
| 1565 Node* accumulator = __ GetAccumulator(); | 1530 Node* accumulator = __ GetAccumulator(); |
| 1566 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); | 1531 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); |
| 1567 Node* relative_jump = __ BytecodeOperandImm(0); | 1532 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1568 __ JumpIfWordEqual(accumulator, null_value, relative_jump); | 1533 __ JumpIfWordEqual(accumulator, null_value, relative_jump); |
| 1569 } | 1534 } |
| 1570 | 1535 |
| 1571 | 1536 |
| 1572 // JumpIfNullConstant <idx8> | 1537 // JumpIfNullConstant <idx8> |
| 1573 // | 1538 // |
| 1574 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool | 1539 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool |
| 1575 // if the object referenced by the accumulator is the null constant. | 1540 // if the object referenced by the accumulator is the null constant. |
| 1576 void Interpreter::DoJumpIfNullConstant( | 1541 void Interpreter::DoJumpIfNullConstant(InterpreterAssembler* assembler) { |
| 1577 compiler::InterpreterAssembler* assembler) { | |
| 1578 Node* accumulator = __ GetAccumulator(); | 1542 Node* accumulator = __ GetAccumulator(); |
| 1579 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); | 1543 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); |
| 1580 Node* index = __ BytecodeOperandIdx(0); | 1544 Node* index = __ BytecodeOperandIdx(0); |
| 1581 Node* constant = __ LoadConstantPoolEntry(index); | 1545 Node* constant = __ LoadConstantPoolEntry(index); |
| 1582 Node* relative_jump = __ SmiUntag(constant); | 1546 Node* relative_jump = __ SmiUntag(constant); |
| 1583 __ JumpIfWordEqual(accumulator, null_value, relative_jump); | 1547 __ JumpIfWordEqual(accumulator, null_value, relative_jump); |
| 1584 } | 1548 } |
| 1585 | 1549 |
| 1586 | 1550 |
| 1587 // JumpIfNullConstantWide <idx16> | 1551 // JumpIfNullConstantWide <idx16> |
| 1588 // | 1552 // |
| 1589 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool | 1553 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool |
| 1590 // if the object referenced by the accumulator is the null constant. | 1554 // if the object referenced by the accumulator is the null constant. |
| 1591 void Interpreter::DoJumpIfNullConstantWide( | 1555 void Interpreter::DoJumpIfNullConstantWide(InterpreterAssembler* assembler) { |
| 1592 compiler::InterpreterAssembler* assembler) { | |
| 1593 DoJumpIfNullConstant(assembler); | 1556 DoJumpIfNullConstant(assembler); |
| 1594 } | 1557 } |
| 1595 | 1558 |
| 1596 // JumpIfUndefined <imm8> | 1559 // JumpIfUndefined <imm8> |
| 1597 // | 1560 // |
| 1598 // Jump by number of bytes represented by an immediate operand if the object | 1561 // Jump by number of bytes represented by an immediate operand if the object |
| 1599 // referenced by the accumulator is the undefined constant. | 1562 // referenced by the accumulator is the undefined constant. |
| 1600 void Interpreter::DoJumpIfUndefined(compiler::InterpreterAssembler* assembler) { | 1563 void Interpreter::DoJumpIfUndefined(InterpreterAssembler* assembler) { |
| 1601 Node* accumulator = __ GetAccumulator(); | 1564 Node* accumulator = __ GetAccumulator(); |
| 1602 Node* undefined_value = | 1565 Node* undefined_value = |
| 1603 __ HeapConstant(isolate_->factory()->undefined_value()); | 1566 __ HeapConstant(isolate_->factory()->undefined_value()); |
| 1604 Node* relative_jump = __ BytecodeOperandImm(0); | 1567 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1605 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump); | 1568 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump); |
| 1606 } | 1569 } |
| 1607 | 1570 |
| 1608 | 1571 |
| 1609 // JumpIfUndefinedConstant <idx8> | 1572 // JumpIfUndefinedConstant <idx8> |
| 1610 // | 1573 // |
| 1611 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool | 1574 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool |
| 1612 // if the object referenced by the accumulator is the undefined constant. | 1575 // if the object referenced by the accumulator is the undefined constant. |
| 1613 void Interpreter::DoJumpIfUndefinedConstant( | 1576 void Interpreter::DoJumpIfUndefinedConstant(InterpreterAssembler* assembler) { |
| 1614 compiler::InterpreterAssembler* assembler) { | |
| 1615 Node* accumulator = __ GetAccumulator(); | 1577 Node* accumulator = __ GetAccumulator(); |
| 1616 Node* undefined_value = | 1578 Node* undefined_value = |
| 1617 __ HeapConstant(isolate_->factory()->undefined_value()); | 1579 __ HeapConstant(isolate_->factory()->undefined_value()); |
| 1618 Node* index = __ BytecodeOperandIdx(0); | 1580 Node* index = __ BytecodeOperandIdx(0); |
| 1619 Node* constant = __ LoadConstantPoolEntry(index); | 1581 Node* constant = __ LoadConstantPoolEntry(index); |
| 1620 Node* relative_jump = __ SmiUntag(constant); | 1582 Node* relative_jump = __ SmiUntag(constant); |
| 1621 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump); | 1583 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump); |
| 1622 } | 1584 } |
| 1623 | 1585 |
| 1624 | 1586 |
| 1625 // JumpIfUndefinedConstantWide <idx16> | 1587 // JumpIfUndefinedConstantWide <idx16> |
| 1626 // | 1588 // |
| 1627 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool | 1589 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool |
| 1628 // if the object referenced by the accumulator is the undefined constant. | 1590 // if the object referenced by the accumulator is the undefined constant. |
| 1629 void Interpreter::DoJumpIfUndefinedConstantWide( | 1591 void Interpreter::DoJumpIfUndefinedConstantWide( |
| 1630 compiler::InterpreterAssembler* assembler) { | 1592 InterpreterAssembler* assembler) { |
| 1631 DoJumpIfUndefinedConstant(assembler); | 1593 DoJumpIfUndefinedConstant(assembler); |
| 1632 } | 1594 } |
| 1633 | 1595 |
| 1634 // JumpIfHole <imm8> | 1596 // JumpIfHole <imm8> |
| 1635 // | 1597 // |
| 1636 // Jump by number of bytes represented by an immediate operand if the object | 1598 // Jump by number of bytes represented by an immediate operand if the object |
| 1637 // referenced by the accumulator is the hole. | 1599 // referenced by the accumulator is the hole. |
| 1638 void Interpreter::DoJumpIfHole(compiler::InterpreterAssembler* assembler) { | 1600 void Interpreter::DoJumpIfHole(InterpreterAssembler* assembler) { |
| 1639 Node* accumulator = __ GetAccumulator(); | 1601 Node* accumulator = __ GetAccumulator(); |
| 1640 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value()); | 1602 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value()); |
| 1641 Node* relative_jump = __ BytecodeOperandImm(0); | 1603 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1642 __ JumpIfWordEqual(accumulator, the_hole_value, relative_jump); | 1604 __ JumpIfWordEqual(accumulator, the_hole_value, relative_jump); |
| 1643 } | 1605 } |
| 1644 | 1606 |
| 1645 // JumpIfNotHole <imm8> | 1607 // JumpIfNotHole <imm8> |
| 1646 // | 1608 // |
| 1647 // Jump by number of bytes represented by an immediate operand if the object | 1609 // Jump by number of bytes represented by an immediate operand if the object |
| 1648 // referenced by the accumulator is not the hole. | 1610 // referenced by the accumulator is not the hole. |
| 1649 void Interpreter::DoJumpIfNotHole(compiler::InterpreterAssembler* assembler) { | 1611 void Interpreter::DoJumpIfNotHole(InterpreterAssembler* assembler) { |
| 1650 Node* accumulator = __ GetAccumulator(); | 1612 Node* accumulator = __ GetAccumulator(); |
| 1651 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value()); | 1613 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value()); |
| 1652 Node* relative_jump = __ BytecodeOperandImm(0); | 1614 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1653 __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump); | 1615 __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump); |
| 1654 } | 1616 } |
| 1655 | 1617 |
| 1656 void Interpreter::DoCreateLiteral(Runtime::FunctionId function_id, | 1618 void Interpreter::DoCreateLiteral(Runtime::FunctionId function_id, |
| 1657 compiler::InterpreterAssembler* assembler) { | 1619 InterpreterAssembler* assembler) { |
| 1658 Node* index = __ BytecodeOperandIdx(0); | 1620 Node* index = __ BytecodeOperandIdx(0); |
| 1659 Node* constant_elements = __ LoadConstantPoolEntry(index); | 1621 Node* constant_elements = __ LoadConstantPoolEntry(index); |
| 1660 Node* literal_index_raw = __ BytecodeOperandIdx(1); | 1622 Node* literal_index_raw = __ BytecodeOperandIdx(1); |
| 1661 Node* literal_index = __ SmiTag(literal_index_raw); | 1623 Node* literal_index = __ SmiTag(literal_index_raw); |
| 1662 Node* flags_raw = __ BytecodeOperandImm(2); | 1624 Node* flags_raw = __ BytecodeOperandImm(2); |
| 1663 Node* flags = __ SmiTag(flags_raw); | 1625 Node* flags = __ SmiTag(flags_raw); |
| 1664 Node* closure = __ LoadRegister(Register::function_closure()); | 1626 Node* closure = __ LoadRegister(Register::function_closure()); |
| 1665 Node* result = __ CallRuntime(function_id, closure, literal_index, | 1627 Node* context = __ GetContext(); |
| 1628 Node* result = __ CallRuntime(function_id, context, closure, literal_index, |
| 1666 constant_elements, flags); | 1629 constant_elements, flags); |
| 1667 __ SetAccumulator(result); | 1630 __ SetAccumulator(result); |
| 1668 __ Dispatch(); | 1631 __ Dispatch(); |
| 1669 } | 1632 } |
| 1670 | 1633 |
| 1671 | 1634 |
| 1672 // CreateRegExpLiteral <pattern_idx> <literal_idx> <flags> | 1635 // CreateRegExpLiteral <pattern_idx> <literal_idx> <flags> |
| 1673 // | 1636 // |
| 1674 // Creates a regular expression literal for literal index <literal_idx> with | 1637 // Creates a regular expression literal for literal index <literal_idx> with |
| 1675 // <flags> and the pattern in <pattern_idx>. | 1638 // <flags> and the pattern in <pattern_idx>. |
| 1676 void Interpreter::DoCreateRegExpLiteral( | 1639 void Interpreter::DoCreateRegExpLiteral(InterpreterAssembler* assembler) { |
| 1677 compiler::InterpreterAssembler* assembler) { | |
| 1678 DoCreateLiteral(Runtime::kCreateRegExpLiteral, assembler); | 1640 DoCreateLiteral(Runtime::kCreateRegExpLiteral, assembler); |
| 1679 } | 1641 } |
| 1680 | 1642 |
| 1681 | 1643 |
| 1682 // CreateRegExpLiteralWide <pattern_idx> <literal_idx> <flags> | 1644 // CreateRegExpLiteralWide <pattern_idx> <literal_idx> <flags> |
| 1683 // | 1645 // |
| 1684 // Creates a regular expression literal for literal index <literal_idx> with | 1646 // Creates a regular expression literal for literal index <literal_idx> with |
| 1685 // <flags> and the pattern in <pattern_idx>. | 1647 // <flags> and the pattern in <pattern_idx>. |
| 1686 void Interpreter::DoCreateRegExpLiteralWide( | 1648 void Interpreter::DoCreateRegExpLiteralWide(InterpreterAssembler* assembler) { |
| 1687 compiler::InterpreterAssembler* assembler) { | |
| 1688 DoCreateLiteral(Runtime::kCreateRegExpLiteral, assembler); | 1649 DoCreateLiteral(Runtime::kCreateRegExpLiteral, assembler); |
| 1689 } | 1650 } |
| 1690 | 1651 |
| 1691 | 1652 |
| 1692 // CreateArrayLiteral <element_idx> <literal_idx> <flags> | 1653 // CreateArrayLiteral <element_idx> <literal_idx> <flags> |
| 1693 // | 1654 // |
| 1694 // Creates an array literal for literal index <literal_idx> with flags <flags> | 1655 // Creates an array literal for literal index <literal_idx> with flags <flags> |
| 1695 // and constant elements in <element_idx>. | 1656 // and constant elements in <element_idx>. |
| 1696 void Interpreter::DoCreateArrayLiteral( | 1657 void Interpreter::DoCreateArrayLiteral(InterpreterAssembler* assembler) { |
| 1697 compiler::InterpreterAssembler* assembler) { | |
| 1698 DoCreateLiteral(Runtime::kCreateArrayLiteral, assembler); | 1658 DoCreateLiteral(Runtime::kCreateArrayLiteral, assembler); |
| 1699 } | 1659 } |
| 1700 | 1660 |
| 1701 | 1661 |
| 1702 // CreateArrayLiteralWide <element_idx> <literal_idx> <flags> | 1662 // CreateArrayLiteralWide <element_idx> <literal_idx> <flags> |
| 1703 // | 1663 // |
| 1704 // Creates an array literal for literal index <literal_idx> with flags <flags> | 1664 // Creates an array literal for literal index <literal_idx> with flags <flags> |
| 1705 // and constant elements in <element_idx>. | 1665 // and constant elements in <element_idx>. |
| 1706 void Interpreter::DoCreateArrayLiteralWide( | 1666 void Interpreter::DoCreateArrayLiteralWide(InterpreterAssembler* assembler) { |
| 1707 compiler::InterpreterAssembler* assembler) { | |
| 1708 DoCreateLiteral(Runtime::kCreateArrayLiteral, assembler); | 1667 DoCreateLiteral(Runtime::kCreateArrayLiteral, assembler); |
| 1709 } | 1668 } |
| 1710 | 1669 |
| 1711 | 1670 |
| 1712 // CreateObjectLiteral <element_idx> <literal_idx> <flags> | 1671 // CreateObjectLiteral <element_idx> <literal_idx> <flags> |
| 1713 // | 1672 // |
| 1714 // Creates an object literal for literal index <literal_idx> with flags <flags> | 1673 // Creates an object literal for literal index <literal_idx> with flags <flags> |
| 1715 // and constant elements in <element_idx>. | 1674 // and constant elements in <element_idx>. |
| 1716 void Interpreter::DoCreateObjectLiteral( | 1675 void Interpreter::DoCreateObjectLiteral(InterpreterAssembler* assembler) { |
| 1717 compiler::InterpreterAssembler* assembler) { | |
| 1718 DoCreateLiteral(Runtime::kCreateObjectLiteral, assembler); | 1676 DoCreateLiteral(Runtime::kCreateObjectLiteral, assembler); |
| 1719 } | 1677 } |
| 1720 | 1678 |
| 1721 | 1679 |
| 1722 // CreateObjectLiteralWide <element_idx> <literal_idx> <flags> | 1680 // CreateObjectLiteralWide <element_idx> <literal_idx> <flags> |
| 1723 // | 1681 // |
| 1724 // Creates an object literal for literal index <literal_idx> with flags <flags> | 1682 // Creates an object literal for literal index <literal_idx> with flags <flags> |
| 1725 // and constant elements in <element_idx>. | 1683 // and constant elements in <element_idx>. |
| 1726 void Interpreter::DoCreateObjectLiteralWide( | 1684 void Interpreter::DoCreateObjectLiteralWide(InterpreterAssembler* assembler) { |
| 1727 compiler::InterpreterAssembler* assembler) { | |
| 1728 DoCreateLiteral(Runtime::kCreateObjectLiteral, assembler); | 1685 DoCreateLiteral(Runtime::kCreateObjectLiteral, assembler); |
| 1729 } | 1686 } |
| 1730 | 1687 |
| 1731 | 1688 |
| 1732 // CreateClosure <index> <tenured> | 1689 // CreateClosure <index> <tenured> |
| 1733 // | 1690 // |
| 1734 // Creates a new closure for SharedFunctionInfo at position |index| in the | 1691 // Creates a new closure for SharedFunctionInfo at position |index| in the |
| 1735 // constant pool and with the PretenureFlag <tenured>. | 1692 // constant pool and with the PretenureFlag <tenured>. |
| 1736 void Interpreter::DoCreateClosure(compiler::InterpreterAssembler* assembler) { | 1693 void Interpreter::DoCreateClosure(InterpreterAssembler* assembler) { |
| 1737 // TODO(rmcilroy): Possibly call FastNewClosureStub when possible instead of | 1694 // TODO(rmcilroy): Possibly call FastNewClosureStub when possible instead of |
| 1738 // calling into the runtime. | 1695 // calling into the runtime. |
| 1739 Node* index = __ BytecodeOperandIdx(0); | 1696 Node* index = __ BytecodeOperandIdx(0); |
| 1740 Node* shared = __ LoadConstantPoolEntry(index); | 1697 Node* shared = __ LoadConstantPoolEntry(index); |
| 1741 Node* tenured_raw = __ BytecodeOperandImm(1); | 1698 Node* tenured_raw = __ BytecodeOperandImm(1); |
| 1742 Node* tenured = __ SmiTag(tenured_raw); | 1699 Node* tenured = __ SmiTag(tenured_raw); |
| 1700 Node* context = __ GetContext(); |
| 1743 Node* result = | 1701 Node* result = |
| 1744 __ CallRuntime(Runtime::kInterpreterNewClosure, shared, tenured); | 1702 __ CallRuntime(Runtime::kInterpreterNewClosure, context, shared, tenured); |
| 1745 __ SetAccumulator(result); | 1703 __ SetAccumulator(result); |
| 1746 __ Dispatch(); | 1704 __ Dispatch(); |
| 1747 } | 1705 } |
| 1748 | 1706 |
| 1749 | 1707 |
| 1750 // CreateClosureWide <index> <tenured> | 1708 // CreateClosureWide <index> <tenured> |
| 1751 // | 1709 // |
| 1752 // Creates a new closure for SharedFunctionInfo at position |index| in the | 1710 // Creates a new closure for SharedFunctionInfo at position |index| in the |
| 1753 // constant pool and with the PretenureFlag <tenured>. | 1711 // constant pool and with the PretenureFlag <tenured>. |
| 1754 void Interpreter::DoCreateClosureWide( | 1712 void Interpreter::DoCreateClosureWide(InterpreterAssembler* assembler) { |
| 1755 compiler::InterpreterAssembler* assembler) { | |
| 1756 return DoCreateClosure(assembler); | 1713 return DoCreateClosure(assembler); |
| 1757 } | 1714 } |
| 1758 | 1715 |
| 1759 | 1716 |
| 1760 // CreateMappedArguments | 1717 // CreateMappedArguments |
| 1761 // | 1718 // |
| 1762 // Creates a new mapped arguments object. | 1719 // Creates a new mapped arguments object. |
| 1763 void Interpreter::DoCreateMappedArguments( | 1720 void Interpreter::DoCreateMappedArguments(InterpreterAssembler* assembler) { |
| 1764 compiler::InterpreterAssembler* assembler) { | |
| 1765 Node* closure = __ LoadRegister(Register::function_closure()); | 1721 Node* closure = __ LoadRegister(Register::function_closure()); |
| 1766 Node* result = __ CallRuntime(Runtime::kNewSloppyArguments_Generic, closure); | 1722 Node* context = __ GetContext(); |
| 1723 Node* result = |
| 1724 __ CallRuntime(Runtime::kNewSloppyArguments_Generic, context, closure); |
| 1767 __ SetAccumulator(result); | 1725 __ SetAccumulator(result); |
| 1768 __ Dispatch(); | 1726 __ Dispatch(); |
| 1769 } | 1727 } |
| 1770 | 1728 |
| 1771 | 1729 |
| 1772 // CreateUnmappedArguments | 1730 // CreateUnmappedArguments |
| 1773 // | 1731 // |
| 1774 // Creates a new unmapped arguments object. | 1732 // Creates a new unmapped arguments object. |
| 1775 void Interpreter::DoCreateUnmappedArguments( | 1733 void Interpreter::DoCreateUnmappedArguments(InterpreterAssembler* assembler) { |
| 1776 compiler::InterpreterAssembler* assembler) { | |
| 1777 Node* closure = __ LoadRegister(Register::function_closure()); | 1734 Node* closure = __ LoadRegister(Register::function_closure()); |
| 1778 Node* result = __ CallRuntime(Runtime::kNewStrictArguments_Generic, closure); | 1735 Node* context = __ GetContext(); |
| 1736 Node* result = |
| 1737 __ CallRuntime(Runtime::kNewStrictArguments_Generic, context, closure); |
| 1779 __ SetAccumulator(result); | 1738 __ SetAccumulator(result); |
| 1780 __ Dispatch(); | 1739 __ Dispatch(); |
| 1781 } | 1740 } |
| 1782 | 1741 |
| 1783 // CreateRestParameter | 1742 // CreateRestParameter |
| 1784 // | 1743 // |
| 1785 // Creates a new rest parameter array. | 1744 // Creates a new rest parameter array. |
| 1786 void Interpreter::DoCreateRestParameter( | 1745 void Interpreter::DoCreateRestParameter(InterpreterAssembler* assembler) { |
| 1787 compiler::InterpreterAssembler* assembler) { | |
| 1788 // TODO(ignition): Use FastNewRestParameterStub here. | 1746 // TODO(ignition): Use FastNewRestParameterStub here. |
| 1789 Node* closure = __ LoadRegister(Register::function_closure()); | 1747 Node* closure = __ LoadRegister(Register::function_closure()); |
| 1790 Node* result = __ CallRuntime(Runtime::kNewRestParameter, closure); | 1748 Node* context = __ GetContext(); |
| 1749 Node* result = __ CallRuntime(Runtime::kNewRestParameter, context, closure); |
| 1791 __ SetAccumulator(result); | 1750 __ SetAccumulator(result); |
| 1792 __ Dispatch(); | 1751 __ Dispatch(); |
| 1793 } | 1752 } |
| 1794 | 1753 |
| 1795 // StackCheck | 1754 // StackCheck |
| 1796 // | 1755 // |
| 1797 // Performs a stack guard check. | 1756 // Performs a stack guard check. |
| 1798 void Interpreter::DoStackCheck(compiler::InterpreterAssembler* assembler) { | 1757 void Interpreter::DoStackCheck(InterpreterAssembler* assembler) { |
| 1799 __ StackCheck(); | 1758 __ StackCheck(); |
| 1800 __ Dispatch(); | 1759 __ Dispatch(); |
| 1801 } | 1760 } |
| 1802 | 1761 |
| 1803 // Throw | 1762 // Throw |
| 1804 // | 1763 // |
| 1805 // Throws the exception in the accumulator. | 1764 // Throws the exception in the accumulator. |
| 1806 void Interpreter::DoThrow(compiler::InterpreterAssembler* assembler) { | 1765 void Interpreter::DoThrow(InterpreterAssembler* assembler) { |
| 1807 Node* exception = __ GetAccumulator(); | 1766 Node* exception = __ GetAccumulator(); |
| 1808 __ CallRuntime(Runtime::kThrow, exception); | 1767 Node* context = __ GetContext(); |
| 1768 __ CallRuntime(Runtime::kThrow, context, exception); |
| 1809 // We shouldn't ever return from a throw. | 1769 // We shouldn't ever return from a throw. |
| 1810 __ Abort(kUnexpectedReturnFromThrow); | 1770 __ Abort(kUnexpectedReturnFromThrow); |
| 1811 } | 1771 } |
| 1812 | 1772 |
| 1813 | 1773 |
| 1814 // ReThrow | 1774 // ReThrow |
| 1815 // | 1775 // |
| 1816 // Re-throws the exception in the accumulator. | 1776 // Re-throws the exception in the accumulator. |
| 1817 void Interpreter::DoReThrow(compiler::InterpreterAssembler* assembler) { | 1777 void Interpreter::DoReThrow(InterpreterAssembler* assembler) { |
| 1818 Node* exception = __ GetAccumulator(); | 1778 Node* exception = __ GetAccumulator(); |
| 1819 __ CallRuntime(Runtime::kReThrow, exception); | 1779 Node* context = __ GetContext(); |
| 1780 __ CallRuntime(Runtime::kReThrow, context, exception); |
| 1820 // We shouldn't ever return from a throw. | 1781 // We shouldn't ever return from a throw. |
| 1821 __ Abort(kUnexpectedReturnFromThrow); | 1782 __ Abort(kUnexpectedReturnFromThrow); |
| 1822 } | 1783 } |
| 1823 | 1784 |
| 1824 | 1785 |
| 1825 // Return | 1786 // Return |
| 1826 // | 1787 // |
| 1827 // Return the value in the accumulator. | 1788 // Return the value in the accumulator. |
| 1828 void Interpreter::DoReturn(compiler::InterpreterAssembler* assembler) { | 1789 void Interpreter::DoReturn(InterpreterAssembler* assembler) { |
| 1829 __ Return(); | 1790 __ InterpreterReturn(); |
| 1830 } | 1791 } |
| 1831 | 1792 |
| 1832 // Debugger | 1793 // Debugger |
| 1833 // | 1794 // |
| 1834 // Call runtime to handle debugger statement. | 1795 // Call runtime to handle debugger statement. |
| 1835 void Interpreter::DoDebugger(compiler::InterpreterAssembler* assembler) { | 1796 void Interpreter::DoDebugger(InterpreterAssembler* assembler) { |
| 1836 __ CallRuntime(Runtime::kHandleDebuggerStatement); | 1797 Node* context = __ GetContext(); |
| 1798 __ CallRuntime(Runtime::kHandleDebuggerStatement, context); |
| 1837 __ Dispatch(); | 1799 __ Dispatch(); |
| 1838 } | 1800 } |
| 1839 | 1801 |
| 1840 // ForInPrepare <cache_info_triple> | 1802 // ForInPrepare <cache_info_triple> |
| 1841 // | 1803 // |
| 1842 // Returns state for for..in loop execution based on the object in the | 1804 // Returns state for for..in loop execution based on the object in the |
| 1843 // accumulator. The result is output in registers |cache_info_triple| to | 1805 // accumulator. The result is output in registers |cache_info_triple| to |
| 1844 // |cache_info_triple + 2|, with the registers holding cache_type, cache_array, | 1806 // |cache_info_triple + 2|, with the registers holding cache_type, cache_array, |
| 1845 // and cache_length respectively. | 1807 // and cache_length respectively. |
| 1846 void Interpreter::DoForInPrepare(compiler::InterpreterAssembler* assembler) { | 1808 void Interpreter::DoForInPrepare(InterpreterAssembler* assembler) { |
| 1847 Node* object = __ GetAccumulator(); | 1809 Node* object = __ GetAccumulator(); |
| 1848 Node* result_triple = __ CallRuntime(Runtime::kForInPrepare, object); | 1810 Node* context = __ GetContext(); |
| 1811 Node* result_triple = __ CallRuntime(Runtime::kForInPrepare, context, object); |
| 1849 | 1812 |
| 1850 // Set output registers: | 1813 // Set output registers: |
| 1851 // 0 == cache_type, 1 == cache_array, 2 == cache_length | 1814 // 0 == cache_type, 1 == cache_array, 2 == cache_length |
| 1852 Node* output_register = __ BytecodeOperandReg(0); | 1815 Node* output_register = __ BytecodeOperandReg(0); |
| 1853 for (int i = 0; i < 3; i++) { | 1816 for (int i = 0; i < 3; i++) { |
| 1854 Node* cache_info = __ Projection(i, result_triple); | 1817 Node* cache_info = __ Projection(i, result_triple); |
| 1855 __ StoreRegister(cache_info, output_register); | 1818 __ StoreRegister(cache_info, output_register); |
| 1856 output_register = __ NextRegister(output_register); | 1819 output_register = __ NextRegister(output_register); |
| 1857 } | 1820 } |
| 1858 __ Dispatch(); | 1821 __ Dispatch(); |
| 1859 } | 1822 } |
| 1860 | 1823 |
| 1861 | 1824 |
| 1862 // ForInPrepareWide <cache_info_triple> | 1825 // ForInPrepareWide <cache_info_triple> |
| 1863 // | 1826 // |
| 1864 // Returns state for for..in loop execution based on the object in the | 1827 // Returns state for for..in loop execution based on the object in the |
| 1865 // accumulator. The result is output in registers |cache_info_triple| to | 1828 // accumulator. The result is output in registers |cache_info_triple| to |
| 1866 // |cache_info_triple + 2|, with the registers holding cache_type, cache_array, | 1829 // |cache_info_triple + 2|, with the registers holding cache_type, cache_array, |
| 1867 // and cache_length respectively. | 1830 // and cache_length respectively. |
| 1868 void Interpreter::DoForInPrepareWide( | 1831 void Interpreter::DoForInPrepareWide(InterpreterAssembler* assembler) { |
| 1869 compiler::InterpreterAssembler* assembler) { | |
| 1870 DoForInPrepare(assembler); | 1832 DoForInPrepare(assembler); |
| 1871 } | 1833 } |
| 1872 | 1834 |
| 1873 | 1835 |
| 1874 // ForInNext <receiver> <index> <cache_info_pair> | 1836 // ForInNext <receiver> <index> <cache_info_pair> |
| 1875 // | 1837 // |
| 1876 // Returns the next enumerable property in the the accumulator. | 1838 // Returns the next enumerable property in the the accumulator. |
| 1877 void Interpreter::DoForInNext(compiler::InterpreterAssembler* assembler) { | 1839 void Interpreter::DoForInNext(InterpreterAssembler* assembler) { |
| 1878 Node* receiver_reg = __ BytecodeOperandReg(0); | 1840 Node* receiver_reg = __ BytecodeOperandReg(0); |
| 1879 Node* receiver = __ LoadRegister(receiver_reg); | 1841 Node* receiver = __ LoadRegister(receiver_reg); |
| 1880 Node* index_reg = __ BytecodeOperandReg(1); | 1842 Node* index_reg = __ BytecodeOperandReg(1); |
| 1881 Node* index = __ LoadRegister(index_reg); | 1843 Node* index = __ LoadRegister(index_reg); |
| 1882 Node* cache_type_reg = __ BytecodeOperandReg(2); | 1844 Node* cache_type_reg = __ BytecodeOperandReg(2); |
| 1883 Node* cache_type = __ LoadRegister(cache_type_reg); | 1845 Node* cache_type = __ LoadRegister(cache_type_reg); |
| 1884 Node* cache_array_reg = __ NextRegister(cache_type_reg); | 1846 Node* cache_array_reg = __ NextRegister(cache_type_reg); |
| 1885 Node* cache_array = __ LoadRegister(cache_array_reg); | 1847 Node* cache_array = __ LoadRegister(cache_array_reg); |
| 1886 Node* result = __ CallRuntime(Runtime::kForInNext, receiver, cache_array, | 1848 Node* context = __ GetContext(); |
| 1887 cache_type, index); | 1849 Node* result = __ CallRuntime(Runtime::kForInNext, context, receiver, |
| 1850 cache_array, cache_type, index); |
| 1888 __ SetAccumulator(result); | 1851 __ SetAccumulator(result); |
| 1889 __ Dispatch(); | 1852 __ Dispatch(); |
| 1890 } | 1853 } |
| 1891 | 1854 |
| 1892 | 1855 |
| 1893 // ForInNextWide <receiver> <index> <cache_info_pair> | 1856 // ForInNextWide <receiver> <index> <cache_info_pair> |
| 1894 // | 1857 // |
| 1895 // Returns the next enumerable property in the the accumulator. | 1858 // Returns the next enumerable property in the the accumulator. |
| 1896 void Interpreter::DoForInNextWide(compiler::InterpreterAssembler* assembler) { | 1859 void Interpreter::DoForInNextWide(InterpreterAssembler* assembler) { |
| 1897 return DoForInNext(assembler); | 1860 return DoForInNext(assembler); |
| 1898 } | 1861 } |
| 1899 | 1862 |
| 1900 | 1863 |
| 1901 // ForInDone <index> <cache_length> | 1864 // ForInDone <index> <cache_length> |
| 1902 // | 1865 // |
| 1903 // Returns true if the end of the enumerable properties has been reached. | 1866 // Returns true if the end of the enumerable properties has been reached. |
| 1904 void Interpreter::DoForInDone(compiler::InterpreterAssembler* assembler) { | 1867 void Interpreter::DoForInDone(InterpreterAssembler* assembler) { |
| 1905 // TODO(oth): Implement directly rather than making a runtime call. | 1868 // TODO(oth): Implement directly rather than making a runtime call. |
| 1906 Node* index_reg = __ BytecodeOperandReg(0); | 1869 Node* index_reg = __ BytecodeOperandReg(0); |
| 1907 Node* index = __ LoadRegister(index_reg); | 1870 Node* index = __ LoadRegister(index_reg); |
| 1908 Node* cache_length_reg = __ BytecodeOperandReg(1); | 1871 Node* cache_length_reg = __ BytecodeOperandReg(1); |
| 1909 Node* cache_length = __ LoadRegister(cache_length_reg); | 1872 Node* cache_length = __ LoadRegister(cache_length_reg); |
| 1910 Node* result = __ CallRuntime(Runtime::kForInDone, index, cache_length); | 1873 Node* context = __ GetContext(); |
| 1874 Node* result = |
| 1875 __ CallRuntime(Runtime::kForInDone, context, index, cache_length); |
| 1911 __ SetAccumulator(result); | 1876 __ SetAccumulator(result); |
| 1912 __ Dispatch(); | 1877 __ Dispatch(); |
| 1913 } | 1878 } |
| 1914 | 1879 |
| 1915 | 1880 |
| 1916 // ForInStep <index> | 1881 // ForInStep <index> |
| 1917 // | 1882 // |
| 1918 // Increments the loop counter in register |index| and stores the result | 1883 // Increments the loop counter in register |index| and stores the result |
| 1919 // in the accumulator. | 1884 // in the accumulator. |
| 1920 void Interpreter::DoForInStep(compiler::InterpreterAssembler* assembler) { | 1885 void Interpreter::DoForInStep(InterpreterAssembler* assembler) { |
| 1921 // TODO(oth): Implement directly rather than making a runtime call. | 1886 // TODO(oth): Implement directly rather than making a runtime call. |
| 1922 Node* index_reg = __ BytecodeOperandReg(0); | 1887 Node* index_reg = __ BytecodeOperandReg(0); |
| 1923 Node* index = __ LoadRegister(index_reg); | 1888 Node* index = __ LoadRegister(index_reg); |
| 1924 Node* result = __ CallRuntime(Runtime::kForInStep, index); | 1889 Node* context = __ GetContext(); |
| 1890 Node* result = __ CallRuntime(Runtime::kForInStep, context, index); |
| 1925 __ SetAccumulator(result); | 1891 __ SetAccumulator(result); |
| 1926 __ Dispatch(); | 1892 __ Dispatch(); |
| 1927 } | 1893 } |
| 1928 | 1894 |
| 1929 } // namespace interpreter | 1895 } // namespace interpreter |
| 1930 } // namespace internal | 1896 } // namespace internal |
| 1931 } // namespace v8 | 1897 } // namespace v8 |
| OLD | NEW |