| OLD | NEW |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/interpreter/interpreter.h" | 5 #include "src/interpreter/interpreter.h" |
| 6 | 6 |
| 7 #include "src/ast/prettyprinter.h" | 7 #include "src/ast/prettyprinter.h" |
| 8 #include "src/code-factory.h" | 8 #include "src/code-factory.h" |
| 9 #include "src/compiler.h" | 9 #include "src/compiler.h" |
| 10 #include "src/factory.h" | 10 #include "src/factory.h" |
| 11 #include "src/interpreter/bytecode-generator.h" | 11 #include "src/interpreter/bytecode-generator.h" |
| 12 #include "src/interpreter/bytecodes.h" | 12 #include "src/interpreter/bytecodes.h" |
| 13 #include "src/interpreter/interpreter-assembler.h" | 13 #include "src/interpreter/interpreter-assembler.h" |
| 14 #include "src/log.h" | 14 #include "src/log.h" |
| 15 #include "src/zone.h" | 15 #include "src/zone.h" |
| 16 | 16 |
| 17 namespace v8 { | 17 namespace v8 { |
| 18 namespace internal { | 18 namespace internal { |
| 19 namespace interpreter { | 19 namespace interpreter { |
| 20 | 20 |
| 21 using compiler::Node; | 21 using compiler::Node; |
| 22 | 22 |
| 23 #define __ assembler-> | 23 #define __ assembler-> |
| 24 | 24 |
| 25 Interpreter::Interpreter(Isolate* isolate) : isolate_(isolate) { | 25 Interpreter::Interpreter(Isolate* isolate) : isolate_(isolate) { |
| 26 memset(&dispatch_table_, 0, sizeof(dispatch_table_)); | 26 memset(dispatch_table_, 0, sizeof(dispatch_table_)); |
| 27 } | 27 } |
| 28 | 28 |
| 29 void Interpreter::Initialize() { | 29 void Interpreter::Initialize() { |
| 30 DCHECK(FLAG_ignition); | 30 DCHECK(FLAG_ignition); |
| 31 if (IsDispatchTableInitialized()) return; | 31 if (IsDispatchTableInitialized()) return; |
| 32 Zone zone; | 32 Zone zone; |
| 33 HandleScope scope(isolate_); | 33 HandleScope scope(isolate_); |
| 34 | 34 |
| 35 #define GENERATE_CODE(Name, ...) \ | 35 // Generate bytecode handlers for all bytecodes and scales. |
| 36 { \ | 36 for (OperandScale operand_scale = OperandScale::kSingle; |
| 37 InterpreterAssembler assembler(isolate_, &zone, Bytecode::k##Name); \ | 37 operand_scale <= OperandScale::kMaxValid; |
| 38 Do##Name(&assembler); \ | 38 operand_scale = Bytecodes::NextOperandScale(operand_scale)) { |
| 39 Handle<Code> code = assembler.GenerateCode(); \ | 39 #define GENERATE_CODE(Name, ...) \ |
| 40 dispatch_table_[Bytecodes::ToByte(Bytecode::k##Name)] = *code; \ | 40 { \ |
| 41 TraceCodegen(code); \ | 41 if (BytecodeHasHandler(Bytecode::k##Name, operand_scale)) { \ |
| 42 LOG_CODE_EVENT(isolate_, \ | 42 InterpreterAssembler assembler(isolate_, &zone, Bytecode::k##Name, \ |
| 43 CodeCreateEvent(Logger::BYTECODE_HANDLER_TAG, \ | 43 operand_scale); \ |
| 44 AbstractCode::cast(*code), #Name)); \ | 44 Do##Name(&assembler); \ |
| 45 Handle<Code> code = assembler.GenerateCode(); \ |
| 46 size_t index = GetDispatchTableIndex(Bytecode::k##Name, operand_scale); \ |
| 47 dispatch_table_[index] = *code; \ |
| 48 TraceCodegen(code); \ |
| 49 LOG_CODE_EVENT(isolate_, \ |
| 50 CodeCreateEvent(Logger::BYTECODE_HANDLER_TAG, \ |
| 51 AbstractCode::cast(*code), #Name)); \ |
| 52 } \ |
| 45 } | 53 } |
| 46 BYTECODE_LIST(GENERATE_CODE) | 54 BYTECODE_LIST(GENERATE_CODE) |
| 47 #undef GENERATE_CODE | 55 #undef GENERATE_CODE |
| 56 } |
| 57 |
| 58 // Fill unused entries will the illegal bytecode handler. |
| 59 size_t illegal_index = |
| 60 GetDispatchTableIndex(Bytecode::kIllegal, OperandScale::kSingle); |
| 61 for (size_t index = 0; index < arraysize(dispatch_table_); ++index) { |
| 62 if (dispatch_table_[index] == nullptr) { |
| 63 dispatch_table_[index] = dispatch_table_[illegal_index]; |
| 64 } |
| 65 } |
| 48 } | 66 } |
| 49 | 67 |
| 50 Code* Interpreter::GetBytecodeHandler(Bytecode bytecode) { | 68 Code* Interpreter::GetBytecodeHandler(Bytecode bytecode, |
| 69 OperandScale operand_scale) { |
| 51 DCHECK(IsDispatchTableInitialized()); | 70 DCHECK(IsDispatchTableInitialized()); |
| 52 return dispatch_table_[Bytecodes::ToByte(bytecode)]; | 71 DCHECK(BytecodeHasHandler(bytecode, operand_scale)); |
| 72 size_t index = GetDispatchTableIndex(bytecode, operand_scale); |
| 73 return dispatch_table_[index]; |
| 74 } |
| 75 |
| 76 // static |
| 77 size_t Interpreter::GetDispatchTableIndex(Bytecode bytecode, |
| 78 OperandScale operand_scale) { |
| 79 static const size_t kEntriesPerOperandScale = 1u << kBitsPerByte; |
| 80 size_t index = static_cast<size_t>(bytecode); |
| 81 OperandScale current_scale = OperandScale::kSingle; |
| 82 while (current_scale != operand_scale) { |
| 83 index += kEntriesPerOperandScale; |
| 84 current_scale = Bytecodes::NextOperandScale(current_scale); |
| 85 } |
| 86 return index; |
| 87 } |
| 88 |
| 89 // static |
| 90 bool Interpreter::BytecodeHasHandler(Bytecode bytecode, |
| 91 OperandScale operand_scale) { |
| 92 return operand_scale == OperandScale::kSingle || |
| 93 Bytecodes::IsBytecodeWithScalableOperands(bytecode); |
| 53 } | 94 } |
| 54 | 95 |
| 55 void Interpreter::IterateDispatchTable(ObjectVisitor* v) { | 96 void Interpreter::IterateDispatchTable(ObjectVisitor* v) { |
| 56 v->VisitPointers( | 97 v->VisitPointers( |
| 57 reinterpret_cast<Object**>(&dispatch_table_[0]), | 98 reinterpret_cast<Object**>(&dispatch_table_[0]), |
| 58 reinterpret_cast<Object**>(&dispatch_table_[0] + kDispatchTableSize)); | 99 reinterpret_cast<Object**>(&dispatch_table_[0] + kDispatchTableSize)); |
| 59 } | 100 } |
| 60 | 101 |
| 61 // static | 102 // static |
| 62 int Interpreter::InterruptBudget() { | 103 int Interpreter::InterruptBudget() { |
| (...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 140 | 181 |
| 141 // LdaZero | 182 // LdaZero |
| 142 // | 183 // |
| 143 // Load literal '0' into the accumulator. | 184 // Load literal '0' into the accumulator. |
| 144 void Interpreter::DoLdaZero(InterpreterAssembler* assembler) { | 185 void Interpreter::DoLdaZero(InterpreterAssembler* assembler) { |
| 145 Node* zero_value = __ NumberConstant(0.0); | 186 Node* zero_value = __ NumberConstant(0.0); |
| 146 __ SetAccumulator(zero_value); | 187 __ SetAccumulator(zero_value); |
| 147 __ Dispatch(); | 188 __ Dispatch(); |
| 148 } | 189 } |
| 149 | 190 |
| 150 | 191 // LdaSmi <imm> |
| 151 // LdaSmi8 <imm8> | |
| 152 // | 192 // |
| 153 // Load an 8-bit integer literal into the accumulator as a Smi. | 193 // Load an integer literal into the accumulator as a Smi. |
| 154 void Interpreter::DoLdaSmi8(InterpreterAssembler* assembler) { | 194 void Interpreter::DoLdaSmi(InterpreterAssembler* assembler) { |
| 155 Node* raw_int = __ BytecodeOperandImm(0); | 195 Node* raw_int = __ BytecodeOperandImm(0); |
| 156 Node* smi_int = __ SmiTag(raw_int); | 196 Node* smi_int = __ SmiTag(raw_int); |
| 157 __ SetAccumulator(smi_int); | 197 __ SetAccumulator(smi_int); |
| 158 __ Dispatch(); | 198 __ Dispatch(); |
| 159 } | 199 } |
| 160 | 200 |
| 161 void Interpreter::DoLoadConstant(InterpreterAssembler* assembler) { | 201 void Interpreter::DoLoadConstant(InterpreterAssembler* assembler) { |
| 162 Node* index = __ BytecodeOperandIdx(0); | 202 Node* index = __ BytecodeOperandIdx(0); |
| 163 Node* constant = __ LoadConstantPoolEntry(index); | 203 Node* constant = __ LoadConstantPoolEntry(index); |
| 164 __ SetAccumulator(constant); | 204 __ SetAccumulator(constant); |
| 165 __ Dispatch(); | 205 __ Dispatch(); |
| 166 } | 206 } |
| 167 | 207 |
| 168 | 208 |
| 169 // LdaConstant <idx> | 209 // LdaConstant <idx> |
| 170 // | 210 // |
| 171 // Load constant literal at |idx| in the constant pool into the accumulator. | 211 // Load constant literal at |idx| in the constant pool into the accumulator. |
| 172 void Interpreter::DoLdaConstant(InterpreterAssembler* assembler) { | 212 void Interpreter::DoLdaConstant(InterpreterAssembler* assembler) { |
| 173 DoLoadConstant(assembler); | 213 DoLoadConstant(assembler); |
| 174 } | 214 } |
| 175 | 215 |
| 176 | |
| 177 // LdaConstantWide <idx> | |
| 178 // | |
| 179 // Load constant literal at |idx| in the constant pool into the accumulator. | |
| 180 void Interpreter::DoLdaConstantWide(InterpreterAssembler* assembler) { | |
| 181 DoLoadConstant(assembler); | |
| 182 } | |
| 183 | |
| 184 | |
| 185 // LdaUndefined | 216 // LdaUndefined |
| 186 // | 217 // |
| 187 // Load Undefined into the accumulator. | 218 // Load Undefined into the accumulator. |
| 188 void Interpreter::DoLdaUndefined(InterpreterAssembler* assembler) { | 219 void Interpreter::DoLdaUndefined(InterpreterAssembler* assembler) { |
| 189 Node* undefined_value = | 220 Node* undefined_value = |
| 190 __ HeapConstant(isolate_->factory()->undefined_value()); | 221 __ HeapConstant(isolate_->factory()->undefined_value()); |
| 191 __ SetAccumulator(undefined_value); | 222 __ SetAccumulator(undefined_value); |
| 192 __ Dispatch(); | 223 __ Dispatch(); |
| 193 } | 224 } |
| 194 | 225 |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 260 // Stores the value of register <src> to register <dst>. | 291 // Stores the value of register <src> to register <dst>. |
| 261 void Interpreter::DoMov(InterpreterAssembler* assembler) { | 292 void Interpreter::DoMov(InterpreterAssembler* assembler) { |
| 262 Node* src_index = __ BytecodeOperandReg(0); | 293 Node* src_index = __ BytecodeOperandReg(0); |
| 263 Node* src_value = __ LoadRegister(src_index); | 294 Node* src_value = __ LoadRegister(src_index); |
| 264 Node* dst_index = __ BytecodeOperandReg(1); | 295 Node* dst_index = __ BytecodeOperandReg(1); |
| 265 __ StoreRegister(src_value, dst_index); | 296 __ StoreRegister(src_value, dst_index); |
| 266 __ Dispatch(); | 297 __ Dispatch(); |
| 267 } | 298 } |
| 268 | 299 |
| 269 | 300 |
| 270 // MovWide <src> <dst> | |
| 271 // | |
| 272 // Stores the value of register <src> to register <dst>. | |
| 273 void Interpreter::DoMovWide(InterpreterAssembler* assembler) { | |
| 274 DoMov(assembler); | |
| 275 } | |
| 276 | |
| 277 void Interpreter::DoLoadGlobal(Callable ic, InterpreterAssembler* assembler) { | 301 void Interpreter::DoLoadGlobal(Callable ic, InterpreterAssembler* assembler) { |
| 278 // Get the global object. | 302 // Get the global object. |
| 279 Node* context = __ GetContext(); | 303 Node* context = __ GetContext(); |
| 280 Node* native_context = | 304 Node* native_context = |
| 281 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX); | 305 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX); |
| 282 Node* global = __ LoadContextSlot(native_context, Context::EXTENSION_INDEX); | 306 Node* global = __ LoadContextSlot(native_context, Context::EXTENSION_INDEX); |
| 283 | 307 |
| 284 // Load the global via the LoadIC. | 308 // Load the global via the LoadIC. |
| 285 Node* code_target = __ HeapConstant(ic.code()); | 309 Node* code_target = __ HeapConstant(ic.code()); |
| 286 Node* constant_index = __ BytecodeOperandIdx(0); | 310 Node* constant_index = __ BytecodeOperandIdx(0); |
| (...skipping 20 matching lines...) Expand all Loading... |
| 307 // LdaGlobalInsideTypeof <name_index> <slot> | 331 // LdaGlobalInsideTypeof <name_index> <slot> |
| 308 // | 332 // |
| 309 // Load the global with name in constant pool entry <name_index> into the | 333 // Load the global with name in constant pool entry <name_index> into the |
| 310 // accumulator using FeedBackVector slot <slot> inside of a typeof. | 334 // accumulator using FeedBackVector slot <slot> inside of a typeof. |
| 311 void Interpreter::DoLdaGlobalInsideTypeof(InterpreterAssembler* assembler) { | 335 void Interpreter::DoLdaGlobalInsideTypeof(InterpreterAssembler* assembler) { |
| 312 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF, | 336 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF, |
| 313 UNINITIALIZED); | 337 UNINITIALIZED); |
| 314 DoLoadGlobal(ic, assembler); | 338 DoLoadGlobal(ic, assembler); |
| 315 } | 339 } |
| 316 | 340 |
| 317 // LdaGlobalWide <name_index> <slot> | |
| 318 // | |
| 319 // Load the global with name in constant pool entry <name_index> into the | |
| 320 // accumulator using FeedBackVector slot <slot> outside of a typeof. | |
| 321 void Interpreter::DoLdaGlobalWide(InterpreterAssembler* assembler) { | |
| 322 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, | |
| 323 UNINITIALIZED); | |
| 324 DoLoadGlobal(ic, assembler); | |
| 325 } | |
| 326 | |
| 327 // LdaGlobalInsideTypeofWide <name_index> <slot> | |
| 328 // | |
| 329 // Load the global with name in constant pool entry <name_index> into the | |
| 330 // accumulator using FeedBackVector slot <slot> inside of a typeof. | |
| 331 void Interpreter::DoLdaGlobalInsideTypeofWide(InterpreterAssembler* assembler) { | |
| 332 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF, | |
| 333 UNINITIALIZED); | |
| 334 DoLoadGlobal(ic, assembler); | |
| 335 } | |
| 336 | |
| 337 | |
| 338 void Interpreter::DoStoreGlobal(Callable ic, InterpreterAssembler* assembler) { | 341 void Interpreter::DoStoreGlobal(Callable ic, InterpreterAssembler* assembler) { |
| 339 // Get the global object. | 342 // Get the global object. |
| 340 Node* context = __ GetContext(); | 343 Node* context = __ GetContext(); |
| 341 Node* native_context = | 344 Node* native_context = |
| 342 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX); | 345 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX); |
| 343 Node* global = __ LoadContextSlot(native_context, Context::EXTENSION_INDEX); | 346 Node* global = __ LoadContextSlot(native_context, Context::EXTENSION_INDEX); |
| 344 | 347 |
| 345 // Store the global via the StoreIC. | 348 // Store the global via the StoreIC. |
| 346 Node* code_target = __ HeapConstant(ic.code()); | 349 Node* code_target = __ HeapConstant(ic.code()); |
| 347 Node* constant_index = __ BytecodeOperandIdx(0); | 350 Node* constant_index = __ BytecodeOperandIdx(0); |
| 348 Node* name = __ LoadConstantPoolEntry(constant_index); | 351 Node* name = __ LoadConstantPoolEntry(constant_index); |
| 349 Node* value = __ GetAccumulator(); | 352 Node* value = __ GetAccumulator(); |
| 350 Node* raw_slot = __ BytecodeOperandIdx(1); | 353 Node* raw_slot = __ BytecodeOperandIdx(1); |
| 351 Node* smi_slot = __ SmiTag(raw_slot); | 354 Node* smi_slot = __ SmiTag(raw_slot); |
| 352 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); | 355 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); |
| 353 __ CallStub(ic.descriptor(), code_target, context, global, name, value, | 356 __ CallStub(ic.descriptor(), code_target, context, global, name, value, |
| 354 smi_slot, type_feedback_vector); | 357 smi_slot, type_feedback_vector); |
| 355 | |
| 356 __ Dispatch(); | 358 __ Dispatch(); |
| 357 } | 359 } |
| 358 | 360 |
| 359 | 361 |
| 360 // StaGlobalSloppy <name_index> <slot> | 362 // StaGlobalSloppy <name_index> <slot> |
| 361 // | 363 // |
| 362 // Store the value in the accumulator into the global with name in constant pool | 364 // Store the value in the accumulator into the global with name in constant pool |
| 363 // entry <name_index> using FeedBackVector slot <slot> in sloppy mode. | 365 // entry <name_index> using FeedBackVector slot <slot> in sloppy mode. |
| 364 void Interpreter::DoStaGlobalSloppy(InterpreterAssembler* assembler) { | 366 void Interpreter::DoStaGlobalSloppy(InterpreterAssembler* assembler) { |
| 365 Callable ic = | 367 Callable ic = |
| 366 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); | 368 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); |
| 367 DoStoreGlobal(ic, assembler); | 369 DoStoreGlobal(ic, assembler); |
| 368 } | 370 } |
| 369 | 371 |
| 370 | 372 |
| 371 // StaGlobalStrict <name_index> <slot> | 373 // StaGlobalStrict <name_index> <slot> |
| 372 // | 374 // |
| 373 // Store the value in the accumulator into the global with name in constant pool | 375 // Store the value in the accumulator into the global with name in constant pool |
| 374 // entry <name_index> using FeedBackVector slot <slot> in strict mode. | 376 // entry <name_index> using FeedBackVector slot <slot> in strict mode. |
| 375 void Interpreter::DoStaGlobalStrict(InterpreterAssembler* assembler) { | 377 void Interpreter::DoStaGlobalStrict(InterpreterAssembler* assembler) { |
| 376 Callable ic = | 378 Callable ic = |
| 377 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); | 379 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); |
| 378 DoStoreGlobal(ic, assembler); | 380 DoStoreGlobal(ic, assembler); |
| 379 } | 381 } |
| 380 | 382 |
| 381 | |
| 382 // StaGlobalSloppyWide <name_index> <slot> | |
| 383 // | |
| 384 // Store the value in the accumulator into the global with name in constant pool | |
| 385 // entry <name_index> using FeedBackVector slot <slot> in sloppy mode. | |
| 386 void Interpreter::DoStaGlobalSloppyWide(InterpreterAssembler* assembler) { | |
| 387 Callable ic = | |
| 388 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); | |
| 389 DoStoreGlobal(ic, assembler); | |
| 390 } | |
| 391 | |
| 392 | |
| 393 // StaGlobalStrictWide <name_index> <slot> | |
| 394 // | |
| 395 // Store the value in the accumulator into the global with name in constant pool | |
| 396 // entry <name_index> using FeedBackVector slot <slot> in strict mode. | |
| 397 void Interpreter::DoStaGlobalStrictWide(InterpreterAssembler* assembler) { | |
| 398 Callable ic = | |
| 399 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); | |
| 400 DoStoreGlobal(ic, assembler); | |
| 401 } | |
| 402 | |
| 403 | |
| 404 // LdaContextSlot <context> <slot_index> | 383 // LdaContextSlot <context> <slot_index> |
| 405 // | 384 // |
| 406 // Load the object in |slot_index| of |context| into the accumulator. | 385 // Load the object in |slot_index| of |context| into the accumulator. |
| 407 void Interpreter::DoLdaContextSlot(InterpreterAssembler* assembler) { | 386 void Interpreter::DoLdaContextSlot(InterpreterAssembler* assembler) { |
| 408 Node* reg_index = __ BytecodeOperandReg(0); | 387 Node* reg_index = __ BytecodeOperandReg(0); |
| 409 Node* context = __ LoadRegister(reg_index); | 388 Node* context = __ LoadRegister(reg_index); |
| 410 Node* slot_index = __ BytecodeOperandIdx(1); | 389 Node* slot_index = __ BytecodeOperandIdx(1); |
| 411 Node* result = __ LoadContextSlot(context, slot_index); | 390 Node* result = __ LoadContextSlot(context, slot_index); |
| 412 __ SetAccumulator(result); | 391 __ SetAccumulator(result); |
| 413 __ Dispatch(); | 392 __ Dispatch(); |
| 414 } | 393 } |
| 415 | 394 |
| 416 | |
| 417 // LdaContextSlotWide <context> <slot_index> | |
| 418 // | |
| 419 // Load the object in |slot_index| of |context| into the accumulator. | |
| 420 void Interpreter::DoLdaContextSlotWide(InterpreterAssembler* assembler) { | |
| 421 DoLdaContextSlot(assembler); | |
| 422 } | |
| 423 | |
| 424 | |
| 425 // StaContextSlot <context> <slot_index> | 395 // StaContextSlot <context> <slot_index> |
| 426 // | 396 // |
| 427 // Stores the object in the accumulator into |slot_index| of |context|. | 397 // Stores the object in the accumulator into |slot_index| of |context|. |
| 428 void Interpreter::DoStaContextSlot(InterpreterAssembler* assembler) { | 398 void Interpreter::DoStaContextSlot(InterpreterAssembler* assembler) { |
| 429 Node* value = __ GetAccumulator(); | 399 Node* value = __ GetAccumulator(); |
| 430 Node* reg_index = __ BytecodeOperandReg(0); | 400 Node* reg_index = __ BytecodeOperandReg(0); |
| 431 Node* context = __ LoadRegister(reg_index); | 401 Node* context = __ LoadRegister(reg_index); |
| 432 Node* slot_index = __ BytecodeOperandIdx(1); | 402 Node* slot_index = __ BytecodeOperandIdx(1); |
| 433 __ StoreContextSlot(context, slot_index, value); | 403 __ StoreContextSlot(context, slot_index, value); |
| 434 __ Dispatch(); | 404 __ Dispatch(); |
| 435 } | 405 } |
| 436 | 406 |
| 437 | |
| 438 // StaContextSlot <context> <slot_index> | |
| 439 // | |
| 440 // Stores the object in the accumulator into |slot_index| of |context|. | |
| 441 void Interpreter::DoStaContextSlotWide(InterpreterAssembler* assembler) { | |
| 442 DoStaContextSlot(assembler); | |
| 443 } | |
| 444 | |
| 445 void Interpreter::DoLoadLookupSlot(Runtime::FunctionId function_id, | 407 void Interpreter::DoLoadLookupSlot(Runtime::FunctionId function_id, |
| 446 InterpreterAssembler* assembler) { | 408 InterpreterAssembler* assembler) { |
| 447 Node* index = __ BytecodeOperandIdx(0); | 409 Node* index = __ BytecodeOperandIdx(0); |
| 448 Node* name = __ LoadConstantPoolEntry(index); | 410 Node* name = __ LoadConstantPoolEntry(index); |
| 449 Node* context = __ GetContext(); | 411 Node* context = __ GetContext(); |
| 450 Node* result = __ CallRuntime(function_id, context, name); | 412 Node* result = __ CallRuntime(function_id, context, name); |
| 451 __ SetAccumulator(result); | 413 __ SetAccumulator(result); |
| 452 __ Dispatch(); | 414 __ Dispatch(); |
| 453 } | 415 } |
| 454 | 416 |
| 455 | |
| 456 // LdaLookupSlot <name_index> | 417 // LdaLookupSlot <name_index> |
| 457 // | 418 // |
| 458 // Lookup the object with the name in constant pool entry |name_index| | 419 // Lookup the object with the name in constant pool entry |name_index| |
| 459 // dynamically. | 420 // dynamically. |
| 460 void Interpreter::DoLdaLookupSlot(InterpreterAssembler* assembler) { | 421 void Interpreter::DoLdaLookupSlot(InterpreterAssembler* assembler) { |
| 461 DoLoadLookupSlot(Runtime::kLoadLookupSlot, assembler); | 422 DoLoadLookupSlot(Runtime::kLoadLookupSlot, assembler); |
| 462 } | 423 } |
| 463 | 424 |
| 464 | |
| 465 // LdaLookupSlotInsideTypeof <name_index> | 425 // LdaLookupSlotInsideTypeof <name_index> |
| 466 // | 426 // |
| 467 // Lookup the object with the name in constant pool entry |name_index| | 427 // Lookup the object with the name in constant pool entry |name_index| |
| 468 // dynamically without causing a NoReferenceError. | 428 // dynamically without causing a NoReferenceError. |
| 469 void Interpreter::DoLdaLookupSlotInsideTypeof(InterpreterAssembler* assembler) { | 429 void Interpreter::DoLdaLookupSlotInsideTypeof(InterpreterAssembler* assembler) { |
| 470 DoLoadLookupSlot(Runtime::kLoadLookupSlotInsideTypeof, assembler); | 430 DoLoadLookupSlot(Runtime::kLoadLookupSlotInsideTypeof, assembler); |
| 471 } | 431 } |
| 472 | 432 |
| 473 | |
| 474 // LdaLookupSlotWide <name_index> | |
| 475 // | |
| 476 // Lookup the object with the name in constant pool entry |name_index| | |
| 477 // dynamically. | |
| 478 void Interpreter::DoLdaLookupSlotWide(InterpreterAssembler* assembler) { | |
| 479 DoLdaLookupSlot(assembler); | |
| 480 } | |
| 481 | |
| 482 | |
| 483 // LdaLookupSlotInsideTypeofWide <name_index> | |
| 484 // | |
| 485 // Lookup the object with the name in constant pool entry |name_index| | |
| 486 // dynamically without causing a NoReferenceError. | |
| 487 void Interpreter::DoLdaLookupSlotInsideTypeofWide( | |
| 488 InterpreterAssembler* assembler) { | |
| 489 DoLdaLookupSlotInsideTypeof(assembler); | |
| 490 } | |
| 491 | |
| 492 void Interpreter::DoStoreLookupSlot(LanguageMode language_mode, | 433 void Interpreter::DoStoreLookupSlot(LanguageMode language_mode, |
| 493 InterpreterAssembler* assembler) { | 434 InterpreterAssembler* assembler) { |
| 494 Node* value = __ GetAccumulator(); | 435 Node* value = __ GetAccumulator(); |
| 495 Node* index = __ BytecodeOperandIdx(0); | 436 Node* index = __ BytecodeOperandIdx(0); |
| 496 Node* name = __ LoadConstantPoolEntry(index); | 437 Node* name = __ LoadConstantPoolEntry(index); |
| 497 Node* context = __ GetContext(); | 438 Node* context = __ GetContext(); |
| 498 Node* result = __ CallRuntime(is_strict(language_mode) | 439 Node* result = __ CallRuntime(is_strict(language_mode) |
| 499 ? Runtime::kStoreLookupSlot_Strict | 440 ? Runtime::kStoreLookupSlot_Strict |
| 500 : Runtime::kStoreLookupSlot_Sloppy, | 441 : Runtime::kStoreLookupSlot_Sloppy, |
| 501 context, name, value); | 442 context, name, value); |
| 502 __ SetAccumulator(result); | 443 __ SetAccumulator(result); |
| 503 __ Dispatch(); | 444 __ Dispatch(); |
| 504 } | 445 } |
| 505 | 446 |
| 506 | |
| 507 // StaLookupSlotSloppy <name_index> | 447 // StaLookupSlotSloppy <name_index> |
| 508 // | 448 // |
| 509 // Store the object in accumulator to the object with the name in constant | 449 // Store the object in accumulator to the object with the name in constant |
| 510 // pool entry |name_index| in sloppy mode. | 450 // pool entry |name_index| in sloppy mode. |
| 511 void Interpreter::DoStaLookupSlotSloppy(InterpreterAssembler* assembler) { | 451 void Interpreter::DoStaLookupSlotSloppy(InterpreterAssembler* assembler) { |
| 512 DoStoreLookupSlot(LanguageMode::SLOPPY, assembler); | 452 DoStoreLookupSlot(LanguageMode::SLOPPY, assembler); |
| 513 } | 453 } |
| 514 | 454 |
| 515 | 455 |
| 516 // StaLookupSlotStrict <name_index> | 456 // StaLookupSlotStrict <name_index> |
| 517 // | 457 // |
| 518 // Store the object in accumulator to the object with the name in constant | 458 // Store the object in accumulator to the object with the name in constant |
| 519 // pool entry |name_index| in strict mode. | 459 // pool entry |name_index| in strict mode. |
| 520 void Interpreter::DoStaLookupSlotStrict(InterpreterAssembler* assembler) { | 460 void Interpreter::DoStaLookupSlotStrict(InterpreterAssembler* assembler) { |
| 521 DoStoreLookupSlot(LanguageMode::STRICT, assembler); | 461 DoStoreLookupSlot(LanguageMode::STRICT, assembler); |
| 522 } | 462 } |
| 523 | 463 |
| 524 | |
| 525 // StaLookupSlotSloppyWide <name_index> | |
| 526 // | |
| 527 // Store the object in accumulator to the object with the name in constant | |
| 528 // pool entry |name_index| in sloppy mode. | |
| 529 void Interpreter::DoStaLookupSlotSloppyWide(InterpreterAssembler* assembler) { | |
| 530 DoStaLookupSlotSloppy(assembler); | |
| 531 } | |
| 532 | |
| 533 | |
| 534 // StaLookupSlotStrictWide <name_index> | |
| 535 // | |
| 536 // Store the object in accumulator to the object with the name in constant | |
| 537 // pool entry |name_index| in strict mode. | |
| 538 void Interpreter::DoStaLookupSlotStrictWide(InterpreterAssembler* assembler) { | |
| 539 DoStaLookupSlotStrict(assembler); | |
| 540 } | |
| 541 | |
| 542 void Interpreter::DoLoadIC(Callable ic, InterpreterAssembler* assembler) { | 464 void Interpreter::DoLoadIC(Callable ic, InterpreterAssembler* assembler) { |
| 543 Node* code_target = __ HeapConstant(ic.code()); | 465 Node* code_target = __ HeapConstant(ic.code()); |
| 544 Node* register_index = __ BytecodeOperandReg(0); | 466 Node* register_index = __ BytecodeOperandReg(0); |
| 545 Node* object = __ LoadRegister(register_index); | 467 Node* object = __ LoadRegister(register_index); |
| 546 Node* constant_index = __ BytecodeOperandIdx(1); | 468 Node* constant_index = __ BytecodeOperandIdx(1); |
| 547 Node* name = __ LoadConstantPoolEntry(constant_index); | 469 Node* name = __ LoadConstantPoolEntry(constant_index); |
| 548 Node* raw_slot = __ BytecodeOperandIdx(2); | 470 Node* raw_slot = __ BytecodeOperandIdx(2); |
| 549 Node* smi_slot = __ SmiTag(raw_slot); | 471 Node* smi_slot = __ SmiTag(raw_slot); |
| 550 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); | 472 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); |
| 551 Node* context = __ GetContext(); | 473 Node* context = __ GetContext(); |
| 552 Node* result = __ CallStub(ic.descriptor(), code_target, context, object, | 474 Node* result = __ CallStub(ic.descriptor(), code_target, context, object, |
| 553 name, smi_slot, type_feedback_vector); | 475 name, smi_slot, type_feedback_vector); |
| 554 __ SetAccumulator(result); | 476 __ SetAccumulator(result); |
| 555 __ Dispatch(); | 477 __ Dispatch(); |
| 556 } | 478 } |
| 557 | 479 |
| 558 // LoadIC <object> <name_index> <slot> | 480 // LoadIC <object> <name_index> <slot> |
| 559 // | 481 // |
| 560 // Calls the LoadIC at FeedBackVector slot <slot> for <object> and the name at | 482 // Calls the LoadIC at FeedBackVector slot <slot> for <object> and the name at |
| 561 // constant pool entry <name_index>. | 483 // constant pool entry <name_index>. |
| 562 void Interpreter::DoLoadIC(InterpreterAssembler* assembler) { | 484 void Interpreter::DoLoadIC(InterpreterAssembler* assembler) { |
| 563 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, | 485 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, |
| 564 UNINITIALIZED); | 486 UNINITIALIZED); |
| 565 DoLoadIC(ic, assembler); | 487 DoLoadIC(ic, assembler); |
| 566 } | 488 } |
| 567 | 489 |
| 568 // LoadICWide <object> <name_index> <slot> | |
| 569 // | |
| 570 // Calls the LoadIC at FeedBackVector slot <slot> for <object> and the name at | |
| 571 // constant pool entry <name_index>. | |
| 572 void Interpreter::DoLoadICWide(InterpreterAssembler* assembler) { | |
| 573 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, | |
| 574 UNINITIALIZED); | |
| 575 DoLoadIC(ic, assembler); | |
| 576 } | |
| 577 | |
| 578 | |
| 579 void Interpreter::DoKeyedLoadIC(Callable ic, InterpreterAssembler* assembler) { | 490 void Interpreter::DoKeyedLoadIC(Callable ic, InterpreterAssembler* assembler) { |
| 580 Node* code_target = __ HeapConstant(ic.code()); | 491 Node* code_target = __ HeapConstant(ic.code()); |
| 581 Node* reg_index = __ BytecodeOperandReg(0); | 492 Node* reg_index = __ BytecodeOperandReg(0); |
| 582 Node* object = __ LoadRegister(reg_index); | 493 Node* object = __ LoadRegister(reg_index); |
| 583 Node* name = __ GetAccumulator(); | 494 Node* name = __ GetAccumulator(); |
| 584 Node* raw_slot = __ BytecodeOperandIdx(1); | 495 Node* raw_slot = __ BytecodeOperandIdx(1); |
| 585 Node* smi_slot = __ SmiTag(raw_slot); | 496 Node* smi_slot = __ SmiTag(raw_slot); |
| 586 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); | 497 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); |
| 587 Node* context = __ GetContext(); | 498 Node* context = __ GetContext(); |
| 588 Node* result = __ CallStub(ic.descriptor(), code_target, context, object, | 499 Node* result = __ CallStub(ic.descriptor(), code_target, context, object, |
| 589 name, smi_slot, type_feedback_vector); | 500 name, smi_slot, type_feedback_vector); |
| 590 __ SetAccumulator(result); | 501 __ SetAccumulator(result); |
| 591 __ Dispatch(); | 502 __ Dispatch(); |
| 592 } | 503 } |
| 593 | 504 |
| 594 // KeyedLoadIC <object> <slot> | 505 // KeyedLoadIC <object> <slot> |
| 595 // | 506 // |
| 596 // Calls the KeyedLoadIC at FeedBackVector slot <slot> for <object> and the key | 507 // Calls the KeyedLoadIC at FeedBackVector slot <slot> for <object> and the key |
| 597 // in the accumulator. | 508 // in the accumulator. |
| 598 void Interpreter::DoKeyedLoadIC(InterpreterAssembler* assembler) { | 509 void Interpreter::DoKeyedLoadIC(InterpreterAssembler* assembler) { |
| 599 Callable ic = | 510 Callable ic = |
| 600 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, UNINITIALIZED); | 511 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, UNINITIALIZED); |
| 601 DoKeyedLoadIC(ic, assembler); | 512 DoKeyedLoadIC(ic, assembler); |
| 602 } | 513 } |
| 603 | 514 |
| 604 // KeyedLoadICWide <object> <slot> | |
| 605 // | |
| 606 // Calls the KeyedLoadIC at FeedBackVector slot <slot> for <object> and the key | |
| 607 // in the accumulator. | |
| 608 void Interpreter::DoKeyedLoadICWide(InterpreterAssembler* assembler) { | |
| 609 Callable ic = | |
| 610 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, UNINITIALIZED); | |
| 611 DoKeyedLoadIC(ic, assembler); | |
| 612 } | |
| 613 | |
| 614 | |
| 615 void Interpreter::DoStoreIC(Callable ic, InterpreterAssembler* assembler) { | 515 void Interpreter::DoStoreIC(Callable ic, InterpreterAssembler* assembler) { |
| 616 Node* code_target = __ HeapConstant(ic.code()); | 516 Node* code_target = __ HeapConstant(ic.code()); |
| 617 Node* object_reg_index = __ BytecodeOperandReg(0); | 517 Node* object_reg_index = __ BytecodeOperandReg(0); |
| 618 Node* object = __ LoadRegister(object_reg_index); | 518 Node* object = __ LoadRegister(object_reg_index); |
| 619 Node* constant_index = __ BytecodeOperandIdx(1); | 519 Node* constant_index = __ BytecodeOperandIdx(1); |
| 620 Node* name = __ LoadConstantPoolEntry(constant_index); | 520 Node* name = __ LoadConstantPoolEntry(constant_index); |
| 621 Node* value = __ GetAccumulator(); | 521 Node* value = __ GetAccumulator(); |
| 622 Node* raw_slot = __ BytecodeOperandIdx(2); | 522 Node* raw_slot = __ BytecodeOperandIdx(2); |
| 623 Node* smi_slot = __ SmiTag(raw_slot); | 523 Node* smi_slot = __ SmiTag(raw_slot); |
| 624 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); | 524 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); |
| (...skipping 20 matching lines...) Expand all Loading... |
| 645 // | 545 // |
| 646 // Calls the strict mode StoreIC at FeedBackVector slot <slot> for <object> and | 546 // Calls the strict mode StoreIC at FeedBackVector slot <slot> for <object> and |
| 647 // the name in constant pool entry <name_index> with the value in the | 547 // the name in constant pool entry <name_index> with the value in the |
| 648 // accumulator. | 548 // accumulator. |
| 649 void Interpreter::DoStoreICStrict(InterpreterAssembler* assembler) { | 549 void Interpreter::DoStoreICStrict(InterpreterAssembler* assembler) { |
| 650 Callable ic = | 550 Callable ic = |
| 651 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); | 551 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); |
| 652 DoStoreIC(ic, assembler); | 552 DoStoreIC(ic, assembler); |
| 653 } | 553 } |
| 654 | 554 |
| 655 | |
| 656 // StoreICSloppyWide <object> <name_index> <slot> | |
| 657 // | |
| 658 // Calls the sloppy mode StoreIC at FeedBackVector slot <slot> for <object> and | |
| 659 // the name in constant pool entry <name_index> with the value in the | |
| 660 // accumulator. | |
| 661 void Interpreter::DoStoreICSloppyWide(InterpreterAssembler* assembler) { | |
| 662 Callable ic = | |
| 663 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); | |
| 664 DoStoreIC(ic, assembler); | |
| 665 } | |
| 666 | |
| 667 | |
| 668 // StoreICStrictWide <object> <name_index> <slot> | |
| 669 // | |
| 670 // Calls the strict mode StoreIC at FeedBackVector slot <slot> for <object> and | |
| 671 // the name in constant pool entry <name_index> with the value in the | |
| 672 // accumulator. | |
| 673 void Interpreter::DoStoreICStrictWide(InterpreterAssembler* assembler) { | |
| 674 Callable ic = | |
| 675 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); | |
| 676 DoStoreIC(ic, assembler); | |
| 677 } | |
| 678 | |
| 679 void Interpreter::DoKeyedStoreIC(Callable ic, InterpreterAssembler* assembler) { | 555 void Interpreter::DoKeyedStoreIC(Callable ic, InterpreterAssembler* assembler) { |
| 680 Node* code_target = __ HeapConstant(ic.code()); | 556 Node* code_target = __ HeapConstant(ic.code()); |
| 681 Node* object_reg_index = __ BytecodeOperandReg(0); | 557 Node* object_reg_index = __ BytecodeOperandReg(0); |
| 682 Node* object = __ LoadRegister(object_reg_index); | 558 Node* object = __ LoadRegister(object_reg_index); |
| 683 Node* name_reg_index = __ BytecodeOperandReg(1); | 559 Node* name_reg_index = __ BytecodeOperandReg(1); |
| 684 Node* name = __ LoadRegister(name_reg_index); | 560 Node* name = __ LoadRegister(name_reg_index); |
| 685 Node* value = __ GetAccumulator(); | 561 Node* value = __ GetAccumulator(); |
| 686 Node* raw_slot = __ BytecodeOperandIdx(2); | 562 Node* raw_slot = __ BytecodeOperandIdx(2); |
| 687 Node* smi_slot = __ SmiTag(raw_slot); | 563 Node* smi_slot = __ SmiTag(raw_slot); |
| 688 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); | 564 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); |
| (...skipping 18 matching lines...) Expand all Loading... |
| 707 // KeyedStoreICStore <object> <key> <slot> | 583 // KeyedStoreICStore <object> <key> <slot> |
| 708 // | 584 // |
| 709 // Calls the strict mode KeyStoreIC at FeedBackVector slot <slot> for <object> | 585 // Calls the strict mode KeyStoreIC at FeedBackVector slot <slot> for <object> |
| 710 // and the key <key> with the value in the accumulator. | 586 // and the key <key> with the value in the accumulator. |
| 711 void Interpreter::DoKeyedStoreICStrict(InterpreterAssembler* assembler) { | 587 void Interpreter::DoKeyedStoreICStrict(InterpreterAssembler* assembler) { |
| 712 Callable ic = | 588 Callable ic = |
| 713 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); | 589 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); |
| 714 DoKeyedStoreIC(ic, assembler); | 590 DoKeyedStoreIC(ic, assembler); |
| 715 } | 591 } |
| 716 | 592 |
| 717 | |
| 718 // KeyedStoreICSloppyWide <object> <key> <slot> | |
| 719 // | |
| 720 // Calls the sloppy mode KeyStoreIC at FeedBackVector slot <slot> for <object> | |
| 721 // and the key <key> with the value in the accumulator. | |
| 722 void Interpreter::DoKeyedStoreICSloppyWide(InterpreterAssembler* assembler) { | |
| 723 Callable ic = | |
| 724 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); | |
| 725 DoKeyedStoreIC(ic, assembler); | |
| 726 } | |
| 727 | |
| 728 | |
| 729 // KeyedStoreICStoreWide <object> <key> <slot> | |
| 730 // | |
| 731 // Calls the strict mode KeyStoreIC at FeedBackVector slot <slot> for <object> | |
| 732 // and the key <key> with the value in the accumulator. | |
| 733 void Interpreter::DoKeyedStoreICStrictWide(InterpreterAssembler* assembler) { | |
| 734 Callable ic = | |
| 735 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); | |
| 736 DoKeyedStoreIC(ic, assembler); | |
| 737 } | |
| 738 | |
| 739 // PushContext <context> | 593 // PushContext <context> |
| 740 // | 594 // |
| 741 // Saves the current context in <context>, and pushes the accumulator as the | 595 // Saves the current context in <context>, and pushes the accumulator as the |
| 742 // new current context. | 596 // new current context. |
| 743 void Interpreter::DoPushContext(InterpreterAssembler* assembler) { | 597 void Interpreter::DoPushContext(InterpreterAssembler* assembler) { |
| 744 Node* reg_index = __ BytecodeOperandReg(0); | 598 Node* reg_index = __ BytecodeOperandReg(0); |
| 745 Node* new_context = __ GetAccumulator(); | 599 Node* new_context = __ GetAccumulator(); |
| 746 Node* old_context = __ GetContext(); | 600 Node* old_context = __ GetContext(); |
| 747 __ StoreRegister(old_context, reg_index); | 601 __ StoreRegister(old_context, reg_index); |
| 748 __ SetContext(new_context); | 602 __ SetContext(new_context); |
| (...skipping 253 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1002 | 856 |
| 1003 | 857 |
| 1004 // Call <callable> <receiver> <arg_count> | 858 // Call <callable> <receiver> <arg_count> |
| 1005 // | 859 // |
| 1006 // Call a JSfunction or Callable in |callable| with the |receiver| and | 860 // Call a JSfunction or Callable in |callable| with the |receiver| and |
| 1007 // |arg_count| arguments in subsequent registers. | 861 // |arg_count| arguments in subsequent registers. |
| 1008 void Interpreter::DoCall(InterpreterAssembler* assembler) { | 862 void Interpreter::DoCall(InterpreterAssembler* assembler) { |
| 1009 DoJSCall(assembler, TailCallMode::kDisallow); | 863 DoJSCall(assembler, TailCallMode::kDisallow); |
| 1010 } | 864 } |
| 1011 | 865 |
| 1012 | |
| 1013 // CallWide <callable> <receiver> <arg_count> | |
| 1014 // | |
| 1015 // Call a JSfunction or Callable in |callable| with the |receiver| and | |
| 1016 // |arg_count| arguments in subsequent registers. | |
| 1017 void Interpreter::DoCallWide(InterpreterAssembler* assembler) { | |
| 1018 DoJSCall(assembler, TailCallMode::kDisallow); | |
| 1019 } | |
| 1020 | |
| 1021 // TailCall <callable> <receiver> <arg_count> | 866 // TailCall <callable> <receiver> <arg_count> |
| 1022 // | 867 // |
| 1023 // Tail call a JSfunction or Callable in |callable| with the |receiver| and | 868 // Tail call a JSfunction or Callable in |callable| with the |receiver| and |
| 1024 // |arg_count| arguments in subsequent registers. | 869 // |arg_count| arguments in subsequent registers. |
| 1025 void Interpreter::DoTailCall(InterpreterAssembler* assembler) { | 870 void Interpreter::DoTailCall(InterpreterAssembler* assembler) { |
| 1026 DoJSCall(assembler, TailCallMode::kAllow); | 871 DoJSCall(assembler, TailCallMode::kAllow); |
| 1027 } | 872 } |
| 1028 | 873 |
| 1029 // TailCallWide <callable> <receiver> <arg_count> | |
| 1030 // | |
| 1031 // Tail call a JSfunction or Callable in |callable| with the |receiver| and | |
| 1032 // |arg_count| arguments in subsequent registers. | |
| 1033 void Interpreter::DoTailCallWide(InterpreterAssembler* assembler) { | |
| 1034 DoJSCall(assembler, TailCallMode::kAllow); | |
| 1035 } | |
| 1036 | |
| 1037 void Interpreter::DoCallRuntimeCommon(InterpreterAssembler* assembler) { | 874 void Interpreter::DoCallRuntimeCommon(InterpreterAssembler* assembler) { |
| 1038 Node* function_id = __ BytecodeOperandIdx(0); | 875 Node* function_id = __ BytecodeOperandRuntimeId(0); |
| 1039 Node* first_arg_reg = __ BytecodeOperandReg(1); | 876 Node* first_arg_reg = __ BytecodeOperandReg(1); |
| 1040 Node* first_arg = __ RegisterLocation(first_arg_reg); | 877 Node* first_arg = __ RegisterLocation(first_arg_reg); |
| 1041 Node* args_count = __ BytecodeOperandCount(2); | 878 Node* args_count = __ BytecodeOperandCount(2); |
| 1042 Node* context = __ GetContext(); | 879 Node* context = __ GetContext(); |
| 1043 Node* result = __ CallRuntimeN(function_id, context, first_arg, args_count); | 880 Node* result = __ CallRuntimeN(function_id, context, first_arg, args_count); |
| 1044 __ SetAccumulator(result); | 881 __ SetAccumulator(result); |
| 1045 __ Dispatch(); | 882 __ Dispatch(); |
| 1046 } | 883 } |
| 1047 | 884 |
| 1048 | 885 |
| 1049 // CallRuntime <function_id> <first_arg> <arg_count> | 886 // CallRuntime <function_id> <first_arg> <arg_count> |
| 1050 // | 887 // |
| 1051 // Call the runtime function |function_id| with the first argument in | 888 // Call the runtime function |function_id| with the first argument in |
| 1052 // register |first_arg| and |arg_count| arguments in subsequent | 889 // register |first_arg| and |arg_count| arguments in subsequent |
| 1053 // registers. | 890 // registers. |
| 1054 void Interpreter::DoCallRuntime(InterpreterAssembler* assembler) { | 891 void Interpreter::DoCallRuntime(InterpreterAssembler* assembler) { |
| 1055 DoCallRuntimeCommon(assembler); | 892 DoCallRuntimeCommon(assembler); |
| 1056 } | 893 } |
| 1057 | 894 |
| 1058 | |
| 1059 // CallRuntime <function_id> <first_arg> <arg_count> | |
| 1060 // | |
| 1061 // Call the runtime function |function_id| with the first argument in | |
| 1062 // register |first_arg| and |arg_count| arguments in subsequent | |
| 1063 // registers. | |
| 1064 void Interpreter::DoCallRuntimeWide(InterpreterAssembler* assembler) { | |
| 1065 DoCallRuntimeCommon(assembler); | |
| 1066 } | |
| 1067 | |
| 1068 void Interpreter::DoCallRuntimeForPairCommon(InterpreterAssembler* assembler) { | 895 void Interpreter::DoCallRuntimeForPairCommon(InterpreterAssembler* assembler) { |
| 1069 // Call the runtime function. | 896 // Call the runtime function. |
| 1070 Node* function_id = __ BytecodeOperandIdx(0); | 897 Node* function_id = __ BytecodeOperandRuntimeId(0); |
| 1071 Node* first_arg_reg = __ BytecodeOperandReg(1); | 898 Node* first_arg_reg = __ BytecodeOperandReg(1); |
| 1072 Node* first_arg = __ RegisterLocation(first_arg_reg); | 899 Node* first_arg = __ RegisterLocation(first_arg_reg); |
| 1073 Node* args_count = __ BytecodeOperandCount(2); | 900 Node* args_count = __ BytecodeOperandCount(2); |
| 1074 Node* context = __ GetContext(); | 901 Node* context = __ GetContext(); |
| 1075 Node* result_pair = | 902 Node* result_pair = |
| 1076 __ CallRuntimeN(function_id, context, first_arg, args_count, 2); | 903 __ CallRuntimeN(function_id, context, first_arg, args_count, 2); |
| 1077 | 904 |
| 1078 // Store the results in <first_return> and <first_return + 1> | 905 // Store the results in <first_return> and <first_return + 1> |
| 1079 Node* first_return_reg = __ BytecodeOperandReg(3); | 906 Node* first_return_reg = __ BytecodeOperandReg(3); |
| 1080 Node* second_return_reg = __ NextRegister(first_return_reg); | 907 Node* second_return_reg = __ NextRegister(first_return_reg); |
| 1081 Node* result0 = __ Projection(0, result_pair); | 908 Node* result0 = __ Projection(0, result_pair); |
| 1082 Node* result1 = __ Projection(1, result_pair); | 909 Node* result1 = __ Projection(1, result_pair); |
| 1083 __ StoreRegister(result0, first_return_reg); | 910 __ StoreRegister(result0, first_return_reg); |
| 1084 __ StoreRegister(result1, second_return_reg); | 911 __ StoreRegister(result1, second_return_reg); |
| 1085 __ Dispatch(); | 912 __ Dispatch(); |
| 1086 } | 913 } |
| 1087 | 914 |
| 1088 | 915 |
| 1089 // CallRuntimeForPair <function_id> <first_arg> <arg_count> <first_return> | 916 // CallRuntimeForPair <function_id> <first_arg> <arg_count> <first_return> |
| 1090 // | 917 // |
| 1091 // Call the runtime function |function_id| which returns a pair, with the | 918 // Call the runtime function |function_id| which returns a pair, with the |
| 1092 // first argument in register |first_arg| and |arg_count| arguments in | 919 // first argument in register |first_arg| and |arg_count| arguments in |
| 1093 // subsequent registers. Returns the result in <first_return> and | 920 // subsequent registers. Returns the result in <first_return> and |
| 1094 // <first_return + 1> | 921 // <first_return + 1> |
| 1095 void Interpreter::DoCallRuntimeForPair(InterpreterAssembler* assembler) { | 922 void Interpreter::DoCallRuntimeForPair(InterpreterAssembler* assembler) { |
| 1096 DoCallRuntimeForPairCommon(assembler); | 923 DoCallRuntimeForPairCommon(assembler); |
| 1097 } | 924 } |
| 1098 | 925 |
| 1099 | |
| 1100 // CallRuntimeForPairWide <function_id> <first_arg> <arg_count> <first_return> | |
| 1101 // | |
| 1102 // Call the runtime function |function_id| which returns a pair, with the | |
| 1103 // first argument in register |first_arg| and |arg_count| arguments in | |
| 1104 // subsequent registers. Returns the result in <first_return> and | |
| 1105 // <first_return + 1> | |
| 1106 void Interpreter::DoCallRuntimeForPairWide(InterpreterAssembler* assembler) { | |
| 1107 DoCallRuntimeForPairCommon(assembler); | |
| 1108 } | |
| 1109 | |
| 1110 void Interpreter::DoCallJSRuntimeCommon(InterpreterAssembler* assembler) { | 926 void Interpreter::DoCallJSRuntimeCommon(InterpreterAssembler* assembler) { |
| 1111 Node* context_index = __ BytecodeOperandIdx(0); | 927 Node* context_index = __ BytecodeOperandIdx(0); |
| 1112 Node* receiver_reg = __ BytecodeOperandReg(1); | 928 Node* receiver_reg = __ BytecodeOperandReg(1); |
| 1113 Node* first_arg = __ RegisterLocation(receiver_reg); | 929 Node* first_arg = __ RegisterLocation(receiver_reg); |
| 1114 Node* receiver_args_count = __ BytecodeOperandCount(2); | 930 Node* receiver_args_count = __ BytecodeOperandCount(2); |
| 1115 Node* receiver_count = __ Int32Constant(1); | 931 Node* receiver_count = __ Int32Constant(1); |
| 1116 Node* args_count = __ Int32Sub(receiver_args_count, receiver_count); | 932 Node* args_count = __ Int32Sub(receiver_args_count, receiver_count); |
| 1117 | 933 |
| 1118 // Get the function to call from the native context. | 934 // Get the function to call from the native context. |
| 1119 Node* context = __ GetContext(); | 935 Node* context = __ GetContext(); |
| (...skipping 10 matching lines...) Expand all Loading... |
| 1130 | 946 |
| 1131 | 947 |
| 1132 // CallJSRuntime <context_index> <receiver> <arg_count> | 948 // CallJSRuntime <context_index> <receiver> <arg_count> |
| 1133 // | 949 // |
| 1134 // Call the JS runtime function that has the |context_index| with the receiver | 950 // Call the JS runtime function that has the |context_index| with the receiver |
| 1135 // in register |receiver| and |arg_count| arguments in subsequent registers. | 951 // in register |receiver| and |arg_count| arguments in subsequent registers. |
| 1136 void Interpreter::DoCallJSRuntime(InterpreterAssembler* assembler) { | 952 void Interpreter::DoCallJSRuntime(InterpreterAssembler* assembler) { |
| 1137 DoCallJSRuntimeCommon(assembler); | 953 DoCallJSRuntimeCommon(assembler); |
| 1138 } | 954 } |
| 1139 | 955 |
| 1140 | |
| 1141 // CallJSRuntimeWide <context_index> <receiver> <arg_count> | |
| 1142 // | |
| 1143 // Call the JS runtime function that has the |context_index| with the receiver | |
| 1144 // in register |receiver| and |arg_count| arguments in subsequent registers. | |
| 1145 void Interpreter::DoCallJSRuntimeWide(InterpreterAssembler* assembler) { | |
| 1146 DoCallJSRuntimeCommon(assembler); | |
| 1147 } | |
| 1148 | |
| 1149 void Interpreter::DoCallConstruct(InterpreterAssembler* assembler) { | 956 void Interpreter::DoCallConstruct(InterpreterAssembler* assembler) { |
| 1150 Callable ic = CodeFactory::InterpreterPushArgsAndConstruct(isolate_); | 957 Callable ic = CodeFactory::InterpreterPushArgsAndConstruct(isolate_); |
| 1151 Node* new_target = __ GetAccumulator(); | 958 Node* new_target = __ GetAccumulator(); |
| 1152 Node* constructor_reg = __ BytecodeOperandReg(0); | 959 Node* constructor_reg = __ BytecodeOperandReg(0); |
| 1153 Node* constructor = __ LoadRegister(constructor_reg); | 960 Node* constructor = __ LoadRegister(constructor_reg); |
| 1154 Node* first_arg_reg = __ BytecodeOperandReg(1); | 961 Node* first_arg_reg = __ BytecodeOperandReg(1); |
| 1155 Node* first_arg = __ RegisterLocation(first_arg_reg); | 962 Node* first_arg = __ RegisterLocation(first_arg_reg); |
| 1156 Node* args_count = __ BytecodeOperandCount(2); | 963 Node* args_count = __ BytecodeOperandCount(2); |
| 1157 Node* context = __ GetContext(); | 964 Node* context = __ GetContext(); |
| 1158 Node* result = | 965 Node* result = |
| 1159 __ CallConstruct(constructor, context, new_target, first_arg, args_count); | 966 __ CallConstruct(constructor, context, new_target, first_arg, args_count); |
| 1160 __ SetAccumulator(result); | 967 __ SetAccumulator(result); |
| 1161 __ Dispatch(); | 968 __ Dispatch(); |
| 1162 } | 969 } |
| 1163 | 970 |
| 1164 | 971 |
| 1165 // New <constructor> <first_arg> <arg_count> | 972 // New <constructor> <first_arg> <arg_count> |
| 1166 // | 973 // |
| 1167 // Call operator new with |constructor| and the first argument in | 974 // Call operator new with |constructor| and the first argument in |
| 1168 // register |first_arg| and |arg_count| arguments in subsequent | 975 // register |first_arg| and |arg_count| arguments in subsequent |
| 1169 // registers. The new.target is in the accumulator. | 976 // registers. The new.target is in the accumulator. |
| 1170 // | 977 // |
| 1171 void Interpreter::DoNew(InterpreterAssembler* assembler) { | 978 void Interpreter::DoNew(InterpreterAssembler* assembler) { |
| 1172 DoCallConstruct(assembler); | 979 DoCallConstruct(assembler); |
| 1173 } | 980 } |
| 1174 | 981 |
| 1175 | |
| 1176 // NewWide <constructor> <first_arg> <arg_count> | |
| 1177 // | |
| 1178 // Call operator new with |constructor| and the first argument in | |
| 1179 // register |first_arg| and |arg_count| arguments in subsequent | |
| 1180 // registers. The new.target is in the accumulator. | |
| 1181 // | |
| 1182 void Interpreter::DoNewWide(InterpreterAssembler* assembler) { | |
| 1183 DoCallConstruct(assembler); | |
| 1184 } | |
| 1185 | |
| 1186 | |
| 1187 // TestEqual <src> | 982 // TestEqual <src> |
| 1188 // | 983 // |
| 1189 // Test if the value in the <src> register equals the accumulator. | 984 // Test if the value in the <src> register equals the accumulator. |
| 1190 void Interpreter::DoTestEqual(InterpreterAssembler* assembler) { | 985 void Interpreter::DoTestEqual(InterpreterAssembler* assembler) { |
| 1191 DoBinaryOp(CodeFactory::Equal(isolate_), assembler); | 986 DoBinaryOp(CodeFactory::Equal(isolate_), assembler); |
| 1192 } | 987 } |
| 1193 | 988 |
| 1194 | 989 |
| 1195 // TestNotEqual <src> | 990 // TestNotEqual <src> |
| 1196 // | 991 // |
| (...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1286 } | 1081 } |
| 1287 | 1082 |
| 1288 | 1083 |
| 1289 // ToObject | 1084 // ToObject |
| 1290 // | 1085 // |
| 1291 // Cast the object referenced by the accumulator to a JSObject. | 1086 // Cast the object referenced by the accumulator to a JSObject. |
| 1292 void Interpreter::DoToObject(InterpreterAssembler* assembler) { | 1087 void Interpreter::DoToObject(InterpreterAssembler* assembler) { |
| 1293 DoTypeConversionOp(CodeFactory::ToObject(isolate_), assembler); | 1088 DoTypeConversionOp(CodeFactory::ToObject(isolate_), assembler); |
| 1294 } | 1089 } |
| 1295 | 1090 |
| 1296 | 1091 // Jump <imm> |
| 1297 // Jump <imm8> | |
| 1298 // | 1092 // |
| 1299 // Jump by number of bytes represented by the immediate operand |imm8|. | 1093 // Jump by number of bytes represented by the immediate operand |imm|. |
| 1300 void Interpreter::DoJump(InterpreterAssembler* assembler) { | 1094 void Interpreter::DoJump(InterpreterAssembler* assembler) { |
| 1301 Node* relative_jump = __ BytecodeOperandImm(0); | 1095 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1302 __ Jump(relative_jump); | 1096 __ Jump(relative_jump); |
| 1303 } | 1097 } |
| 1304 | 1098 |
| 1305 | 1099 // JumpConstant <idx> |
| 1306 // JumpConstant <idx8> | |
| 1307 // | 1100 // |
| 1308 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool. | 1101 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool. |
| 1309 void Interpreter::DoJumpConstant(InterpreterAssembler* assembler) { | 1102 void Interpreter::DoJumpConstant(InterpreterAssembler* assembler) { |
| 1310 Node* index = __ BytecodeOperandIdx(0); | 1103 Node* index = __ BytecodeOperandIdx(0); |
| 1311 Node* constant = __ LoadConstantPoolEntry(index); | 1104 Node* constant = __ LoadConstantPoolEntry(index); |
| 1312 Node* relative_jump = __ SmiUntag(constant); | 1105 Node* relative_jump = __ SmiUntag(constant); |
| 1313 __ Jump(relative_jump); | 1106 __ Jump(relative_jump); |
| 1314 } | 1107 } |
| 1315 | 1108 |
| 1316 | 1109 // JumpIfTrue <imm> |
| 1317 // JumpConstantWide <idx16> | |
| 1318 // | |
| 1319 // Jump by number of bytes in the Smi in the |idx16| entry in the | |
| 1320 // constant pool. | |
| 1321 void Interpreter::DoJumpConstantWide(InterpreterAssembler* assembler) { | |
| 1322 DoJumpConstant(assembler); | |
| 1323 } | |
| 1324 | |
| 1325 | |
| 1326 // JumpIfTrue <imm8> | |
| 1327 // | 1110 // |
| 1328 // Jump by number of bytes represented by an immediate operand if the | 1111 // Jump by number of bytes represented by an immediate operand if the |
| 1329 // accumulator contains true. | 1112 // accumulator contains true. |
| 1330 void Interpreter::DoJumpIfTrue(InterpreterAssembler* assembler) { | 1113 void Interpreter::DoJumpIfTrue(InterpreterAssembler* assembler) { |
| 1331 Node* accumulator = __ GetAccumulator(); | 1114 Node* accumulator = __ GetAccumulator(); |
| 1332 Node* relative_jump = __ BytecodeOperandImm(0); | 1115 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1333 Node* true_value = __ BooleanConstant(true); | 1116 Node* true_value = __ BooleanConstant(true); |
| 1334 __ JumpIfWordEqual(accumulator, true_value, relative_jump); | 1117 __ JumpIfWordEqual(accumulator, true_value, relative_jump); |
| 1335 } | 1118 } |
| 1336 | 1119 |
| 1337 | 1120 // JumpIfTrueConstant <idx> |
| 1338 // JumpIfTrueConstant <idx8> | |
| 1339 // | 1121 // |
| 1340 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool | 1122 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool |
| 1341 // if the accumulator contains true. | 1123 // if the accumulator contains true. |
| 1342 void Interpreter::DoJumpIfTrueConstant(InterpreterAssembler* assembler) { | 1124 void Interpreter::DoJumpIfTrueConstant(InterpreterAssembler* assembler) { |
| 1343 Node* accumulator = __ GetAccumulator(); | 1125 Node* accumulator = __ GetAccumulator(); |
| 1344 Node* index = __ BytecodeOperandIdx(0); | 1126 Node* index = __ BytecodeOperandIdx(0); |
| 1345 Node* constant = __ LoadConstantPoolEntry(index); | 1127 Node* constant = __ LoadConstantPoolEntry(index); |
| 1346 Node* relative_jump = __ SmiUntag(constant); | 1128 Node* relative_jump = __ SmiUntag(constant); |
| 1347 Node* true_value = __ BooleanConstant(true); | 1129 Node* true_value = __ BooleanConstant(true); |
| 1348 __ JumpIfWordEqual(accumulator, true_value, relative_jump); | 1130 __ JumpIfWordEqual(accumulator, true_value, relative_jump); |
| 1349 } | 1131 } |
| 1350 | 1132 |
| 1351 | 1133 // JumpIfFalse <imm> |
| 1352 // JumpIfTrueConstantWide <idx16> | |
| 1353 // | |
| 1354 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool | |
| 1355 // if the accumulator contains true. | |
| 1356 void Interpreter::DoJumpIfTrueConstantWide(InterpreterAssembler* assembler) { | |
| 1357 DoJumpIfTrueConstant(assembler); | |
| 1358 } | |
| 1359 | |
| 1360 | |
| 1361 // JumpIfFalse <imm8> | |
| 1362 // | 1134 // |
| 1363 // Jump by number of bytes represented by an immediate operand if the | 1135 // Jump by number of bytes represented by an immediate operand if the |
| 1364 // accumulator contains false. | 1136 // accumulator contains false. |
| 1365 void Interpreter::DoJumpIfFalse(InterpreterAssembler* assembler) { | 1137 void Interpreter::DoJumpIfFalse(InterpreterAssembler* assembler) { |
| 1366 Node* accumulator = __ GetAccumulator(); | 1138 Node* accumulator = __ GetAccumulator(); |
| 1367 Node* relative_jump = __ BytecodeOperandImm(0); | 1139 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1368 Node* false_value = __ BooleanConstant(false); | 1140 Node* false_value = __ BooleanConstant(false); |
| 1369 __ JumpIfWordEqual(accumulator, false_value, relative_jump); | 1141 __ JumpIfWordEqual(accumulator, false_value, relative_jump); |
| 1370 } | 1142 } |
| 1371 | 1143 |
| 1372 | 1144 // JumpIfFalseConstant <idx> |
| 1373 // JumpIfFalseConstant <idx8> | |
| 1374 // | 1145 // |
| 1375 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool | 1146 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool |
| 1376 // if the accumulator contains false. | 1147 // if the accumulator contains false. |
| 1377 void Interpreter::DoJumpIfFalseConstant(InterpreterAssembler* assembler) { | 1148 void Interpreter::DoJumpIfFalseConstant(InterpreterAssembler* assembler) { |
| 1378 Node* accumulator = __ GetAccumulator(); | 1149 Node* accumulator = __ GetAccumulator(); |
| 1379 Node* index = __ BytecodeOperandIdx(0); | 1150 Node* index = __ BytecodeOperandIdx(0); |
| 1380 Node* constant = __ LoadConstantPoolEntry(index); | 1151 Node* constant = __ LoadConstantPoolEntry(index); |
| 1381 Node* relative_jump = __ SmiUntag(constant); | 1152 Node* relative_jump = __ SmiUntag(constant); |
| 1382 Node* false_value = __ BooleanConstant(false); | 1153 Node* false_value = __ BooleanConstant(false); |
| 1383 __ JumpIfWordEqual(accumulator, false_value, relative_jump); | 1154 __ JumpIfWordEqual(accumulator, false_value, relative_jump); |
| 1384 } | 1155 } |
| 1385 | 1156 |
| 1386 | 1157 // JumpIfToBooleanTrue <imm> |
| 1387 // JumpIfFalseConstant <idx16> | |
| 1388 // | |
| 1389 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool | |
| 1390 // if the accumulator contains false. | |
| 1391 void Interpreter::DoJumpIfFalseConstantWide(InterpreterAssembler* assembler) { | |
| 1392 DoJumpIfFalseConstant(assembler); | |
| 1393 } | |
| 1394 | |
| 1395 | |
| 1396 // JumpIfToBooleanTrue <imm8> | |
| 1397 // | 1158 // |
| 1398 // Jump by number of bytes represented by an immediate operand if the object | 1159 // Jump by number of bytes represented by an immediate operand if the object |
| 1399 // referenced by the accumulator is true when the object is cast to boolean. | 1160 // referenced by the accumulator is true when the object is cast to boolean. |
| 1400 void Interpreter::DoJumpIfToBooleanTrue(InterpreterAssembler* assembler) { | 1161 void Interpreter::DoJumpIfToBooleanTrue(InterpreterAssembler* assembler) { |
| 1401 Callable callable = CodeFactory::ToBoolean(isolate_); | 1162 Callable callable = CodeFactory::ToBoolean(isolate_); |
| 1402 Node* target = __ HeapConstant(callable.code()); | 1163 Node* target = __ HeapConstant(callable.code()); |
| 1403 Node* accumulator = __ GetAccumulator(); | 1164 Node* accumulator = __ GetAccumulator(); |
| 1404 Node* context = __ GetContext(); | 1165 Node* context = __ GetContext(); |
| 1405 Node* to_boolean_value = | 1166 Node* to_boolean_value = |
| 1406 __ CallStub(callable.descriptor(), target, context, accumulator); | 1167 __ CallStub(callable.descriptor(), target, context, accumulator); |
| 1407 Node* relative_jump = __ BytecodeOperandImm(0); | 1168 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1408 Node* true_value = __ BooleanConstant(true); | 1169 Node* true_value = __ BooleanConstant(true); |
| 1409 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump); | 1170 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump); |
| 1410 } | 1171 } |
| 1411 | 1172 |
| 1412 | 1173 // JumpIfToBooleanTrueConstant <idx> |
| 1413 // JumpIfToBooleanTrueConstant <idx8> | |
| 1414 // | 1174 // |
| 1415 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool | 1175 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool |
| 1416 // if the object referenced by the accumulator is true when the object is cast | 1176 // if the object referenced by the accumulator is true when the object is cast |
| 1417 // to boolean. | 1177 // to boolean. |
| 1418 void Interpreter::DoJumpIfToBooleanTrueConstant( | 1178 void Interpreter::DoJumpIfToBooleanTrueConstant( |
| 1419 InterpreterAssembler* assembler) { | 1179 InterpreterAssembler* assembler) { |
| 1420 Callable callable = CodeFactory::ToBoolean(isolate_); | 1180 Callable callable = CodeFactory::ToBoolean(isolate_); |
| 1421 Node* target = __ HeapConstant(callable.code()); | 1181 Node* target = __ HeapConstant(callable.code()); |
| 1422 Node* accumulator = __ GetAccumulator(); | 1182 Node* accumulator = __ GetAccumulator(); |
| 1423 Node* context = __ GetContext(); | 1183 Node* context = __ GetContext(); |
| 1424 Node* to_boolean_value = | 1184 Node* to_boolean_value = |
| 1425 __ CallStub(callable.descriptor(), target, context, accumulator); | 1185 __ CallStub(callable.descriptor(), target, context, accumulator); |
| 1426 Node* index = __ BytecodeOperandIdx(0); | 1186 Node* index = __ BytecodeOperandIdx(0); |
| 1427 Node* constant = __ LoadConstantPoolEntry(index); | 1187 Node* constant = __ LoadConstantPoolEntry(index); |
| 1428 Node* relative_jump = __ SmiUntag(constant); | 1188 Node* relative_jump = __ SmiUntag(constant); |
| 1429 Node* true_value = __ BooleanConstant(true); | 1189 Node* true_value = __ BooleanConstant(true); |
| 1430 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump); | 1190 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump); |
| 1431 } | 1191 } |
| 1432 | 1192 |
| 1433 | 1193 // JumpIfToBooleanFalse <imm> |
| 1434 // JumpIfToBooleanTrueConstantWide <idx16> | |
| 1435 // | |
| 1436 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool | |
| 1437 // if the object referenced by the accumulator is true when the object is cast | |
| 1438 // to boolean. | |
| 1439 void Interpreter::DoJumpIfToBooleanTrueConstantWide( | |
| 1440 InterpreterAssembler* assembler) { | |
| 1441 DoJumpIfToBooleanTrueConstant(assembler); | |
| 1442 } | |
| 1443 | |
| 1444 | |
| 1445 // JumpIfToBooleanFalse <imm8> | |
| 1446 // | 1194 // |
| 1447 // Jump by number of bytes represented by an immediate operand if the object | 1195 // Jump by number of bytes represented by an immediate operand if the object |
| 1448 // referenced by the accumulator is false when the object is cast to boolean. | 1196 // referenced by the accumulator is false when the object is cast to boolean. |
| 1449 void Interpreter::DoJumpIfToBooleanFalse(InterpreterAssembler* assembler) { | 1197 void Interpreter::DoJumpIfToBooleanFalse(InterpreterAssembler* assembler) { |
| 1450 Callable callable = CodeFactory::ToBoolean(isolate_); | 1198 Callable callable = CodeFactory::ToBoolean(isolate_); |
| 1451 Node* target = __ HeapConstant(callable.code()); | 1199 Node* target = __ HeapConstant(callable.code()); |
| 1452 Node* accumulator = __ GetAccumulator(); | 1200 Node* accumulator = __ GetAccumulator(); |
| 1453 Node* context = __ GetContext(); | 1201 Node* context = __ GetContext(); |
| 1454 Node* to_boolean_value = | 1202 Node* to_boolean_value = |
| 1455 __ CallStub(callable.descriptor(), target, context, accumulator); | 1203 __ CallStub(callable.descriptor(), target, context, accumulator); |
| 1456 Node* relative_jump = __ BytecodeOperandImm(0); | 1204 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1457 Node* false_value = __ BooleanConstant(false); | 1205 Node* false_value = __ BooleanConstant(false); |
| 1458 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump); | 1206 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump); |
| 1459 } | 1207 } |
| 1460 | 1208 |
| 1461 | 1209 // JumpIfToBooleanFalseConstant <idx> |
| 1462 // JumpIfToBooleanFalseConstant <idx8> | |
| 1463 // | 1210 // |
| 1464 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool | 1211 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool |
| 1465 // if the object referenced by the accumulator is false when the object is cast | 1212 // if the object referenced by the accumulator is false when the object is cast |
| 1466 // to boolean. | 1213 // to boolean. |
| 1467 void Interpreter::DoJumpIfToBooleanFalseConstant( | 1214 void Interpreter::DoJumpIfToBooleanFalseConstant( |
| 1468 InterpreterAssembler* assembler) { | 1215 InterpreterAssembler* assembler) { |
| 1469 Callable callable = CodeFactory::ToBoolean(isolate_); | 1216 Callable callable = CodeFactory::ToBoolean(isolate_); |
| 1470 Node* target = __ HeapConstant(callable.code()); | 1217 Node* target = __ HeapConstant(callable.code()); |
| 1471 Node* accumulator = __ GetAccumulator(); | 1218 Node* accumulator = __ GetAccumulator(); |
| 1472 Node* context = __ GetContext(); | 1219 Node* context = __ GetContext(); |
| 1473 Node* to_boolean_value = | 1220 Node* to_boolean_value = |
| 1474 __ CallStub(callable.descriptor(), target, context, accumulator); | 1221 __ CallStub(callable.descriptor(), target, context, accumulator); |
| 1475 Node* index = __ BytecodeOperandIdx(0); | 1222 Node* index = __ BytecodeOperandIdx(0); |
| 1476 Node* constant = __ LoadConstantPoolEntry(index); | 1223 Node* constant = __ LoadConstantPoolEntry(index); |
| 1477 Node* relative_jump = __ SmiUntag(constant); | 1224 Node* relative_jump = __ SmiUntag(constant); |
| 1478 Node* false_value = __ BooleanConstant(false); | 1225 Node* false_value = __ BooleanConstant(false); |
| 1479 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump); | 1226 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump); |
| 1480 } | 1227 } |
| 1481 | 1228 |
| 1482 | 1229 // JumpIfNull <imm> |
| 1483 // JumpIfToBooleanFalseConstantWide <idx16> | |
| 1484 // | |
| 1485 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool | |
| 1486 // if the object referenced by the accumulator is false when the object is cast | |
| 1487 // to boolean. | |
| 1488 void Interpreter::DoJumpIfToBooleanFalseConstantWide( | |
| 1489 InterpreterAssembler* assembler) { | |
| 1490 DoJumpIfToBooleanFalseConstant(assembler); | |
| 1491 } | |
| 1492 | |
| 1493 | |
| 1494 // JumpIfNull <imm8> | |
| 1495 // | 1230 // |
| 1496 // Jump by number of bytes represented by an immediate operand if the object | 1231 // Jump by number of bytes represented by an immediate operand if the object |
| 1497 // referenced by the accumulator is the null constant. | 1232 // referenced by the accumulator is the null constant. |
| 1498 void Interpreter::DoJumpIfNull(InterpreterAssembler* assembler) { | 1233 void Interpreter::DoJumpIfNull(InterpreterAssembler* assembler) { |
| 1499 Node* accumulator = __ GetAccumulator(); | 1234 Node* accumulator = __ GetAccumulator(); |
| 1500 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); | 1235 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); |
| 1501 Node* relative_jump = __ BytecodeOperandImm(0); | 1236 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1502 __ JumpIfWordEqual(accumulator, null_value, relative_jump); | 1237 __ JumpIfWordEqual(accumulator, null_value, relative_jump); |
| 1503 } | 1238 } |
| 1504 | 1239 |
| 1505 | 1240 // JumpIfNullConstant <idx> |
| 1506 // JumpIfNullConstant <idx8> | |
| 1507 // | 1241 // |
| 1508 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool | 1242 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool |
| 1509 // if the object referenced by the accumulator is the null constant. | 1243 // if the object referenced by the accumulator is the null constant. |
| 1510 void Interpreter::DoJumpIfNullConstant(InterpreterAssembler* assembler) { | 1244 void Interpreter::DoJumpIfNullConstant(InterpreterAssembler* assembler) { |
| 1511 Node* accumulator = __ GetAccumulator(); | 1245 Node* accumulator = __ GetAccumulator(); |
| 1512 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); | 1246 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); |
| 1513 Node* index = __ BytecodeOperandIdx(0); | 1247 Node* index = __ BytecodeOperandIdx(0); |
| 1514 Node* constant = __ LoadConstantPoolEntry(index); | 1248 Node* constant = __ LoadConstantPoolEntry(index); |
| 1515 Node* relative_jump = __ SmiUntag(constant); | 1249 Node* relative_jump = __ SmiUntag(constant); |
| 1516 __ JumpIfWordEqual(accumulator, null_value, relative_jump); | 1250 __ JumpIfWordEqual(accumulator, null_value, relative_jump); |
| 1517 } | 1251 } |
| 1518 | 1252 |
| 1519 | 1253 // JumpIfUndefined <imm> |
| 1520 // JumpIfNullConstantWide <idx16> | |
| 1521 // | |
| 1522 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool | |
| 1523 // if the object referenced by the accumulator is the null constant. | |
| 1524 void Interpreter::DoJumpIfNullConstantWide(InterpreterAssembler* assembler) { | |
| 1525 DoJumpIfNullConstant(assembler); | |
| 1526 } | |
| 1527 | |
| 1528 // JumpIfUndefined <imm8> | |
| 1529 // | 1254 // |
| 1530 // Jump by number of bytes represented by an immediate operand if the object | 1255 // Jump by number of bytes represented by an immediate operand if the object |
| 1531 // referenced by the accumulator is the undefined constant. | 1256 // referenced by the accumulator is the undefined constant. |
| 1532 void Interpreter::DoJumpIfUndefined(InterpreterAssembler* assembler) { | 1257 void Interpreter::DoJumpIfUndefined(InterpreterAssembler* assembler) { |
| 1533 Node* accumulator = __ GetAccumulator(); | 1258 Node* accumulator = __ GetAccumulator(); |
| 1534 Node* undefined_value = | 1259 Node* undefined_value = |
| 1535 __ HeapConstant(isolate_->factory()->undefined_value()); | 1260 __ HeapConstant(isolate_->factory()->undefined_value()); |
| 1536 Node* relative_jump = __ BytecodeOperandImm(0); | 1261 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1537 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump); | 1262 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump); |
| 1538 } | 1263 } |
| 1539 | 1264 |
| 1540 | 1265 // JumpIfUndefinedConstant <idx> |
| 1541 // JumpIfUndefinedConstant <idx8> | |
| 1542 // | 1266 // |
| 1543 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool | 1267 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool |
| 1544 // if the object referenced by the accumulator is the undefined constant. | 1268 // if the object referenced by the accumulator is the undefined constant. |
| 1545 void Interpreter::DoJumpIfUndefinedConstant(InterpreterAssembler* assembler) { | 1269 void Interpreter::DoJumpIfUndefinedConstant(InterpreterAssembler* assembler) { |
| 1546 Node* accumulator = __ GetAccumulator(); | 1270 Node* accumulator = __ GetAccumulator(); |
| 1547 Node* undefined_value = | 1271 Node* undefined_value = |
| 1548 __ HeapConstant(isolate_->factory()->undefined_value()); | 1272 __ HeapConstant(isolate_->factory()->undefined_value()); |
| 1549 Node* index = __ BytecodeOperandIdx(0); | 1273 Node* index = __ BytecodeOperandIdx(0); |
| 1550 Node* constant = __ LoadConstantPoolEntry(index); | 1274 Node* constant = __ LoadConstantPoolEntry(index); |
| 1551 Node* relative_jump = __ SmiUntag(constant); | 1275 Node* relative_jump = __ SmiUntag(constant); |
| 1552 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump); | 1276 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump); |
| 1553 } | 1277 } |
| 1554 | 1278 |
| 1555 | 1279 // JumpIfNotHole <imm> |
| 1556 // JumpIfUndefinedConstantWide <idx16> | |
| 1557 // | |
| 1558 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool | |
| 1559 // if the object referenced by the accumulator is the undefined constant. | |
| 1560 void Interpreter::DoJumpIfUndefinedConstantWide( | |
| 1561 InterpreterAssembler* assembler) { | |
| 1562 DoJumpIfUndefinedConstant(assembler); | |
| 1563 } | |
| 1564 | |
| 1565 // JumpIfNotHole <imm8> | |
| 1566 // | 1280 // |
| 1567 // Jump by number of bytes represented by an immediate operand if the object | 1281 // Jump by number of bytes represented by an immediate operand if the object |
| 1568 // referenced by the accumulator is the hole. | 1282 // referenced by the accumulator is the hole. |
| 1569 void Interpreter::DoJumpIfNotHole(InterpreterAssembler* assembler) { | 1283 void Interpreter::DoJumpIfNotHole(InterpreterAssembler* assembler) { |
| 1570 Node* accumulator = __ GetAccumulator(); | 1284 Node* accumulator = __ GetAccumulator(); |
| 1571 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value()); | 1285 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value()); |
| 1572 Node* relative_jump = __ BytecodeOperandImm(0); | 1286 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1573 __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump); | 1287 __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump); |
| 1574 } | 1288 } |
| 1575 | 1289 |
| 1576 // JumpIfNotHoleConstant <idx8> | 1290 // JumpIfNotHoleConstant <idx> |
| 1577 // | 1291 // |
| 1578 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool | 1292 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool |
| 1579 // if the object referenced by the accumulator is the hole constant. | 1293 // if the object referenced by the accumulator is the hole constant. |
| 1580 void Interpreter::DoJumpIfNotHoleConstant(InterpreterAssembler* assembler) { | 1294 void Interpreter::DoJumpIfNotHoleConstant(InterpreterAssembler* assembler) { |
| 1581 Node* accumulator = __ GetAccumulator(); | 1295 Node* accumulator = __ GetAccumulator(); |
| 1582 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value()); | 1296 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value()); |
| 1583 Node* index = __ BytecodeOperandIdx(0); | 1297 Node* index = __ BytecodeOperandIdx(0); |
| 1584 Node* constant = __ LoadConstantPoolEntry(index); | 1298 Node* constant = __ LoadConstantPoolEntry(index); |
| 1585 Node* relative_jump = __ SmiUntag(constant); | 1299 Node* relative_jump = __ SmiUntag(constant); |
| 1586 __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump); | 1300 __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump); |
| 1587 } | 1301 } |
| 1588 | 1302 |
| 1589 // JumpIfNotHoleConstantWide <idx16> | |
| 1590 // | |
| 1591 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool | |
| 1592 // if the object referenced by the accumulator is the hole constant. | |
| 1593 void Interpreter::DoJumpIfNotHoleConstantWide(InterpreterAssembler* assembler) { | |
| 1594 DoJumpIfNotHoleConstant(assembler); | |
| 1595 } | |
| 1596 | |
| 1597 void Interpreter::DoCreateLiteral(Runtime::FunctionId function_id, | 1303 void Interpreter::DoCreateLiteral(Runtime::FunctionId function_id, |
| 1598 InterpreterAssembler* assembler) { | 1304 InterpreterAssembler* assembler) { |
| 1599 Node* index = __ BytecodeOperandIdx(0); | 1305 Node* index = __ BytecodeOperandIdx(0); |
| 1600 Node* constant_elements = __ LoadConstantPoolEntry(index); | 1306 Node* constant_elements = __ LoadConstantPoolEntry(index); |
| 1601 Node* literal_index_raw = __ BytecodeOperandIdx(1); | 1307 Node* literal_index_raw = __ BytecodeOperandIdx(1); |
| 1602 Node* literal_index = __ SmiTag(literal_index_raw); | 1308 Node* literal_index = __ SmiTag(literal_index_raw); |
| 1603 Node* flags_raw = __ BytecodeOperandImm(2); | 1309 Node* flags_raw = __ BytecodeOperandFlag(2); |
| 1604 Node* flags = __ SmiTag(flags_raw); | 1310 Node* flags = __ SmiTag(flags_raw); |
| 1605 Node* closure = __ LoadRegister(Register::function_closure()); | 1311 Node* closure = __ LoadRegister(Register::function_closure()); |
| 1606 Node* context = __ GetContext(); | 1312 Node* context = __ GetContext(); |
| 1607 Node* result = __ CallRuntime(function_id, context, closure, literal_index, | 1313 Node* result = __ CallRuntime(function_id, context, closure, literal_index, |
| 1608 constant_elements, flags); | 1314 constant_elements, flags); |
| 1609 __ SetAccumulator(result); | 1315 __ SetAccumulator(result); |
| 1610 __ Dispatch(); | 1316 __ Dispatch(); |
| 1611 } | 1317 } |
| 1612 | 1318 |
| 1613 | 1319 |
| 1614 // CreateRegExpLiteral <pattern_idx> <literal_idx> <flags> | 1320 // CreateRegExpLiteral <pattern_idx> <literal_idx> <flags> |
| 1615 // | 1321 // |
| 1616 // Creates a regular expression literal for literal index <literal_idx> with | 1322 // Creates a regular expression literal for literal index <literal_idx> with |
| 1617 // <flags> and the pattern in <pattern_idx>. | 1323 // <flags> and the pattern in <pattern_idx>. |
| 1618 void Interpreter::DoCreateRegExpLiteral(InterpreterAssembler* assembler) { | 1324 void Interpreter::DoCreateRegExpLiteral(InterpreterAssembler* assembler) { |
| 1619 Callable callable = CodeFactory::FastCloneRegExp(isolate_); | 1325 Callable callable = CodeFactory::FastCloneRegExp(isolate_); |
| 1620 Node* target = __ HeapConstant(callable.code()); | 1326 Node* target = __ HeapConstant(callable.code()); |
| 1621 Node* index = __ BytecodeOperandIdx(0); | 1327 Node* index = __ BytecodeOperandIdx(0); |
| 1622 Node* pattern = __ LoadConstantPoolEntry(index); | 1328 Node* pattern = __ LoadConstantPoolEntry(index); |
| 1623 Node* literal_index_raw = __ BytecodeOperandIdx(1); | 1329 Node* literal_index_raw = __ BytecodeOperandIdx(1); |
| 1624 Node* literal_index = __ SmiTag(literal_index_raw); | 1330 Node* literal_index = __ SmiTag(literal_index_raw); |
| 1625 Node* flags_raw = __ BytecodeOperandImm(2); | 1331 Node* flags_raw = __ BytecodeOperandFlag(2); |
| 1626 Node* flags = __ SmiTag(flags_raw); | 1332 Node* flags = __ SmiTag(flags_raw); |
| 1627 Node* closure = __ LoadRegister(Register::function_closure()); | 1333 Node* closure = __ LoadRegister(Register::function_closure()); |
| 1628 Node* context = __ GetContext(); | 1334 Node* context = __ GetContext(); |
| 1629 Node* result = __ CallStub(callable.descriptor(), target, context, closure, | 1335 Node* result = __ CallStub(callable.descriptor(), target, context, closure, |
| 1630 literal_index, pattern, flags); | 1336 literal_index, pattern, flags); |
| 1631 __ SetAccumulator(result); | 1337 __ SetAccumulator(result); |
| 1632 __ Dispatch(); | 1338 __ Dispatch(); |
| 1633 } | 1339 } |
| 1634 | 1340 |
| 1635 | |
| 1636 // CreateRegExpLiteralWide <pattern_idx> <literal_idx> <flags> | |
| 1637 // | |
| 1638 // Creates a regular expression literal for literal index <literal_idx> with | |
| 1639 // <flags> and the pattern in <pattern_idx>. | |
| 1640 void Interpreter::DoCreateRegExpLiteralWide(InterpreterAssembler* assembler) { | |
| 1641 DoCreateRegExpLiteral(assembler); | |
| 1642 } | |
| 1643 | |
| 1644 | |
| 1645 // CreateArrayLiteral <element_idx> <literal_idx> <flags> | 1341 // CreateArrayLiteral <element_idx> <literal_idx> <flags> |
| 1646 // | 1342 // |
| 1647 // Creates an array literal for literal index <literal_idx> with flags <flags> | 1343 // Creates an array literal for literal index <literal_idx> with flags <flags> |
| 1648 // and constant elements in <element_idx>. | 1344 // and constant elements in <element_idx>. |
| 1649 void Interpreter::DoCreateArrayLiteral(InterpreterAssembler* assembler) { | 1345 void Interpreter::DoCreateArrayLiteral(InterpreterAssembler* assembler) { |
| 1650 DoCreateLiteral(Runtime::kCreateArrayLiteral, assembler); | 1346 DoCreateLiteral(Runtime::kCreateArrayLiteral, assembler); |
| 1651 } | 1347 } |
| 1652 | 1348 |
| 1653 | |
| 1654 // CreateArrayLiteralWide <element_idx> <literal_idx> <flags> | |
| 1655 // | |
| 1656 // Creates an array literal for literal index <literal_idx> with flags <flags> | |
| 1657 // and constant elements in <element_idx>. | |
| 1658 void Interpreter::DoCreateArrayLiteralWide(InterpreterAssembler* assembler) { | |
| 1659 DoCreateLiteral(Runtime::kCreateArrayLiteral, assembler); | |
| 1660 } | |
| 1661 | |
| 1662 | |
| 1663 // CreateObjectLiteral <element_idx> <literal_idx> <flags> | 1349 // CreateObjectLiteral <element_idx> <literal_idx> <flags> |
| 1664 // | 1350 // |
| 1665 // Creates an object literal for literal index <literal_idx> with flags <flags> | 1351 // Creates an object literal for literal index <literal_idx> with flags <flags> |
| 1666 // and constant elements in <element_idx>. | 1352 // and constant elements in <element_idx>. |
| 1667 void Interpreter::DoCreateObjectLiteral(InterpreterAssembler* assembler) { | 1353 void Interpreter::DoCreateObjectLiteral(InterpreterAssembler* assembler) { |
| 1668 DoCreateLiteral(Runtime::kCreateObjectLiteral, assembler); | 1354 DoCreateLiteral(Runtime::kCreateObjectLiteral, assembler); |
| 1669 } | 1355 } |
| 1670 | 1356 |
| 1671 | |
| 1672 // CreateObjectLiteralWide <element_idx> <literal_idx> <flags> | |
| 1673 // | |
| 1674 // Creates an object literal for literal index <literal_idx> with flags <flags> | |
| 1675 // and constant elements in <element_idx>. | |
| 1676 void Interpreter::DoCreateObjectLiteralWide(InterpreterAssembler* assembler) { | |
| 1677 DoCreateLiteral(Runtime::kCreateObjectLiteral, assembler); | |
| 1678 } | |
| 1679 | |
| 1680 | |
| 1681 // CreateClosure <index> <tenured> | 1357 // CreateClosure <index> <tenured> |
| 1682 // | 1358 // |
| 1683 // Creates a new closure for SharedFunctionInfo at position |index| in the | 1359 // Creates a new closure for SharedFunctionInfo at position |index| in the |
| 1684 // constant pool and with the PretenureFlag <tenured>. | 1360 // constant pool and with the PretenureFlag <tenured>. |
| 1685 void Interpreter::DoCreateClosure(InterpreterAssembler* assembler) { | 1361 void Interpreter::DoCreateClosure(InterpreterAssembler* assembler) { |
| 1686 // TODO(rmcilroy): Possibly call FastNewClosureStub when possible instead of | 1362 // TODO(rmcilroy): Possibly call FastNewClosureStub when possible instead of |
| 1687 // calling into the runtime. | 1363 // calling into the runtime. |
| 1688 Node* index = __ BytecodeOperandIdx(0); | 1364 Node* index = __ BytecodeOperandIdx(0); |
| 1689 Node* shared = __ LoadConstantPoolEntry(index); | 1365 Node* shared = __ LoadConstantPoolEntry(index); |
| 1690 Node* tenured_raw = __ BytecodeOperandImm(1); | 1366 Node* tenured_raw = __ BytecodeOperandFlag(1); |
| 1691 Node* tenured = __ SmiTag(tenured_raw); | 1367 Node* tenured = __ SmiTag(tenured_raw); |
| 1692 Node* context = __ GetContext(); | 1368 Node* context = __ GetContext(); |
| 1693 Node* result = | 1369 Node* result = |
| 1694 __ CallRuntime(Runtime::kInterpreterNewClosure, context, shared, tenured); | 1370 __ CallRuntime(Runtime::kInterpreterNewClosure, context, shared, tenured); |
| 1695 __ SetAccumulator(result); | 1371 __ SetAccumulator(result); |
| 1696 __ Dispatch(); | 1372 __ Dispatch(); |
| 1697 } | 1373 } |
| 1698 | 1374 |
| 1699 | |
| 1700 // CreateClosureWide <index> <tenured> | |
| 1701 // | |
| 1702 // Creates a new closure for SharedFunctionInfo at position |index| in the | |
| 1703 // constant pool and with the PretenureFlag <tenured>. | |
| 1704 void Interpreter::DoCreateClosureWide(InterpreterAssembler* assembler) { | |
| 1705 return DoCreateClosure(assembler); | |
| 1706 } | |
| 1707 | |
| 1708 | |
| 1709 // CreateMappedArguments | 1375 // CreateMappedArguments |
| 1710 // | 1376 // |
| 1711 // Creates a new mapped arguments object. | 1377 // Creates a new mapped arguments object. |
| 1712 void Interpreter::DoCreateMappedArguments(InterpreterAssembler* assembler) { | 1378 void Interpreter::DoCreateMappedArguments(InterpreterAssembler* assembler) { |
| 1713 Node* closure = __ LoadRegister(Register::function_closure()); | 1379 Node* closure = __ LoadRegister(Register::function_closure()); |
| 1714 Node* context = __ GetContext(); | 1380 Node* context = __ GetContext(); |
| 1715 Node* result = | 1381 Node* result = |
| 1716 __ CallRuntime(Runtime::kNewSloppyArguments_Generic, context, closure); | 1382 __ CallRuntime(Runtime::kNewSloppyArguments_Generic, context, closure); |
| 1717 __ SetAccumulator(result); | 1383 __ SetAccumulator(result); |
| 1718 __ Dispatch(); | 1384 __ Dispatch(); |
| (...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1820 // 0 == cache_type, 1 == cache_array, 2 == cache_length | 1486 // 0 == cache_type, 1 == cache_array, 2 == cache_length |
| 1821 Node* output_register = __ BytecodeOperandReg(0); | 1487 Node* output_register = __ BytecodeOperandReg(0); |
| 1822 for (int i = 0; i < 3; i++) { | 1488 for (int i = 0; i < 3; i++) { |
| 1823 Node* cache_info = __ Projection(i, result_triple); | 1489 Node* cache_info = __ Projection(i, result_triple); |
| 1824 __ StoreRegister(cache_info, output_register); | 1490 __ StoreRegister(cache_info, output_register); |
| 1825 output_register = __ NextRegister(output_register); | 1491 output_register = __ NextRegister(output_register); |
| 1826 } | 1492 } |
| 1827 __ Dispatch(); | 1493 __ Dispatch(); |
| 1828 } | 1494 } |
| 1829 | 1495 |
| 1830 | |
| 1831 // ForInPrepareWide <cache_info_triple> | |
| 1832 // | |
| 1833 // Returns state for for..in loop execution based on the object in the | |
| 1834 // accumulator. The result is output in registers |cache_info_triple| to | |
| 1835 // |cache_info_triple + 2|, with the registers holding cache_type, cache_array, | |
| 1836 // and cache_length respectively. | |
| 1837 void Interpreter::DoForInPrepareWide(InterpreterAssembler* assembler) { | |
| 1838 DoForInPrepare(assembler); | |
| 1839 } | |
| 1840 | |
| 1841 | |
| 1842 // ForInNext <receiver> <index> <cache_info_pair> | 1496 // ForInNext <receiver> <index> <cache_info_pair> |
| 1843 // | 1497 // |
| 1844 // Returns the next enumerable property in the the accumulator. | 1498 // Returns the next enumerable property in the the accumulator. |
| 1845 void Interpreter::DoForInNext(InterpreterAssembler* assembler) { | 1499 void Interpreter::DoForInNext(InterpreterAssembler* assembler) { |
| 1846 Node* receiver_reg = __ BytecodeOperandReg(0); | 1500 Node* receiver_reg = __ BytecodeOperandReg(0); |
| 1847 Node* receiver = __ LoadRegister(receiver_reg); | 1501 Node* receiver = __ LoadRegister(receiver_reg); |
| 1848 Node* index_reg = __ BytecodeOperandReg(1); | 1502 Node* index_reg = __ BytecodeOperandReg(1); |
| 1849 Node* index = __ LoadRegister(index_reg); | 1503 Node* index = __ LoadRegister(index_reg); |
| 1850 Node* cache_type_reg = __ BytecodeOperandReg(2); | 1504 Node* cache_type_reg = __ BytecodeOperandReg(2); |
| 1851 Node* cache_type = __ LoadRegister(cache_type_reg); | 1505 Node* cache_type = __ LoadRegister(cache_type_reg); |
| (...skipping 26 matching lines...) Expand all Loading... |
| 1878 | 1532 |
| 1879 // Need to filter the {key} for the {receiver}. | 1533 // Need to filter the {key} for the {receiver}. |
| 1880 Node* context = __ GetContext(); | 1534 Node* context = __ GetContext(); |
| 1881 Node* result = | 1535 Node* result = |
| 1882 __ CallRuntime(Runtime::kForInFilter, context, receiver, key); | 1536 __ CallRuntime(Runtime::kForInFilter, context, receiver, key); |
| 1883 __ SetAccumulator(result); | 1537 __ SetAccumulator(result); |
| 1884 __ Dispatch(); | 1538 __ Dispatch(); |
| 1885 } | 1539 } |
| 1886 } | 1540 } |
| 1887 | 1541 |
| 1888 | |
| 1889 // ForInNextWide <receiver> <index> <cache_info_pair> | |
| 1890 // | |
| 1891 // Returns the next enumerable property in the the accumulator. | |
| 1892 void Interpreter::DoForInNextWide(InterpreterAssembler* assembler) { | |
| 1893 return DoForInNext(assembler); | |
| 1894 } | |
| 1895 | |
| 1896 | |
| 1897 // ForInDone <index> <cache_length> | 1542 // ForInDone <index> <cache_length> |
| 1898 // | 1543 // |
| 1899 // Returns true if the end of the enumerable properties has been reached. | 1544 // Returns true if the end of the enumerable properties has been reached. |
| 1900 void Interpreter::DoForInDone(InterpreterAssembler* assembler) { | 1545 void Interpreter::DoForInDone(InterpreterAssembler* assembler) { |
| 1901 // TODO(oth): Implement directly rather than making a runtime call. | 1546 // TODO(oth): Implement directly rather than making a runtime call. |
| 1902 Node* index_reg = __ BytecodeOperandReg(0); | 1547 Node* index_reg = __ BytecodeOperandReg(0); |
| 1903 Node* index = __ LoadRegister(index_reg); | 1548 Node* index = __ LoadRegister(index_reg); |
| 1904 Node* cache_length_reg = __ BytecodeOperandReg(1); | 1549 Node* cache_length_reg = __ BytecodeOperandReg(1); |
| 1905 Node* cache_length = __ LoadRegister(cache_length_reg); | 1550 Node* cache_length = __ LoadRegister(cache_length_reg); |
| 1906 Node* context = __ GetContext(); | 1551 Node* context = __ GetContext(); |
| 1907 Node* result = | 1552 Node* result = |
| 1908 __ CallRuntime(Runtime::kForInDone, context, index, cache_length); | 1553 __ CallRuntime(Runtime::kForInDone, context, index, cache_length); |
| 1909 __ SetAccumulator(result); | 1554 __ SetAccumulator(result); |
| 1910 __ Dispatch(); | 1555 __ Dispatch(); |
| 1911 } | 1556 } |
| 1912 | 1557 |
| 1913 | |
| 1914 // ForInStep <index> | 1558 // ForInStep <index> |
| 1915 // | 1559 // |
| 1916 // Increments the loop counter in register |index| and stores the result | 1560 // Increments the loop counter in register |index| and stores the result |
| 1917 // in the accumulator. | 1561 // in the accumulator. |
| 1918 void Interpreter::DoForInStep(InterpreterAssembler* assembler) { | 1562 void Interpreter::DoForInStep(InterpreterAssembler* assembler) { |
| 1919 Node* index_reg = __ BytecodeOperandReg(0); | 1563 Node* index_reg = __ BytecodeOperandReg(0); |
| 1920 Node* index = __ LoadRegister(index_reg); | 1564 Node* index = __ LoadRegister(index_reg); |
| 1921 Node* one = __ SmiConstant(Smi::FromInt(1)); | 1565 Node* one = __ SmiConstant(Smi::FromInt(1)); |
| 1922 Node* result = __ SmiAdd(index, one); | 1566 Node* result = __ SmiAdd(index, one); |
| 1923 __ SetAccumulator(result); | 1567 __ SetAccumulator(result); |
| 1924 __ Dispatch(); | 1568 __ Dispatch(); |
| 1925 } | 1569 } |
| 1926 | 1570 |
| 1571 // Wide |
| 1572 // |
| 1573 // Prefix bytecode indicating next bytecode has wide (16-bit) operands. |
| 1574 void Interpreter::DoWide(InterpreterAssembler* assembler) { |
| 1575 __ DispatchWide(OperandScale::kDouble); |
| 1576 } |
| 1577 |
| 1578 // ExtraWide |
| 1579 // |
| 1580 // Prefix bytecode indicating next bytecode has extra-wide (32-bit) operands. |
| 1581 void Interpreter::DoExtraWide(InterpreterAssembler* assembler) { |
| 1582 __ DispatchWide(OperandScale::kQuadruple); |
| 1583 } |
| 1584 |
| 1585 // Illegal |
| 1586 // |
| 1587 // An invalid bytecode aborting execution if dispatched. |
| 1588 void Interpreter::DoIllegal(InterpreterAssembler* assembler) { |
| 1589 __ Abort(kInvalidBytecode); |
| 1590 } |
| 1591 |
| 1927 } // namespace interpreter | 1592 } // namespace interpreter |
| 1928 } // namespace internal | 1593 } // namespace internal |
| 1929 } // namespace v8 | 1594 } // namespace v8 |
| OLD | NEW |