Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/interpreter/interpreter.h" | 5 #include "src/interpreter/interpreter.h" |
| 6 | 6 |
| 7 #include "src/ast/prettyprinter.h" | 7 #include "src/ast/prettyprinter.h" |
| 8 #include "src/code-factory.h" | 8 #include "src/code-factory.h" |
| 9 #include "src/compiler.h" | 9 #include "src/compiler.h" |
| 10 #include "src/factory.h" | 10 #include "src/factory.h" |
| (...skipping 14 matching lines...) Expand all Loading... | |
| 25 Interpreter::Interpreter(Isolate* isolate) : isolate_(isolate) { | 25 Interpreter::Interpreter(Isolate* isolate) : isolate_(isolate) { |
| 26 memset(&dispatch_table_, 0, sizeof(dispatch_table_)); | 26 memset(&dispatch_table_, 0, sizeof(dispatch_table_)); |
| 27 } | 27 } |
| 28 | 28 |
| 29 void Interpreter::Initialize() { | 29 void Interpreter::Initialize() { |
| 30 DCHECK(FLAG_ignition); | 30 DCHECK(FLAG_ignition); |
| 31 if (IsDispatchTableInitialized()) return; | 31 if (IsDispatchTableInitialized()) return; |
| 32 Zone zone; | 32 Zone zone; |
| 33 HandleScope scope(isolate_); | 33 HandleScope scope(isolate_); |
| 34 | 34 |
| 35 #define GENERATE_CODE(Name, ...) \ | 35 Handle<Code> invalid_bytecode_handler = MakeInvalidBytecodeHandler(&zone); |
| 36 { \ | 36 |
| 37 InterpreterAssembler assembler(isolate_, &zone, Bytecode::k##Name); \ | 37 for (int i = 0; i < kNumberOfWideVariants; ++i) { |
| 38 Do##Name(&assembler); \ | 38 set_operand_scale(1 << i); |
| 39 Handle<Code> code = assembler.GenerateCode(); \ | 39 #define GENERATE_CODE(Name, ...) \ |
| 40 TraceCodegen(code, #Name); \ | 40 { \ |
| 41 LOG_CODE_EVENT(isolate_, \ | 41 int row = i * (kMaxUInt8 + 1) + Bytecodes::ToByte(Bytecode::k##Name); \ |
|
rmcilroy
2016/03/10 16:45:38
Could you pull this out into a private helper func
oth
2016/03/11 16:26:12
Done.
| |
| 42 CodeCreateEvent(Logger::BYTECODE_HANDLER_TAG, \ | 42 if (operand_scale() == 1 || \ |
| 43 AbstractCode::cast(*code), #Name)); \ | 43 Bytecodes::IsBytecodeWithScalableOperands(Bytecode::k##Name) || \ |
| 44 dispatch_table_[Bytecodes::ToByte(Bytecode::k##Name)] = *code; \ | 44 Bytecodes::IsDebugBreak(Bytecode::k##Name)) { \ |
|
rmcilroy
2016/03/10 16:45:38
Is the IsDebugBreak necessary? Isn't it only the D
oth
2016/03/11 16:26:12
Good spot, chaff from an earlier variation on hand
| |
| 45 InterpreterAssembler assembler(isolate_, &zone, Bytecode::k##Name, \ | |
| 46 operand_scale()); \ | |
| 47 Do##Name(&assembler); \ | |
| 48 Handle<Code> code = assembler.GenerateCode(); \ | |
| 49 TraceCodegen(code, #Name); \ | |
| 50 LOG_CODE_EVENT(isolate_, \ | |
| 51 CodeCreateEvent(Logger::BYTECODE_HANDLER_TAG, \ | |
| 52 AbstractCode::cast(*code), #Name)); \ | |
| 53 dispatch_table_[row] = *code; \ | |
| 54 } else { \ | |
| 55 dispatch_table_[row] = *invalid_bytecode_handler; \ | |
| 56 } \ | |
| 45 } | 57 } |
| 46 BYTECODE_LIST(GENERATE_CODE) | 58 BYTECODE_LIST(GENERATE_CODE) |
| 47 #undef GENERATE_CODE | 59 #undef GENERATE_CODE |
| 60 for (int p = static_cast<int>(Bytecode::kLast) + 1; p <= kMaxUInt8; ++p) { | |
| 61 dispatch_table_[p] = *invalid_bytecode_handler; | |
|
rmcilroy
2016/03/10 16:45:38
How about just setting the whole dispatch table wi
oth
2016/03/11 16:26:12
Done in spirit - with the Illegal bytecode there's
| |
| 62 } | |
| 63 } | |
| 48 } | 64 } |
| 49 | 65 |
| 50 Code* Interpreter::GetBytecodeHandler(Bytecode bytecode) { | 66 Code* Interpreter::GetBytecodeHandler(Bytecode bytecode, int operand_scale) { |
| 51 DCHECK(IsDispatchTableInitialized()); | 67 DCHECK(IsDispatchTableInitialized()); |
| 52 return dispatch_table_[Bytecodes::ToByte(bytecode)]; | 68 size_t index = Bytecodes::ToByte(bytecode); |
| 69 switch (operand_scale) { | |
|
rmcilroy
2016/03/10 16:45:38
I don't think you need the switch for this, you sh
oth
2016/03/11 16:26:12
Done.
| |
| 70 case 1: | |
| 71 break; | |
| 72 case 2: | |
| 73 index += kMaxUInt8 + 1; | |
| 74 break; | |
| 75 case 4: | |
| 76 index += 2 * (kMaxUInt8 + 1); | |
| 77 break; | |
| 78 default: | |
| 79 UNREACHABLE(); | |
| 80 } | |
| 81 DCHECK_LE(index, arraysize(dispatch_table_)); | |
| 82 Code* bytecode_handler = dispatch_table_[index]; | |
| 83 Code* invalid_bytecode_handler = | |
| 84 dispatch_table_[static_cast<int>(Bytecode::kLast) + 1]; | |
| 85 return bytecode_handler != invalid_bytecode_handler ? bytecode_handler | |
|
rmcilroy
2016/03/10 16:45:38
Why is this necessary? We shouldn't ever be asking
oth
2016/03/11 16:26:12
It's not clear the caller should know which entrie
rmcilroy
2016/03/16 11:55:54
Ok, I still think we should be returning the actua
oth
2016/03/17 13:48:38
The underlying issue is just that the logger wants
| |
| 86 : nullptr; | |
| 87 } | |
| 88 | |
| 89 Handle<Code> Interpreter::MakeInvalidBytecodeHandler(Zone* zone) { | |
| 90 InterpreterAssembler assembler(isolate_, zone); | |
| 91 assembler.Abort(kInvalidBytecode); | |
| 92 return assembler.GenerateCode(); | |
| 53 } | 93 } |
| 54 | 94 |
| 55 void Interpreter::IterateDispatchTable(ObjectVisitor* v) { | 95 void Interpreter::IterateDispatchTable(ObjectVisitor* v) { |
| 56 v->VisitPointers( | 96 v->VisitPointers( |
| 57 reinterpret_cast<Object**>(&dispatch_table_[0]), | 97 reinterpret_cast<Object**>(&dispatch_table_[0]), |
| 58 reinterpret_cast<Object**>(&dispatch_table_[0] + kDispatchTableSize)); | 98 reinterpret_cast<Object**>(&dispatch_table_[0] + kDispatchTableSize)); |
| 59 } | 99 } |
| 60 | 100 |
| 61 // static | 101 // static |
| 62 int Interpreter::InterruptBudget() { | 102 int Interpreter::InterruptBudget() { |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 128 | 168 |
| 129 // LdaZero | 169 // LdaZero |
| 130 // | 170 // |
| 131 // Load literal '0' into the accumulator. | 171 // Load literal '0' into the accumulator. |
| 132 void Interpreter::DoLdaZero(InterpreterAssembler* assembler) { | 172 void Interpreter::DoLdaZero(InterpreterAssembler* assembler) { |
| 133 Node* zero_value = __ NumberConstant(0.0); | 173 Node* zero_value = __ NumberConstant(0.0); |
| 134 __ SetAccumulator(zero_value); | 174 __ SetAccumulator(zero_value); |
| 135 __ Dispatch(); | 175 __ Dispatch(); |
| 136 } | 176 } |
| 137 | 177 |
| 138 | 178 // LdaSmi <imm> |
| 139 // LdaSmi8 <imm8> | |
| 140 // | 179 // |
| 141 // Load an 8-bit integer literal into the accumulator as a Smi. | 180 // Load an integer literal into the accumulator as a Smi. |
| 142 void Interpreter::DoLdaSmi8(InterpreterAssembler* assembler) { | 181 void Interpreter::DoLdaSmi(InterpreterAssembler* assembler) { |
| 143 Node* raw_int = __ BytecodeOperandImm(0); | 182 Node* raw_int = __ BytecodeOperandImm(0); |
| 144 Node* smi_int = __ SmiTag(raw_int); | 183 Node* smi_int = __ SmiTag(raw_int); |
| 145 __ SetAccumulator(smi_int); | 184 __ SetAccumulator(smi_int); |
| 146 __ Dispatch(); | 185 __ Dispatch(); |
| 147 } | 186 } |
| 148 | 187 |
| 149 void Interpreter::DoLoadConstant(InterpreterAssembler* assembler) { | 188 void Interpreter::DoLoadConstant(InterpreterAssembler* assembler) { |
| 150 Node* index = __ BytecodeOperandIdx(0); | 189 Node* index = __ BytecodeOperandIdx(0); |
| 151 Node* constant = __ LoadConstantPoolEntry(index); | 190 Node* constant = __ LoadConstantPoolEntry(index); |
| 152 __ SetAccumulator(constant); | 191 __ SetAccumulator(constant); |
| 153 __ Dispatch(); | 192 __ Dispatch(); |
| 154 } | 193 } |
| 155 | 194 |
| 156 | 195 |
| 157 // LdaConstant <idx> | 196 // LdaConstant <idx> |
| 158 // | 197 // |
| 159 // Load constant literal at |idx| in the constant pool into the accumulator. | 198 // Load constant literal at |idx| in the constant pool into the accumulator. |
| 160 void Interpreter::DoLdaConstant(InterpreterAssembler* assembler) { | 199 void Interpreter::DoLdaConstant(InterpreterAssembler* assembler) { |
| 161 DoLoadConstant(assembler); | 200 DoLoadConstant(assembler); |
| 162 } | 201 } |
| 163 | 202 |
| 164 | |
| 165 // LdaConstantWide <idx> | |
| 166 // | |
| 167 // Load constant literal at |idx| in the constant pool into the accumulator. | |
| 168 void Interpreter::DoLdaConstantWide(InterpreterAssembler* assembler) { | |
| 169 DoLoadConstant(assembler); | |
| 170 } | |
| 171 | |
| 172 | |
| 173 // LdaUndefined | 203 // LdaUndefined |
| 174 // | 204 // |
| 175 // Load Undefined into the accumulator. | 205 // Load Undefined into the accumulator. |
| 176 void Interpreter::DoLdaUndefined(InterpreterAssembler* assembler) { | 206 void Interpreter::DoLdaUndefined(InterpreterAssembler* assembler) { |
| 177 Node* undefined_value = | 207 Node* undefined_value = |
| 178 __ HeapConstant(isolate_->factory()->undefined_value()); | 208 __ HeapConstant(isolate_->factory()->undefined_value()); |
| 179 __ SetAccumulator(undefined_value); | 209 __ SetAccumulator(undefined_value); |
| 180 __ Dispatch(); | 210 __ Dispatch(); |
| 181 } | 211 } |
| 182 | 212 |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 248 // Stores the value of register <src> to register <dst>. | 278 // Stores the value of register <src> to register <dst>. |
| 249 void Interpreter::DoMov(InterpreterAssembler* assembler) { | 279 void Interpreter::DoMov(InterpreterAssembler* assembler) { |
| 250 Node* src_index = __ BytecodeOperandReg(0); | 280 Node* src_index = __ BytecodeOperandReg(0); |
| 251 Node* src_value = __ LoadRegister(src_index); | 281 Node* src_value = __ LoadRegister(src_index); |
| 252 Node* dst_index = __ BytecodeOperandReg(1); | 282 Node* dst_index = __ BytecodeOperandReg(1); |
| 253 __ StoreRegister(src_value, dst_index); | 283 __ StoreRegister(src_value, dst_index); |
| 254 __ Dispatch(); | 284 __ Dispatch(); |
| 255 } | 285 } |
| 256 | 286 |
| 257 | 287 |
| 258 // MovWide <src> <dst> | |
| 259 // | |
| 260 // Stores the value of register <src> to register <dst>. | |
| 261 void Interpreter::DoMovWide(InterpreterAssembler* assembler) { | |
| 262 DoMov(assembler); | |
| 263 } | |
| 264 | |
| 265 void Interpreter::DoLoadGlobal(Callable ic, InterpreterAssembler* assembler) { | 288 void Interpreter::DoLoadGlobal(Callable ic, InterpreterAssembler* assembler) { |
| 266 // Get the global object. | 289 // Get the global object. |
| 267 Node* context = __ GetContext(); | 290 Node* context = __ GetContext(); |
| 268 Node* native_context = | 291 Node* native_context = |
| 269 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX); | 292 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX); |
| 270 Node* global = __ LoadContextSlot(native_context, Context::EXTENSION_INDEX); | 293 Node* global = __ LoadContextSlot(native_context, Context::EXTENSION_INDEX); |
| 271 | 294 |
| 272 // Load the global via the LoadIC. | 295 // Load the global via the LoadIC. |
| 273 Node* code_target = __ HeapConstant(ic.code()); | 296 Node* code_target = __ HeapConstant(ic.code()); |
| 274 Node* constant_index = __ BytecodeOperandIdx(0); | 297 Node* constant_index = __ BytecodeOperandIdx(0); |
| (...skipping 20 matching lines...) Expand all Loading... | |
| 295 // LdaGlobalInsideTypeof <name_index> <slot> | 318 // LdaGlobalInsideTypeof <name_index> <slot> |
| 296 // | 319 // |
| 297 // Load the global with name in constant pool entry <name_index> into the | 320 // Load the global with name in constant pool entry <name_index> into the |
| 298 // accumulator using FeedBackVector slot <slot> inside of a typeof. | 321 // accumulator using FeedBackVector slot <slot> inside of a typeof. |
| 299 void Interpreter::DoLdaGlobalInsideTypeof(InterpreterAssembler* assembler) { | 322 void Interpreter::DoLdaGlobalInsideTypeof(InterpreterAssembler* assembler) { |
| 300 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF, | 323 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF, |
| 301 UNINITIALIZED); | 324 UNINITIALIZED); |
| 302 DoLoadGlobal(ic, assembler); | 325 DoLoadGlobal(ic, assembler); |
| 303 } | 326 } |
| 304 | 327 |
| 305 // LdaGlobalWide <name_index> <slot> | |
| 306 // | |
| 307 // Load the global with name in constant pool entry <name_index> into the | |
| 308 // accumulator using FeedBackVector slot <slot> outside of a typeof. | |
| 309 void Interpreter::DoLdaGlobalWide(InterpreterAssembler* assembler) { | |
| 310 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, | |
| 311 UNINITIALIZED); | |
| 312 DoLoadGlobal(ic, assembler); | |
| 313 } | |
| 314 | |
| 315 // LdaGlobalInsideTypeofWide <name_index> <slot> | |
| 316 // | |
| 317 // Load the global with name in constant pool entry <name_index> into the | |
| 318 // accumulator using FeedBackVector slot <slot> inside of a typeof. | |
| 319 void Interpreter::DoLdaGlobalInsideTypeofWide(InterpreterAssembler* assembler) { | |
| 320 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF, | |
| 321 UNINITIALIZED); | |
| 322 DoLoadGlobal(ic, assembler); | |
| 323 } | |
| 324 | |
| 325 | |
| 326 void Interpreter::DoStoreGlobal(Callable ic, InterpreterAssembler* assembler) { | 328 void Interpreter::DoStoreGlobal(Callable ic, InterpreterAssembler* assembler) { |
| 327 // Get the global object. | 329 // Get the global object. |
| 328 Node* context = __ GetContext(); | 330 Node* context = __ GetContext(); |
| 329 Node* native_context = | 331 Node* native_context = |
| 330 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX); | 332 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX); |
| 331 Node* global = __ LoadContextSlot(native_context, Context::EXTENSION_INDEX); | 333 Node* global = __ LoadContextSlot(native_context, Context::EXTENSION_INDEX); |
| 332 | 334 |
| 333 // Store the global via the StoreIC. | 335 // Store the global via the StoreIC. |
| 334 Node* code_target = __ HeapConstant(ic.code()); | 336 Node* code_target = __ HeapConstant(ic.code()); |
| 335 Node* constant_index = __ BytecodeOperandIdx(0); | 337 Node* constant_index = __ BytecodeOperandIdx(0); |
| 336 Node* name = __ LoadConstantPoolEntry(constant_index); | 338 Node* name = __ LoadConstantPoolEntry(constant_index); |
| 337 Node* value = __ GetAccumulator(); | 339 Node* value = __ GetAccumulator(); |
| 338 Node* raw_slot = __ BytecodeOperandIdx(1); | 340 Node* raw_slot = __ BytecodeOperandIdx(1); |
| 339 Node* smi_slot = __ SmiTag(raw_slot); | 341 Node* smi_slot = __ SmiTag(raw_slot); |
| 340 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); | 342 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); |
| 341 __ CallStub(ic.descriptor(), code_target, context, global, name, value, | 343 __ CallStub(ic.descriptor(), code_target, context, global, name, value, |
| 342 smi_slot, type_feedback_vector); | 344 smi_slot, type_feedback_vector); |
| 343 | |
| 344 __ Dispatch(); | 345 __ Dispatch(); |
| 345 } | 346 } |
| 346 | 347 |
| 347 | 348 |
| 348 // StaGlobalSloppy <name_index> <slot> | 349 // StaGlobalSloppy <name_index> <slot> |
| 349 // | 350 // |
| 350 // Store the value in the accumulator into the global with name in constant pool | 351 // Store the value in the accumulator into the global with name in constant pool |
| 351 // entry <name_index> using FeedBackVector slot <slot> in sloppy mode. | 352 // entry <name_index> using FeedBackVector slot <slot> in sloppy mode. |
| 352 void Interpreter::DoStaGlobalSloppy(InterpreterAssembler* assembler) { | 353 void Interpreter::DoStaGlobalSloppy(InterpreterAssembler* assembler) { |
| 353 Callable ic = | 354 Callable ic = |
| 354 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); | 355 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); |
| 355 DoStoreGlobal(ic, assembler); | 356 DoStoreGlobal(ic, assembler); |
| 356 } | 357 } |
| 357 | 358 |
| 358 | 359 |
| 359 // StaGlobalStrict <name_index> <slot> | 360 // StaGlobalStrict <name_index> <slot> |
| 360 // | 361 // |
| 361 // Store the value in the accumulator into the global with name in constant pool | 362 // Store the value in the accumulator into the global with name in constant pool |
| 362 // entry <name_index> using FeedBackVector slot <slot> in strict mode. | 363 // entry <name_index> using FeedBackVector slot <slot> in strict mode. |
| 363 void Interpreter::DoStaGlobalStrict(InterpreterAssembler* assembler) { | 364 void Interpreter::DoStaGlobalStrict(InterpreterAssembler* assembler) { |
| 364 Callable ic = | 365 Callable ic = |
| 365 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); | 366 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); |
| 366 DoStoreGlobal(ic, assembler); | 367 DoStoreGlobal(ic, assembler); |
| 367 } | 368 } |
| 368 | 369 |
| 369 | |
| 370 // StaGlobalSloppyWide <name_index> <slot> | |
| 371 // | |
| 372 // Store the value in the accumulator into the global with name in constant pool | |
| 373 // entry <name_index> using FeedBackVector slot <slot> in sloppy mode. | |
| 374 void Interpreter::DoStaGlobalSloppyWide(InterpreterAssembler* assembler) { | |
| 375 Callable ic = | |
| 376 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); | |
| 377 DoStoreGlobal(ic, assembler); | |
| 378 } | |
| 379 | |
| 380 | |
| 381 // StaGlobalStrictWide <name_index> <slot> | |
| 382 // | |
| 383 // Store the value in the accumulator into the global with name in constant pool | |
| 384 // entry <name_index> using FeedBackVector slot <slot> in strict mode. | |
| 385 void Interpreter::DoStaGlobalStrictWide(InterpreterAssembler* assembler) { | |
| 386 Callable ic = | |
| 387 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); | |
| 388 DoStoreGlobal(ic, assembler); | |
| 389 } | |
| 390 | |
| 391 | |
| 392 // LdaContextSlot <context> <slot_index> | 370 // LdaContextSlot <context> <slot_index> |
| 393 // | 371 // |
| 394 // Load the object in |slot_index| of |context| into the accumulator. | 372 // Load the object in |slot_index| of |context| into the accumulator. |
| 395 void Interpreter::DoLdaContextSlot(InterpreterAssembler* assembler) { | 373 void Interpreter::DoLdaContextSlot(InterpreterAssembler* assembler) { |
| 396 Node* reg_index = __ BytecodeOperandReg(0); | 374 Node* reg_index = __ BytecodeOperandReg(0); |
| 397 Node* context = __ LoadRegister(reg_index); | 375 Node* context = __ LoadRegister(reg_index); |
| 398 Node* slot_index = __ BytecodeOperandIdx(1); | 376 Node* slot_index = __ BytecodeOperandIdx(1); |
| 399 Node* result = __ LoadContextSlot(context, slot_index); | 377 Node* result = __ LoadContextSlot(context, slot_index); |
| 400 __ SetAccumulator(result); | 378 __ SetAccumulator(result); |
| 401 __ Dispatch(); | 379 __ Dispatch(); |
| 402 } | 380 } |
| 403 | 381 |
| 404 | |
| 405 // LdaContextSlotWide <context> <slot_index> | |
| 406 // | |
| 407 // Load the object in |slot_index| of |context| into the accumulator. | |
| 408 void Interpreter::DoLdaContextSlotWide(InterpreterAssembler* assembler) { | |
| 409 DoLdaContextSlot(assembler); | |
| 410 } | |
| 411 | |
| 412 | |
| 413 // StaContextSlot <context> <slot_index> | 382 // StaContextSlot <context> <slot_index> |
| 414 // | 383 // |
| 415 // Stores the object in the accumulator into |slot_index| of |context|. | 384 // Stores the object in the accumulator into |slot_index| of |context|. |
| 416 void Interpreter::DoStaContextSlot(InterpreterAssembler* assembler) { | 385 void Interpreter::DoStaContextSlot(InterpreterAssembler* assembler) { |
| 417 Node* value = __ GetAccumulator(); | 386 Node* value = __ GetAccumulator(); |
| 418 Node* reg_index = __ BytecodeOperandReg(0); | 387 Node* reg_index = __ BytecodeOperandReg(0); |
| 419 Node* context = __ LoadRegister(reg_index); | 388 Node* context = __ LoadRegister(reg_index); |
| 420 Node* slot_index = __ BytecodeOperandIdx(1); | 389 Node* slot_index = __ BytecodeOperandIdx(1); |
| 421 __ StoreContextSlot(context, slot_index, value); | 390 __ StoreContextSlot(context, slot_index, value); |
| 422 __ Dispatch(); | 391 __ Dispatch(); |
| 423 } | 392 } |
| 424 | 393 |
| 425 | |
| 426 // StaContextSlot <context> <slot_index> | |
| 427 // | |
| 428 // Stores the object in the accumulator into |slot_index| of |context|. | |
| 429 void Interpreter::DoStaContextSlotWide(InterpreterAssembler* assembler) { | |
| 430 DoStaContextSlot(assembler); | |
| 431 } | |
| 432 | |
| 433 void Interpreter::DoLoadLookupSlot(Runtime::FunctionId function_id, | 394 void Interpreter::DoLoadLookupSlot(Runtime::FunctionId function_id, |
| 434 InterpreterAssembler* assembler) { | 395 InterpreterAssembler* assembler) { |
| 435 Node* index = __ BytecodeOperandIdx(0); | 396 Node* index = __ BytecodeOperandIdx(0); |
| 436 Node* name = __ LoadConstantPoolEntry(index); | 397 Node* name = __ LoadConstantPoolEntry(index); |
| 437 Node* context = __ GetContext(); | 398 Node* context = __ GetContext(); |
| 438 Node* result = __ CallRuntime(function_id, context, name); | 399 Node* result = __ CallRuntime(function_id, context, name); |
| 439 __ SetAccumulator(result); | 400 __ SetAccumulator(result); |
| 440 __ Dispatch(); | 401 __ Dispatch(); |
| 441 } | 402 } |
| 442 | 403 |
| 443 | |
| 444 // LdaLookupSlot <name_index> | 404 // LdaLookupSlot <name_index> |
| 445 // | 405 // |
| 446 // Lookup the object with the name in constant pool entry |name_index| | 406 // Lookup the object with the name in constant pool entry |name_index| |
| 447 // dynamically. | 407 // dynamically. |
| 448 void Interpreter::DoLdaLookupSlot(InterpreterAssembler* assembler) { | 408 void Interpreter::DoLdaLookupSlot(InterpreterAssembler* assembler) { |
| 449 DoLoadLookupSlot(Runtime::kLoadLookupSlot, assembler); | 409 DoLoadLookupSlot(Runtime::kLoadLookupSlot, assembler); |
| 450 } | 410 } |
| 451 | 411 |
| 452 | |
| 453 // LdaLookupSlotInsideTypeof <name_index> | 412 // LdaLookupSlotInsideTypeof <name_index> |
| 454 // | 413 // |
| 455 // Lookup the object with the name in constant pool entry |name_index| | 414 // Lookup the object with the name in constant pool entry |name_index| |
| 456 // dynamically without causing a NoReferenceError. | 415 // dynamically without causing a NoReferenceError. |
| 457 void Interpreter::DoLdaLookupSlotInsideTypeof(InterpreterAssembler* assembler) { | 416 void Interpreter::DoLdaLookupSlotInsideTypeof(InterpreterAssembler* assembler) { |
| 458 DoLoadLookupSlot(Runtime::kLoadLookupSlotInsideTypeof, assembler); | 417 DoLoadLookupSlot(Runtime::kLoadLookupSlotInsideTypeof, assembler); |
| 459 } | 418 } |
| 460 | 419 |
| 461 | |
| 462 // LdaLookupSlotWide <name_index> | |
| 463 // | |
| 464 // Lookup the object with the name in constant pool entry |name_index| | |
| 465 // dynamically. | |
| 466 void Interpreter::DoLdaLookupSlotWide(InterpreterAssembler* assembler) { | |
| 467 DoLdaLookupSlot(assembler); | |
| 468 } | |
| 469 | |
| 470 | |
| 471 // LdaLookupSlotInsideTypeofWide <name_index> | |
| 472 // | |
| 473 // Lookup the object with the name in constant pool entry |name_index| | |
| 474 // dynamically without causing a NoReferenceError. | |
| 475 void Interpreter::DoLdaLookupSlotInsideTypeofWide( | |
| 476 InterpreterAssembler* assembler) { | |
| 477 DoLdaLookupSlotInsideTypeof(assembler); | |
| 478 } | |
| 479 | |
| 480 void Interpreter::DoStoreLookupSlot(LanguageMode language_mode, | 420 void Interpreter::DoStoreLookupSlot(LanguageMode language_mode, |
| 481 InterpreterAssembler* assembler) { | 421 InterpreterAssembler* assembler) { |
| 482 Node* value = __ GetAccumulator(); | 422 Node* value = __ GetAccumulator(); |
| 483 Node* index = __ BytecodeOperandIdx(0); | 423 Node* index = __ BytecodeOperandIdx(0); |
| 484 Node* name = __ LoadConstantPoolEntry(index); | 424 Node* name = __ LoadConstantPoolEntry(index); |
| 485 Node* context = __ GetContext(); | 425 Node* context = __ GetContext(); |
| 486 Node* result = __ CallRuntime(is_strict(language_mode) | 426 Node* result = __ CallRuntime(is_strict(language_mode) |
| 487 ? Runtime::kStoreLookupSlot_Strict | 427 ? Runtime::kStoreLookupSlot_Strict |
| 488 : Runtime::kStoreLookupSlot_Sloppy, | 428 : Runtime::kStoreLookupSlot_Sloppy, |
| 489 context, name, value); | 429 context, name, value); |
| 490 __ SetAccumulator(result); | 430 __ SetAccumulator(result); |
| 491 __ Dispatch(); | 431 __ Dispatch(); |
| 492 } | 432 } |
| 493 | 433 |
| 494 | |
| 495 // StaLookupSlotSloppy <name_index> | 434 // StaLookupSlotSloppy <name_index> |
| 496 // | 435 // |
| 497 // Store the object in accumulator to the object with the name in constant | 436 // Store the object in accumulator to the object with the name in constant |
| 498 // pool entry |name_index| in sloppy mode. | 437 // pool entry |name_index| in sloppy mode. |
| 499 void Interpreter::DoStaLookupSlotSloppy(InterpreterAssembler* assembler) { | 438 void Interpreter::DoStaLookupSlotSloppy(InterpreterAssembler* assembler) { |
| 500 DoStoreLookupSlot(LanguageMode::SLOPPY, assembler); | 439 DoStoreLookupSlot(LanguageMode::SLOPPY, assembler); |
| 501 } | 440 } |
| 502 | 441 |
| 503 | 442 |
| 504 // StaLookupSlotStrict <name_index> | 443 // StaLookupSlotStrict <name_index> |
| 505 // | 444 // |
| 506 // Store the object in accumulator to the object with the name in constant | 445 // Store the object in accumulator to the object with the name in constant |
| 507 // pool entry |name_index| in strict mode. | 446 // pool entry |name_index| in strict mode. |
| 508 void Interpreter::DoStaLookupSlotStrict(InterpreterAssembler* assembler) { | 447 void Interpreter::DoStaLookupSlotStrict(InterpreterAssembler* assembler) { |
| 509 DoStoreLookupSlot(LanguageMode::STRICT, assembler); | 448 DoStoreLookupSlot(LanguageMode::STRICT, assembler); |
| 510 } | 449 } |
| 511 | 450 |
| 512 | |
| 513 // StaLookupSlotSloppyWide <name_index> | |
| 514 // | |
| 515 // Store the object in accumulator to the object with the name in constant | |
| 516 // pool entry |name_index| in sloppy mode. | |
| 517 void Interpreter::DoStaLookupSlotSloppyWide(InterpreterAssembler* assembler) { | |
| 518 DoStaLookupSlotSloppy(assembler); | |
| 519 } | |
| 520 | |
| 521 | |
| 522 // StaLookupSlotStrictWide <name_index> | |
| 523 // | |
| 524 // Store the object in accumulator to the object with the name in constant | |
| 525 // pool entry |name_index| in strict mode. | |
| 526 void Interpreter::DoStaLookupSlotStrictWide(InterpreterAssembler* assembler) { | |
| 527 DoStaLookupSlotStrict(assembler); | |
| 528 } | |
| 529 | |
| 530 void Interpreter::DoLoadIC(Callable ic, InterpreterAssembler* assembler) { | 451 void Interpreter::DoLoadIC(Callable ic, InterpreterAssembler* assembler) { |
| 531 Node* code_target = __ HeapConstant(ic.code()); | 452 Node* code_target = __ HeapConstant(ic.code()); |
| 532 Node* register_index = __ BytecodeOperandReg(0); | 453 Node* register_index = __ BytecodeOperandReg(0); |
| 533 Node* object = __ LoadRegister(register_index); | 454 Node* object = __ LoadRegister(register_index); |
| 534 Node* constant_index = __ BytecodeOperandIdx(1); | 455 Node* constant_index = __ BytecodeOperandIdx(1); |
| 535 Node* name = __ LoadConstantPoolEntry(constant_index); | 456 Node* name = __ LoadConstantPoolEntry(constant_index); |
| 536 Node* raw_slot = __ BytecodeOperandIdx(2); | 457 Node* raw_slot = __ BytecodeOperandIdx(2); |
| 537 Node* smi_slot = __ SmiTag(raw_slot); | 458 Node* smi_slot = __ SmiTag(raw_slot); |
| 538 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); | 459 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); |
| 539 Node* context = __ GetContext(); | 460 Node* context = __ GetContext(); |
| 540 Node* result = __ CallStub(ic.descriptor(), code_target, context, object, | 461 Node* result = __ CallStub(ic.descriptor(), code_target, context, object, |
| 541 name, smi_slot, type_feedback_vector); | 462 name, smi_slot, type_feedback_vector); |
| 542 __ SetAccumulator(result); | 463 __ SetAccumulator(result); |
| 543 __ Dispatch(); | 464 __ Dispatch(); |
| 544 } | 465 } |
| 545 | 466 |
| 546 // LoadIC <object> <name_index> <slot> | 467 // LoadIC <object> <name_index> <slot> |
| 547 // | 468 // |
| 548 // Calls the LoadIC at FeedBackVector slot <slot> for <object> and the name at | 469 // Calls the LoadIC at FeedBackVector slot <slot> for <object> and the name at |
| 549 // constant pool entry <name_index>. | 470 // constant pool entry <name_index>. |
| 550 void Interpreter::DoLoadIC(InterpreterAssembler* assembler) { | 471 void Interpreter::DoLoadIC(InterpreterAssembler* assembler) { |
| 551 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, | 472 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, |
| 552 UNINITIALIZED); | 473 UNINITIALIZED); |
| 553 DoLoadIC(ic, assembler); | 474 DoLoadIC(ic, assembler); |
| 554 } | 475 } |
| 555 | 476 |
| 556 // LoadICWide <object> <name_index> <slot> | |
| 557 // | |
| 558 // Calls the LoadIC at FeedBackVector slot <slot> for <object> and the name at | |
| 559 // constant pool entry <name_index>. | |
| 560 void Interpreter::DoLoadICWide(InterpreterAssembler* assembler) { | |
| 561 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, | |
| 562 UNINITIALIZED); | |
| 563 DoLoadIC(ic, assembler); | |
| 564 } | |
| 565 | |
| 566 | |
| 567 void Interpreter::DoKeyedLoadIC(Callable ic, InterpreterAssembler* assembler) { | 477 void Interpreter::DoKeyedLoadIC(Callable ic, InterpreterAssembler* assembler) { |
| 568 Node* code_target = __ HeapConstant(ic.code()); | 478 Node* code_target = __ HeapConstant(ic.code()); |
| 569 Node* reg_index = __ BytecodeOperandReg(0); | 479 Node* reg_index = __ BytecodeOperandReg(0); |
| 570 Node* object = __ LoadRegister(reg_index); | 480 Node* object = __ LoadRegister(reg_index); |
| 571 Node* name = __ GetAccumulator(); | 481 Node* name = __ GetAccumulator(); |
| 572 Node* raw_slot = __ BytecodeOperandIdx(1); | 482 Node* raw_slot = __ BytecodeOperandIdx(1); |
| 573 Node* smi_slot = __ SmiTag(raw_slot); | 483 Node* smi_slot = __ SmiTag(raw_slot); |
| 574 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); | 484 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); |
| 575 Node* context = __ GetContext(); | 485 Node* context = __ GetContext(); |
| 576 Node* result = __ CallStub(ic.descriptor(), code_target, context, object, | 486 Node* result = __ CallStub(ic.descriptor(), code_target, context, object, |
| 577 name, smi_slot, type_feedback_vector); | 487 name, smi_slot, type_feedback_vector); |
| 578 __ SetAccumulator(result); | 488 __ SetAccumulator(result); |
| 579 __ Dispatch(); | 489 __ Dispatch(); |
| 580 } | 490 } |
| 581 | 491 |
| 582 // KeyedLoadIC <object> <slot> | 492 // KeyedLoadIC <object> <slot> |
| 583 // | 493 // |
| 584 // Calls the KeyedLoadIC at FeedBackVector slot <slot> for <object> and the key | 494 // Calls the KeyedLoadIC at FeedBackVector slot <slot> for <object> and the key |
| 585 // in the accumulator. | 495 // in the accumulator. |
| 586 void Interpreter::DoKeyedLoadIC(InterpreterAssembler* assembler) { | 496 void Interpreter::DoKeyedLoadIC(InterpreterAssembler* assembler) { |
| 587 Callable ic = | 497 Callable ic = |
| 588 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, UNINITIALIZED); | 498 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, UNINITIALIZED); |
| 589 DoKeyedLoadIC(ic, assembler); | 499 DoKeyedLoadIC(ic, assembler); |
| 590 } | 500 } |
| 591 | 501 |
| 592 // KeyedLoadICWide <object> <slot> | |
| 593 // | |
| 594 // Calls the KeyedLoadIC at FeedBackVector slot <slot> for <object> and the key | |
| 595 // in the accumulator. | |
| 596 void Interpreter::DoKeyedLoadICWide(InterpreterAssembler* assembler) { | |
| 597 Callable ic = | |
| 598 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, UNINITIALIZED); | |
| 599 DoKeyedLoadIC(ic, assembler); | |
| 600 } | |
| 601 | |
| 602 | |
| 603 void Interpreter::DoStoreIC(Callable ic, InterpreterAssembler* assembler) { | 502 void Interpreter::DoStoreIC(Callable ic, InterpreterAssembler* assembler) { |
| 604 Node* code_target = __ HeapConstant(ic.code()); | 503 Node* code_target = __ HeapConstant(ic.code()); |
| 605 Node* object_reg_index = __ BytecodeOperandReg(0); | 504 Node* object_reg_index = __ BytecodeOperandReg(0); |
| 606 Node* object = __ LoadRegister(object_reg_index); | 505 Node* object = __ LoadRegister(object_reg_index); |
| 607 Node* constant_index = __ BytecodeOperandIdx(1); | 506 Node* constant_index = __ BytecodeOperandIdx(1); |
| 608 Node* name = __ LoadConstantPoolEntry(constant_index); | 507 Node* name = __ LoadConstantPoolEntry(constant_index); |
| 609 Node* value = __ GetAccumulator(); | 508 Node* value = __ GetAccumulator(); |
| 610 Node* raw_slot = __ BytecodeOperandIdx(2); | 509 Node* raw_slot = __ BytecodeOperandIdx(2); |
| 611 Node* smi_slot = __ SmiTag(raw_slot); | 510 Node* smi_slot = __ SmiTag(raw_slot); |
| 612 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); | 511 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); |
| (...skipping 20 matching lines...) Expand all Loading... | |
| 633 // | 532 // |
| 634 // Calls the strict mode StoreIC at FeedBackVector slot <slot> for <object> and | 533 // Calls the strict mode StoreIC at FeedBackVector slot <slot> for <object> and |
| 635 // the name in constant pool entry <name_index> with the value in the | 534 // the name in constant pool entry <name_index> with the value in the |
| 636 // accumulator. | 535 // accumulator. |
| 637 void Interpreter::DoStoreICStrict(InterpreterAssembler* assembler) { | 536 void Interpreter::DoStoreICStrict(InterpreterAssembler* assembler) { |
| 638 Callable ic = | 537 Callable ic = |
| 639 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); | 538 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); |
| 640 DoStoreIC(ic, assembler); | 539 DoStoreIC(ic, assembler); |
| 641 } | 540 } |
| 642 | 541 |
| 643 | |
| 644 // StoreICSloppyWide <object> <name_index> <slot> | |
| 645 // | |
| 646 // Calls the sloppy mode StoreIC at FeedBackVector slot <slot> for <object> and | |
| 647 // the name in constant pool entry <name_index> with the value in the | |
| 648 // accumulator. | |
| 649 void Interpreter::DoStoreICSloppyWide(InterpreterAssembler* assembler) { | |
| 650 Callable ic = | |
| 651 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); | |
| 652 DoStoreIC(ic, assembler); | |
| 653 } | |
| 654 | |
| 655 | |
| 656 // StoreICStrictWide <object> <name_index> <slot> | |
| 657 // | |
| 658 // Calls the strict mode StoreIC at FeedBackVector slot <slot> for <object> and | |
| 659 // the name in constant pool entry <name_index> with the value in the | |
| 660 // accumulator. | |
| 661 void Interpreter::DoStoreICStrictWide(InterpreterAssembler* assembler) { | |
| 662 Callable ic = | |
| 663 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); | |
| 664 DoStoreIC(ic, assembler); | |
| 665 } | |
| 666 | |
| 667 void Interpreter::DoKeyedStoreIC(Callable ic, InterpreterAssembler* assembler) { | 542 void Interpreter::DoKeyedStoreIC(Callable ic, InterpreterAssembler* assembler) { |
| 668 Node* code_target = __ HeapConstant(ic.code()); | 543 Node* code_target = __ HeapConstant(ic.code()); |
| 669 Node* object_reg_index = __ BytecodeOperandReg(0); | 544 Node* object_reg_index = __ BytecodeOperandReg(0); |
| 670 Node* object = __ LoadRegister(object_reg_index); | 545 Node* object = __ LoadRegister(object_reg_index); |
| 671 Node* name_reg_index = __ BytecodeOperandReg(1); | 546 Node* name_reg_index = __ BytecodeOperandReg(1); |
| 672 Node* name = __ LoadRegister(name_reg_index); | 547 Node* name = __ LoadRegister(name_reg_index); |
| 673 Node* value = __ GetAccumulator(); | 548 Node* value = __ GetAccumulator(); |
| 674 Node* raw_slot = __ BytecodeOperandIdx(2); | 549 Node* raw_slot = __ BytecodeOperandIdx(2); |
| 675 Node* smi_slot = __ SmiTag(raw_slot); | 550 Node* smi_slot = __ SmiTag(raw_slot); |
| 676 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); | 551 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); |
| (...skipping 18 matching lines...) Expand all Loading... | |
| 695 // KeyedStoreICStore <object> <key> <slot> | 570 // KeyedStoreICStore <object> <key> <slot> |
| 696 // | 571 // |
| 697 // Calls the strict mode KeyStoreIC at FeedBackVector slot <slot> for <object> | 572 // Calls the strict mode KeyStoreIC at FeedBackVector slot <slot> for <object> |
| 698 // and the key <key> with the value in the accumulator. | 573 // and the key <key> with the value in the accumulator. |
| 699 void Interpreter::DoKeyedStoreICStrict(InterpreterAssembler* assembler) { | 574 void Interpreter::DoKeyedStoreICStrict(InterpreterAssembler* assembler) { |
| 700 Callable ic = | 575 Callable ic = |
| 701 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); | 576 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); |
| 702 DoKeyedStoreIC(ic, assembler); | 577 DoKeyedStoreIC(ic, assembler); |
| 703 } | 578 } |
| 704 | 579 |
| 705 | |
| 706 // KeyedStoreICSloppyWide <object> <key> <slot> | |
| 707 // | |
| 708 // Calls the sloppy mode KeyStoreIC at FeedBackVector slot <slot> for <object> | |
| 709 // and the key <key> with the value in the accumulator. | |
| 710 void Interpreter::DoKeyedStoreICSloppyWide(InterpreterAssembler* assembler) { | |
| 711 Callable ic = | |
| 712 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); | |
| 713 DoKeyedStoreIC(ic, assembler); | |
| 714 } | |
| 715 | |
| 716 | |
| 717 // KeyedStoreICStoreWide <object> <key> <slot> | |
| 718 // | |
| 719 // Calls the strict mode KeyStoreIC at FeedBackVector slot <slot> for <object> | |
| 720 // and the key <key> with the value in the accumulator. | |
| 721 void Interpreter::DoKeyedStoreICStrictWide(InterpreterAssembler* assembler) { | |
| 722 Callable ic = | |
| 723 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); | |
| 724 DoKeyedStoreIC(ic, assembler); | |
| 725 } | |
| 726 | |
| 727 // PushContext <context> | 580 // PushContext <context> |
| 728 // | 581 // |
| 729 // Saves the current context in <context>, and pushes the accumulator as the | 582 // Saves the current context in <context>, and pushes the accumulator as the |
| 730 // new current context. | 583 // new current context. |
| 731 void Interpreter::DoPushContext(InterpreterAssembler* assembler) { | 584 void Interpreter::DoPushContext(InterpreterAssembler* assembler) { |
| 732 Node* reg_index = __ BytecodeOperandReg(0); | 585 Node* reg_index = __ BytecodeOperandReg(0); |
| 733 Node* new_context = __ GetAccumulator(); | 586 Node* new_context = __ GetAccumulator(); |
| 734 Node* old_context = __ GetContext(); | 587 Node* old_context = __ GetContext(); |
| 735 __ StoreRegister(old_context, reg_index); | 588 __ StoreRegister(old_context, reg_index); |
| 736 __ SetContext(new_context); | 589 __ SetContext(new_context); |
| (...skipping 253 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 990 | 843 |
| 991 | 844 |
| 992 // Call <callable> <receiver> <arg_count> | 845 // Call <callable> <receiver> <arg_count> |
| 993 // | 846 // |
| 994 // Call a JSfunction or Callable in |callable| with the |receiver| and | 847 // Call a JSfunction or Callable in |callable| with the |receiver| and |
| 995 // |arg_count| arguments in subsequent registers. | 848 // |arg_count| arguments in subsequent registers. |
| 996 void Interpreter::DoCall(InterpreterAssembler* assembler) { | 849 void Interpreter::DoCall(InterpreterAssembler* assembler) { |
| 997 DoJSCall(assembler, TailCallMode::kDisallow); | 850 DoJSCall(assembler, TailCallMode::kDisallow); |
| 998 } | 851 } |
| 999 | 852 |
| 1000 | |
| 1001 // CallWide <callable> <receiver> <arg_count> | |
| 1002 // | |
| 1003 // Call a JSfunction or Callable in |callable| with the |receiver| and | |
| 1004 // |arg_count| arguments in subsequent registers. | |
| 1005 void Interpreter::DoCallWide(InterpreterAssembler* assembler) { | |
| 1006 DoJSCall(assembler, TailCallMode::kDisallow); | |
| 1007 } | |
| 1008 | |
| 1009 // TailCall <callable> <receiver> <arg_count> | 853 // TailCall <callable> <receiver> <arg_count> |
| 1010 // | 854 // |
| 1011 // Tail call a JSfunction or Callable in |callable| with the |receiver| and | 855 // Tail call a JSfunction or Callable in |callable| with the |receiver| and |
| 1012 // |arg_count| arguments in subsequent registers. | 856 // |arg_count| arguments in subsequent registers. |
| 1013 void Interpreter::DoTailCall(InterpreterAssembler* assembler) { | 857 void Interpreter::DoTailCall(InterpreterAssembler* assembler) { |
| 1014 DoJSCall(assembler, TailCallMode::kAllow); | 858 DoJSCall(assembler, TailCallMode::kAllow); |
| 1015 } | 859 } |
| 1016 | 860 |
| 1017 // TailCallWide <callable> <receiver> <arg_count> | |
| 1018 // | |
| 1019 // Tail call a JSfunction or Callable in |callable| with the |receiver| and | |
| 1020 // |arg_count| arguments in subsequent registers. | |
| 1021 void Interpreter::DoTailCallWide(InterpreterAssembler* assembler) { | |
| 1022 DoJSCall(assembler, TailCallMode::kAllow); | |
| 1023 } | |
| 1024 | |
| 1025 void Interpreter::DoCallRuntimeCommon(InterpreterAssembler* assembler) { | 861 void Interpreter::DoCallRuntimeCommon(InterpreterAssembler* assembler) { |
| 1026 Node* function_id = __ BytecodeOperandIdx(0); | 862 Node* function_id = __ BytecodeOperandIdx(0); |
| 1027 Node* first_arg_reg = __ BytecodeOperandReg(1); | 863 Node* first_arg_reg = __ BytecodeOperandReg(1); |
| 1028 Node* first_arg = __ RegisterLocation(first_arg_reg); | 864 Node* first_arg = __ RegisterLocation(first_arg_reg); |
| 1029 Node* args_count = __ BytecodeOperandCount(2); | 865 Node* args_count = __ BytecodeOperandCount(2); |
| 1030 Node* context = __ GetContext(); | 866 Node* context = __ GetContext(); |
| 1031 Node* result = __ CallRuntimeN(function_id, context, first_arg, args_count); | 867 Node* result = __ CallRuntimeN(function_id, context, first_arg, args_count); |
| 1032 __ SetAccumulator(result); | 868 __ SetAccumulator(result); |
| 1033 __ Dispatch(); | 869 __ Dispatch(); |
| 1034 } | 870 } |
| 1035 | 871 |
| 1036 | 872 |
| 1037 // CallRuntime <function_id> <first_arg> <arg_count> | 873 // CallRuntime <function_id> <first_arg> <arg_count> |
| 1038 // | 874 // |
| 1039 // Call the runtime function |function_id| with the first argument in | 875 // Call the runtime function |function_id| with the first argument in |
| 1040 // register |first_arg| and |arg_count| arguments in subsequent | 876 // register |first_arg| and |arg_count| arguments in subsequent |
| 1041 // registers. | 877 // registers. |
| 1042 void Interpreter::DoCallRuntime(InterpreterAssembler* assembler) { | 878 void Interpreter::DoCallRuntime(InterpreterAssembler* assembler) { |
| 1043 DoCallRuntimeCommon(assembler); | 879 DoCallRuntimeCommon(assembler); |
| 1044 } | 880 } |
| 1045 | 881 |
| 1046 | |
| 1047 // CallRuntime <function_id> <first_arg> <arg_count> | |
| 1048 // | |
| 1049 // Call the runtime function |function_id| with the first argument in | |
| 1050 // register |first_arg| and |arg_count| arguments in subsequent | |
| 1051 // registers. | |
| 1052 void Interpreter::DoCallRuntimeWide(InterpreterAssembler* assembler) { | |
| 1053 DoCallRuntimeCommon(assembler); | |
| 1054 } | |
| 1055 | |
| 1056 void Interpreter::DoCallRuntimeForPairCommon(InterpreterAssembler* assembler) { | 882 void Interpreter::DoCallRuntimeForPairCommon(InterpreterAssembler* assembler) { |
| 1057 // Call the runtime function. | 883 // Call the runtime function. |
| 1058 Node* function_id = __ BytecodeOperandIdx(0); | 884 Node* function_id = __ BytecodeOperandIdx(0); |
| 1059 Node* first_arg_reg = __ BytecodeOperandReg(1); | 885 Node* first_arg_reg = __ BytecodeOperandReg(1); |
| 1060 Node* first_arg = __ RegisterLocation(first_arg_reg); | 886 Node* first_arg = __ RegisterLocation(first_arg_reg); |
| 1061 Node* args_count = __ BytecodeOperandCount(2); | 887 Node* args_count = __ BytecodeOperandCount(2); |
| 1062 Node* context = __ GetContext(); | 888 Node* context = __ GetContext(); |
| 1063 Node* result_pair = | 889 Node* result_pair = |
| 1064 __ CallRuntimeN(function_id, context, first_arg, args_count, 2); | 890 __ CallRuntimeN(function_id, context, first_arg, args_count, 2); |
| 1065 | 891 |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 1077 // CallRuntimeForPair <function_id> <first_arg> <arg_count> <first_return> | 903 // CallRuntimeForPair <function_id> <first_arg> <arg_count> <first_return> |
| 1078 // | 904 // |
| 1079 // Call the runtime function |function_id| which returns a pair, with the | 905 // Call the runtime function |function_id| which returns a pair, with the |
| 1080 // first argument in register |first_arg| and |arg_count| arguments in | 906 // first argument in register |first_arg| and |arg_count| arguments in |
| 1081 // subsequent registers. Returns the result in <first_return> and | 907 // subsequent registers. Returns the result in <first_return> and |
| 1082 // <first_return + 1> | 908 // <first_return + 1> |
| 1083 void Interpreter::DoCallRuntimeForPair(InterpreterAssembler* assembler) { | 909 void Interpreter::DoCallRuntimeForPair(InterpreterAssembler* assembler) { |
| 1084 DoCallRuntimeForPairCommon(assembler); | 910 DoCallRuntimeForPairCommon(assembler); |
| 1085 } | 911 } |
| 1086 | 912 |
| 1087 | |
| 1088 // CallRuntimeForPairWide <function_id> <first_arg> <arg_count> <first_return> | |
| 1089 // | |
| 1090 // Call the runtime function |function_id| which returns a pair, with the | |
| 1091 // first argument in register |first_arg| and |arg_count| arguments in | |
| 1092 // subsequent registers. Returns the result in <first_return> and | |
| 1093 // <first_return + 1> | |
| 1094 void Interpreter::DoCallRuntimeForPairWide(InterpreterAssembler* assembler) { | |
| 1095 DoCallRuntimeForPairCommon(assembler); | |
| 1096 } | |
| 1097 | |
| 1098 void Interpreter::DoCallJSRuntimeCommon(InterpreterAssembler* assembler) { | 913 void Interpreter::DoCallJSRuntimeCommon(InterpreterAssembler* assembler) { |
| 1099 Node* context_index = __ BytecodeOperandIdx(0); | 914 Node* context_index = __ BytecodeOperandIdx(0); |
| 1100 Node* receiver_reg = __ BytecodeOperandReg(1); | 915 Node* receiver_reg = __ BytecodeOperandReg(1); |
| 1101 Node* first_arg = __ RegisterLocation(receiver_reg); | 916 Node* first_arg = __ RegisterLocation(receiver_reg); |
| 1102 Node* receiver_args_count = __ BytecodeOperandCount(2); | 917 Node* receiver_args_count = __ BytecodeOperandCount(2); |
| 1103 Node* receiver_count = __ Int32Constant(1); | 918 Node* receiver_count = __ Int32Constant(1); |
| 1104 Node* args_count = __ Int32Sub(receiver_args_count, receiver_count); | 919 Node* args_count = __ Int32Sub(receiver_args_count, receiver_count); |
| 1105 | 920 |
| 1106 // Get the function to call from the native context. | 921 // Get the function to call from the native context. |
| 1107 Node* context = __ GetContext(); | 922 Node* context = __ GetContext(); |
| (...skipping 10 matching lines...) Expand all Loading... | |
| 1118 | 933 |
| 1119 | 934 |
| 1120 // CallJSRuntime <context_index> <receiver> <arg_count> | 935 // CallJSRuntime <context_index> <receiver> <arg_count> |
| 1121 // | 936 // |
| 1122 // Call the JS runtime function that has the |context_index| with the receiver | 937 // Call the JS runtime function that has the |context_index| with the receiver |
| 1123 // in register |receiver| and |arg_count| arguments in subsequent registers. | 938 // in register |receiver| and |arg_count| arguments in subsequent registers. |
| 1124 void Interpreter::DoCallJSRuntime(InterpreterAssembler* assembler) { | 939 void Interpreter::DoCallJSRuntime(InterpreterAssembler* assembler) { |
| 1125 DoCallJSRuntimeCommon(assembler); | 940 DoCallJSRuntimeCommon(assembler); |
| 1126 } | 941 } |
| 1127 | 942 |
| 1128 | |
| 1129 // CallJSRuntimeWide <context_index> <receiver> <arg_count> | |
| 1130 // | |
| 1131 // Call the JS runtime function that has the |context_index| with the receiver | |
| 1132 // in register |receiver| and |arg_count| arguments in subsequent registers. | |
| 1133 void Interpreter::DoCallJSRuntimeWide(InterpreterAssembler* assembler) { | |
| 1134 DoCallJSRuntimeCommon(assembler); | |
| 1135 } | |
| 1136 | |
| 1137 void Interpreter::DoCallConstruct(InterpreterAssembler* assembler) { | 943 void Interpreter::DoCallConstruct(InterpreterAssembler* assembler) { |
| 1138 Callable ic = CodeFactory::InterpreterPushArgsAndConstruct(isolate_); | 944 Callable ic = CodeFactory::InterpreterPushArgsAndConstruct(isolate_); |
| 1139 Node* new_target = __ GetAccumulator(); | 945 Node* new_target = __ GetAccumulator(); |
| 1140 Node* constructor_reg = __ BytecodeOperandReg(0); | 946 Node* constructor_reg = __ BytecodeOperandReg(0); |
| 1141 Node* constructor = __ LoadRegister(constructor_reg); | 947 Node* constructor = __ LoadRegister(constructor_reg); |
| 1142 Node* first_arg_reg = __ BytecodeOperandReg(1); | 948 Node* first_arg_reg = __ BytecodeOperandReg(1); |
| 1143 Node* first_arg = __ RegisterLocation(first_arg_reg); | 949 Node* first_arg = __ RegisterLocation(first_arg_reg); |
| 1144 Node* args_count = __ BytecodeOperandCount(2); | 950 Node* args_count = __ BytecodeOperandCount(2); |
| 1145 Node* context = __ GetContext(); | 951 Node* context = __ GetContext(); |
| 1146 Node* result = | 952 Node* result = |
| 1147 __ CallConstruct(constructor, context, new_target, first_arg, args_count); | 953 __ CallConstruct(constructor, context, new_target, first_arg, args_count); |
| 1148 __ SetAccumulator(result); | 954 __ SetAccumulator(result); |
| 1149 __ Dispatch(); | 955 __ Dispatch(); |
| 1150 } | 956 } |
| 1151 | 957 |
| 1152 | 958 |
| 1153 // New <constructor> <first_arg> <arg_count> | 959 // New <constructor> <first_arg> <arg_count> |
| 1154 // | 960 // |
| 1155 // Call operator new with |constructor| and the first argument in | 961 // Call operator new with |constructor| and the first argument in |
| 1156 // register |first_arg| and |arg_count| arguments in subsequent | 962 // register |first_arg| and |arg_count| arguments in subsequent |
| 1157 // registers. The new.target is in the accumulator. | 963 // registers. The new.target is in the accumulator. |
| 1158 // | 964 // |
| 1159 void Interpreter::DoNew(InterpreterAssembler* assembler) { | 965 void Interpreter::DoNew(InterpreterAssembler* assembler) { |
| 1160 DoCallConstruct(assembler); | 966 DoCallConstruct(assembler); |
| 1161 } | 967 } |
| 1162 | 968 |
| 1163 | |
| 1164 // NewWide <constructor> <first_arg> <arg_count> | |
| 1165 // | |
| 1166 // Call operator new with |constructor| and the first argument in | |
| 1167 // register |first_arg| and |arg_count| arguments in subsequent | |
| 1168 // registers. The new.target is in the accumulator. | |
| 1169 // | |
| 1170 void Interpreter::DoNewWide(InterpreterAssembler* assembler) { | |
| 1171 DoCallConstruct(assembler); | |
| 1172 } | |
| 1173 | |
| 1174 | |
| 1175 // TestEqual <src> | 969 // TestEqual <src> |
| 1176 // | 970 // |
| 1177 // Test if the value in the <src> register equals the accumulator. | 971 // Test if the value in the <src> register equals the accumulator. |
| 1178 void Interpreter::DoTestEqual(InterpreterAssembler* assembler) { | 972 void Interpreter::DoTestEqual(InterpreterAssembler* assembler) { |
| 1179 DoBinaryOp(Runtime::kEqual, assembler); | 973 DoBinaryOp(Runtime::kEqual, assembler); |
| 1180 } | 974 } |
| 1181 | 975 |
| 1182 | 976 |
| 1183 // TestNotEqual <src> | 977 // TestNotEqual <src> |
| 1184 // | 978 // |
| (...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1274 } | 1068 } |
| 1275 | 1069 |
| 1276 | 1070 |
| 1277 // ToObject | 1071 // ToObject |
| 1278 // | 1072 // |
| 1279 // Cast the object referenced by the accumulator to a JSObject. | 1073 // Cast the object referenced by the accumulator to a JSObject. |
| 1280 void Interpreter::DoToObject(InterpreterAssembler* assembler) { | 1074 void Interpreter::DoToObject(InterpreterAssembler* assembler) { |
| 1281 DoTypeConversionOp(CodeFactory::ToObject(isolate_), assembler); | 1075 DoTypeConversionOp(CodeFactory::ToObject(isolate_), assembler); |
| 1282 } | 1076 } |
| 1283 | 1077 |
| 1284 | 1078 // Jump <imm> |
| 1285 // Jump <imm8> | |
| 1286 // | 1079 // |
| 1287 // Jump by number of bytes represented by the immediate operand |imm8|. | 1080 // Jump by number of bytes represented by the immediate operand |imm|. |
| 1288 void Interpreter::DoJump(InterpreterAssembler* assembler) { | 1081 void Interpreter::DoJump(InterpreterAssembler* assembler) { |
| 1289 Node* relative_jump = __ BytecodeOperandImm(0); | 1082 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1290 __ Jump(relative_jump); | 1083 __ Jump(relative_jump); |
| 1291 } | 1084 } |
| 1292 | 1085 |
| 1293 | 1086 // JumpConstant <idx> |
| 1294 // JumpConstant <idx8> | |
| 1295 // | 1087 // |
| 1296 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool. | 1088 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool. |
| 1297 void Interpreter::DoJumpConstant(InterpreterAssembler* assembler) { | 1089 void Interpreter::DoJumpConstant(InterpreterAssembler* assembler) { |
| 1298 Node* index = __ BytecodeOperandIdx(0); | 1090 Node* index = __ BytecodeOperandIdx(0); |
| 1299 Node* constant = __ LoadConstantPoolEntry(index); | 1091 Node* constant = __ LoadConstantPoolEntry(index); |
| 1300 Node* relative_jump = __ SmiUntag(constant); | 1092 Node* relative_jump = __ SmiUntag(constant); |
| 1301 __ Jump(relative_jump); | 1093 __ Jump(relative_jump); |
| 1302 } | 1094 } |
| 1303 | 1095 |
| 1304 | 1096 // JumpIfTrue <imm> |
| 1305 // JumpConstantWide <idx16> | |
| 1306 // | |
| 1307 // Jump by number of bytes in the Smi in the |idx16| entry in the | |
| 1308 // constant pool. | |
| 1309 void Interpreter::DoJumpConstantWide(InterpreterAssembler* assembler) { | |
| 1310 DoJumpConstant(assembler); | |
| 1311 } | |
| 1312 | |
| 1313 | |
| 1314 // JumpIfTrue <imm8> | |
| 1315 // | 1097 // |
| 1316 // Jump by number of bytes represented by an immediate operand if the | 1098 // Jump by number of bytes represented by an immediate operand if the |
| 1317 // accumulator contains true. | 1099 // accumulator contains true. |
| 1318 void Interpreter::DoJumpIfTrue(InterpreterAssembler* assembler) { | 1100 void Interpreter::DoJumpIfTrue(InterpreterAssembler* assembler) { |
| 1319 Node* accumulator = __ GetAccumulator(); | 1101 Node* accumulator = __ GetAccumulator(); |
| 1320 Node* relative_jump = __ BytecodeOperandImm(0); | 1102 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1321 Node* true_value = __ BooleanConstant(true); | 1103 Node* true_value = __ BooleanConstant(true); |
| 1322 __ JumpIfWordEqual(accumulator, true_value, relative_jump); | 1104 __ JumpIfWordEqual(accumulator, true_value, relative_jump); |
| 1323 } | 1105 } |
| 1324 | 1106 |
| 1325 | 1107 // JumpIfTrueConstant <idx> |
| 1326 // JumpIfTrueConstant <idx8> | |
| 1327 // | 1108 // |
| 1328 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool | 1109 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool |
| 1329 // if the accumulator contains true. | 1110 // if the accumulator contains true. |
| 1330 void Interpreter::DoJumpIfTrueConstant(InterpreterAssembler* assembler) { | 1111 void Interpreter::DoJumpIfTrueConstant(InterpreterAssembler* assembler) { |
| 1331 Node* accumulator = __ GetAccumulator(); | 1112 Node* accumulator = __ GetAccumulator(); |
| 1332 Node* index = __ BytecodeOperandIdx(0); | 1113 Node* index = __ BytecodeOperandIdx(0); |
| 1333 Node* constant = __ LoadConstantPoolEntry(index); | 1114 Node* constant = __ LoadConstantPoolEntry(index); |
| 1334 Node* relative_jump = __ SmiUntag(constant); | 1115 Node* relative_jump = __ SmiUntag(constant); |
| 1335 Node* true_value = __ BooleanConstant(true); | 1116 Node* true_value = __ BooleanConstant(true); |
| 1336 __ JumpIfWordEqual(accumulator, true_value, relative_jump); | 1117 __ JumpIfWordEqual(accumulator, true_value, relative_jump); |
| 1337 } | 1118 } |
| 1338 | 1119 |
| 1339 | 1120 // JumpIfFalse <imm> |
| 1340 // JumpIfTrueConstantWide <idx16> | |
| 1341 // | |
| 1342 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool | |
| 1343 // if the accumulator contains true. | |
| 1344 void Interpreter::DoJumpIfTrueConstantWide(InterpreterAssembler* assembler) { | |
| 1345 DoJumpIfTrueConstant(assembler); | |
| 1346 } | |
| 1347 | |
| 1348 | |
| 1349 // JumpIfFalse <imm8> | |
| 1350 // | 1121 // |
| 1351 // Jump by number of bytes represented by an immediate operand if the | 1122 // Jump by number of bytes represented by an immediate operand if the |
| 1352 // accumulator contains false. | 1123 // accumulator contains false. |
| 1353 void Interpreter::DoJumpIfFalse(InterpreterAssembler* assembler) { | 1124 void Interpreter::DoJumpIfFalse(InterpreterAssembler* assembler) { |
| 1354 Node* accumulator = __ GetAccumulator(); | 1125 Node* accumulator = __ GetAccumulator(); |
| 1355 Node* relative_jump = __ BytecodeOperandImm(0); | 1126 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1356 Node* false_value = __ BooleanConstant(false); | 1127 Node* false_value = __ BooleanConstant(false); |
| 1357 __ JumpIfWordEqual(accumulator, false_value, relative_jump); | 1128 __ JumpIfWordEqual(accumulator, false_value, relative_jump); |
| 1358 } | 1129 } |
| 1359 | 1130 |
| 1360 | 1131 // JumpIfFalseConstant <idx> |
| 1361 // JumpIfFalseConstant <idx8> | |
| 1362 // | 1132 // |
| 1363 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool | 1133 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool |
| 1364 // if the accumulator contains false. | 1134 // if the accumulator contains false. |
| 1365 void Interpreter::DoJumpIfFalseConstant(InterpreterAssembler* assembler) { | 1135 void Interpreter::DoJumpIfFalseConstant(InterpreterAssembler* assembler) { |
| 1366 Node* accumulator = __ GetAccumulator(); | 1136 Node* accumulator = __ GetAccumulator(); |
| 1367 Node* index = __ BytecodeOperandIdx(0); | 1137 Node* index = __ BytecodeOperandIdx(0); |
| 1368 Node* constant = __ LoadConstantPoolEntry(index); | 1138 Node* constant = __ LoadConstantPoolEntry(index); |
| 1369 Node* relative_jump = __ SmiUntag(constant); | 1139 Node* relative_jump = __ SmiUntag(constant); |
| 1370 Node* false_value = __ BooleanConstant(false); | 1140 Node* false_value = __ BooleanConstant(false); |
| 1371 __ JumpIfWordEqual(accumulator, false_value, relative_jump); | 1141 __ JumpIfWordEqual(accumulator, false_value, relative_jump); |
| 1372 } | 1142 } |
| 1373 | 1143 |
| 1374 | 1144 // JumpIfToBooleanTrue <imm> |
| 1375 // JumpIfFalseConstant <idx16> | |
| 1376 // | |
| 1377 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool | |
| 1378 // if the accumulator contains false. | |
| 1379 void Interpreter::DoJumpIfFalseConstantWide(InterpreterAssembler* assembler) { | |
| 1380 DoJumpIfFalseConstant(assembler); | |
| 1381 } | |
| 1382 | |
| 1383 | |
| 1384 // JumpIfToBooleanTrue <imm8> | |
| 1385 // | 1145 // |
| 1386 // Jump by number of bytes represented by an immediate operand if the object | 1146 // Jump by number of bytes represented by an immediate operand if the object |
| 1387 // referenced by the accumulator is true when the object is cast to boolean. | 1147 // referenced by the accumulator is true when the object is cast to boolean. |
| 1388 void Interpreter::DoJumpIfToBooleanTrue(InterpreterAssembler* assembler) { | 1148 void Interpreter::DoJumpIfToBooleanTrue(InterpreterAssembler* assembler) { |
| 1389 Callable callable = CodeFactory::ToBoolean(isolate_); | 1149 Callable callable = CodeFactory::ToBoolean(isolate_); |
| 1390 Node* target = __ HeapConstant(callable.code()); | 1150 Node* target = __ HeapConstant(callable.code()); |
| 1391 Node* accumulator = __ GetAccumulator(); | 1151 Node* accumulator = __ GetAccumulator(); |
| 1392 Node* context = __ GetContext(); | 1152 Node* context = __ GetContext(); |
| 1393 Node* to_boolean_value = | 1153 Node* to_boolean_value = |
| 1394 __ CallStub(callable.descriptor(), target, context, accumulator); | 1154 __ CallStub(callable.descriptor(), target, context, accumulator); |
| 1395 Node* relative_jump = __ BytecodeOperandImm(0); | 1155 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1396 Node* true_value = __ BooleanConstant(true); | 1156 Node* true_value = __ BooleanConstant(true); |
| 1397 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump); | 1157 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump); |
| 1398 } | 1158 } |
| 1399 | 1159 |
| 1400 | 1160 // JumpIfToBooleanTrueConstant <idx> |
| 1401 // JumpIfToBooleanTrueConstant <idx8> | |
| 1402 // | 1161 // |
| 1403 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool | 1162 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool |
| 1404 // if the object referenced by the accumulator is true when the object is cast | 1163 // if the object referenced by the accumulator is true when the object is cast |
| 1405 // to boolean. | 1164 // to boolean. |
| 1406 void Interpreter::DoJumpIfToBooleanTrueConstant( | 1165 void Interpreter::DoJumpIfToBooleanTrueConstant( |
| 1407 InterpreterAssembler* assembler) { | 1166 InterpreterAssembler* assembler) { |
| 1408 Callable callable = CodeFactory::ToBoolean(isolate_); | 1167 Callable callable = CodeFactory::ToBoolean(isolate_); |
| 1409 Node* target = __ HeapConstant(callable.code()); | 1168 Node* target = __ HeapConstant(callable.code()); |
| 1410 Node* accumulator = __ GetAccumulator(); | 1169 Node* accumulator = __ GetAccumulator(); |
| 1411 Node* context = __ GetContext(); | 1170 Node* context = __ GetContext(); |
| 1412 Node* to_boolean_value = | 1171 Node* to_boolean_value = |
| 1413 __ CallStub(callable.descriptor(), target, context, accumulator); | 1172 __ CallStub(callable.descriptor(), target, context, accumulator); |
| 1414 Node* index = __ BytecodeOperandIdx(0); | 1173 Node* index = __ BytecodeOperandIdx(0); |
| 1415 Node* constant = __ LoadConstantPoolEntry(index); | 1174 Node* constant = __ LoadConstantPoolEntry(index); |
| 1416 Node* relative_jump = __ SmiUntag(constant); | 1175 Node* relative_jump = __ SmiUntag(constant); |
| 1417 Node* true_value = __ BooleanConstant(true); | 1176 Node* true_value = __ BooleanConstant(true); |
| 1418 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump); | 1177 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump); |
| 1419 } | 1178 } |
| 1420 | 1179 |
| 1421 | 1180 // JumpIfToBooleanFalse <imm> |
| 1422 // JumpIfToBooleanTrueConstantWide <idx16> | |
| 1423 // | |
| 1424 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool | |
| 1425 // if the object referenced by the accumulator is true when the object is cast | |
| 1426 // to boolean. | |
| 1427 void Interpreter::DoJumpIfToBooleanTrueConstantWide( | |
| 1428 InterpreterAssembler* assembler) { | |
| 1429 DoJumpIfToBooleanTrueConstant(assembler); | |
| 1430 } | |
| 1431 | |
| 1432 | |
| 1433 // JumpIfToBooleanFalse <imm8> | |
| 1434 // | 1181 // |
| 1435 // Jump by number of bytes represented by an immediate operand if the object | 1182 // Jump by number of bytes represented by an immediate operand if the object |
| 1436 // referenced by the accumulator is false when the object is cast to boolean. | 1183 // referenced by the accumulator is false when the object is cast to boolean. |
| 1437 void Interpreter::DoJumpIfToBooleanFalse(InterpreterAssembler* assembler) { | 1184 void Interpreter::DoJumpIfToBooleanFalse(InterpreterAssembler* assembler) { |
| 1438 Callable callable = CodeFactory::ToBoolean(isolate_); | 1185 Callable callable = CodeFactory::ToBoolean(isolate_); |
| 1439 Node* target = __ HeapConstant(callable.code()); | 1186 Node* target = __ HeapConstant(callable.code()); |
| 1440 Node* accumulator = __ GetAccumulator(); | 1187 Node* accumulator = __ GetAccumulator(); |
| 1441 Node* context = __ GetContext(); | 1188 Node* context = __ GetContext(); |
| 1442 Node* to_boolean_value = | 1189 Node* to_boolean_value = |
| 1443 __ CallStub(callable.descriptor(), target, context, accumulator); | 1190 __ CallStub(callable.descriptor(), target, context, accumulator); |
| 1444 Node* relative_jump = __ BytecodeOperandImm(0); | 1191 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1445 Node* false_value = __ BooleanConstant(false); | 1192 Node* false_value = __ BooleanConstant(false); |
| 1446 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump); | 1193 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump); |
| 1447 } | 1194 } |
| 1448 | 1195 |
| 1449 | 1196 // JumpIfToBooleanFalseConstant <idx> |
| 1450 // JumpIfToBooleanFalseConstant <idx8> | |
| 1451 // | 1197 // |
| 1452 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool | 1198 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool |
| 1453 // if the object referenced by the accumulator is false when the object is cast | 1199 // if the object referenced by the accumulator is false when the object is cast |
| 1454 // to boolean. | 1200 // to boolean. |
| 1455 void Interpreter::DoJumpIfToBooleanFalseConstant( | 1201 void Interpreter::DoJumpIfToBooleanFalseConstant( |
| 1456 InterpreterAssembler* assembler) { | 1202 InterpreterAssembler* assembler) { |
| 1457 Callable callable = CodeFactory::ToBoolean(isolate_); | 1203 Callable callable = CodeFactory::ToBoolean(isolate_); |
| 1458 Node* target = __ HeapConstant(callable.code()); | 1204 Node* target = __ HeapConstant(callable.code()); |
| 1459 Node* accumulator = __ GetAccumulator(); | 1205 Node* accumulator = __ GetAccumulator(); |
| 1460 Node* context = __ GetContext(); | 1206 Node* context = __ GetContext(); |
| 1461 Node* to_boolean_value = | 1207 Node* to_boolean_value = |
| 1462 __ CallStub(callable.descriptor(), target, context, accumulator); | 1208 __ CallStub(callable.descriptor(), target, context, accumulator); |
| 1463 Node* index = __ BytecodeOperandIdx(0); | 1209 Node* index = __ BytecodeOperandIdx(0); |
| 1464 Node* constant = __ LoadConstantPoolEntry(index); | 1210 Node* constant = __ LoadConstantPoolEntry(index); |
| 1465 Node* relative_jump = __ SmiUntag(constant); | 1211 Node* relative_jump = __ SmiUntag(constant); |
| 1466 Node* false_value = __ BooleanConstant(false); | 1212 Node* false_value = __ BooleanConstant(false); |
| 1467 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump); | 1213 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump); |
| 1468 } | 1214 } |
| 1469 | 1215 |
| 1470 | 1216 // JumpIfNull <imm> |
| 1471 // JumpIfToBooleanFalseConstantWide <idx16> | |
| 1472 // | |
| 1473 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool | |
| 1474 // if the object referenced by the accumulator is false when the object is cast | |
| 1475 // to boolean. | |
| 1476 void Interpreter::DoJumpIfToBooleanFalseConstantWide( | |
| 1477 InterpreterAssembler* assembler) { | |
| 1478 DoJumpIfToBooleanFalseConstant(assembler); | |
| 1479 } | |
| 1480 | |
| 1481 | |
| 1482 // JumpIfNull <imm8> | |
| 1483 // | 1217 // |
| 1484 // Jump by number of bytes represented by an immediate operand if the object | 1218 // Jump by number of bytes represented by an immediate operand if the object |
| 1485 // referenced by the accumulator is the null constant. | 1219 // referenced by the accumulator is the null constant. |
| 1486 void Interpreter::DoJumpIfNull(InterpreterAssembler* assembler) { | 1220 void Interpreter::DoJumpIfNull(InterpreterAssembler* assembler) { |
| 1487 Node* accumulator = __ GetAccumulator(); | 1221 Node* accumulator = __ GetAccumulator(); |
| 1488 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); | 1222 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); |
| 1489 Node* relative_jump = __ BytecodeOperandImm(0); | 1223 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1490 __ JumpIfWordEqual(accumulator, null_value, relative_jump); | 1224 __ JumpIfWordEqual(accumulator, null_value, relative_jump); |
| 1491 } | 1225 } |
| 1492 | 1226 |
| 1493 | 1227 // JumpIfNullConstant <idx> |
| 1494 // JumpIfNullConstant <idx8> | |
| 1495 // | 1228 // |
| 1496 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool | 1229 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool |
| 1497 // if the object referenced by the accumulator is the null constant. | 1230 // if the object referenced by the accumulator is the null constant. |
| 1498 void Interpreter::DoJumpIfNullConstant(InterpreterAssembler* assembler) { | 1231 void Interpreter::DoJumpIfNullConstant(InterpreterAssembler* assembler) { |
| 1499 Node* accumulator = __ GetAccumulator(); | 1232 Node* accumulator = __ GetAccumulator(); |
| 1500 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); | 1233 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); |
| 1501 Node* index = __ BytecodeOperandIdx(0); | 1234 Node* index = __ BytecodeOperandIdx(0); |
| 1502 Node* constant = __ LoadConstantPoolEntry(index); | 1235 Node* constant = __ LoadConstantPoolEntry(index); |
| 1503 Node* relative_jump = __ SmiUntag(constant); | 1236 Node* relative_jump = __ SmiUntag(constant); |
| 1504 __ JumpIfWordEqual(accumulator, null_value, relative_jump); | 1237 __ JumpIfWordEqual(accumulator, null_value, relative_jump); |
| 1505 } | 1238 } |
| 1506 | 1239 |
| 1507 | 1240 // JumpIfUndefined <imm> |
| 1508 // JumpIfNullConstantWide <idx16> | |
| 1509 // | |
| 1510 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool | |
| 1511 // if the object referenced by the accumulator is the null constant. | |
| 1512 void Interpreter::DoJumpIfNullConstantWide(InterpreterAssembler* assembler) { | |
| 1513 DoJumpIfNullConstant(assembler); | |
| 1514 } | |
| 1515 | |
| 1516 // JumpIfUndefined <imm8> | |
| 1517 // | 1241 // |
| 1518 // Jump by number of bytes represented by an immediate operand if the object | 1242 // Jump by number of bytes represented by an immediate operand if the object |
| 1519 // referenced by the accumulator is the undefined constant. | 1243 // referenced by the accumulator is the undefined constant. |
| 1520 void Interpreter::DoJumpIfUndefined(InterpreterAssembler* assembler) { | 1244 void Interpreter::DoJumpIfUndefined(InterpreterAssembler* assembler) { |
| 1521 Node* accumulator = __ GetAccumulator(); | 1245 Node* accumulator = __ GetAccumulator(); |
| 1522 Node* undefined_value = | 1246 Node* undefined_value = |
| 1523 __ HeapConstant(isolate_->factory()->undefined_value()); | 1247 __ HeapConstant(isolate_->factory()->undefined_value()); |
| 1524 Node* relative_jump = __ BytecodeOperandImm(0); | 1248 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1525 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump); | 1249 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump); |
| 1526 } | 1250 } |
| 1527 | 1251 |
| 1528 | 1252 // JumpIfUndefinedConstant <idx> |
| 1529 // JumpIfUndefinedConstant <idx8> | |
| 1530 // | 1253 // |
| 1531 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool | 1254 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool |
| 1532 // if the object referenced by the accumulator is the undefined constant. | 1255 // if the object referenced by the accumulator is the undefined constant. |
| 1533 void Interpreter::DoJumpIfUndefinedConstant(InterpreterAssembler* assembler) { | 1256 void Interpreter::DoJumpIfUndefinedConstant(InterpreterAssembler* assembler) { |
| 1534 Node* accumulator = __ GetAccumulator(); | 1257 Node* accumulator = __ GetAccumulator(); |
| 1535 Node* undefined_value = | 1258 Node* undefined_value = |
| 1536 __ HeapConstant(isolate_->factory()->undefined_value()); | 1259 __ HeapConstant(isolate_->factory()->undefined_value()); |
| 1537 Node* index = __ BytecodeOperandIdx(0); | 1260 Node* index = __ BytecodeOperandIdx(0); |
| 1538 Node* constant = __ LoadConstantPoolEntry(index); | 1261 Node* constant = __ LoadConstantPoolEntry(index); |
| 1539 Node* relative_jump = __ SmiUntag(constant); | 1262 Node* relative_jump = __ SmiUntag(constant); |
| 1540 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump); | 1263 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump); |
| 1541 } | 1264 } |
| 1542 | 1265 |
| 1543 | 1266 // JumpIfNotHole <imm> |
| 1544 // JumpIfUndefinedConstantWide <idx16> | |
| 1545 // | |
| 1546 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool | |
| 1547 // if the object referenced by the accumulator is the undefined constant. | |
| 1548 void Interpreter::DoJumpIfUndefinedConstantWide( | |
| 1549 InterpreterAssembler* assembler) { | |
| 1550 DoJumpIfUndefinedConstant(assembler); | |
| 1551 } | |
| 1552 | |
| 1553 // JumpIfNotHole <imm8> | |
| 1554 // | 1267 // |
| 1555 // Jump by number of bytes represented by an immediate operand if the object | 1268 // Jump by number of bytes represented by an immediate operand if the object |
| 1556 // referenced by the accumulator is the hole. | 1269 // referenced by the accumulator is the hole. |
| 1557 void Interpreter::DoJumpIfNotHole(InterpreterAssembler* assembler) { | 1270 void Interpreter::DoJumpIfNotHole(InterpreterAssembler* assembler) { |
| 1558 Node* accumulator = __ GetAccumulator(); | 1271 Node* accumulator = __ GetAccumulator(); |
| 1559 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value()); | 1272 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value()); |
| 1560 Node* relative_jump = __ BytecodeOperandImm(0); | 1273 Node* relative_jump = __ BytecodeOperandImm(0); |
| 1561 __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump); | 1274 __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump); |
| 1562 } | 1275 } |
| 1563 | 1276 |
| 1564 // JumpIfNotHoleConstant <idx8> | 1277 // JumpIfNotHoleConstant <idx> |
| 1565 // | 1278 // |
| 1566 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool | 1279 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool |
| 1567 // if the object referenced by the accumulator is the hole constant. | 1280 // if the object referenced by the accumulator is the hole constant. |
| 1568 void Interpreter::DoJumpIfNotHoleConstant(InterpreterAssembler* assembler) { | 1281 void Interpreter::DoJumpIfNotHoleConstant(InterpreterAssembler* assembler) { |
| 1569 Node* accumulator = __ GetAccumulator(); | 1282 Node* accumulator = __ GetAccumulator(); |
| 1570 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value()); | 1283 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value()); |
| 1571 Node* index = __ BytecodeOperandIdx(0); | 1284 Node* index = __ BytecodeOperandIdx(0); |
| 1572 Node* constant = __ LoadConstantPoolEntry(index); | 1285 Node* constant = __ LoadConstantPoolEntry(index); |
| 1573 Node* relative_jump = __ SmiUntag(constant); | 1286 Node* relative_jump = __ SmiUntag(constant); |
| 1574 __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump); | 1287 __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump); |
| 1575 } | 1288 } |
| 1576 | 1289 |
| 1577 // JumpIfNotHoleConstantWide <idx16> | |
| 1578 // | |
| 1579 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool | |
| 1580 // if the object referenced by the accumulator is the hole constant. | |
| 1581 void Interpreter::DoJumpIfNotHoleConstantWide(InterpreterAssembler* assembler) { | |
| 1582 DoJumpIfNotHoleConstant(assembler); | |
| 1583 } | |
| 1584 | |
| 1585 void Interpreter::DoCreateLiteral(Runtime::FunctionId function_id, | 1290 void Interpreter::DoCreateLiteral(Runtime::FunctionId function_id, |
| 1586 InterpreterAssembler* assembler) { | 1291 InterpreterAssembler* assembler) { |
| 1587 Node* index = __ BytecodeOperandIdx(0); | 1292 Node* index = __ BytecodeOperandIdx(0); |
| 1588 Node* constant_elements = __ LoadConstantPoolEntry(index); | 1293 Node* constant_elements = __ LoadConstantPoolEntry(index); |
| 1589 Node* literal_index_raw = __ BytecodeOperandIdx(1); | 1294 Node* literal_index_raw = __ BytecodeOperandIdx(1); |
| 1590 Node* literal_index = __ SmiTag(literal_index_raw); | 1295 Node* literal_index = __ SmiTag(literal_index_raw); |
| 1591 Node* flags_raw = __ BytecodeOperandImm(2); | 1296 Node* flags_raw = __ BytecodeOperandFlag(2); |
| 1592 Node* flags = __ SmiTag(flags_raw); | 1297 Node* flags = __ SmiTag(flags_raw); |
| 1593 Node* closure = __ LoadRegister(Register::function_closure()); | 1298 Node* closure = __ LoadRegister(Register::function_closure()); |
| 1594 Node* context = __ GetContext(); | 1299 Node* context = __ GetContext(); |
| 1595 Node* result = __ CallRuntime(function_id, context, closure, literal_index, | 1300 Node* result = __ CallRuntime(function_id, context, closure, literal_index, |
| 1596 constant_elements, flags); | 1301 constant_elements, flags); |
| 1597 __ SetAccumulator(result); | 1302 __ SetAccumulator(result); |
| 1598 __ Dispatch(); | 1303 __ Dispatch(); |
| 1599 } | 1304 } |
| 1600 | 1305 |
| 1601 | 1306 |
| 1602 // CreateRegExpLiteral <pattern_idx> <literal_idx> <flags> | 1307 // CreateRegExpLiteral <pattern_idx> <literal_idx> <flags> |
| 1603 // | 1308 // |
| 1604 // Creates a regular expression literal for literal index <literal_idx> with | 1309 // Creates a regular expression literal for literal index <literal_idx> with |
| 1605 // <flags> and the pattern in <pattern_idx>. | 1310 // <flags> and the pattern in <pattern_idx>. |
| 1606 void Interpreter::DoCreateRegExpLiteral(InterpreterAssembler* assembler) { | 1311 void Interpreter::DoCreateRegExpLiteral(InterpreterAssembler* assembler) { |
| 1607 Callable callable = CodeFactory::FastCloneRegExp(isolate_); | 1312 Callable callable = CodeFactory::FastCloneRegExp(isolate_); |
| 1608 Node* target = __ HeapConstant(callable.code()); | 1313 Node* target = __ HeapConstant(callable.code()); |
| 1609 Node* index = __ BytecodeOperandIdx(0); | 1314 Node* index = __ BytecodeOperandIdx(0); |
| 1610 Node* pattern = __ LoadConstantPoolEntry(index); | 1315 Node* pattern = __ LoadConstantPoolEntry(index); |
| 1611 Node* literal_index_raw = __ BytecodeOperandIdx(1); | 1316 Node* literal_index_raw = __ BytecodeOperandIdx(1); |
| 1612 Node* literal_index = __ SmiTag(literal_index_raw); | 1317 Node* literal_index = __ SmiTag(literal_index_raw); |
| 1613 Node* flags_raw = __ BytecodeOperandImm(2); | 1318 Node* flags_raw = __ BytecodeOperandFlag(2); |
| 1614 Node* flags = __ SmiTag(flags_raw); | 1319 Node* flags = __ SmiTag(flags_raw); |
| 1615 Node* closure = __ LoadRegister(Register::function_closure()); | 1320 Node* closure = __ LoadRegister(Register::function_closure()); |
| 1616 Node* context = __ GetContext(); | 1321 Node* context = __ GetContext(); |
| 1617 Node* result = __ CallStub(callable.descriptor(), target, context, closure, | 1322 Node* result = __ CallStub(callable.descriptor(), target, context, closure, |
| 1618 literal_index, pattern, flags); | 1323 literal_index, pattern, flags); |
| 1619 __ SetAccumulator(result); | 1324 __ SetAccumulator(result); |
| 1620 __ Dispatch(); | 1325 __ Dispatch(); |
| 1621 } | 1326 } |
| 1622 | 1327 |
| 1623 | |
| 1624 // CreateRegExpLiteralWide <pattern_idx> <literal_idx> <flags> | |
| 1625 // | |
| 1626 // Creates a regular expression literal for literal index <literal_idx> with | |
| 1627 // <flags> and the pattern in <pattern_idx>. | |
| 1628 void Interpreter::DoCreateRegExpLiteralWide(InterpreterAssembler* assembler) { | |
| 1629 DoCreateRegExpLiteral(assembler); | |
| 1630 } | |
| 1631 | |
| 1632 | |
| 1633 // CreateArrayLiteral <element_idx> <literal_idx> <flags> | 1328 // CreateArrayLiteral <element_idx> <literal_idx> <flags> |
| 1634 // | 1329 // |
| 1635 // Creates an array literal for literal index <literal_idx> with flags <flags> | 1330 // Creates an array literal for literal index <literal_idx> with flags <flags> |
| 1636 // and constant elements in <element_idx>. | 1331 // and constant elements in <element_idx>. |
| 1637 void Interpreter::DoCreateArrayLiteral(InterpreterAssembler* assembler) { | 1332 void Interpreter::DoCreateArrayLiteral(InterpreterAssembler* assembler) { |
| 1638 DoCreateLiteral(Runtime::kCreateArrayLiteral, assembler); | 1333 DoCreateLiteral(Runtime::kCreateArrayLiteral, assembler); |
| 1639 } | 1334 } |
| 1640 | 1335 |
| 1641 | |
| 1642 // CreateArrayLiteralWide <element_idx> <literal_idx> <flags> | |
| 1643 // | |
| 1644 // Creates an array literal for literal index <literal_idx> with flags <flags> | |
| 1645 // and constant elements in <element_idx>. | |
| 1646 void Interpreter::DoCreateArrayLiteralWide(InterpreterAssembler* assembler) { | |
| 1647 DoCreateLiteral(Runtime::kCreateArrayLiteral, assembler); | |
| 1648 } | |
| 1649 | |
| 1650 | |
| 1651 // CreateObjectLiteral <element_idx> <literal_idx> <flags> | 1336 // CreateObjectLiteral <element_idx> <literal_idx> <flags> |
| 1652 // | 1337 // |
| 1653 // Creates an object literal for literal index <literal_idx> with flags <flags> | 1338 // Creates an object literal for literal index <literal_idx> with flags <flags> |
| 1654 // and constant elements in <element_idx>. | 1339 // and constant elements in <element_idx>. |
| 1655 void Interpreter::DoCreateObjectLiteral(InterpreterAssembler* assembler) { | 1340 void Interpreter::DoCreateObjectLiteral(InterpreterAssembler* assembler) { |
| 1656 DoCreateLiteral(Runtime::kCreateObjectLiteral, assembler); | 1341 DoCreateLiteral(Runtime::kCreateObjectLiteral, assembler); |
| 1657 } | 1342 } |
| 1658 | 1343 |
| 1659 | |
| 1660 // CreateObjectLiteralWide <element_idx> <literal_idx> <flags> | |
| 1661 // | |
| 1662 // Creates an object literal for literal index <literal_idx> with flags <flags> | |
| 1663 // and constant elements in <element_idx>. | |
| 1664 void Interpreter::DoCreateObjectLiteralWide(InterpreterAssembler* assembler) { | |
| 1665 DoCreateLiteral(Runtime::kCreateObjectLiteral, assembler); | |
| 1666 } | |
| 1667 | |
| 1668 | |
| 1669 // CreateClosure <index> <tenured> | 1344 // CreateClosure <index> <tenured> |
| 1670 // | 1345 // |
| 1671 // Creates a new closure for SharedFunctionInfo at position |index| in the | 1346 // Creates a new closure for SharedFunctionInfo at position |index| in the |
| 1672 // constant pool and with the PretenureFlag <tenured>. | 1347 // constant pool and with the PretenureFlag <tenured>. |
| 1673 void Interpreter::DoCreateClosure(InterpreterAssembler* assembler) { | 1348 void Interpreter::DoCreateClosure(InterpreterAssembler* assembler) { |
| 1674 // TODO(rmcilroy): Possibly call FastNewClosureStub when possible instead of | 1349 // TODO(rmcilroy): Possibly call FastNewClosureStub when possible instead of |
| 1675 // calling into the runtime. | 1350 // calling into the runtime. |
| 1676 Node* index = __ BytecodeOperandIdx(0); | 1351 Node* index = __ BytecodeOperandIdx(0); |
| 1677 Node* shared = __ LoadConstantPoolEntry(index); | 1352 Node* shared = __ LoadConstantPoolEntry(index); |
| 1678 Node* tenured_raw = __ BytecodeOperandImm(1); | 1353 Node* tenured_raw = __ BytecodeOperandFlag(1); |
| 1679 Node* tenured = __ SmiTag(tenured_raw); | 1354 Node* tenured = __ SmiTag(tenured_raw); |
| 1680 Node* context = __ GetContext(); | 1355 Node* context = __ GetContext(); |
| 1681 Node* result = | 1356 Node* result = |
| 1682 __ CallRuntime(Runtime::kInterpreterNewClosure, context, shared, tenured); | 1357 __ CallRuntime(Runtime::kInterpreterNewClosure, context, shared, tenured); |
| 1683 __ SetAccumulator(result); | 1358 __ SetAccumulator(result); |
| 1684 __ Dispatch(); | 1359 __ Dispatch(); |
| 1685 } | 1360 } |
| 1686 | 1361 |
| 1687 | |
| 1688 // CreateClosureWide <index> <tenured> | |
| 1689 // | |
| 1690 // Creates a new closure for SharedFunctionInfo at position |index| in the | |
| 1691 // constant pool and with the PretenureFlag <tenured>. | |
| 1692 void Interpreter::DoCreateClosureWide(InterpreterAssembler* assembler) { | |
| 1693 return DoCreateClosure(assembler); | |
| 1694 } | |
| 1695 | |
| 1696 | |
| 1697 // CreateMappedArguments | 1362 // CreateMappedArguments |
| 1698 // | 1363 // |
| 1699 // Creates a new mapped arguments object. | 1364 // Creates a new mapped arguments object. |
| 1700 void Interpreter::DoCreateMappedArguments(InterpreterAssembler* assembler) { | 1365 void Interpreter::DoCreateMappedArguments(InterpreterAssembler* assembler) { |
| 1701 Node* closure = __ LoadRegister(Register::function_closure()); | 1366 Node* closure = __ LoadRegister(Register::function_closure()); |
| 1702 Node* context = __ GetContext(); | 1367 Node* context = __ GetContext(); |
| 1703 Node* result = | 1368 Node* result = |
| 1704 __ CallRuntime(Runtime::kNewSloppyArguments_Generic, context, closure); | 1369 __ CallRuntime(Runtime::kNewSloppyArguments_Generic, context, closure); |
| 1705 __ SetAccumulator(result); | 1370 __ SetAccumulator(result); |
| 1706 __ Dispatch(); | 1371 __ Dispatch(); |
| (...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1808 // 0 == cache_type, 1 == cache_array, 2 == cache_length | 1473 // 0 == cache_type, 1 == cache_array, 2 == cache_length |
| 1809 Node* output_register = __ BytecodeOperandReg(0); | 1474 Node* output_register = __ BytecodeOperandReg(0); |
| 1810 for (int i = 0; i < 3; i++) { | 1475 for (int i = 0; i < 3; i++) { |
| 1811 Node* cache_info = __ Projection(i, result_triple); | 1476 Node* cache_info = __ Projection(i, result_triple); |
| 1812 __ StoreRegister(cache_info, output_register); | 1477 __ StoreRegister(cache_info, output_register); |
| 1813 output_register = __ NextRegister(output_register); | 1478 output_register = __ NextRegister(output_register); |
| 1814 } | 1479 } |
| 1815 __ Dispatch(); | 1480 __ Dispatch(); |
| 1816 } | 1481 } |
| 1817 | 1482 |
| 1818 | |
| 1819 // ForInPrepareWide <cache_info_triple> | |
| 1820 // | |
| 1821 // Returns state for for..in loop execution based on the object in the | |
| 1822 // accumulator. The result is output in registers |cache_info_triple| to | |
| 1823 // |cache_info_triple + 2|, with the registers holding cache_type, cache_array, | |
| 1824 // and cache_length respectively. | |
| 1825 void Interpreter::DoForInPrepareWide(InterpreterAssembler* assembler) { | |
| 1826 DoForInPrepare(assembler); | |
| 1827 } | |
| 1828 | |
| 1829 | |
| 1830 // ForInNext <receiver> <index> <cache_info_pair> | 1483 // ForInNext <receiver> <index> <cache_info_pair> |
| 1831 // | 1484 // |
| 1832 // Returns the next enumerable property in the the accumulator. | 1485 // Returns the next enumerable property in the the accumulator. |
| 1833 void Interpreter::DoForInNext(InterpreterAssembler* assembler) { | 1486 void Interpreter::DoForInNext(InterpreterAssembler* assembler) { |
| 1834 Node* receiver_reg = __ BytecodeOperandReg(0); | 1487 Node* receiver_reg = __ BytecodeOperandReg(0); |
| 1835 Node* receiver = __ LoadRegister(receiver_reg); | 1488 Node* receiver = __ LoadRegister(receiver_reg); |
| 1836 Node* index_reg = __ BytecodeOperandReg(1); | 1489 Node* index_reg = __ BytecodeOperandReg(1); |
| 1837 Node* index = __ LoadRegister(index_reg); | 1490 Node* index = __ LoadRegister(index_reg); |
| 1838 Node* cache_type_reg = __ BytecodeOperandReg(2); | 1491 Node* cache_type_reg = __ BytecodeOperandReg(2); |
| 1839 Node* cache_type = __ LoadRegister(cache_type_reg); | 1492 Node* cache_type = __ LoadRegister(cache_type_reg); |
| (...skipping 26 matching lines...) Expand all Loading... | |
| 1866 | 1519 |
| 1867 // Need to filter the {key} for the {receiver}. | 1520 // Need to filter the {key} for the {receiver}. |
| 1868 Node* context = __ GetContext(); | 1521 Node* context = __ GetContext(); |
| 1869 Node* result = | 1522 Node* result = |
| 1870 __ CallRuntime(Runtime::kForInFilter, context, receiver, key); | 1523 __ CallRuntime(Runtime::kForInFilter, context, receiver, key); |
| 1871 __ SetAccumulator(result); | 1524 __ SetAccumulator(result); |
| 1872 __ Dispatch(); | 1525 __ Dispatch(); |
| 1873 } | 1526 } |
| 1874 } | 1527 } |
| 1875 | 1528 |
| 1876 | |
| 1877 // ForInNextWide <receiver> <index> <cache_info_pair> | |
| 1878 // | |
| 1879 // Returns the next enumerable property in the the accumulator. | |
| 1880 void Interpreter::DoForInNextWide(InterpreterAssembler* assembler) { | |
| 1881 return DoForInNext(assembler); | |
| 1882 } | |
| 1883 | |
| 1884 | |
| 1885 // ForInDone <index> <cache_length> | 1529 // ForInDone <index> <cache_length> |
| 1886 // | 1530 // |
| 1887 // Returns true if the end of the enumerable properties has been reached. | 1531 // Returns true if the end of the enumerable properties has been reached. |
| 1888 void Interpreter::DoForInDone(InterpreterAssembler* assembler) { | 1532 void Interpreter::DoForInDone(InterpreterAssembler* assembler) { |
| 1889 // TODO(oth): Implement directly rather than making a runtime call. | 1533 // TODO(oth): Implement directly rather than making a runtime call. |
| 1890 Node* index_reg = __ BytecodeOperandReg(0); | 1534 Node* index_reg = __ BytecodeOperandReg(0); |
| 1891 Node* index = __ LoadRegister(index_reg); | 1535 Node* index = __ LoadRegister(index_reg); |
| 1892 Node* cache_length_reg = __ BytecodeOperandReg(1); | 1536 Node* cache_length_reg = __ BytecodeOperandReg(1); |
| 1893 Node* cache_length = __ LoadRegister(cache_length_reg); | 1537 Node* cache_length = __ LoadRegister(cache_length_reg); |
| 1894 Node* context = __ GetContext(); | 1538 Node* context = __ GetContext(); |
| 1895 Node* result = | 1539 Node* result = |
| 1896 __ CallRuntime(Runtime::kForInDone, context, index, cache_length); | 1540 __ CallRuntime(Runtime::kForInDone, context, index, cache_length); |
| 1897 __ SetAccumulator(result); | 1541 __ SetAccumulator(result); |
| 1898 __ Dispatch(); | 1542 __ Dispatch(); |
| 1899 } | 1543 } |
| 1900 | 1544 |
| 1901 | |
| 1902 // ForInStep <index> | 1545 // ForInStep <index> |
| 1903 // | 1546 // |
| 1904 // Increments the loop counter in register |index| and stores the result | 1547 // Increments the loop counter in register |index| and stores the result |
| 1905 // in the accumulator. | 1548 // in the accumulator. |
| 1906 void Interpreter::DoForInStep(InterpreterAssembler* assembler) { | 1549 void Interpreter::DoForInStep(InterpreterAssembler* assembler) { |
| 1907 Node* index_reg = __ BytecodeOperandReg(0); | 1550 Node* index_reg = __ BytecodeOperandReg(0); |
| 1908 Node* index = __ LoadRegister(index_reg); | 1551 Node* index = __ LoadRegister(index_reg); |
| 1909 Node* one = __ SmiConstant(Smi::FromInt(1)); | 1552 Node* one = __ SmiConstant(Smi::FromInt(1)); |
| 1910 Node* result = __ SmiAdd(index, one); | 1553 Node* result = __ SmiAdd(index, one); |
| 1911 __ SetAccumulator(result); | 1554 __ SetAccumulator(result); |
| 1912 __ Dispatch(); | 1555 __ Dispatch(); |
| 1913 } | 1556 } |
| 1914 | 1557 |
| 1558 void Interpreter::DoWide(InterpreterAssembler* assembler) { | |
| 1559 __ RedispatchWide(); | |
| 1560 } | |
| 1561 | |
| 1562 void Interpreter::DoExtraWide(InterpreterAssembler* assembler) { | |
| 1563 __ RedispatchWide(); | |
| 1564 } | |
| 1565 | |
| 1915 } // namespace interpreter | 1566 } // namespace interpreter |
| 1916 } // namespace internal | 1567 } // namespace internal |
| 1917 } // namespace v8 | 1568 } // namespace v8 |
| OLD | NEW |