OLD | NEW |
---|---|
1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/interpreter/interpreter.h" | 5 #include "src/interpreter/interpreter.h" |
6 | 6 |
7 #include <fstream> | 7 #include <fstream> |
8 #include <memory> | 8 #include <memory> |
9 | 9 |
10 #include "src/ast/prettyprinter.h" | 10 #include "src/ast/prettyprinter.h" |
(...skipping 403 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
414 // | 414 // |
415 // Stores the value of register <src> to register <dst>. | 415 // Stores the value of register <src> to register <dst>. |
416 void Interpreter::DoMov(InterpreterAssembler* assembler) { | 416 void Interpreter::DoMov(InterpreterAssembler* assembler) { |
417 Node* src_index = __ BytecodeOperandReg(0); | 417 Node* src_index = __ BytecodeOperandReg(0); |
418 Node* src_value = __ LoadRegister(src_index); | 418 Node* src_value = __ LoadRegister(src_index); |
419 Node* dst_index = __ BytecodeOperandReg(1); | 419 Node* dst_index = __ BytecodeOperandReg(1); |
420 __ StoreRegister(src_value, dst_index); | 420 __ StoreRegister(src_value, dst_index); |
421 __ Dispatch(); | 421 __ Dispatch(); |
422 } | 422 } |
423 | 423 |
424 Node* Interpreter::BuildLoadGlobal(Callable ic, | 424 Node* Interpreter::BuildLoadGlobal(Callable ic, Node* context, Node* raw_slot, |
rmcilroy
2016/09/19 09:04:29
nit - rename raw_slot to feedback_slot (in header
Leszek Swirski
2016/09/19 10:34:51
Done.
| |
425 InterpreterAssembler* assembler) { | 425 InterpreterAssembler* assembler) { |
426 typedef LoadGlobalWithVectorDescriptor Descriptor; | 426 typedef LoadGlobalWithVectorDescriptor Descriptor; |
427 // Get the global object. | |
428 Node* context = __ GetContext(); | |
429 | 427 |
430 // Load the global via the LoadGlobalIC. | 428 // Load the global via the LoadGlobalIC. |
431 Node* code_target = __ HeapConstant(ic.code()); | 429 Node* code_target = __ HeapConstant(ic.code()); |
432 Node* raw_slot = __ BytecodeOperandIdx(0); | |
433 Node* smi_slot = __ SmiTag(raw_slot); | 430 Node* smi_slot = __ SmiTag(raw_slot); |
434 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); | 431 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); |
435 return __ CallStub(ic.descriptor(), code_target, context, | 432 return __ CallStub(ic.descriptor(), code_target, context, |
436 Arg(Descriptor::kSlot, smi_slot), | 433 Arg(Descriptor::kSlot, smi_slot), |
437 Arg(Descriptor::kVector, type_feedback_vector)); | 434 Arg(Descriptor::kVector, type_feedback_vector)); |
438 } | 435 } |
439 | 436 |
437 Node* Interpreter::BuildLoadGlobal(Callable ic, | |
rmcilroy
2016/09/19 09:04:29
Not sure it's worth having this helper - would pro
Leszek Swirski
2016/09/19 10:34:51
Fair enough, done (or rather, undone).
| |
438 InterpreterAssembler* assembler) { | |
439 // Get the global object. | |
440 Node* context = __ GetContext(); | |
441 | |
442 Node* raw_slot = __ BytecodeOperandIdx(0); | |
443 return BuildLoadGlobal(ic, context, raw_slot, assembler); | |
444 } | |
445 | |
440 // LdaGlobal <slot> | 446 // LdaGlobal <slot> |
441 // | 447 // |
442 // Load the global with name in constant pool entry <name_index> into the | 448 // Load the global with name in constant pool entry <name_index> into the |
443 // accumulator using FeedBackVector slot <slot> outside of a typeof. | 449 // accumulator using FeedBackVector slot <slot> outside of a typeof. |
444 void Interpreter::DoLdaGlobal(InterpreterAssembler* assembler) { | 450 void Interpreter::DoLdaGlobal(InterpreterAssembler* assembler) { |
445 Callable ic = | 451 Callable ic = |
446 CodeFactory::LoadGlobalICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF); | 452 CodeFactory::LoadGlobalICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF); |
447 Node* result = BuildLoadGlobal(ic, assembler); | 453 Node* result = BuildLoadGlobal(ic, assembler); |
448 __ SetAccumulator(result); | 454 __ SetAccumulator(result); |
449 __ Dispatch(); | 455 __ Dispatch(); |
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
554 Node* value = __ GetAccumulator(); | 560 Node* value = __ GetAccumulator(); |
555 Node* reg_index = __ BytecodeOperandReg(0); | 561 Node* reg_index = __ BytecodeOperandReg(0); |
556 Node* context = __ LoadRegister(reg_index); | 562 Node* context = __ LoadRegister(reg_index); |
557 Node* slot_index = __ BytecodeOperandIdx(1); | 563 Node* slot_index = __ BytecodeOperandIdx(1); |
558 Node* depth = __ BytecodeOperandUImm(2); | 564 Node* depth = __ BytecodeOperandUImm(2); |
559 Node* slot_context = __ GetContextAtDepth(context, depth); | 565 Node* slot_context = __ GetContextAtDepth(context, depth); |
560 __ StoreContextSlot(slot_context, slot_index, value); | 566 __ StoreContextSlot(slot_context, slot_index, value); |
561 __ Dispatch(); | 567 __ Dispatch(); |
562 } | 568 } |
563 | 569 |
570 void Interpreter::DoLdaLookupSlowPath(Runtime::FunctionId function_id, | |
571 Node* name_index, Node* context, | |
572 InterpreterAssembler* assembler) { | |
573 Node* name = __ LoadConstantPoolEntry(name_index); | |
574 Node* result = __ CallRuntime(function_id, context, name); | |
575 __ SetAccumulator(result); | |
576 __ Dispatch(); | |
rmcilroy
2016/09/19 09:04:29
I'd prefer you made this BuildLdaLookupSlowPath an
Leszek Swirski
2016/09/19 10:34:51
Done.
| |
577 } | |
578 | |
564 void Interpreter::DoLdaLookupSlot(Runtime::FunctionId function_id, | 579 void Interpreter::DoLdaLookupSlot(Runtime::FunctionId function_id, |
565 InterpreterAssembler* assembler) { | 580 InterpreterAssembler* assembler) { |
566 Node* index = __ BytecodeOperandIdx(0); | 581 Node* index = __ BytecodeOperandIdx(0); |
567 Node* name = __ LoadConstantPoolEntry(index); | |
568 Node* context = __ GetContext(); | 582 Node* context = __ GetContext(); |
569 Node* result = __ CallRuntime(function_id, context, name); | 583 DoLdaLookupSlowPath(function_id, index, context, assembler); |
570 __ SetAccumulator(result); | |
571 __ Dispatch(); | |
572 } | 584 } |
573 | 585 |
574 // LdaLookupSlot <name_index> | 586 // LdaLookupSlot <name_index> |
575 // | 587 // |
576 // Lookup the object with the name in constant pool entry |name_index| | 588 // Lookup the object with the name in constant pool entry |name_index| |
577 // dynamically. | 589 // dynamically. |
578 void Interpreter::DoLdaLookupSlot(InterpreterAssembler* assembler) { | 590 void Interpreter::DoLdaLookupSlot(InterpreterAssembler* assembler) { |
579 DoLdaLookupSlot(Runtime::kLoadLookupSlot, assembler); | 591 DoLdaLookupSlot(Runtime::kLoadLookupSlot, assembler); |
580 } | 592 } |
581 | 593 |
(...skipping 20 matching lines...) Expand all Loading... | |
602 // Fast path does a normal load context. | 614 // Fast path does a normal load context. |
603 { | 615 { |
604 Node* slot_context = __ GetContextAtDepth(context, depth); | 616 Node* slot_context = __ GetContextAtDepth(context, depth); |
605 Node* result = __ LoadContextSlot(slot_context, slot_index); | 617 Node* result = __ LoadContextSlot(slot_context, slot_index); |
606 __ SetAccumulator(result); | 618 __ SetAccumulator(result); |
607 __ Dispatch(); | 619 __ Dispatch(); |
608 } | 620 } |
609 | 621 |
610 // Slow path when we have to call out to the runtime. | 622 // Slow path when we have to call out to the runtime. |
611 __ Bind(&slowpath); | 623 __ Bind(&slowpath); |
612 { | 624 { DoLdaLookupSlowPath(function_id, name_index, context, assembler); } |
613 Node* name = __ LoadConstantPoolEntry(name_index); | |
614 Node* result = __ CallRuntime(function_id, context, name); | |
615 __ SetAccumulator(result); | |
616 __ Dispatch(); | |
617 } | |
618 } | 625 } |
619 | 626 |
620 // LdaLookupSlot <name_index> | 627 // LdaLookupSlot <name_index> |
621 // | 628 // |
622 // Lookup the object with the name in constant pool entry |name_index| | 629 // Lookup the object with the name in constant pool entry |name_index| |
623 // dynamically. | 630 // dynamically. |
624 void Interpreter::DoLdaLookupContextSlot(InterpreterAssembler* assembler) { | 631 void Interpreter::DoLdaLookupContextSlot(InterpreterAssembler* assembler) { |
625 DoLdaLookupContextSlot(Runtime::kLoadLookupSlot, assembler); | 632 DoLdaLookupContextSlot(Runtime::kLoadLookupSlot, assembler); |
626 } | 633 } |
627 | 634 |
628 // LdaLookupSlotInsideTypeof <name_index> | 635 // LdaLookupSlotInsideTypeof <name_index> |
629 // | 636 // |
630 // Lookup the object with the name in constant pool entry |name_index| | 637 // Lookup the object with the name in constant pool entry |name_index| |
631 // dynamically without causing a NoReferenceError. | 638 // dynamically without causing a NoReferenceError. |
632 void Interpreter::DoLdaLookupContextSlotInsideTypeof( | 639 void Interpreter::DoLdaLookupContextSlotInsideTypeof( |
633 InterpreterAssembler* assembler) { | 640 InterpreterAssembler* assembler) { |
634 DoLdaLookupContextSlot(Runtime::kLoadLookupSlotInsideTypeof, assembler); | 641 DoLdaLookupContextSlot(Runtime::kLoadLookupSlotInsideTypeof, assembler); |
635 } | 642 } |
636 | 643 |
644 void Interpreter::DoLdaLookupGlobalSlot(Runtime::FunctionId function_id, | |
645 InterpreterAssembler* assembler) { | |
646 Node* context = __ GetContext(); | |
647 Node* name_index = __ BytecodeOperandIdx(0); | |
648 Node* raw_slot = __ BytecodeOperandIdx(1); | |
rmcilroy
2016/09/19 09:04:29
nit - slot_index
Leszek Swirski
2016/09/19 10:34:51
Changed to feedback_slot, to match the parameter n
| |
649 Node* depth = __ BytecodeOperandUImm(2); | |
650 | |
651 Label slowpath(assembler, Label::kDeferred); | |
652 | |
653 // Check for context extensions to allow the fast path | |
654 __ GotoIfHasContextExtensionUpToDepth(context, depth, &slowpath); | |
655 | |
656 // Fast path does a normal load global | |
657 { | |
658 Callable ic = CodeFactory::LoadGlobalICInOptimizedCode( | |
659 isolate_, function_id == Runtime::kLoadLookupSlotInsideTypeof | |
660 ? INSIDE_TYPEOF | |
661 : NOT_INSIDE_TYPEOF); | |
662 Node* result = BuildLoadGlobal(ic, context, raw_slot, assembler); | |
663 __ SetAccumulator(result); | |
664 __ Dispatch(); | |
665 } | |
666 | |
667 // Slow path when we have to call out to the runtime | |
668 __ Bind(&slowpath); | |
669 { DoLdaLookupSlowPath(function_id, name_index, context, assembler); } | |
670 } | |
671 | |
672 // LdaLookupSlot <name_index> | |
673 // | |
674 // Lookup the object with the name in constant pool entry |name_index| | |
675 // dynamically. | |
676 void Interpreter::DoLdaLookupGlobalSlot(InterpreterAssembler* assembler) { | |
677 DoLdaLookupGlobalSlot(Runtime::kLoadLookupSlot, assembler); | |
678 } | |
679 | |
680 // LdaLookupSlotInsideTypeof <name_index> | |
681 // | |
682 // Lookup the object with the name in constant pool entry |name_index| | |
683 // dynamically without causing a NoReferenceError. | |
684 void Interpreter::DoLdaLookupGlobalSlotInsideTypeof( | |
685 InterpreterAssembler* assembler) { | |
686 DoLdaLookupGlobalSlot(Runtime::kLoadLookupSlotInsideTypeof, assembler); | |
687 } | |
688 | |
637 void Interpreter::DoStaLookupSlot(LanguageMode language_mode, | 689 void Interpreter::DoStaLookupSlot(LanguageMode language_mode, |
638 InterpreterAssembler* assembler) { | 690 InterpreterAssembler* assembler) { |
639 Node* value = __ GetAccumulator(); | 691 Node* value = __ GetAccumulator(); |
640 Node* index = __ BytecodeOperandIdx(0); | 692 Node* index = __ BytecodeOperandIdx(0); |
641 Node* name = __ LoadConstantPoolEntry(index); | 693 Node* name = __ LoadConstantPoolEntry(index); |
642 Node* context = __ GetContext(); | 694 Node* context = __ GetContext(); |
643 Node* result = __ CallRuntime(is_strict(language_mode) | 695 Node* result = __ CallRuntime(is_strict(language_mode) |
644 ? Runtime::kStoreLookupSlot_Strict | 696 ? Runtime::kStoreLookupSlot_Strict |
645 : Runtime::kStoreLookupSlot_Sloppy, | 697 : Runtime::kStoreLookupSlot_Sloppy, |
646 context, name, value); | 698 context, name, value); |
(...skipping 1866 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2513 __ StoreObjectField(generator, JSGeneratorObject::kContinuationOffset, | 2565 __ StoreObjectField(generator, JSGeneratorObject::kContinuationOffset, |
2514 __ SmiTag(new_state)); | 2566 __ SmiTag(new_state)); |
2515 __ SetAccumulator(old_state); | 2567 __ SetAccumulator(old_state); |
2516 | 2568 |
2517 __ Dispatch(); | 2569 __ Dispatch(); |
2518 } | 2570 } |
2519 | 2571 |
2520 } // namespace interpreter | 2572 } // namespace interpreter |
2521 } // namespace internal | 2573 } // namespace internal |
2522 } // namespace v8 | 2574 } // namespace v8 |
OLD | NEW |