Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(197)

Side by Side Diff: src/x64/codegen-x64.cc

Issue 487017: Refactor Reference so that SetValue and GetValue pop the reference state. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 10 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/codegen-x64.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
(...skipping 635 matching lines...) Expand 10 before | Expand all | Expand 10 after
647 // instruction that gets patched and coverage code gets in the way. 647 // instruction that gets patched and coverage code gets in the way.
648 masm_->testl(rax, Immediate(-delta_to_patch_site)); 648 masm_->testl(rax, Immediate(-delta_to_patch_site));
649 // Restore value (returned from store IC), key and receiver 649 // Restore value (returned from store IC), key and receiver
650 // registers. 650 // registers.
651 if (!value_.is(rax)) __ movq(value_, rax); 651 if (!value_.is(rax)) __ movq(value_, rax);
652 __ pop(key_); 652 __ pop(key_);
653 __ pop(receiver_); 653 __ pop(receiver_);
654 } 654 }
655 655
656 656
657 void CodeGenerator::CallApplyLazy(Property* apply, 657 void CodeGenerator::CallApplyLazy(Expression* applicand,
658 Expression* receiver, 658 Expression* receiver,
659 VariableProxy* arguments, 659 VariableProxy* arguments,
660 int position) { 660 int position) {
661 // An optimized implementation of expressions of the form
662 // x.apply(y, arguments).
663 // If the arguments object of the scope has not been allocated,
664 // and x.apply is Function.prototype.apply, this optimization
665 // just copies y and the arguments of the current function on the
666 // stack, as receiver and arguments, and calls x.
667 // In the implementation comments, we call x the applicand
668 // and y the receiver.
661 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION); 669 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
662 ASSERT(arguments->IsArguments()); 670 ASSERT(arguments->IsArguments());
663 671
664 JumpTarget slow, done; 672 // Load applicand.apply onto the stack. This will usually
665
666 // Load the apply function onto the stack. This will usually
667 // give us a megamorphic load site. Not super, but it works. 673 // give us a megamorphic load site. Not super, but it works.
668 Reference ref(this, apply); 674 Load(applicand);
669 ref.GetValue(); 675 Handle<String> name = Factory::LookupAsciiSymbol("apply");
670 ASSERT(ref.type() == Reference::NAMED); 676 frame()->Push(name);
677 Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET);
678 __ nop();
679 frame()->Push(&answer);
671 680
672 // Load the receiver and the existing arguments object onto the 681 // Load the receiver and the existing arguments object onto the
673 // expression stack. Avoid allocating the arguments object here. 682 // expression stack. Avoid allocating the arguments object here.
674 Load(receiver); 683 Load(receiver);
675 LoadFromSlot(scope_->arguments()->var()->slot(), NOT_INSIDE_TYPEOF); 684 LoadFromSlot(scope_->arguments()->var()->slot(), NOT_INSIDE_TYPEOF);
676 685
677 // Emit the source position information after having loaded the 686 // Emit the source position information after having loaded the
678 // receiver and the arguments. 687 // receiver and the arguments.
679 CodeForSourcePosition(position); 688 CodeForSourcePosition(position);
689 // Contents of frame at this point:
690 // Frame[0]: arguments object of the current function or the hole.
691 // Frame[1]: receiver
692 // Frame[2]: applicand.apply
693 // Frame[3]: applicand.
680 694
681 // Check if the arguments object has been lazily allocated 695 // Check if the arguments object has been lazily allocated
682 // already. If so, just use that instead of copying the arguments 696 // already. If so, just use that instead of copying the arguments
683 // from the stack. This also deals with cases where a local variable 697 // from the stack. This also deals with cases where a local variable
684 // named 'arguments' has been introduced. 698 // named 'arguments' has been introduced.
685 frame_->Dup(); 699 frame_->Dup();
686 Result probe = frame_->Pop(); 700 Result probe = frame_->Pop();
687 bool try_lazy = true; 701 { VirtualFrame::SpilledScope spilled_scope;
688 if (probe.is_constant()) { 702 Label slow, done;
689 try_lazy = probe.handle()->IsTheHole(); 703 bool try_lazy = true;
690 } else { 704 if (probe.is_constant()) {
691 __ Cmp(probe.reg(), Factory::the_hole_value()); 705 try_lazy = probe.handle()->IsTheHole();
692 probe.Unuse(); 706 } else {
693 slow.Branch(not_equal); 707 __ CompareRoot(probe.reg(), Heap::kTheHoleValueRootIndex);
694 } 708 probe.Unuse();
709 __ j(not_equal, &slow);
710 }
695 711
696 if (try_lazy) { 712 if (try_lazy) {
697 JumpTarget build_args; 713 Label build_args;
714 // Get rid of the arguments object probe.
715 frame_->Drop(); // Can be called on a spilled frame.
716 // Stack now has 3 elements on it.
717 // Contents of stack at this point:
718 // rsp[0]: receiver
719 // rsp[1]: applicand.apply
720 // rsp[2]: applicand.
698 721
699 // Get rid of the arguments object probe. 722 // Check that the receiver really is a JavaScript object.
700 frame_->Drop(); 723 __ movq(rax, Operand(rsp, 0));
701 724 Condition is_smi = masm_->CheckSmi(rax);
702 // Before messing with the execution stack, we sync all 725 __ j(is_smi, &build_args);
703 // elements. This is bound to happen anyway because we're
704 // about to call a function.
705 frame_->SyncRange(0, frame_->element_count() - 1);
706
707 // Check that the receiver really is a JavaScript object.
708 {
709 frame_->PushElementAt(0);
710 Result receiver = frame_->Pop();
711 receiver.ToRegister();
712 Condition is_smi = masm_->CheckSmi(receiver.reg());
713 build_args.Branch(is_smi);
714 // We allow all JSObjects including JSFunctions. As long as 726 // We allow all JSObjects including JSFunctions. As long as
715 // JS_FUNCTION_TYPE is the last instance type and it is right 727 // JS_FUNCTION_TYPE is the last instance type and it is right
716 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper 728 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper
717 // bound. 729 // bound.
718 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); 730 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
719 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); 731 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
720 __ CmpObjectType(receiver.reg(), FIRST_JS_OBJECT_TYPE, kScratchRegister); 732 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
721 build_args.Branch(below); 733 __ j(below, &build_args);
734
735 // Check that applicand.apply is Function.prototype.apply.
736 __ movq(rax, Operand(rsp, kPointerSize));
737 is_smi = masm_->CheckSmi(rax);
738 __ j(is_smi, &build_args);
739 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rcx);
740 __ j(not_equal, &build_args);
741 __ movq(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
742 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
743 __ Cmp(FieldOperand(rax, SharedFunctionInfo::kCodeOffset), apply_code);
744 __ j(not_equal, &build_args);
745
746 // Check that applicand is a function.
747 __ movq(rdi, Operand(rsp, 2 * kPointerSize));
748 is_smi = masm_->CheckSmi(rdi);
749 __ j(is_smi, &build_args);
750 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
751 __ j(not_equal, &build_args);
752
753 // Copy the arguments to this function possibly from the
754 // adaptor frame below it.
755 Label invoke, adapted;
756 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
757 __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset),
758 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
759 __ j(equal, &adapted);
760
761 // No arguments adaptor frame. Copy fixed number of arguments.
762 __ movq(rax, Immediate(scope_->num_parameters()));
763 for (int i = 0; i < scope_->num_parameters(); i++) {
764 __ push(frame_->ParameterAt(i));
765 }
766 __ jmp(&invoke);
767
768 // Arguments adaptor frame present. Copy arguments from there, but
769 // avoid copying too many arguments to avoid stack overflows.
770 __ bind(&adapted);
771 static const uint32_t kArgumentsLimit = 1 * KB;
772 __ movq(rax, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
773 __ SmiToInteger32(rax, rax);
774 __ movq(rcx, rax);
775 __ cmpq(rax, Immediate(kArgumentsLimit));
776 __ j(above, &build_args);
777
778 // Loop through the arguments pushing them onto the execution
779 // stack. We don't inform the virtual frame of the push, so we don't
780 // have to worry about getting rid of the elements from the virtual
781 // frame.
782 Label loop;
783 // rcx is a small non-negative integer, due to the test above.
784 __ testl(rcx, rcx);
785 __ j(zero, &invoke);
786 __ bind(&loop);
787 __ push(Operand(rdx, rcx, times_pointer_size, 1 * kPointerSize));
788 __ decl(rcx);
789 __ j(not_zero, &loop);
790
791 // Invoke the function.
792 __ bind(&invoke);
793 ParameterCount actual(rax);
794 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
795 // Drop applicand.apply and applicand from the stack, and push
796 // the result of the function call, but leave the spilled frame
797 // unchanged, with 3 elements, so it is correct when we compile the
798 // slow-case code.
799 __ addq(rsp, Immediate(2 * kPointerSize));
800 __ push(rax);
801 // Stack now has 1 element:
802 // rsp[0]: result
803 __ jmp(&done);
804
805 // Slow-case: Allocate the arguments object since we know it isn't
806 // there, and fall-through to the slow-case where we call
807 // applicand.apply.
808 __ bind(&build_args);
809 // Stack now has 3 elements, because we have jumped from where:
810 // rsp[0]: receiver
811 // rsp[1]: applicand.apply
812 // rsp[2]: applicand.
813
814 // StoreArgumentsObject requires a correct frame, and may modify it.
815 Result arguments_object = StoreArgumentsObject(false);
816 frame_->SpillAll();
817 arguments_object.ToRegister();
818 frame_->EmitPush(arguments_object.reg());
819 arguments_object.Unuse();
820 // Stack and frame now have 4 elements.
821 __ bind(&slow);
722 } 822 }
723 823
724 // Verify that we're invoking Function.prototype.apply. 824 // Generic computation of x.apply(y, args) with no special optimization.
725 { 825 // Flip applicand.apply and applicand on the stack, so
726 frame_->PushElementAt(1); 826 // applicand looks like the receiver of the applicand.apply call.
727 Result apply = frame_->Pop(); 827 // Then process it as a normal function call.
728 apply.ToRegister(); 828 __ movq(rax, Operand(rsp, 3 * kPointerSize));
729 Condition is_smi = masm_->CheckSmi(apply.reg()); 829 __ movq(rbx, Operand(rsp, 2 * kPointerSize));
730 build_args.Branch(is_smi); 830 __ movq(Operand(rsp, 2 * kPointerSize), rax);
731 Result tmp = allocator_->Allocate(); 831 __ movq(Operand(rsp, 3 * kPointerSize), rbx);
732 __ CmpObjectType(apply.reg(), JS_FUNCTION_TYPE, tmp.reg());
733 build_args.Branch(not_equal);
734 __ movq(tmp.reg(),
735 FieldOperand(apply.reg(), JSFunction::kSharedFunctionInfoOffset));
736 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
737 __ Cmp(FieldOperand(tmp.reg(), SharedFunctionInfo::kCodeOffset),
738 apply_code);
739 build_args.Branch(not_equal);
740 }
741 832
742 // Get the function receiver from the stack. Check that it 833 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS);
743 // really is a function. 834 Result res = frame_->CallStub(&call_function, 3);
744 __ movq(rdi, Operand(rsp, 2 * kPointerSize)); 835 // The function and its two arguments have been dropped.
745 Condition is_smi = masm_->CheckSmi(rdi); 836 frame_->Drop(1); // Drop the receiver as well.
746 build_args.Branch(is_smi); 837 res.ToRegister();
747 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 838 frame_->EmitPush(res.reg());
748 build_args.Branch(not_equal); 839 // Stack now has 1 element:
749 840 // rsp[0]: result
750 // Copy the arguments to this function possibly from the 841 if (try_lazy) __ bind(&done);
751 // adaptor frame below it. 842 } // End of spilled scope.
752 Label invoke, adapted; 843 // Restore the context register after a call.
753 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
754 __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset),
755 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
756 __ j(equal, &adapted);
757
758 // No arguments adaptor frame. Copy fixed number of arguments.
759 __ movq(rax, Immediate(scope_->num_parameters()));
760 for (int i = 0; i < scope_->num_parameters(); i++) {
761 __ push(frame_->ParameterAt(i));
762 }
763 __ jmp(&invoke);
764
765 // Arguments adaptor frame present. Copy arguments from there, but
766 // avoid copying too many arguments to avoid stack overflows.
767 __ bind(&adapted);
768 static const uint32_t kArgumentsLimit = 1 * KB;
769 __ movq(rax, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
770 __ SmiToInteger32(rax, rax);
771 __ movq(rcx, rax);
772 __ cmpq(rax, Immediate(kArgumentsLimit));
773 build_args.Branch(above);
774
775 // Loop through the arguments pushing them onto the execution
776 // stack. We don't inform the virtual frame of the push, so we don't
777 // have to worry about getting rid of the elements from the virtual
778 // frame.
779 Label loop;
780 __ testl(rcx, rcx);
781 __ j(zero, &invoke);
782 __ bind(&loop);
783 __ push(Operand(rdx, rcx, times_pointer_size, 1 * kPointerSize));
784 __ decl(rcx);
785 __ j(not_zero, &loop);
786
787 // Invoke the function. The virtual frame knows about the receiver
788 // so make sure to forget that explicitly.
789 __ bind(&invoke);
790 ParameterCount actual(rax);
791 __ InvokeFunction(rdi, actual, CALL_FUNCTION);
792 frame_->Forget(1);
793 Result result = allocator()->Allocate(rax);
794 frame_->SetElementAt(0, &result);
795 done.Jump();
796
797 // Slow-case: Allocate the arguments object since we know it isn't
798 // there, and fall-through to the slow-case where we call
799 // Function.prototype.apply.
800 build_args.Bind();
801 Result arguments_object = StoreArgumentsObject(false);
802 frame_->Push(&arguments_object);
803 slow.Bind();
804 }
805
806 // Flip the apply function and the function to call on the stack, so
807 // the function looks like the receiver of the apply call. This way,
808 // the generic Function.prototype.apply implementation can deal with
809 // the call like it usually does.
810 Result a2 = frame_->Pop();
811 Result a1 = frame_->Pop();
812 Result ap = frame_->Pop();
813 Result fn = frame_->Pop();
814 frame_->Push(&ap);
815 frame_->Push(&fn);
816 frame_->Push(&a1);
817 frame_->Push(&a2);
818 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS);
819 Result res = frame_->CallStub(&call_function, 3);
820 frame_->Push(&res);
821
822 // All done. Restore context register after call.
823 if (try_lazy) done.Bind();
824 frame_->RestoreContextRegister(); 844 frame_->RestoreContextRegister();
825 } 845 }
826 846
827 847
828 class DeferredStackCheck: public DeferredCode { 848 class DeferredStackCheck: public DeferredCode {
829 public: 849 public:
830 DeferredStackCheck() { 850 DeferredStackCheck() {
831 set_comment("[ DeferredStackCheck"); 851 set_comment("[ DeferredStackCheck");
832 } 852 }
833 853
(...skipping 976 matching lines...) Expand 10 before | Expand all | Expand 10 after
1810 end_del_check.Bind(); 1830 end_del_check.Bind();
1811 // Store the entry in the 'each' expression and take another spin in the 1831 // Store the entry in the 'each' expression and take another spin in the
1812 // loop. rdx: i'th entry of the enum cache (or string there of) 1832 // loop. rdx: i'th entry of the enum cache (or string there of)
1813 frame_->EmitPush(rbx); 1833 frame_->EmitPush(rbx);
1814 { Reference each(this, node->each()); 1834 { Reference each(this, node->each());
1815 // Loading a reference may leave the frame in an unspilled state. 1835 // Loading a reference may leave the frame in an unspilled state.
1816 frame_->SpillAll(); 1836 frame_->SpillAll();
1817 if (!each.is_illegal()) { 1837 if (!each.is_illegal()) {
1818 if (each.size() > 0) { 1838 if (each.size() > 0) {
1819 frame_->EmitPush(frame_->ElementAt(each.size())); 1839 frame_->EmitPush(frame_->ElementAt(each.size()));
1820 } 1840 each.SetValue(NOT_CONST_INIT);
1821 // If the reference was to a slot we rely on the convenient property 1841 frame_->Drop(2); // Drop the original and the copy of the element.
1822 // that it doesn't matter whether a value (eg, ebx pushed above) is 1842 } else {
1823 // right on top of or right underneath a zero-sized reference. 1843 // If the reference has size zero then we can use the value below
1824 each.SetValue(NOT_CONST_INIT); 1844 // the reference as if it were above the reference, instead of pushing
1825 if (each.size() > 0) { 1845 // a new copy of it above the reference.
1826 // It's safe to pop the value lying on top of the reference before 1846 each.SetValue(NOT_CONST_INIT);
1827 // unloading the reference itself (which preserves the top of stack, 1847 frame_->Drop(); // Drop the original of the element.
1828 // ie, now the topmost value of the non-zero sized reference), since
1829 // we will discard the top of stack after unloading the reference
1830 // anyway.
1831 frame_->Drop();
1832 } 1848 }
1833 } 1849 }
1834 } 1850 }
1835 // Unloading a reference may leave the frame in an unspilled state. 1851 // Unloading a reference may leave the frame in an unspilled state.
1836 frame_->SpillAll(); 1852 frame_->SpillAll();
1837 1853
1838 // Discard the i'th entry pushed above or else the remainder of the
1839 // reference, whichever is currently on top of the stack.
1840 frame_->Drop();
1841
1842 // Body. 1854 // Body.
1843 CheckStack(); // TODO(1222600): ignore if body contains calls. 1855 CheckStack(); // TODO(1222600): ignore if body contains calls.
1844 VisitAndSpill(node->body()); 1856 VisitAndSpill(node->body());
1845 1857
1846 // Next. Reestablish a spilled frame in case we are coming here via 1858 // Next. Reestablish a spilled frame in case we are coming here via
1847 // a continue in the body. 1859 // a continue in the body.
1848 node->continue_target()->Bind(); 1860 node->continue_target()->Bind();
1849 frame_->SpillAll(); 1861 frame_->SpillAll();
1850 frame_->EmitPop(rax); 1862 frame_->EmitPop(rax);
1851 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); 1863 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
(...skipping 690 matching lines...) Expand 10 before | Expand all | Expand 10 after
2542 Load(node->value()); 2554 Load(node->value());
2543 Result result = 2555 Result result =
2544 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2); 2556 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2);
2545 frame_->Push(&result); 2557 frame_->Push(&result);
2546 } 2558 }
2547 2559
2548 2560
2549 void CodeGenerator::VisitAssignment(Assignment* node) { 2561 void CodeGenerator::VisitAssignment(Assignment* node) {
2550 Comment cmnt(masm_, "[ Assignment"); 2562 Comment cmnt(masm_, "[ Assignment");
2551 2563
2552 { Reference target(this, node->target()); 2564 { Reference target(this, node->target(), node->is_compound());
2553 if (target.is_illegal()) { 2565 if (target.is_illegal()) {
2554 // Fool the virtual frame into thinking that we left the assignment's 2566 // Fool the virtual frame into thinking that we left the assignment's
2555 // value on the frame. 2567 // value on the frame.
2556 frame_->Push(Smi::FromInt(0)); 2568 frame_->Push(Smi::FromInt(0));
2557 return; 2569 return;
2558 } 2570 }
2559 Variable* var = node->target()->AsVariableProxy()->AsVariable(); 2571 Variable* var = node->target()->AsVariableProxy()->AsVariable();
2560 2572
2561 if (node->starts_initialization_block()) { 2573 if (node->starts_initialization_block()) {
2562 ASSERT(target.type() == Reference::NAMED || 2574 ASSERT(target.type() == Reference::NAMED ||
2563 target.type() == Reference::KEYED); 2575 target.type() == Reference::KEYED);
2564 // Change to slow case in the beginning of an initialization 2576 // Change to slow case in the beginning of an initialization
2565 // block to avoid the quadratic behavior of repeatedly adding 2577 // block to avoid the quadratic behavior of repeatedly adding
2566 // fast properties. 2578 // fast properties.
2567 2579
2568 // The receiver is the argument to the runtime call. It is the 2580 // The receiver is the argument to the runtime call. It is the
2569 // first value pushed when the reference was loaded to the 2581 // first value pushed when the reference was loaded to the
2570 // frame. 2582 // frame.
2571 frame_->PushElementAt(target.size() - 1); 2583 frame_->PushElementAt(target.size() - 1);
2572 Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1); 2584 Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1);
2573 } 2585 }
2586 if (node->ends_initialization_block()) {
2587 // Add an extra copy of the receiver to the frame, so that it can be
2588 // converted back to fast case after the assignment.
2589 ASSERT(target.type() == Reference::NAMED ||
2590 target.type() == Reference::KEYED);
2591 if (target.type() == Reference::NAMED) {
2592 frame_->Dup();
2593 // Dup target receiver on stack.
2594 } else {
2595 ASSERT(target.type() == Reference::KEYED);
2596 Result temp = frame_->Pop();
2597 frame_->Dup();
2598 frame_->Push(&temp);
2599 }
2600 }
2574 if (node->op() == Token::ASSIGN || 2601 if (node->op() == Token::ASSIGN ||
2575 node->op() == Token::INIT_VAR || 2602 node->op() == Token::INIT_VAR ||
2576 node->op() == Token::INIT_CONST) { 2603 node->op() == Token::INIT_CONST) {
2577 Load(node->value()); 2604 Load(node->value());
2578 2605
2579 } else { 2606 } else { // Assignment is a compound assignment.
2580 Literal* literal = node->value()->AsLiteral(); 2607 Literal* literal = node->value()->AsLiteral();
2581 bool overwrite_value = 2608 bool overwrite_value =
2582 (node->value()->AsBinaryOperation() != NULL && 2609 (node->value()->AsBinaryOperation() != NULL &&
2583 node->value()->AsBinaryOperation()->ResultOverwriteAllowed()); 2610 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
2584 Variable* right_var = node->value()->AsVariableProxy()->AsVariable(); 2611 Variable* right_var = node->value()->AsVariableProxy()->AsVariable();
2585 // There are two cases where the target is not read in the right hand 2612 // There are two cases where the target is not read in the right hand
2586 // side, that are easy to test for: the right hand side is a literal, 2613 // side, that are easy to test for: the right hand side is a literal,
2587 // or the right hand side is a different variable. TakeValue invalidates 2614 // or the right hand side is a different variable. TakeValue invalidates
2588 // the target, with an implicit promise that it will be written to again 2615 // the target, with an implicit promise that it will be written to again
2589 // before it is read. 2616 // before it is read.
2590 if (literal != NULL || (right_var != NULL && right_var != var)) { 2617 if (literal != NULL || (right_var != NULL && right_var != var)) {
2591 target.TakeValue(); 2618 target.TakeValue();
2592 } else { 2619 } else {
2593 target.GetValue(); 2620 target.GetValue();
2594 } 2621 }
2595 Load(node->value()); 2622 Load(node->value());
2596 GenericBinaryOperation(node->binary_op(), 2623 GenericBinaryOperation(node->binary_op(),
2597 node->type(), 2624 node->type(),
2598 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE); 2625 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
2599 } 2626 }
2600 2627
2601 if (var != NULL && 2628 if (var != NULL &&
2602 var->mode() == Variable::CONST && 2629 var->mode() == Variable::CONST &&
2603 node->op() != Token::INIT_VAR && node->op() != Token::INIT_CONST) { 2630 node->op() != Token::INIT_VAR && node->op() != Token::INIT_CONST) {
2604 // Assignment ignored - leave the value on the stack. 2631 // Assignment ignored - leave the value on the stack.
2632 UnloadReference(&target);
2605 } else { 2633 } else {
2606 CodeForSourcePosition(node->position()); 2634 CodeForSourcePosition(node->position());
2607 if (node->op() == Token::INIT_CONST) { 2635 if (node->op() == Token::INIT_CONST) {
2608 // Dynamic constant initializations must use the function context 2636 // Dynamic constant initializations must use the function context
2609 // and initialize the actual constant declared. Dynamic variable 2637 // and initialize the actual constant declared. Dynamic variable
2610 // initializations are simply assignments and use SetValue. 2638 // initializations are simply assignments and use SetValue.
2611 target.SetValue(CONST_INIT); 2639 target.SetValue(CONST_INIT);
2612 } else { 2640 } else {
2613 target.SetValue(NOT_CONST_INIT); 2641 target.SetValue(NOT_CONST_INIT);
2614 } 2642 }
2615 if (node->ends_initialization_block()) { 2643 if (node->ends_initialization_block()) {
2616 ASSERT(target.type() == Reference::NAMED || 2644 ASSERT(target.type() == Reference::UNLOADED);
2617 target.type() == Reference::KEYED);
2618 // End of initialization block. Revert to fast case. The 2645 // End of initialization block. Revert to fast case. The
2619 // argument to the runtime call is the receiver, which is the 2646 // argument to the runtime call is the extra copy of the receiver,
2620 // first value pushed as part of the reference, which is below 2647 // which is below the value of the assignment.
2621 // the lhs value. 2648 // Swap the receiver and the value of the assignment expression.
2622 frame_->PushElementAt(target.size()); 2649 Result lhs = frame_->Pop();
2650 Result receiver = frame_->Pop();
2651 frame_->Push(&lhs);
2652 frame_->Push(&receiver);
2623 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1); 2653 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
2624 } 2654 }
2625 } 2655 }
2626 } 2656 }
2627 } 2657 }
2628 2658
2629 2659
2630 void CodeGenerator::VisitThrow(Throw* node) { 2660 void CodeGenerator::VisitThrow(Throw* node) {
2631 Comment cmnt(masm_, "[ Throw"); 2661 Comment cmnt(masm_, "[ Throw");
2632 Load(node->exception()); 2662 Load(node->exception());
(...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after
2780 2810
2781 Handle<String> name = Handle<String>::cast(literal->handle()); 2811 Handle<String> name = Handle<String>::cast(literal->handle());
2782 2812
2783 if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION && 2813 if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION &&
2784 name->IsEqualTo(CStrVector("apply")) && 2814 name->IsEqualTo(CStrVector("apply")) &&
2785 args->length() == 2 && 2815 args->length() == 2 &&
2786 args->at(1)->AsVariableProxy() != NULL && 2816 args->at(1)->AsVariableProxy() != NULL &&
2787 args->at(1)->AsVariableProxy()->IsArguments()) { 2817 args->at(1)->AsVariableProxy()->IsArguments()) {
2788 // Use the optimized Function.prototype.apply that avoids 2818 // Use the optimized Function.prototype.apply that avoids
2789 // allocating lazily allocated arguments objects. 2819 // allocating lazily allocated arguments objects.
2790 CallApplyLazy(property, 2820 CallApplyLazy(property->obj(),
2791 args->at(0), 2821 args->at(0),
2792 args->at(1)->AsVariableProxy(), 2822 args->at(1)->AsVariableProxy(),
2793 node->position()); 2823 node->position());
2794 2824
2795 } else { 2825 } else {
2796 // Push the name of the function and the receiver onto the stack. 2826 // Push the name of the function and the receiver onto the stack.
2797 frame_->Push(name); 2827 frame_->Push(name);
2798 Load(property->obj()); 2828 Load(property->obj());
2799 2829
2800 // Load the arguments. 2830 // Load the arguments.
(...skipping 11 matching lines...) Expand all
2812 // Replace the function on the stack with the result. 2842 // Replace the function on the stack with the result.
2813 frame_->SetElementAt(0, &result); 2843 frame_->SetElementAt(0, &result);
2814 } 2844 }
2815 2845
2816 } else { 2846 } else {
2817 // ------------------------------------------- 2847 // -------------------------------------------
2818 // JavaScript example: 'array[index](1, 2, 3)' 2848 // JavaScript example: 'array[index](1, 2, 3)'
2819 // ------------------------------------------- 2849 // -------------------------------------------
2820 2850
2821 // Load the function to call from the property through a reference. 2851 // Load the function to call from the property through a reference.
2822 Reference ref(this, property);
2823 ref.GetValue();
2824
2825 // Pass receiver to called function.
2826 if (property->is_synthetic()) { 2852 if (property->is_synthetic()) {
2853 Reference ref(this, property, false);
2854 ref.GetValue();
2827 // Use global object as receiver. 2855 // Use global object as receiver.
2828 LoadGlobalReceiver(); 2856 LoadGlobalReceiver();
2829 } else { 2857 } else {
2830 // The reference's size is non-negative. 2858 Reference ref(this, property, false);
2831 frame_->PushElementAt(ref.size()); 2859 ASSERT(ref.size() == 2);
2860 Result key = frame_->Pop();
2861 frame_->Dup(); // Duplicate the receiver.
2862 frame_->Push(&key);
2863 ref.GetValue();
2864 // Top of frame contains function to call, with duplicate copy of
2865 // receiver below it. Swap them.
2866 Result function = frame_->Pop();
2867 Result receiver = frame_->Pop();
2868 frame_->Push(&function);
2869 frame_->Push(&receiver);
2832 } 2870 }
2833 2871
2834 // Call the function. 2872 // Call the function.
2835 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position()); 2873 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position());
2836 } 2874 }
2837 2875
2838 } else { 2876 } else {
2839 // ---------------------------------- 2877 // ----------------------------------
2840 // JavaScript example: 'foo(1, 2, 3)' // foo is not global 2878 // JavaScript example: 'foo(1, 2, 3)' // foo is not global
2841 // ---------------------------------- 2879 // ----------------------------------
(...skipping 318 matching lines...) Expand 10 before | Expand all | Expand 10 after
3160 3198
3161 Variable* var = node->expression()->AsVariableProxy()->AsVariable(); 3199 Variable* var = node->expression()->AsVariableProxy()->AsVariable();
3162 bool is_const = (var != NULL && var->mode() == Variable::CONST); 3200 bool is_const = (var != NULL && var->mode() == Variable::CONST);
3163 3201
3164 // Postfix operations need a stack slot under the reference to hold 3202 // Postfix operations need a stack slot under the reference to hold
3165 // the old value while the new value is being stored. This is so that 3203 // the old value while the new value is being stored. This is so that
3166 // in the case that storing the new value requires a call, the old 3204 // in the case that storing the new value requires a call, the old
3167 // value will be in the frame to be spilled. 3205 // value will be in the frame to be spilled.
3168 if (is_postfix) frame_->Push(Smi::FromInt(0)); 3206 if (is_postfix) frame_->Push(Smi::FromInt(0));
3169 3207
3170 { Reference target(this, node->expression()); 3208 // A constant reference is not saved to, so the reference is not a
3209 // compound assignment reference.
3210 { Reference target(this, node->expression(), !is_const);
3171 if (target.is_illegal()) { 3211 if (target.is_illegal()) {
3172 // Spoof the virtual frame to have the expected height (one higher 3212 // Spoof the virtual frame to have the expected height (one higher
3173 // than on entry). 3213 // than on entry).
3174 if (!is_postfix) frame_->Push(Smi::FromInt(0)); 3214 if (!is_postfix) frame_->Push(Smi::FromInt(0));
3175 return; 3215 return;
3176 } 3216 }
3177 target.TakeValue(); 3217 target.TakeValue();
3178 3218
3179 Result new_value = frame_->Pop(); 3219 Result new_value = frame_->Pop();
3180 new_value.ToRegister(); 3220 new_value.ToRegister();
(...skipping 1068 matching lines...) Expand 10 before | Expand all | Expand 10 after
4249 } 4289 }
4250 4290
4251 4291
4252 bool CodeGenerator::IsUnsafeSmi(Handle<Object> value) { 4292 bool CodeGenerator::IsUnsafeSmi(Handle<Object> value) {
4253 return false; 4293 return false;
4254 } 4294 }
4255 4295
4256 //------------------------------------------------------------------------------ 4296 //------------------------------------------------------------------------------
4257 // CodeGenerator implementation of variables, lookups, and stores. 4297 // CodeGenerator implementation of variables, lookups, and stores.
4258 4298
4259 Reference::Reference(CodeGenerator* cgen, Expression* expression) 4299 Reference::Reference(CodeGenerator* cgen,
4260 : cgen_(cgen), expression_(expression), type_(ILLEGAL) { 4300 Expression* expression,
4301 bool persist_after_get)
4302 : cgen_(cgen),
4303 expression_(expression),
4304 type_(ILLEGAL),
4305 persist_after_get_(persist_after_get) {
4261 cgen->LoadReference(this); 4306 cgen->LoadReference(this);
4262 } 4307 }
4263 4308
4264 4309
4265 Reference::~Reference() { 4310 Reference::~Reference() {
4266 cgen_->UnloadReference(this); 4311 ASSERT(is_unloaded() || is_illegal());
4267 } 4312 }
4268 4313
4269 4314
4270 void CodeGenerator::LoadReference(Reference* ref) { 4315 void CodeGenerator::LoadReference(Reference* ref) {
4271 // References are loaded from both spilled and unspilled code. Set the 4316 // References are loaded from both spilled and unspilled code. Set the
4272 // state to unspilled to allow that (and explicitly spill after 4317 // state to unspilled to allow that (and explicitly spill after
4273 // construction at the construction sites). 4318 // construction at the construction sites).
4274 bool was_in_spilled_code = in_spilled_code_; 4319 bool was_in_spilled_code = in_spilled_code_;
4275 in_spilled_code_ = false; 4320 in_spilled_code_ = false;
4276 4321
(...skipping 29 matching lines...) Expand all
4306 } 4351 }
4307 4352
4308 in_spilled_code_ = was_in_spilled_code; 4353 in_spilled_code_ = was_in_spilled_code;
4309 } 4354 }
4310 4355
4311 4356
4312 void CodeGenerator::UnloadReference(Reference* ref) { 4357 void CodeGenerator::UnloadReference(Reference* ref) {
4313 // Pop a reference from the stack while preserving TOS. 4358 // Pop a reference from the stack while preserving TOS.
4314 Comment cmnt(masm_, "[ UnloadReference"); 4359 Comment cmnt(masm_, "[ UnloadReference");
4315 frame_->Nip(ref->size()); 4360 frame_->Nip(ref->size());
4361 ref->set_unloaded();
4316 } 4362 }
4317 4363
4318 4364
4319 Operand CodeGenerator::SlotOperand(Slot* slot, Register tmp) { 4365 Operand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
4320 // Currently, this assertion will fail if we try to assign to 4366 // Currently, this assertion will fail if we try to assign to
4321 // a constant variable that is constant because it is read-only 4367 // a constant variable that is constant because it is read-only
4322 // (such as the variable referring to a named function expression). 4368 // (such as the variable referring to a named function expression).
4323 // We need to implement assignments to read-only variables. 4369 // We need to implement assignments to read-only variables.
4324 // Ideally, we should do this during AST generation (by converting 4370 // Ideally, we should do this during AST generation (by converting
4325 // such assignments into expression statements); however, in general 4371 // such assignments into expression statements); however, in general
(...skipping 1326 matching lines...) Expand 10 before | Expand all | Expand 10 after
5652 break; 5698 break;
5653 } 5699 }
5654 deferred->BindExit(); 5700 deferred->BindExit();
5655 left->Unuse(); 5701 left->Unuse();
5656 right->Unuse(); 5702 right->Unuse();
5657 ASSERT(answer.is_valid()); 5703 ASSERT(answer.is_valid());
5658 return answer; 5704 return answer;
5659 } 5705 }
5660 5706
5661 5707
5708 Result CodeGenerator::EmitKeyedLoad(bool is_global) {
5709 Comment cmnt(masm_, "[ Load from keyed Property");
5710 // Inline array load code if inside of a loop. We do not know
5711 // the receiver map yet, so we initially generate the code with
5712 // a check against an invalid map. In the inline cache code, we
5713 // patch the map check if appropriate.
5714 if (loop_nesting() > 0) {
5715 Comment cmnt(masm_, "[ Inlined load from keyed Property");
5716
5717 Result key = frame_->Pop();
5718 Result receiver = frame_->Pop();
5719 key.ToRegister();
5720 receiver.ToRegister();
5721
5722 // Use a fresh temporary to load the elements without destroying
5723 // the receiver which is needed for the deferred slow case.
5724 Result elements = allocator()->Allocate();
5725 ASSERT(elements.is_valid());
5726
5727 // Use a fresh temporary for the index and later the loaded
5728 // value.
5729 Result index = allocator()->Allocate();
5730 ASSERT(index.is_valid());
5731
5732 DeferredReferenceGetKeyedValue* deferred =
5733 new DeferredReferenceGetKeyedValue(index.reg(),
5734 receiver.reg(),
5735 key.reg(),
5736 is_global);
5737
5738 // Check that the receiver is not a smi (only needed if this
5739 // is not a load from the global context) and that it has the
5740 // expected map.
5741 if (!is_global) {
5742 __ JumpIfSmi(receiver.reg(), deferred->entry_label());
5743 }
5744
5745 // Initially, use an invalid map. The map is patched in the IC
5746 // initialization code.
5747 __ bind(deferred->patch_site());
5748 // Use masm-> here instead of the double underscore macro since extra
5749 // coverage code can interfere with the patching. Do not use
5750 // root array to load null_value, since it must be patched with
5751 // the expected receiver map.
5752 masm_->movq(kScratchRegister, Factory::null_value(),
5753 RelocInfo::EMBEDDED_OBJECT);
5754 masm_->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
5755 kScratchRegister);
5756 deferred->Branch(not_equal);
5757
5758 // Check that the key is a non-negative smi.
5759 __ JumpIfNotPositiveSmi(key.reg(), deferred->entry_label());
5760
5761 // Get the elements array from the receiver and check that it
5762 // is not a dictionary.
5763 __ movq(elements.reg(),
5764 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
5765 __ Cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset),
5766 Factory::fixed_array_map());
5767 deferred->Branch(not_equal);
5768
5769 // Shift the key to get the actual index value and check that
5770 // it is within bounds.
5771 __ SmiToInteger32(index.reg(), key.reg());
5772 __ cmpl(index.reg(),
5773 FieldOperand(elements.reg(), FixedArray::kLengthOffset));
5774 deferred->Branch(above_equal);
5775
5776 // The index register holds the un-smi-tagged key. It has been
5777 // zero-extended to 64-bits, so it can be used directly as index in the
5778 // operand below.
5779 // Load and check that the result is not the hole. We could
5780 // reuse the index or elements register for the value.
5781 //
5782 // TODO(206): Consider whether it makes sense to try some
5783 // heuristic about which register to reuse. For example, if
5784 // one is rax, the we can reuse that one because the value
5785 // coming from the deferred code will be in rax.
5786 Result value = index;
5787 __ movq(value.reg(),
5788 Operand(elements.reg(),
5789 index.reg(),
5790 times_pointer_size,
5791 FixedArray::kHeaderSize - kHeapObjectTag));
5792 elements.Unuse();
5793 index.Unuse();
5794 __ CompareRoot(value.reg(), Heap::kTheHoleValueRootIndex);
5795 deferred->Branch(equal);
5796 __ IncrementCounter(&Counters::keyed_load_inline, 1);
5797
5798 deferred->BindExit();
5799 // Restore the receiver and key to the frame and push the
5800 // result on top of it.
5801 frame_->Push(&receiver);
5802 frame_->Push(&key);
5803 return value;
5804
5805 } else {
5806 Comment cmnt(masm_, "[ Load from keyed Property");
5807 RelocInfo::Mode mode = is_global
5808 ? RelocInfo::CODE_TARGET_CONTEXT
5809 : RelocInfo::CODE_TARGET;
5810 Result answer = frame_->CallKeyedLoadIC(mode);
5811 // Make sure that we do not have a test instruction after the
5812 // call. A test instruction after the call is used to
5813 // indicate that we have generated an inline version of the
5814 // keyed load. The explicit nop instruction is here because
5815 // the push that follows might be peep-hole optimized away.
5816 __ nop();
5817 return answer;
5818 }
5819 }
5820
5821
5662 #undef __ 5822 #undef __
5663 #define __ ACCESS_MASM(masm) 5823 #define __ ACCESS_MASM(masm)
5664 5824
5665 5825
5666 Handle<String> Reference::GetName() { 5826 Handle<String> Reference::GetName() {
5667 ASSERT(type_ == NAMED); 5827 ASSERT(type_ == NAMED);
5668 Property* property = expression_->AsProperty(); 5828 Property* property = expression_->AsProperty();
5669 if (property == NULL) { 5829 if (property == NULL) {
5670 // Global variable reference treated as a named property reference. 5830 // Global variable reference treated as a named property reference.
5671 VariableProxy* proxy = expression_->AsVariableProxy(); 5831 VariableProxy* proxy = expression_->AsVariableProxy();
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after
5781 } 5941 }
5782 break; 5942 break;
5783 } 5943 }
5784 5944
5785 case KEYED: { 5945 case KEYED: {
5786 Comment cmnt(masm, "[ Load from keyed Property"); 5946 Comment cmnt(masm, "[ Load from keyed Property");
5787 Variable* var = expression_->AsVariableProxy()->AsVariable(); 5947 Variable* var = expression_->AsVariableProxy()->AsVariable();
5788 bool is_global = var != NULL; 5948 bool is_global = var != NULL;
5789 ASSERT(!is_global || var->is_global()); 5949 ASSERT(!is_global || var->is_global());
5790 5950
5791 // Inline array load code if inside of a loop. We do not know 5951 Result value = cgen_->EmitKeyedLoad(is_global);
5792 // the receiver map yet, so we initially generate the code with 5952 cgen_->frame()->Push(&value);
5793 // a check against an invalid map. In the inline cache code, we
5794 // patch the map check if appropriate.
5795 if (cgen_->loop_nesting() > 0) {
5796 Comment cmnt(masm, "[ Inlined load from keyed Property");
5797
5798 Result key = cgen_->frame()->Pop();
5799 Result receiver = cgen_->frame()->Pop();
5800 key.ToRegister();
5801 receiver.ToRegister();
5802
5803 // Use a fresh temporary to load the elements without destroying
5804 // the receiver which is needed for the deferred slow case.
5805 Result elements = cgen_->allocator()->Allocate();
5806 ASSERT(elements.is_valid());
5807
5808 // Use a fresh temporary for the index and later the loaded
5809 // value.
5810 Result index = cgen_->allocator()->Allocate();
5811 ASSERT(index.is_valid());
5812
5813 DeferredReferenceGetKeyedValue* deferred =
5814 new DeferredReferenceGetKeyedValue(index.reg(),
5815 receiver.reg(),
5816 key.reg(),
5817 is_global);
5818
5819 // Check that the receiver is not a smi (only needed if this
5820 // is not a load from the global context) and that it has the
5821 // expected map.
5822 if (!is_global) {
5823 __ JumpIfSmi(receiver.reg(), deferred->entry_label());
5824 }
5825
5826 // Initially, use an invalid map. The map is patched in the IC
5827 // initialization code.
5828 __ bind(deferred->patch_site());
5829 // Use masm-> here instead of the double underscore macro since extra
5830 // coverage code can interfere with the patching.
5831 masm->movq(kScratchRegister, Factory::null_value(),
5832 RelocInfo::EMBEDDED_OBJECT);
5833 masm->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
5834 kScratchRegister);
5835 deferred->Branch(not_equal);
5836
5837 // Check that the key is a non-negative smi.
5838 __ JumpIfNotPositiveSmi(key.reg(), deferred->entry_label());
5839
5840 // Get the elements array from the receiver and check that it
5841 // is not a dictionary.
5842 __ movq(elements.reg(),
5843 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
5844 __ Cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset),
5845 Factory::fixed_array_map());
5846 deferred->Branch(not_equal);
5847
5848 // Shift the key to get the actual index value and check that
5849 // it is within bounds.
5850 __ SmiToInteger32(index.reg(), key.reg());
5851 __ cmpl(index.reg(),
5852 FieldOperand(elements.reg(), FixedArray::kLengthOffset));
5853 deferred->Branch(above_equal);
5854
5855 // The index register holds the un-smi-tagged key. It has been
5856 // zero-extended to 64-bits, so it can be used directly as index in the
5857 // operand below.
5858 // Load and check that the result is not the hole. We could
5859 // reuse the index or elements register for the value.
5860 //
5861 // TODO(206): Consider whether it makes sense to try some
5862 // heuristic about which register to reuse. For example, if
5863 // one is rax, the we can reuse that one because the value
5864 // coming from the deferred code will be in rax.
5865 Result value = index;
5866 __ movq(value.reg(),
5867 Operand(elements.reg(),
5868 index.reg(),
5869 times_pointer_size,
5870 FixedArray::kHeaderSize - kHeapObjectTag));
5871 elements.Unuse();
5872 index.Unuse();
5873 __ CompareRoot(value.reg(), Heap::kTheHoleValueRootIndex);
5874 deferred->Branch(equal);
5875 __ IncrementCounter(&Counters::keyed_load_inline, 1);
5876
5877 deferred->BindExit();
5878 // Restore the receiver and key to the frame and push the
5879 // result on top of it.
5880 cgen_->frame()->Push(&receiver);
5881 cgen_->frame()->Push(&key);
5882 cgen_->frame()->Push(&value);
5883
5884 } else {
5885 Comment cmnt(masm, "[ Load from keyed Property");
5886 RelocInfo::Mode mode = is_global
5887 ? RelocInfo::CODE_TARGET_CONTEXT
5888 : RelocInfo::CODE_TARGET;
5889 Result answer = cgen_->frame()->CallKeyedLoadIC(mode);
5890 // Make sure that we do not have a test instruction after the
5891 // call. A test instruction after the call is used to
5892 // indicate that we have generated an inline version of the
5893 // keyed load. The explicit nop instruction is here because
5894 // the push that follows might be peep-hole optimized away.
5895 __ nop();
5896 cgen_->frame()->Push(&answer);
5897 }
5898 break; 5953 break;
5899 } 5954 }
5900 5955
5901 default: 5956 default:
5902 UNREACHABLE(); 5957 UNREACHABLE();
5903 } 5958 }
5959
5960 if (!persist_after_get_) {
5961 cgen_->UnloadReference(this);
5962 }
5904 } 5963 }
5905 5964
5906 5965
5907 void Reference::TakeValue() { 5966 void Reference::TakeValue() {
5908 // TODO(X64): This function is completely architecture independent. Move 5967 // TODO(X64): This function is completely architecture independent. Move
5909 // it somewhere shared. 5968 // it somewhere shared.
5910 5969
5911 // For non-constant frame-allocated slots, we invalidate the value in the 5970 // For non-constant frame-allocated slots, we invalidate the value in the
5912 // slot. For all others, we fall back on GetValue. 5971 // slot. For all others, we fall back on GetValue.
5913 ASSERT(!cgen_->in_spilled_code()); 5972 ASSERT(!cgen_->in_spilled_code());
(...skipping 16 matching lines...) Expand all
5930 // Only non-constant, frame-allocated parameters and locals can reach 5989 // Only non-constant, frame-allocated parameters and locals can reach
5931 // here. Be careful not to use the optimizations for arguments 5990 // here. Be careful not to use the optimizations for arguments
5932 // object access since it may not have been initialized yet. 5991 // object access since it may not have been initialized yet.
5933 ASSERT(!slot->is_arguments()); 5992 ASSERT(!slot->is_arguments());
5934 if (slot->type() == Slot::PARAMETER) { 5993 if (slot->type() == Slot::PARAMETER) {
5935 cgen_->frame()->TakeParameterAt(slot->index()); 5994 cgen_->frame()->TakeParameterAt(slot->index());
5936 } else { 5995 } else {
5937 ASSERT(slot->type() == Slot::LOCAL); 5996 ASSERT(slot->type() == Slot::LOCAL);
5938 cgen_->frame()->TakeLocalAt(slot->index()); 5997 cgen_->frame()->TakeLocalAt(slot->index());
5939 } 5998 }
5999
6000 ASSERT(persist_after_get_);
6001 // Do not unload the reference, because it is used in SetValue.
5940 } 6002 }
5941 6003
5942 6004
5943 void Reference::SetValue(InitState init_state) { 6005 void Reference::SetValue(InitState init_state) {
5944 ASSERT(cgen_->HasValidEntryRegisters()); 6006 ASSERT(cgen_->HasValidEntryRegisters());
5945 ASSERT(!is_illegal()); 6007 ASSERT(!is_illegal());
5946 MacroAssembler* masm = cgen_->masm(); 6008 MacroAssembler* masm = cgen_->masm();
5947 switch (type_) { 6009 switch (type_) {
5948 case SLOT: { 6010 case SLOT: {
5949 Comment cmnt(masm, "[ Store to Slot"); 6011 Comment cmnt(masm, "[ Store to Slot");
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
6058 // keyed store. 6120 // keyed store.
6059 masm->nop(); 6121 masm->nop();
6060 cgen_->frame()->Push(&answer); 6122 cgen_->frame()->Push(&answer);
6061 } 6123 }
6062 break; 6124 break;
6063 } 6125 }
6064 6126
6065 default: 6127 default:
6066 UNREACHABLE(); 6128 UNREACHABLE();
6067 } 6129 }
6130 cgen_->UnloadReference(this);
6068 } 6131 }
6069 6132
6070 6133
6071 void FastNewClosureStub::Generate(MacroAssembler* masm) { 6134 void FastNewClosureStub::Generate(MacroAssembler* masm) {
6072 // Clone the boilerplate in new space. Set the context to the 6135 // Clone the boilerplate in new space. Set the context to the
6073 // current context in rsi. 6136 // current context in rsi.
6074 Label gc; 6137 Label gc;
6075 __ AllocateInNewSpace(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT); 6138 __ AllocateInNewSpace(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT);
6076 6139
6077 // Get the boilerplate function from the stack. 6140 // Get the boilerplate function from the stack.
(...skipping 2464 matching lines...) Expand 10 before | Expand all | Expand 10 after
8542 // Call the function from C++. 8605 // Call the function from C++.
8543 return FUNCTION_CAST<ModuloFunction>(buffer); 8606 return FUNCTION_CAST<ModuloFunction>(buffer);
8544 } 8607 }
8545 8608
8546 #endif 8609 #endif
8547 8610
8548 8611
8549 #undef __ 8612 #undef __
8550 8613
8551 } } // namespace v8::internal 8614 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/x64/codegen-x64.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698