OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 20 matching lines...) Expand all Loading... |
31 | 31 |
32 #include "codegen.h" | 32 #include "codegen.h" |
33 #include "deoptimizer.h" | 33 #include "deoptimizer.h" |
34 #include "full-codegen.h" | 34 #include "full-codegen.h" |
35 | 35 |
36 namespace v8 { | 36 namespace v8 { |
37 namespace internal { | 37 namespace internal { |
38 | 38 |
39 | 39 |
40 #define __ ACCESS_MASM(masm) | 40 #define __ ACCESS_MASM(masm) |
41 | 41 #define __k __ |
| 42 #define __q __ |
| 43 #define __a __ |
| 44 #define __a __ |
42 | 45 |
43 void Builtins::Generate_Adaptor(MacroAssembler* masm, | 46 void Builtins::Generate_Adaptor(MacroAssembler* masm, |
44 CFunctionId id, | 47 CFunctionId id, |
45 BuiltinExtraArguments extra_args) { | 48 BuiltinExtraArguments extra_args) { |
46 // ----------- S t a t e ------------- | 49 // ----------- S t a t e ------------- |
47 // -- rax : number of arguments excluding receiver | 50 // -- rax : number of arguments excluding receiver |
48 // -- rdi : called function (only guaranteed when | 51 // -- rdi : called function (only guaranteed when |
49 // extra_args requires it) | 52 // extra_args requires it) |
50 // -- rsi : context | 53 // -- rsi : context |
51 // -- rsp[0] : return address | 54 // -- rsp[0] : return address |
52 // -- rsp[8] : last argument | 55 // -- rsp[8] : last argument |
53 // -- ... | 56 // -- ... |
54 // -- rsp[8 * argc] : first argument (argc == rax) | 57 // -- rsp[8 * argc] : first argument (argc == rax) |
55 // -- rsp[8 * (argc + 1)] : receiver | 58 // -- rsp[8 * (argc + 1)] : receiver |
56 // ----------------------------------- | 59 // ----------------------------------- |
57 | 60 |
58 // Insert extra arguments. | 61 // Insert extra arguments. |
59 int num_extra_args = 0; | 62 int num_extra_args = 0; |
60 if (extra_args == NEEDS_CALLED_FUNCTION) { | 63 if (extra_args == NEEDS_CALLED_FUNCTION) { |
61 num_extra_args = 1; | 64 num_extra_args = 1; |
62 __ pop(kScratchRegister); // Save return address. | 65 __k pop(kScratchRegister); // Save return address. |
63 __ push(rdi); | 66 __ push(rdi); |
64 __ push(kScratchRegister); // Restore return address. | 67 __k push(kScratchRegister); // Restore return address. |
65 } else { | 68 } else { |
66 ASSERT(extra_args == NO_EXTRA_ARGUMENTS); | 69 ASSERT(extra_args == NO_EXTRA_ARGUMENTS); |
67 } | 70 } |
68 | 71 |
69 // JumpToExternalReference expects rax to contain the number of arguments | 72 // JumpToExternalReference expects rax to contain the number of arguments |
70 // including the receiver and the extra arguments. | 73 // including the receiver and the extra arguments. |
71 __ addq(rax, Immediate(num_extra_args + 1)); | 74 __ addq(rax, Immediate(num_extra_args + 1)); |
72 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1); | 75 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1); |
73 } | 76 } |
74 | 77 |
(...skipping 347 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
422 __ movq(rax, Operand(rsp, 0)); | 425 __ movq(rax, Operand(rsp, 0)); |
423 | 426 |
424 // Restore the arguments count and leave the construct frame. | 427 // Restore the arguments count and leave the construct frame. |
425 __ bind(&exit); | 428 __ bind(&exit); |
426 __ movq(rbx, Operand(rsp, kPointerSize)); // Get arguments count. | 429 __ movq(rbx, Operand(rsp, kPointerSize)); // Get arguments count. |
427 | 430 |
428 // Leave construct frame. | 431 // Leave construct frame. |
429 } | 432 } |
430 | 433 |
431 // Remove caller arguments from the stack and return. | 434 // Remove caller arguments from the stack and return. |
432 __ pop(rcx); | 435 __k pop(rcx); |
433 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2); | 436 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2); |
434 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize)); | 437 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize)); |
435 __ push(rcx); | 438 __k push(rcx); |
436 Counters* counters = masm->isolate()->counters(); | 439 Counters* counters = masm->isolate()->counters(); |
437 __ IncrementCounter(counters->constructed_objects(), 1); | 440 __ IncrementCounter(counters->constructed_objects(), 1); |
438 __ ret(0); | 441 __ ret(0); |
439 } | 442 } |
440 | 443 |
441 | 444 |
442 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) { | 445 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) { |
443 Generate_JSConstructStubHelper(masm, false, true); | 446 Generate_JSConstructStubHelper(masm, false, true); |
444 } | 447 } |
445 | 448 |
(...skipping 236 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
682 | 685 |
683 // Preserve registers across notification, this is important for compiled | 686 // Preserve registers across notification, this is important for compiled |
684 // stubs that tail call the runtime on deopts passing their parameters in | 687 // stubs that tail call the runtime on deopts passing their parameters in |
685 // registers. | 688 // registers. |
686 __ Pushad(); | 689 __ Pushad(); |
687 __ CallRuntime(Runtime::kNotifyStubFailure, 0); | 690 __ CallRuntime(Runtime::kNotifyStubFailure, 0); |
688 __ Popad(); | 691 __ Popad(); |
689 // Tear down internal frame. | 692 // Tear down internal frame. |
690 } | 693 } |
691 | 694 |
692 __ pop(MemOperand(rsp, 0)); // Ignore state offset | 695 __k pop(MemOperand(rsp, 0)); // Ignore state offset |
693 __ ret(0); // Return to IC Miss stub, continuation still on stack. | 696 __ ret(0); // Return to IC Miss stub, continuation still on stack. |
694 } | 697 } |
695 | 698 |
696 | 699 |
697 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, | 700 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, |
698 Deoptimizer::BailoutType type) { | 701 Deoptimizer::BailoutType type) { |
699 // Enter an internal frame. | 702 // Enter an internal frame. |
700 { | 703 { |
701 FrameScope scope(masm, StackFrame::INTERNAL); | 704 FrameScope scope(masm, StackFrame::INTERNAL); |
702 | 705 |
703 // Pass the deoptimization type to the runtime system. | 706 // Pass the deoptimization type to the runtime system. |
704 __ Push(Smi::FromInt(static_cast<int>(type))); | 707 __ Push(Smi::FromInt(static_cast<int>(type))); |
705 | 708 |
706 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); | 709 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); |
707 // Tear down internal frame. | 710 // Tear down internal frame. |
708 } | 711 } |
709 | 712 |
710 // Get the full codegen state from the stack and untag it. | 713 // Get the full codegen state from the stack and untag it. |
711 __ SmiToInteger32(r10, Operand(rsp, 1 * kPointerSize)); | 714 __a SmiToInteger32(r10, Operand(rsp, 1 * kPointerSize)); |
712 | 715 |
713 // Switch on the state. | 716 // Switch on the state. |
714 Label not_no_registers, not_tos_rax; | 717 Label not_no_registers, not_tos_rax; |
715 __ cmpq(r10, Immediate(FullCodeGenerator::NO_REGISTERS)); | 718 __ cmpq(r10, Immediate(FullCodeGenerator::NO_REGISTERS)); |
716 __ j(not_equal, ¬_no_registers, Label::kNear); | 719 __ j(not_equal, ¬_no_registers, Label::kNear); |
717 __ ret(1 * kPointerSize); // Remove state. | 720 __q ret(1 * kPointerSize); // Remove state. |
718 | 721 |
719 __ bind(¬_no_registers); | 722 __ bind(¬_no_registers); |
720 __ movq(rax, Operand(rsp, 2 * kPointerSize)); | 723 __q movq(rax, Operand(rsp, 2 * kPointerSize)); |
721 __ cmpq(r10, Immediate(FullCodeGenerator::TOS_REG)); | 724 __ cmpq(r10, Immediate(FullCodeGenerator::TOS_REG)); |
722 __ j(not_equal, ¬_tos_rax, Label::kNear); | 725 __ j(not_equal, ¬_tos_rax, Label::kNear); |
723 __ ret(2 * kPointerSize); // Remove state, rax. | 726 __a ret(2 * kPointerSize); // Remove state, rax. |
724 | 727 |
725 __ bind(¬_tos_rax); | 728 __ bind(¬_tos_rax); |
726 __ Abort("no cases left"); | 729 __ Abort("no cases left"); |
727 } | 730 } |
728 | 731 |
729 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { | 732 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { |
730 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); | 733 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); |
731 } | 734 } |
732 | 735 |
733 | 736 |
(...skipping 30 matching lines...) Expand all Loading... |
764 // ... | 767 // ... |
765 // rsp[8 * n] : Argument 1 | 768 // rsp[8 * n] : Argument 1 |
766 // rsp[8 * (n + 1)] : Receiver (function to call) | 769 // rsp[8 * (n + 1)] : Receiver (function to call) |
767 // | 770 // |
768 // rax contains the number of arguments, n, not counting the receiver. | 771 // rax contains the number of arguments, n, not counting the receiver. |
769 // | 772 // |
770 // 1. Make sure we have at least one argument. | 773 // 1. Make sure we have at least one argument. |
771 { Label done; | 774 { Label done; |
772 __ testq(rax, rax); | 775 __ testq(rax, rax); |
773 __ j(not_zero, &done); | 776 __ j(not_zero, &done); |
774 __ pop(rbx); | 777 __k pop(rbx); |
775 __ Push(masm->isolate()->factory()->undefined_value()); | 778 __ Push(masm->isolate()->factory()->undefined_value()); |
776 __ push(rbx); | 779 __k push(rbx); |
777 __ incq(rax); | 780 __ incq(rax); |
778 __ bind(&done); | 781 __ bind(&done); |
779 } | 782 } |
780 | 783 |
781 // 2. Get the function to call (passed as receiver) from the stack, check | 784 // 2. Get the function to call (passed as receiver) from the stack, check |
782 // if it is a function. | 785 // if it is a function. |
783 Label slow, non_function; | 786 Label slow, non_function; |
784 // The function to call is at position n+1 on the stack. | 787 // The function to call is at position n+1 on the stack. |
785 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize)); | 788 __a movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize)); |
786 __ JumpIfSmi(rdi, &non_function); | 789 __ JumpIfSmi(rdi, &non_function); |
787 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); | 790 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); |
788 __ j(not_equal, &slow); | 791 __ j(not_equal, &slow); |
789 | 792 |
790 // 3a. Patch the first argument if necessary when calling a function. | 793 // 3a. Patch the first argument if necessary when calling a function. |
791 Label shift_arguments; | 794 Label shift_arguments; |
792 __ Set(rdx, 0); // indicate regular JS_FUNCTION | 795 __ Set(rdx, 0); // indicate regular JS_FUNCTION |
793 { Label convert_to_object, use_global_receiver, patch_receiver; | 796 { Label convert_to_object, use_global_receiver, patch_receiver; |
794 // Change context eagerly in case we need the global receiver. | 797 // Change context eagerly in case we need the global receiver. |
795 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); | 798 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); |
796 | 799 |
797 // Do not transform the receiver for strict mode functions. | 800 // Do not transform the receiver for strict mode functions. |
798 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | 801 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); |
799 __ testb(FieldOperand(rbx, SharedFunctionInfo::kStrictModeByteOffset), | 802 __ testb(FieldOperand(rbx, SharedFunctionInfo::kStrictModeByteOffset), |
800 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); | 803 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); |
801 __ j(not_equal, &shift_arguments); | 804 __ j(not_equal, &shift_arguments); |
802 | 805 |
803 // Do not transform the receiver for natives. | 806 // Do not transform the receiver for natives. |
804 // SharedFunctionInfo is already loaded into rbx. | 807 // SharedFunctionInfo is already loaded into rbx. |
805 __ testb(FieldOperand(rbx, SharedFunctionInfo::kNativeByteOffset), | 808 __ testb(FieldOperand(rbx, SharedFunctionInfo::kNativeByteOffset), |
806 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); | 809 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); |
807 __ j(not_zero, &shift_arguments); | 810 __ j(not_zero, &shift_arguments); |
808 | 811 |
809 // Compute the receiver in non-strict mode. | 812 // Compute the receiver in non-strict mode. |
810 __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0)); | 813 __a movq(rbx, Operand(rsp, rax, times_pointer_size, 0)); |
811 __ JumpIfSmi(rbx, &convert_to_object, Label::kNear); | 814 __ JumpIfSmi(rbx, &convert_to_object, Label::kNear); |
812 | 815 |
813 __ CompareRoot(rbx, Heap::kNullValueRootIndex); | 816 __ CompareRoot(rbx, Heap::kNullValueRootIndex); |
814 __ j(equal, &use_global_receiver); | 817 __ j(equal, &use_global_receiver); |
815 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); | 818 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); |
816 __ j(equal, &use_global_receiver); | 819 __ j(equal, &use_global_receiver); |
817 | 820 |
818 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); | 821 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); |
819 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx); | 822 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx); |
820 __ j(above_equal, &shift_arguments); | 823 __ j(above_equal, &shift_arguments); |
821 | 824 |
822 __ bind(&convert_to_object); | 825 __ bind(&convert_to_object); |
823 { | 826 { |
824 // Enter an internal frame in order to preserve argument count. | 827 // Enter an internal frame in order to preserve argument count. |
825 FrameScope scope(masm, StackFrame::INTERNAL); | 828 FrameScope scope(masm, StackFrame::INTERNAL); |
826 __ Integer32ToSmi(rax, rax); | 829 __ Integer32ToSmi(rax, rax); |
827 __ push(rax); | 830 __ push(rax); |
828 | 831 |
829 __ push(rbx); | 832 __ push(rbx); |
830 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | 833 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
831 __ movq(rbx, rax); | 834 __ movq(rbx, rax); |
832 __ Set(rdx, 0); // indicate regular JS_FUNCTION | 835 __ Set(rdx, 0); // indicate regular JS_FUNCTION |
833 | 836 |
834 __ pop(rax); | 837 __ pop(rax); |
835 __ SmiToInteger32(rax, rax); | 838 __ SmiToInteger32(rax, rax); |
836 } | 839 } |
837 | 840 |
838 // Restore the function to rdi. | 841 // Restore the function to rdi. |
839 __ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize)); | 842 __a movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize)); |
840 __ jmp(&patch_receiver, Label::kNear); | 843 __ jmp(&patch_receiver, Label::kNear); |
841 | 844 |
842 // Use the global receiver object from the called function as the | 845 // Use the global receiver object from the called function as the |
843 // receiver. | 846 // receiver. |
844 __ bind(&use_global_receiver); | 847 __ bind(&use_global_receiver); |
845 const int kGlobalIndex = | 848 const int kGlobalIndex = |
846 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize; | 849 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize; |
847 __ movq(rbx, FieldOperand(rsi, kGlobalIndex)); | 850 __ movq(rbx, FieldOperand(rsi, kGlobalIndex)); |
848 __ movq(rbx, FieldOperand(rbx, GlobalObject::kNativeContextOffset)); | 851 __ movq(rbx, FieldOperand(rbx, GlobalObject::kNativeContextOffset)); |
849 __ movq(rbx, FieldOperand(rbx, kGlobalIndex)); | 852 __ movq(rbx, FieldOperand(rbx, kGlobalIndex)); |
850 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); | 853 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); |
851 | 854 |
852 __ bind(&patch_receiver); | 855 __ bind(&patch_receiver); |
853 __ movq(Operand(rsp, rax, times_pointer_size, 0), rbx); | 856 __a movq(Operand(rsp, rax, times_pointer_size, 0), rbx); |
854 | |
855 __ jmp(&shift_arguments); | 857 __ jmp(&shift_arguments); |
856 } | 858 } |
857 | 859 |
858 // 3b. Check for function proxy. | 860 // 3b. Check for function proxy. |
859 __ bind(&slow); | 861 __ bind(&slow); |
860 __ Set(rdx, 1); // indicate function proxy | 862 __ Set(rdx, 1); // indicate function proxy |
861 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); | 863 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); |
862 __ j(equal, &shift_arguments); | 864 __ j(equal, &shift_arguments); |
863 __ bind(&non_function); | 865 __ bind(&non_function); |
864 __ Set(rdx, 2); // indicate non-function | 866 __ Set(rdx, 2); // indicate non-function |
865 | 867 |
866 // 3c. Patch the first argument when calling a non-function. The | 868 // 3c. Patch the first argument when calling a non-function. The |
867 // CALL_NON_FUNCTION builtin expects the non-function callee as | 869 // CALL_NON_FUNCTION builtin expects the non-function callee as |
868 // receiver, so overwrite the first argument which will ultimately | 870 // receiver, so overwrite the first argument which will ultimately |
869 // become the receiver. | 871 // become the receiver. |
870 __ movq(Operand(rsp, rax, times_pointer_size, 0), rdi); | 872 __a movq(Operand(rsp, rax, times_pointer_size, 0), rdi); |
871 | 873 |
872 // 4. Shift arguments and return address one slot down on the stack | 874 // 4. Shift arguments and return address one slot down on the stack |
873 // (overwriting the original receiver). Adjust argument count to make | 875 // (overwriting the original receiver). Adjust argument count to make |
874 // the original first argument the new receiver. | 876 // the original first argument the new receiver. |
875 __ bind(&shift_arguments); | 877 __ bind(&shift_arguments); |
876 { Label loop; | 878 { Label loop; |
877 __ movq(rcx, rax); | 879 __ movq(rcx, rax); |
| 880 #ifdef V8_TARGET_ARCH_X32 |
| 881 __ incl(rcx); // HWRegSize = kPointerSize + kPointerSize |
| 882 #endif |
878 __ bind(&loop); | 883 __ bind(&loop); |
879 __ movq(rbx, Operand(rsp, rcx, times_pointer_size, 0)); | 884 __ movq(rbx, Operand(rsp, rcx, times_pointer_size, 0)); |
880 __ movq(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx); | 885 __ movq(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx); |
881 __ decq(rcx); | 886 __ decq(rcx); |
882 __ j(not_sign, &loop); // While non-negative (to copy return address). | 887 __ j(not_sign, &loop); // While non-negative (to copy return address). |
| 888 #ifndef V8_TARGET_ARCH_X32 |
883 __ pop(rbx); // Discard copy of return address. | 889 __ pop(rbx); // Discard copy of return address. |
| 890 #else |
| 891 __ leal(rsp, Operand(rsp, 4)); // Discard bottom-half of return address |
| 892 #endif |
884 __ decq(rax); // One fewer argument (first argument is new receiver). | 893 __ decq(rax); // One fewer argument (first argument is new receiver). |
885 } | 894 } |
886 | 895 |
887 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin, | 896 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin, |
888 // or a function proxy via CALL_FUNCTION_PROXY. | 897 // or a function proxy via CALL_FUNCTION_PROXY. |
889 { Label function, non_proxy; | 898 { Label function, non_proxy; |
890 __ testq(rdx, rdx); | 899 __ testq(rdx, rdx); |
891 __ j(zero, &function); | 900 __ j(zero, &function); |
892 __ Set(rbx, 0); | 901 __ Set(rbx, 0); |
893 __ SetCallKind(rcx, CALL_AS_METHOD); | 902 __ SetCallKind(rcx, CALL_AS_METHOD); |
894 __ cmpq(rdx, Immediate(1)); | 903 __ cmpq(rdx, Immediate(1)); |
895 __ j(not_equal, &non_proxy); | 904 __ j(not_equal, &non_proxy); |
896 | 905 |
897 __ pop(rdx); // return address | 906 __k pop(rdx); // return address |
898 __ push(rdi); // re-add proxy object as additional argument | 907 __ push(rdi); // re-add proxy object as additional argument |
899 __ push(rdx); | 908 __k push(rdx); |
900 __ incq(rax); | 909 __ incq(rax); |
901 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); | 910 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); |
902 __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 911 __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
903 RelocInfo::CODE_TARGET); | 912 RelocInfo::CODE_TARGET); |
904 | 913 |
905 __ bind(&non_proxy); | 914 __ bind(&non_proxy); |
906 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); | 915 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); |
907 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 916 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
908 RelocInfo::CODE_TARGET); | 917 RelocInfo::CODE_TARGET); |
909 __ bind(&function); | 918 __ bind(&function); |
910 } | 919 } |
911 | 920 |
912 // 5b. Get the code to call from the function and check that the number of | 921 // 5b. Get the code to call from the function and check that the number of |
913 // expected arguments matches what we're providing. If so, jump | 922 // expected arguments matches what we're providing. If so, jump |
914 // (tail-call) to the code in register edx without checking arguments. | 923 // (tail-call) to the code in register edx without checking arguments. |
915 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | 924 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); |
| 925 #ifndef V8_TARGET_ARCH_X32 |
916 __ movsxlq(rbx, | 926 __ movsxlq(rbx, |
917 FieldOperand(rdx, | 927 FieldOperand(rdx, |
918 SharedFunctionInfo::kFormalParameterCountOffset)); | 928 SharedFunctionInfo::kFormalParameterCountOffset)); |
| 929 #else |
| 930 // kFormalParameterCountOffset is not tagged in X64, while tagged in X32. |
| 931 __ movl(rbx, |
| 932 FieldOperand(rdx, |
| 933 SharedFunctionInfo::kFormalParameterCountOffset)); |
| 934 __ SmiToInteger32(rbx, rbx); |
| 935 #endif |
919 __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); | 936 __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); |
920 __ SetCallKind(rcx, CALL_AS_METHOD); | 937 __ SetCallKind(rcx, CALL_AS_METHOD); |
921 __ cmpq(rax, rbx); | 938 __ cmpq(rax, rbx); |
922 __ j(not_equal, | 939 __ j(not_equal, |
923 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 940 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
924 RelocInfo::CODE_TARGET); | 941 RelocInfo::CODE_TARGET); |
925 | 942 |
926 ParameterCount expected(0); | 943 ParameterCount expected(0); |
927 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION, | 944 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION, |
928 NullCallWrapper(), CALL_AS_METHOD); | 945 NullCallWrapper(), CALL_AS_METHOD); |
929 } | 946 } |
930 | 947 |
931 | 948 |
932 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { | 949 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { |
933 // Stack at entry: | 950 // Stack at entry: |
934 // rsp : return address | 951 // rsp : return address |
935 // rsp[8] : arguments | 952 // rsp[8] : arguments |
936 // rsp[16] : receiver ("this") | 953 // rsp[16] : receiver ("this") |
937 // rsp[24] : function | 954 // rsp[24] : function |
938 { | 955 { |
939 FrameScope frame_scope(masm, StackFrame::INTERNAL); | 956 FrameScope frame_scope(masm, StackFrame::INTERNAL); |
940 // Stack frame: | 957 // Stack frame: |
941 // rbp : Old base pointer | 958 // rbp : Old base pointer |
942 // rbp[8] : return address | 959 // rbp[8] : return address |
943 // rbp[16] : function arguments | 960 // rbp[16] : function arguments |
944 // rbp[24] : receiver | 961 // rbp[24] : receiver |
945 // rbp[32] : function | 962 // rbp[32] : function |
| 963 #ifndef V8_TARGET_ARCH_X32 |
946 static const int kArgumentsOffset = 2 * kPointerSize; | 964 static const int kArgumentsOffset = 2 * kPointerSize; |
947 static const int kReceiverOffset = 3 * kPointerSize; | 965 static const int kReceiverOffset = 3 * kPointerSize; |
948 static const int kFunctionOffset = 4 * kPointerSize; | 966 static const int kFunctionOffset = 4 * kPointerSize; |
| 967 #else |
| 968 static const int kArgumentsOffset = 2 * kHWRegSize; |
| 969 static const int kReceiverOffset = 2 * kHWRegSize + 1 * kPointerSize; |
| 970 static const int kFunctionOffset = 2 * kHWRegSize + 2 * kPointerSize; |
| 971 #endif |
949 | 972 |
950 __ push(Operand(rbp, kFunctionOffset)); | 973 __ push(Operand(rbp, kFunctionOffset)); |
951 __ push(Operand(rbp, kArgumentsOffset)); | 974 __ push(Operand(rbp, kArgumentsOffset)); |
952 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); | 975 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); |
953 | 976 |
954 // Check the stack for overflow. We are not trying to catch | 977 // Check the stack for overflow. We are not trying to catch |
955 // interruptions (e.g. debug break and preemption) here, so the "real stack | 978 // interruptions (e.g. debug break and preemption) here, so the "real stack |
956 // limit" is checked. | 979 // limit" is checked. |
957 Label okay; | 980 Label okay; |
958 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); | 981 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); |
(...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1173 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, rcx); | 1196 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, rcx); |
1174 __ cmpq(rdi, rcx); | 1197 __ cmpq(rdi, rcx); |
1175 __ Assert(equal, "Unexpected String function"); | 1198 __ Assert(equal, "Unexpected String function"); |
1176 } | 1199 } |
1177 | 1200 |
1178 // Load the first argument into rax and get rid of the rest | 1201 // Load the first argument into rax and get rid of the rest |
1179 // (including the receiver). | 1202 // (including the receiver). |
1180 Label no_arguments; | 1203 Label no_arguments; |
1181 __ testq(rax, rax); | 1204 __ testq(rax, rax); |
1182 __ j(zero, &no_arguments); | 1205 __ j(zero, &no_arguments); |
1183 __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0)); | 1206 __a movq(rbx, Operand(rsp, rax, times_pointer_size, 0)); |
1184 __ pop(rcx); | 1207 __k pop(rcx); |
1185 __ lea(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize)); | 1208 __ lea(rsp, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize)); |
1186 __ push(rcx); | 1209 __k push(rcx); |
1187 __ movq(rax, rbx); | 1210 __ movq(rax, rbx); |
1188 | 1211 |
1189 // Lookup the argument in the number to string cache. | 1212 // Lookup the argument in the number to string cache. |
1190 Label not_cached, argument_is_string; | 1213 Label not_cached, argument_is_string; |
1191 NumberToStringStub::GenerateLookupNumberStringCache( | 1214 NumberToStringStub::GenerateLookupNumberStringCache( |
1192 masm, | 1215 masm, |
1193 rax, // Input. | 1216 rax, // Input. |
1194 rbx, // Result. | 1217 rbx, // Result. |
1195 rcx, // Scratch 1. | 1218 rcx, // Scratch 1. |
1196 rdx, // Scratch 2. | 1219 rdx, // Scratch 2. |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1261 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); | 1284 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); |
1262 __ pop(rdi); | 1285 __ pop(rdi); |
1263 } | 1286 } |
1264 __ movq(rbx, rax); | 1287 __ movq(rbx, rax); |
1265 __ jmp(&argument_is_string); | 1288 __ jmp(&argument_is_string); |
1266 | 1289 |
1267 // Load the empty string into rbx, remove the receiver from the | 1290 // Load the empty string into rbx, remove the receiver from the |
1268 // stack, and jump back to the case where the argument is a string. | 1291 // stack, and jump back to the case where the argument is a string. |
1269 __ bind(&no_arguments); | 1292 __ bind(&no_arguments); |
1270 __ LoadRoot(rbx, Heap::kempty_stringRootIndex); | 1293 __ LoadRoot(rbx, Heap::kempty_stringRootIndex); |
1271 __ pop(rcx); | 1294 __k pop(rcx); |
1272 __ lea(rsp, Operand(rsp, kPointerSize)); | 1295 __ lea(rsp, Operand(rsp, 1 * kPointerSize)); |
1273 __ push(rcx); | 1296 __k push(rcx); |
1274 __ jmp(&argument_is_string); | 1297 __ jmp(&argument_is_string); |
1275 | 1298 |
1276 // At this point the argument is already a string. Call runtime to | 1299 // At this point the argument is already a string. Call runtime to |
1277 // create a string wrapper. | 1300 // create a string wrapper. |
1278 __ bind(&gc_required); | 1301 __ bind(&gc_required); |
1279 __ IncrementCounter(counters->string_ctor_gc_required(), 1); | 1302 __ IncrementCounter(counters->string_ctor_gc_required(), 1); |
1280 { | 1303 { |
1281 FrameScope scope(masm, StackFrame::INTERNAL); | 1304 FrameScope scope(masm, StackFrame::INTERNAL); |
1282 __ push(rbx); | 1305 __ push(rbx); |
1283 __ CallRuntime(Runtime::kNewStringWrapper, 1); | 1306 __ CallRuntime(Runtime::kNewStringWrapper, 1); |
(...skipping 22 matching lines...) Expand all Loading... |
1306 | 1329 |
1307 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { | 1330 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { |
1308 // Retrieve the number of arguments from the stack. Number is a Smi. | 1331 // Retrieve the number of arguments from the stack. Number is a Smi. |
1309 __ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 1332 __ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
1310 | 1333 |
1311 // Leave the frame. | 1334 // Leave the frame. |
1312 __ movq(rsp, rbp); | 1335 __ movq(rsp, rbp); |
1313 __ pop(rbp); | 1336 __ pop(rbp); |
1314 | 1337 |
1315 // Remove caller arguments from the stack. | 1338 // Remove caller arguments from the stack. |
1316 __ pop(rcx); | 1339 __k pop(rcx); |
1317 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2); | 1340 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2); |
1318 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize)); | 1341 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize)); |
1319 __ push(rcx); | 1342 __k push(rcx); |
1320 } | 1343 } |
1321 | 1344 |
1322 | 1345 |
1323 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { | 1346 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { |
1324 // ----------- S t a t e ------------- | 1347 // ----------- S t a t e ------------- |
1325 // -- rax : actual number of arguments | 1348 // -- rax : actual number of arguments |
1326 // -- rbx : expected number of arguments | 1349 // -- rbx : expected number of arguments |
1327 // -- rcx : call kind information | 1350 // -- rcx : call kind information |
1328 // -- rdx : code entry to call | 1351 // -- rdx : code entry to call |
1329 // ----------------------------------- | 1352 // ----------------------------------- |
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1420 // If the result was -1 it means that we couldn't optimize the | 1443 // If the result was -1 it means that we couldn't optimize the |
1421 // function. Just return and continue in the unoptimized version. | 1444 // function. Just return and continue in the unoptimized version. |
1422 Label skip; | 1445 Label skip; |
1423 __ SmiCompare(rax, Smi::FromInt(-1)); | 1446 __ SmiCompare(rax, Smi::FromInt(-1)); |
1424 __ j(not_equal, &skip, Label::kNear); | 1447 __ j(not_equal, &skip, Label::kNear); |
1425 __ ret(0); | 1448 __ ret(0); |
1426 | 1449 |
1427 __ bind(&skip); | 1450 __ bind(&skip); |
1428 // Untag the AST id and push it on the stack. | 1451 // Untag the AST id and push it on the stack. |
1429 __ SmiToInteger32(rax, rax); | 1452 __ SmiToInteger32(rax, rax); |
1430 __ push(rax); | 1453 __k push(rax); |
1431 | 1454 |
1432 // Generate the code for doing the frame-to-frame translation using | 1455 // Generate the code for doing the frame-to-frame translation using |
1433 // the deoptimizer infrastructure. | 1456 // the deoptimizer infrastructure. |
1434 Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR); | 1457 Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR); |
1435 generator.Generate(); | 1458 generator.Generate(); |
1436 } | 1459 } |
1437 | 1460 |
1438 | 1461 #undef __a |
| 1462 #undef __a |
| 1463 #undef __q |
| 1464 #undef __k |
1439 #undef __ | 1465 #undef __ |
1440 | 1466 |
1441 } } // namespace v8::internal | 1467 } } // namespace v8::internal |
1442 | 1468 |
1443 #endif // V8_TARGET_ARCH_X64 | 1469 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |