OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 3699 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3710 void StackCheckStub::Generate(MacroAssembler* masm) { | 3710 void StackCheckStub::Generate(MacroAssembler* masm) { |
3711 __ TailCallRuntime(Runtime::kStackGuard, 0, 1); | 3711 __ TailCallRuntime(Runtime::kStackGuard, 0, 1); |
3712 } | 3712 } |
3713 | 3713 |
3714 | 3714 |
3715 void InterruptStub::Generate(MacroAssembler* masm) { | 3715 void InterruptStub::Generate(MacroAssembler* masm) { |
3716 __ TailCallRuntime(Runtime::kInterrupt, 0, 1); | 3716 __ TailCallRuntime(Runtime::kInterrupt, 0, 1); |
3717 } | 3717 } |
3718 | 3718 |
3719 | 3719 |
3720 static void GenerateRecordCallTargetNoArray(MacroAssembler* masm) { | |
3721 // Cache the called function in a global property cell. Cache states | |
3722 // are uninitialized, monomorphic (indicated by a JSFunction), and | |
3723 // megamorphic. | |
3724 // rbx : cache cell for call target | |
3725 // rdi : the function to call | |
3726 Isolate* isolate = masm->isolate(); | |
3727 Label initialize, done; | |
3728 | |
3729 // Load the cache state into rcx. | |
3730 __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset)); | |
3731 | |
3732 // A monomorphic cache hit or an already megamorphic state: invoke the | |
3733 // function without changing the state. | |
3734 __ cmpq(rcx, rdi); | |
3735 __ j(equal, &done, Label::kNear); | |
3736 __ Cmp(rcx, TypeFeedbackCells::MegamorphicSentinel(isolate)); | |
3737 __ j(equal, &done, Label::kNear); | |
3738 | |
3739 // A monomorphic miss (i.e, here the cache is not uninitialized) goes | |
3740 // megamorphic. | |
3741 __ Cmp(rcx, TypeFeedbackCells::UninitializedSentinel(isolate)); | |
3742 __ j(equal, &initialize, Label::kNear); | |
3743 // MegamorphicSentinel is an immortal immovable object (undefined) so no | |
3744 // write-barrier is needed. | |
3745 __ Move(FieldOperand(rbx, Cell::kValueOffset), | |
3746 TypeFeedbackCells::MegamorphicSentinel(isolate)); | |
3747 __ jmp(&done, Label::kNear); | |
3748 | |
3749 // An uninitialized cache is patched with the function. | |
3750 __ bind(&initialize); | |
3751 __ movq(FieldOperand(rbx, Cell::kValueOffset), rdi); | |
3752 // No need for a write barrier here - cells are rescanned. | |
3753 | |
3754 __ bind(&done); | |
3755 } | |
3756 | |
3757 | |
3758 static void GenerateRecordCallTarget(MacroAssembler* masm) { | 3720 static void GenerateRecordCallTarget(MacroAssembler* masm) { |
3759 // Cache the called function in a global property cell. Cache states | 3721 // Cache the called function in a global property cell. Cache states |
3760 // are uninitialized, monomorphic (indicated by a JSFunction), and | 3722 // are uninitialized, monomorphic (indicated by a JSFunction), and |
3761 // megamorphic. | 3723 // megamorphic. |
3762 // rbx : cache cell for call target | 3724 // rbx : cache cell for call target |
3763 // rdi : the function to call | 3725 // rdi : the function to call |
3764 ASSERT(FLAG_optimize_constructed_arrays); | |
3765 Isolate* isolate = masm->isolate(); | 3726 Isolate* isolate = masm->isolate(); |
3766 Label initialize, done, miss, megamorphic, not_array_function; | 3727 Label initialize, done, miss, megamorphic, not_array_function; |
3767 | 3728 |
3768 // Load the cache state into rcx. | 3729 // Load the cache state into rcx. |
3769 __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset)); | 3730 __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset)); |
3770 | 3731 |
3771 // A monomorphic cache hit or an already megamorphic state: invoke the | 3732 // A monomorphic cache hit or an already megamorphic state: invoke the |
3772 // function without changing the state. | 3733 // function without changing the state. |
3773 __ cmpq(rcx, rdi); | 3734 __ cmpq(rcx, rdi); |
3774 __ j(equal, &done); | 3735 __ j(equal, &done); |
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3853 __ bind(&call); | 3814 __ bind(&call); |
3854 } | 3815 } |
3855 | 3816 |
3856 // Check that the function really is a JavaScript function. | 3817 // Check that the function really is a JavaScript function. |
3857 __ JumpIfSmi(rdi, &non_function); | 3818 __ JumpIfSmi(rdi, &non_function); |
3858 // Goto slow case if we do not have a function. | 3819 // Goto slow case if we do not have a function. |
3859 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); | 3820 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); |
3860 __ j(not_equal, &slow); | 3821 __ j(not_equal, &slow); |
3861 | 3822 |
3862 if (RecordCallTarget()) { | 3823 if (RecordCallTarget()) { |
3863 if (FLAG_optimize_constructed_arrays) { | 3824 GenerateRecordCallTarget(masm); |
3864 GenerateRecordCallTarget(masm); | |
3865 } else { | |
3866 GenerateRecordCallTargetNoArray(masm); | |
3867 } | |
3868 } | 3825 } |
3869 | 3826 |
3870 // Fast-case: Just invoke the function. | 3827 // Fast-case: Just invoke the function. |
3871 ParameterCount actual(argc_); | 3828 ParameterCount actual(argc_); |
3872 | 3829 |
3873 if (ReceiverMightBeImplicit()) { | 3830 if (ReceiverMightBeImplicit()) { |
3874 Label call_as_function; | 3831 Label call_as_function; |
3875 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); | 3832 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); |
3876 __ j(equal, &call_as_function); | 3833 __ j(equal, &call_as_function); |
3877 __ InvokeFunction(rdi, | 3834 __ InvokeFunction(rdi, |
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3932 // rdi : constructor function | 3889 // rdi : constructor function |
3933 Label slow, non_function_call; | 3890 Label slow, non_function_call; |
3934 | 3891 |
3935 // Check that function is not a smi. | 3892 // Check that function is not a smi. |
3936 __ JumpIfSmi(rdi, &non_function_call); | 3893 __ JumpIfSmi(rdi, &non_function_call); |
3937 // Check that function is a JSFunction. | 3894 // Check that function is a JSFunction. |
3938 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); | 3895 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); |
3939 __ j(not_equal, &slow); | 3896 __ j(not_equal, &slow); |
3940 | 3897 |
3941 if (RecordCallTarget()) { | 3898 if (RecordCallTarget()) { |
3942 if (FLAG_optimize_constructed_arrays) { | 3899 GenerateRecordCallTarget(masm); |
3943 GenerateRecordCallTarget(masm); | |
3944 } else { | |
3945 GenerateRecordCallTargetNoArray(masm); | |
3946 } | |
3947 } | 3900 } |
3948 | 3901 |
3949 // Jump to the function-specific construct stub. | 3902 // Jump to the function-specific construct stub. |
3950 Register jmp_reg = FLAG_optimize_constructed_arrays ? rcx : rbx; | 3903 Register jmp_reg = rcx; |
3951 __ movq(jmp_reg, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | 3904 __ movq(jmp_reg, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); |
3952 __ movq(jmp_reg, FieldOperand(jmp_reg, | 3905 __ movq(jmp_reg, FieldOperand(jmp_reg, |
3953 SharedFunctionInfo::kConstructStubOffset)); | 3906 SharedFunctionInfo::kConstructStubOffset)); |
3954 __ lea(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize)); | 3907 __ lea(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize)); |
3955 __ jmp(jmp_reg); | 3908 __ jmp(jmp_reg); |
3956 | 3909 |
3957 // rdi: called object | 3910 // rdi: called object |
3958 // rax: number of arguments | 3911 // rax: number of arguments |
3959 // rcx: object map | 3912 // rcx: object map |
3960 Label do_call; | 3913 Label do_call; |
(...skipping 27 matching lines...) Expand all Loading... |
3988 #endif | 3941 #endif |
3989 } | 3942 } |
3990 | 3943 |
3991 | 3944 |
3992 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 3945 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
3993 CEntryStub::GenerateAheadOfTime(isolate); | 3946 CEntryStub::GenerateAheadOfTime(isolate); |
3994 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 3947 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
3995 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 3948 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
3996 // It is important that the store buffer overflow stubs are generated first. | 3949 // It is important that the store buffer overflow stubs are generated first. |
3997 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); | 3950 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); |
3998 if (FLAG_optimize_constructed_arrays) { | 3951 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); |
3999 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); | |
4000 } | |
4001 } | 3952 } |
4002 | 3953 |
4003 | 3954 |
4004 void CodeStub::GenerateFPStubs(Isolate* isolate) { | 3955 void CodeStub::GenerateFPStubs(Isolate* isolate) { |
4005 } | 3956 } |
4006 | 3957 |
4007 | 3958 |
4008 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { | 3959 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { |
4009 CEntryStub stub(1, kDontSaveFPRegs); | 3960 CEntryStub stub(1, kDontSaveFPRegs); |
4010 stub.GetCode(isolate)->set_is_pregenerated(true); | 3961 stub.GetCode(isolate)->set_is_pregenerated(true); |
(...skipping 2931 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6942 // We should either have undefined in ebx or a valid cell | 6893 // We should either have undefined in ebx or a valid cell |
6943 Label okay_here; | 6894 Label okay_here; |
6944 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); | 6895 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); |
6945 __ Cmp(rbx, undefined_sentinel); | 6896 __ Cmp(rbx, undefined_sentinel); |
6946 __ j(equal, &okay_here); | 6897 __ j(equal, &okay_here); |
6947 __ Cmp(FieldOperand(rbx, 0), cell_map); | 6898 __ Cmp(FieldOperand(rbx, 0), cell_map); |
6948 __ Assert(equal, "Expected property cell in register rbx"); | 6899 __ Assert(equal, "Expected property cell in register rbx"); |
6949 __ bind(&okay_here); | 6900 __ bind(&okay_here); |
6950 } | 6901 } |
6951 | 6902 |
6952 if (FLAG_optimize_constructed_arrays) { | 6903 Label no_info, switch_ready; |
6953 Label no_info, switch_ready; | 6904 // Get the elements kind and case on that. |
6954 // Get the elements kind and case on that. | 6905 __ Cmp(rbx, undefined_sentinel); |
6955 __ Cmp(rbx, undefined_sentinel); | 6906 __ j(equal, &no_info); |
6956 __ j(equal, &no_info); | 6907 __ movq(rdx, FieldOperand(rbx, Cell::kValueOffset)); |
6957 __ movq(rdx, FieldOperand(rbx, Cell::kValueOffset)); | 6908 __ JumpIfNotSmi(rdx, &no_info); |
6958 __ JumpIfNotSmi(rdx, &no_info); | 6909 __ SmiToInteger32(rdx, rdx); |
6959 __ SmiToInteger32(rdx, rdx); | 6910 __ jmp(&switch_ready); |
6960 __ jmp(&switch_ready); | 6911 __ bind(&no_info); |
6961 __ bind(&no_info); | 6912 __ movq(rdx, Immediate(GetInitialFastElementsKind())); |
6962 __ movq(rdx, Immediate(GetInitialFastElementsKind())); | 6913 __ bind(&switch_ready); |
6963 __ bind(&switch_ready); | |
6964 | 6914 |
6965 if (argument_count_ == ANY) { | 6915 if (argument_count_ == ANY) { |
6966 Label not_zero_case, not_one_case; | 6916 Label not_zero_case, not_one_case; |
6967 __ testq(rax, rax); | 6917 __ testq(rax, rax); |
6968 __ j(not_zero, ¬_zero_case); | 6918 __ j(not_zero, ¬_zero_case); |
6969 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm); | 6919 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm); |
6970 | 6920 |
6971 __ bind(¬_zero_case); | 6921 __ bind(¬_zero_case); |
6972 __ cmpl(rax, Immediate(1)); | 6922 __ cmpl(rax, Immediate(1)); |
6973 __ j(greater, ¬_one_case); | 6923 __ j(greater, ¬_one_case); |
6974 CreateArrayDispatchOneArgument(masm); | 6924 CreateArrayDispatchOneArgument(masm); |
6975 | 6925 |
6976 __ bind(¬_one_case); | 6926 __ bind(¬_one_case); |
6977 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm); | 6927 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm); |
6978 } else if (argument_count_ == NONE) { | 6928 } else if (argument_count_ == NONE) { |
6979 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm); | 6929 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm); |
6980 } else if (argument_count_ == ONE) { | 6930 } else if (argument_count_ == ONE) { |
6981 CreateArrayDispatchOneArgument(masm); | 6931 CreateArrayDispatchOneArgument(masm); |
6982 } else if (argument_count_ == MORE_THAN_ONE) { | 6932 } else if (argument_count_ == MORE_THAN_ONE) { |
6983 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm); | 6933 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm); |
6984 } else { | |
6985 UNREACHABLE(); | |
6986 } | |
6987 } else { | 6934 } else { |
6988 Label generic_constructor; | 6935 UNREACHABLE(); |
6989 // Run the native code for the Array function called as constructor. | |
6990 ArrayNativeCode(masm, &generic_constructor); | |
6991 | |
6992 // Jump to the generic construct code in case the specialized code cannot | |
6993 // handle the construction. | |
6994 __ bind(&generic_constructor); | |
6995 Handle<Code> generic_construct_stub = | |
6996 masm->isolate()->builtins()->JSConstructStubGeneric(); | |
6997 __ jmp(generic_construct_stub, RelocInfo::CODE_TARGET); | |
6998 } | 6936 } |
6999 } | 6937 } |
7000 | 6938 |
7001 | 6939 |
7002 void InternalArrayConstructorStub::GenerateCase( | 6940 void InternalArrayConstructorStub::GenerateCase( |
7003 MacroAssembler* masm, ElementsKind kind) { | 6941 MacroAssembler* masm, ElementsKind kind) { |
7004 Label not_zero_case, not_one_case; | 6942 Label not_zero_case, not_one_case; |
7005 Label normal_sequence; | 6943 Label normal_sequence; |
7006 | 6944 |
7007 __ testq(rax, rax); | 6945 __ testq(rax, rax); |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7051 // Initial map for the builtin Array function should be a map. | 6989 // Initial map for the builtin Array function should be a map. |
7052 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); | 6990 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); |
7053 // Will both indicate a NULL and a Smi. | 6991 // Will both indicate a NULL and a Smi. |
7054 STATIC_ASSERT(kSmiTag == 0); | 6992 STATIC_ASSERT(kSmiTag == 0); |
7055 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); | 6993 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); |
7056 __ Check(not_smi, "Unexpected initial map for Array function"); | 6994 __ Check(not_smi, "Unexpected initial map for Array function"); |
7057 __ CmpObjectType(rcx, MAP_TYPE, rcx); | 6995 __ CmpObjectType(rcx, MAP_TYPE, rcx); |
7058 __ Check(equal, "Unexpected initial map for Array function"); | 6996 __ Check(equal, "Unexpected initial map for Array function"); |
7059 } | 6997 } |
7060 | 6998 |
7061 if (FLAG_optimize_constructed_arrays) { | 6999 // Figure out the right elements kind |
7062 // Figure out the right elements kind | 7000 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); |
7063 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); | |
7064 | 7001 |
7065 // Load the map's "bit field 2" into |result|. We only need the first byte, | 7002 // Load the map's "bit field 2" into |result|. We only need the first byte, |
7066 // but the following masking takes care of that anyway. | 7003 // but the following masking takes care of that anyway. |
7067 __ movzxbq(rcx, FieldOperand(rcx, Map::kBitField2Offset)); | 7004 __ movzxbq(rcx, FieldOperand(rcx, Map::kBitField2Offset)); |
7068 // Retrieve elements_kind from bit field 2. | 7005 // Retrieve elements_kind from bit field 2. |
7069 __ and_(rcx, Immediate(Map::kElementsKindMask)); | 7006 __ and_(rcx, Immediate(Map::kElementsKindMask)); |
7070 __ shr(rcx, Immediate(Map::kElementsKindShift)); | 7007 __ shr(rcx, Immediate(Map::kElementsKindShift)); |
7071 | 7008 |
7072 if (FLAG_debug_code) { | 7009 if (FLAG_debug_code) { |
7073 Label done; | 7010 Label done; |
7074 __ cmpl(rcx, Immediate(FAST_ELEMENTS)); | 7011 __ cmpl(rcx, Immediate(FAST_ELEMENTS)); |
7075 __ j(equal, &done); | 7012 __ j(equal, &done); |
7076 __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS)); | 7013 __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS)); |
7077 __ Assert(equal, | 7014 __ Assert(equal, |
7078 "Invalid ElementsKind for InternalArray or InternalPackedArray"); | 7015 "Invalid ElementsKind for InternalArray or InternalPackedArray"); |
7079 __ bind(&done); | 7016 __ bind(&done); |
7080 } | 7017 } |
7081 | 7018 |
7082 Label fast_elements_case; | 7019 Label fast_elements_case; |
7083 __ cmpl(rcx, Immediate(FAST_ELEMENTS)); | 7020 __ cmpl(rcx, Immediate(FAST_ELEMENTS)); |
7084 __ j(equal, &fast_elements_case); | 7021 __ j(equal, &fast_elements_case); |
7085 GenerateCase(masm, FAST_HOLEY_ELEMENTS); | 7022 GenerateCase(masm, FAST_HOLEY_ELEMENTS); |
7086 | 7023 |
7087 __ bind(&fast_elements_case); | 7024 __ bind(&fast_elements_case); |
7088 GenerateCase(masm, FAST_ELEMENTS); | 7025 GenerateCase(masm, FAST_ELEMENTS); |
7089 } else { | |
7090 Label generic_constructor; | |
7091 // Run the native code for the Array function called as constructor. | |
7092 ArrayNativeCode(masm, &generic_constructor); | |
7093 | |
7094 // Jump to the generic construct code in case the specialized code cannot | |
7095 // handle the construction. | |
7096 __ bind(&generic_constructor); | |
7097 Handle<Code> generic_construct_stub = | |
7098 masm->isolate()->builtins()->JSConstructStubGeneric(); | |
7099 __ jmp(generic_construct_stub, RelocInfo::CODE_TARGET); | |
7100 } | |
7101 } | 7026 } |
7102 | 7027 |
7103 | 7028 |
7104 #undef __ | 7029 #undef __ |
7105 | 7030 |
7106 } } // namespace v8::internal | 7031 } } // namespace v8::internal |
7107 | 7032 |
7108 #endif // V8_TARGET_ARCH_X64 | 7033 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |