OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
54 Isolate* isolate, | 54 Isolate* isolate, |
55 CodeStubInterfaceDescriptor* descriptor) { | 55 CodeStubInterfaceDescriptor* descriptor) { |
56 static Register registers[] = { rax, rbx, rcx, rdx }; | 56 static Register registers[] = { rax, rbx, rcx, rdx }; |
57 descriptor->register_param_count_ = 4; | 57 descriptor->register_param_count_ = 4; |
58 descriptor->register_params_ = registers; | 58 descriptor->register_params_ = registers; |
59 descriptor->deoptimization_handler_ = | 59 descriptor->deoptimization_handler_ = |
60 Runtime::FunctionForId(Runtime::kCreateObjectLiteralShallow)->entry; | 60 Runtime::FunctionForId(Runtime::kCreateObjectLiteralShallow)->entry; |
61 } | 61 } |
62 | 62 |
63 | 63 |
| 64 void CreateAllocationSiteStub::InitializeInterfaceDescriptor( |
| 65 Isolate* isolate, |
| 66 CodeStubInterfaceDescriptor* descriptor) { |
| 67 static Register registers[] = { rbx }; |
| 68 descriptor->register_param_count_ = 1; |
| 69 descriptor->register_params_ = registers; |
| 70 descriptor->deoptimization_handler_ = NULL; |
| 71 } |
| 72 |
| 73 |
64 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( | 74 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( |
65 Isolate* isolate, | 75 Isolate* isolate, |
66 CodeStubInterfaceDescriptor* descriptor) { | 76 CodeStubInterfaceDescriptor* descriptor) { |
67 static Register registers[] = { rdx, rax }; | 77 static Register registers[] = { rdx, rax }; |
68 descriptor->register_param_count_ = 2; | 78 descriptor->register_param_count_ = 2; |
69 descriptor->register_params_ = registers; | 79 descriptor->register_params_ = registers; |
70 descriptor->deoptimization_handler_ = | 80 descriptor->deoptimization_handler_ = |
71 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure); | 81 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure); |
72 } | 82 } |
73 | 83 |
(...skipping 3655 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3729 // Load the cache state into rcx. | 3739 // Load the cache state into rcx. |
3730 __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset)); | 3740 __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset)); |
3731 | 3741 |
3732 // A monomorphic cache hit or an already megamorphic state: invoke the | 3742 // A monomorphic cache hit or an already megamorphic state: invoke the |
3733 // function without changing the state. | 3743 // function without changing the state. |
3734 __ cmpq(rcx, rdi); | 3744 __ cmpq(rcx, rdi); |
3735 __ j(equal, &done); | 3745 __ j(equal, &done); |
3736 __ Cmp(rcx, TypeFeedbackCells::MegamorphicSentinel(isolate)); | 3746 __ Cmp(rcx, TypeFeedbackCells::MegamorphicSentinel(isolate)); |
3737 __ j(equal, &done); | 3747 __ j(equal, &done); |
3738 | 3748 |
| 3749 /* |
| 3750 If we came here, we need to see if we are the array function. |
| 3751 If we didn't have a matching function, and we didn't find the megamorph |
| 3752 sentinel, then we have in the cell either some other function or an |
| 3753 AllocationSite. Do a map check on the object in rcx |
| 3754 */ |
| 3755 Handle<Map> allocation_site_map( |
| 3756 masm->isolate()->heap()->allocation_site_map(), |
| 3757 masm->isolate()); |
| 3758 __ Cmp(FieldOperand(rcx, 0), allocation_site_map); |
| 3759 __ j(not_equal, &miss); |
| 3760 |
3739 // Special handling of the Array() function, which caches not only the | 3761 // Special handling of the Array() function, which caches not only the |
3740 // monomorphic Array function but the initial ElementsKind with special | 3762 // monomorphic Array function but the initial ElementsKind with special |
3741 // sentinels | 3763 // sentinels |
3742 __ JumpIfNotSmi(rcx, &miss); | |
3743 if (FLAG_debug_code) { | |
3744 Handle<Object> terminal_kind_sentinel = | |
3745 TypeFeedbackCells::MonomorphicArraySentinel(masm->isolate(), | |
3746 LAST_FAST_ELEMENTS_KIND); | |
3747 __ Cmp(rcx, terminal_kind_sentinel); | |
3748 __ Assert(less_equal, "Array function sentinel is not an ElementsKind"); | |
3749 } | |
3750 | |
3751 // Make sure the function is the Array() function | 3764 // Make sure the function is the Array() function |
3752 __ LoadArrayFunction(rcx); | 3765 __ LoadArrayFunction(rcx); |
3753 __ cmpq(rdi, rcx); | 3766 __ cmpq(rdi, rcx); |
3754 __ j(not_equal, &megamorphic); | 3767 __ j(not_equal, &megamorphic); |
3755 __ jmp(&done); | 3768 __ jmp(&done); |
3756 | 3769 |
3757 __ bind(&miss); | 3770 __ bind(&miss); |
3758 | 3771 |
3759 // A monomorphic miss (i.e, here the cache is not uninitialized) goes | 3772 // A monomorphic miss (i.e, here the cache is not uninitialized) goes |
3760 // megamorphic. | 3773 // megamorphic. |
3761 __ Cmp(rcx, TypeFeedbackCells::UninitializedSentinel(isolate)); | 3774 __ Cmp(rcx, TypeFeedbackCells::UninitializedSentinel(isolate)); |
3762 __ j(equal, &initialize); | 3775 __ j(equal, &initialize); |
3763 // MegamorphicSentinel is an immortal immovable object (undefined) so no | 3776 // MegamorphicSentinel is an immortal immovable object (undefined) so no |
3764 // write-barrier is needed. | 3777 // write-barrier is needed. |
3765 __ bind(&megamorphic); | 3778 __ bind(&megamorphic); |
3766 __ Move(FieldOperand(rbx, Cell::kValueOffset), | 3779 __ Move(FieldOperand(rbx, Cell::kValueOffset), |
3767 TypeFeedbackCells::MegamorphicSentinel(isolate)); | 3780 TypeFeedbackCells::MegamorphicSentinel(isolate)); |
3768 __ jmp(&done, Label::kNear); | 3781 __ jmp(&done); |
3769 | 3782 |
3770 // An uninitialized cache is patched with the function or sentinel to | 3783 // An uninitialized cache is patched with the function or sentinel to |
3771 // indicate the ElementsKind if function is the Array constructor. | 3784 // indicate the ElementsKind if function is the Array constructor. |
3772 __ bind(&initialize); | 3785 __ bind(&initialize); |
3773 // Make sure the function is the Array() function | 3786 // Make sure the function is the Array() function |
3774 __ LoadArrayFunction(rcx); | 3787 __ LoadArrayFunction(rcx); |
3775 __ cmpq(rdi, rcx); | 3788 __ cmpq(rdi, rcx); |
3776 __ j(not_equal, ¬_array_function); | 3789 __ j(not_equal, ¬_array_function); |
3777 | 3790 |
3778 // The target function is the Array constructor, install a sentinel value in | 3791 // The target function is the Array constructor, |
3779 // the constructor's type info cell that will track the initial ElementsKind | 3792 // Create an AllocationSite if we don't already have it, store it in the cell |
3780 // that should be used for the array when its constructed. | 3793 { |
3781 Handle<Object> initial_kind_sentinel = | 3794 FrameScope scope(masm, StackFrame::INTERNAL); |
3782 TypeFeedbackCells::MonomorphicArraySentinel(isolate, | 3795 |
3783 GetInitialFastElementsKind()); | 3796 __ push(rax); |
3784 __ Move(FieldOperand(rbx, Cell::kValueOffset), | 3797 __ push(rdi); |
3785 initial_kind_sentinel); | 3798 __ push(rbx); |
| 3799 |
| 3800 CreateAllocationSiteStub create_stub; |
| 3801 __ CallStub(&create_stub); |
| 3802 |
| 3803 __ pop(rbx); |
| 3804 __ pop(rdi); |
| 3805 __ pop(rax); |
| 3806 } |
3786 __ jmp(&done); | 3807 __ jmp(&done); |
3787 | 3808 |
3788 __ bind(¬_array_function); | 3809 __ bind(¬_array_function); |
3789 __ movq(FieldOperand(rbx, Cell::kValueOffset), rdi); | 3810 __ movq(FieldOperand(rbx, Cell::kValueOffset), rdi); |
3790 // No need for a write barrier here - cells are rescanned. | 3811 // No need for a write barrier here - cells are rescanned. |
3791 | 3812 |
3792 __ bind(&done); | 3813 __ bind(&done); |
3793 } | 3814 } |
3794 | 3815 |
3795 | 3816 |
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3945 } | 3966 } |
3946 | 3967 |
3947 | 3968 |
3948 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 3969 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
3949 CEntryStub::GenerateAheadOfTime(isolate); | 3970 CEntryStub::GenerateAheadOfTime(isolate); |
3950 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 3971 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
3951 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 3972 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
3952 // It is important that the store buffer overflow stubs are generated first. | 3973 // It is important that the store buffer overflow stubs are generated first. |
3953 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); | 3974 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); |
3954 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); | 3975 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); |
| 3976 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); |
3955 } | 3977 } |
3956 | 3978 |
3957 | 3979 |
3958 void CodeStub::GenerateFPStubs(Isolate* isolate) { | 3980 void CodeStub::GenerateFPStubs(Isolate* isolate) { |
3959 } | 3981 } |
3960 | 3982 |
3961 | 3983 |
3962 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { | 3984 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { |
3963 CEntryStub stub(1, kDontSaveFPRegs); | 3985 CEntryStub stub(1, kDontSaveFPRegs); |
3964 stub.GetCode(isolate)->set_is_pregenerated(true); | 3986 stub.GetCode(isolate)->set_is_pregenerated(true); |
(...skipping 2799 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6764 __ movq(rcx, Operand(rsp, kPointerSize)); | 6786 __ movq(rcx, Operand(rsp, kPointerSize)); |
6765 __ testq(rcx, rcx); | 6787 __ testq(rcx, rcx); |
6766 __ j(zero, &normal_sequence); | 6788 __ j(zero, &normal_sequence); |
6767 | 6789 |
6768 // We are going to create a holey array, but our kind is non-holey. | 6790 // We are going to create a holey array, but our kind is non-holey. |
6769 // Fix kind and retry | 6791 // Fix kind and retry |
6770 __ incl(rdx); | 6792 __ incl(rdx); |
6771 __ Cmp(rbx, undefined_sentinel); | 6793 __ Cmp(rbx, undefined_sentinel); |
6772 __ j(equal, &normal_sequence); | 6794 __ j(equal, &normal_sequence); |
6773 | 6795 |
6774 // The type cell may have gone megamorphic, don't overwrite if so | 6796 __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset)); |
6775 __ movq(rcx, FieldOperand(rbx, kPointerSize)); | 6797 Handle<Map> allocation_site_map( |
6776 __ JumpIfNotSmi(rcx, &normal_sequence); | 6798 masm->isolate()->heap()->allocation_site_map(), |
| 6799 masm->isolate()); |
| 6800 __ Cmp(FieldOperand(rcx, 0), allocation_site_map); |
| 6801 __ j(not_equal, &normal_sequence); |
6777 | 6802 |
6778 // Save the resulting elements kind in type info | 6803 // Save the resulting elements kind in type info |
6779 __ Integer32ToSmi(rdx, rdx); | 6804 __ Integer32ToSmi(rdx, rdx); |
6780 __ movq(FieldOperand(rbx, kPointerSize), rdx); | 6805 __ movq(FieldOperand(rcx, AllocationSite::kPayloadOffset), rdx); |
6781 __ SmiToInteger32(rdx, rdx); | 6806 __ SmiToInteger32(rdx, rdx); |
6782 | 6807 |
6783 __ bind(&normal_sequence); | 6808 __ bind(&normal_sequence); |
6784 int last_index = GetSequenceIndexFromFastElementsKind( | 6809 int last_index = GetSequenceIndexFromFastElementsKind( |
6785 TERMINAL_FAST_ELEMENTS_KIND); | 6810 TERMINAL_FAST_ELEMENTS_KIND); |
6786 for (int i = 0; i <= last_index; ++i) { | 6811 for (int i = 0; i <= last_index; ++i) { |
6787 Label next; | 6812 Label next; |
6788 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 6813 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
6789 __ cmpl(rdx, Immediate(kind)); | 6814 __ cmpl(rdx, Immediate(kind)); |
6790 __ j(not_equal, &next); | 6815 __ j(not_equal, &next); |
6791 ArraySingleArgumentConstructorStub stub(kind); | 6816 ArraySingleArgumentConstructorStub stub(kind); |
6792 __ TailCallStub(&stub); | 6817 __ TailCallStub(&stub); |
6793 __ bind(&next); | 6818 __ bind(&next); |
6794 } | 6819 } |
6795 | 6820 |
6796 // If we reached this point there is a problem. | 6821 // If we reached this point there is a problem. |
6797 __ Abort("Unexpected ElementsKind in array constructor"); | 6822 __ Abort("Unexpected ElementsKind in array constructor"); |
6798 } | 6823 } |
6799 | 6824 |
6800 | 6825 |
6801 template<class T> | 6826 template<class T> |
6802 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { | 6827 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { |
6803 int to_index = GetSequenceIndexFromFastElementsKind( | 6828 int to_index = GetSequenceIndexFromFastElementsKind( |
6804 TERMINAL_FAST_ELEMENTS_KIND); | 6829 TERMINAL_FAST_ELEMENTS_KIND); |
6805 for (int i = 0; i <= to_index; ++i) { | 6830 for (int i = 0; i <= to_index; ++i) { |
6806 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 6831 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
6807 T stub(kind); | 6832 T stub(kind); |
6808 stub.GetCode(isolate)->set_is_pregenerated(true); | 6833 stub.GetCode(isolate)->set_is_pregenerated(true); |
6809 if (AllocationSiteInfo::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { | 6834 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { |
6810 T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES); | 6835 T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES); |
6811 stub1.GetCode(isolate)->set_is_pregenerated(true); | 6836 stub1.GetCode(isolate)->set_is_pregenerated(true); |
6812 } | 6837 } |
6813 } | 6838 } |
6814 } | 6839 } |
6815 | 6840 |
6816 | 6841 |
6817 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { | 6842 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { |
6818 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( | 6843 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( |
6819 isolate); | 6844 isolate); |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6872 __ Cmp(FieldOperand(rbx, 0), cell_map); | 6897 __ Cmp(FieldOperand(rbx, 0), cell_map); |
6873 __ Assert(equal, "Expected property cell in register rbx"); | 6898 __ Assert(equal, "Expected property cell in register rbx"); |
6874 __ bind(&okay_here); | 6899 __ bind(&okay_here); |
6875 } | 6900 } |
6876 | 6901 |
6877 Label no_info, switch_ready; | 6902 Label no_info, switch_ready; |
6878 // Get the elements kind and case on that. | 6903 // Get the elements kind and case on that. |
6879 __ Cmp(rbx, undefined_sentinel); | 6904 __ Cmp(rbx, undefined_sentinel); |
6880 __ j(equal, &no_info); | 6905 __ j(equal, &no_info); |
6881 __ movq(rdx, FieldOperand(rbx, Cell::kValueOffset)); | 6906 __ movq(rdx, FieldOperand(rbx, Cell::kValueOffset)); |
6882 __ JumpIfNotSmi(rdx, &no_info); | 6907 |
| 6908 // The type cell may have undefined in its value. |
| 6909 __ Cmp(rdx, undefined_sentinel); |
| 6910 __ j(equal, &no_info); |
| 6911 |
| 6912 // We should have an allocation site object |
| 6913 if (FLAG_debug_code) { |
| 6914 __ Cmp(FieldOperand(rdx, 0), |
| 6915 Handle<Map>(masm->isolate()->heap()->allocation_site_map())); |
| 6916 __ Assert(equal, "Expected AllocationSite object in register rdx"); |
| 6917 } |
| 6918 |
| 6919 __ movq(rdx, FieldOperand(rdx, AllocationSite::kPayloadOffset)); |
6883 __ SmiToInteger32(rdx, rdx); | 6920 __ SmiToInteger32(rdx, rdx); |
6884 __ jmp(&switch_ready); | 6921 __ jmp(&switch_ready); |
6885 __ bind(&no_info); | 6922 __ bind(&no_info); |
6886 __ movq(rdx, Immediate(GetInitialFastElementsKind())); | 6923 __ movq(rdx, Immediate(GetInitialFastElementsKind())); |
6887 __ bind(&switch_ready); | 6924 __ bind(&switch_ready); |
6888 | 6925 |
6889 if (argument_count_ == ANY) { | 6926 if (argument_count_ == ANY) { |
6890 Label not_zero_case, not_one_case; | 6927 Label not_zero_case, not_one_case; |
6891 __ testq(rax, rax); | 6928 __ testq(rax, rax); |
6892 __ j(not_zero, ¬_zero_case); | 6929 __ j(not_zero, ¬_zero_case); |
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6998 __ bind(&fast_elements_case); | 7035 __ bind(&fast_elements_case); |
6999 GenerateCase(masm, FAST_ELEMENTS); | 7036 GenerateCase(masm, FAST_ELEMENTS); |
7000 } | 7037 } |
7001 | 7038 |
7002 | 7039 |
7003 #undef __ | 7040 #undef __ |
7004 | 7041 |
7005 } } // namespace v8::internal | 7042 } } // namespace v8::internal |
7006 | 7043 |
7007 #endif // V8_TARGET_ARCH_X64 | 7044 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |