OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_ARM64 | 7 #if V8_TARGET_ARCH_ARM64 |
8 | 8 |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 2712 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2723 | 2723 |
2724 // (9) Sliced string. Replace subject with parent. | 2724 // (9) Sliced string. Replace subject with parent. |
2725 __ Ldr(sliced_string_offset, | 2725 __ Ldr(sliced_string_offset, |
2726 UntagSmiFieldMemOperand(subject, SlicedString::kOffsetOffset)); | 2726 UntagSmiFieldMemOperand(subject, SlicedString::kOffsetOffset)); |
2727 __ Ldr(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); | 2727 __ Ldr(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); |
2728 __ B(&check_underlying); // Go to (4). | 2728 __ B(&check_underlying); // Go to (4). |
2729 #endif | 2729 #endif |
2730 } | 2730 } |
2731 | 2731 |
2732 | 2732 |
2733 static void GenerateRecordCallTarget(MacroAssembler* masm, | 2733 static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub, |
2734 Register argc, | 2734 Register argc, Register function, |
| 2735 Register feedback_vector, |
| 2736 Register index) { |
| 2737 FrameScope scope(masm, StackFrame::INTERNAL); |
| 2738 |
| 2739 // Arguments register must be smi-tagged to call out. |
| 2740 __ SmiTag(argc); |
| 2741 __ Push(argc, function, feedback_vector, index); |
| 2742 |
| 2743 DCHECK(feedback_vector.Is(x2) && index.Is(x3)); |
| 2744 __ CallStub(stub); |
| 2745 |
| 2746 __ Pop(index, feedback_vector, function, argc); |
| 2747 __ SmiUntag(argc); |
| 2748 } |
| 2749 |
| 2750 |
| 2751 static void GenerateRecordCallTarget(MacroAssembler* masm, Register argc, |
2735 Register function, | 2752 Register function, |
2736 Register feedback_vector, | 2753 Register feedback_vector, Register index, |
2737 Register index, | 2754 Register scratch1, Register scratch2, |
2738 Register scratch1, | 2755 Register scratch3) { |
2739 Register scratch2) { | |
2740 ASM_LOCATION("GenerateRecordCallTarget"); | 2756 ASM_LOCATION("GenerateRecordCallTarget"); |
2741 DCHECK(!AreAliased(scratch1, scratch2, | 2757 DCHECK(!AreAliased(scratch1, scratch2, scratch3, argc, function, |
2742 argc, function, feedback_vector, index)); | 2758 feedback_vector, index)); |
2743 // Cache the called function in a feedback vector slot. Cache states are | 2759 // Cache the called function in a feedback vector slot. Cache states are |
2744 // uninitialized, monomorphic (indicated by a JSFunction), and megamorphic. | 2760 // uninitialized, monomorphic (indicated by a JSFunction), and megamorphic. |
2745 // argc : number of arguments to the construct function | 2761 // argc : number of arguments to the construct function |
2746 // function : the function to call | 2762 // function : the function to call |
2747 // feedback_vector : the feedback vector | 2763 // feedback_vector : the feedback vector |
2748 // index : slot in feedback vector (smi) | 2764 // index : slot in feedback vector (smi) |
2749 Label initialize, done, miss, megamorphic, not_array_function; | 2765 Label initialize, done, miss, megamorphic, not_array_function; |
2750 | 2766 |
2751 DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()), | 2767 DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()), |
2752 masm->isolate()->heap()->megamorphic_symbol()); | 2768 masm->isolate()->heap()->megamorphic_symbol()); |
2753 DCHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(masm->isolate()), | 2769 DCHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(masm->isolate()), |
2754 masm->isolate()->heap()->uninitialized_symbol()); | 2770 masm->isolate()->heap()->uninitialized_symbol()); |
2755 | 2771 |
2756 // Load the cache state. | 2772 // Load the cache state. |
2757 __ Add(scratch1, feedback_vector, | 2773 Register feedback = scratch1; |
| 2774 Register feedback_map = scratch2; |
| 2775 Register feedback_value = scratch3; |
| 2776 __ Add(feedback, feedback_vector, |
2758 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | 2777 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); |
2759 __ Ldr(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize)); | 2778 __ Ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize)); |
2760 | 2779 |
2761 // A monomorphic cache hit or an already megamorphic state: invoke the | 2780 // A monomorphic cache hit or an already megamorphic state: invoke the |
2762 // function without changing the state. | 2781 // function without changing the state. |
2763 __ Cmp(scratch1, function); | 2782 Label check_allocation_site; |
| 2783 __ Ldr(feedback_value, FieldMemOperand(feedback, WeakCell::kValueOffset)); |
| 2784 __ Cmp(function, feedback_value); |
2764 __ B(eq, &done); | 2785 __ B(eq, &done); |
| 2786 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex); |
| 2787 __ B(eq, &done); |
| 2788 __ Ldr(feedback_map, FieldMemOperand(feedback, 0)); |
| 2789 __ CompareRoot(feedback_map, Heap::kWeakCellMapRootIndex); |
| 2790 __ B(ne, FLAG_pretenuring_call_new ? &miss : &check_allocation_site); |
| 2791 |
| 2792 // If function is not equal to the weak cell value, and the weak cell value is |
| 2793 // cleared, we have a new chance to become monomorphic. |
| 2794 __ JumpIfSmi(feedback_value, &initialize); |
| 2795 __ B(&megamorphic); |
2765 | 2796 |
2766 if (!FLAG_pretenuring_call_new) { | 2797 if (!FLAG_pretenuring_call_new) { |
| 2798 __ bind(&check_allocation_site); |
2767 // If we came here, we need to see if we are the array function. | 2799 // If we came here, we need to see if we are the array function. |
2768 // If we didn't have a matching function, and we didn't find the megamorph | 2800 // If we didn't have a matching function, and we didn't find the megamorph |
2769 // sentinel, then we have in the slot either some other function or an | 2801 // sentinel, then we have in the slot either some other function or an |
2770 // AllocationSite. Do a map check on the object in scratch1 register. | 2802 // AllocationSite. Do a map check on the object in scratch1 register. |
2771 __ Ldr(scratch2, FieldMemOperand(scratch1, AllocationSite::kMapOffset)); | 2803 __ JumpIfNotRoot(feedback_map, Heap::kAllocationSiteMapRootIndex, &miss); |
2772 __ JumpIfNotRoot(scratch2, Heap::kAllocationSiteMapRootIndex, &miss); | |
2773 | 2804 |
2774 // Make sure the function is the Array() function | 2805 // Make sure the function is the Array() function |
2775 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1); | 2806 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1); |
2776 __ Cmp(function, scratch1); | 2807 __ Cmp(function, scratch1); |
2777 __ B(ne, &megamorphic); | 2808 __ B(ne, &megamorphic); |
2778 __ B(&done); | 2809 __ B(&done); |
2779 } | 2810 } |
2780 | 2811 |
2781 __ Bind(&miss); | 2812 __ Bind(&miss); |
2782 | 2813 |
(...skipping 15 matching lines...) Expand all Loading... |
2798 | 2829 |
2799 if (!FLAG_pretenuring_call_new) { | 2830 if (!FLAG_pretenuring_call_new) { |
2800 // Make sure the function is the Array() function | 2831 // Make sure the function is the Array() function |
2801 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1); | 2832 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1); |
2802 __ Cmp(function, scratch1); | 2833 __ Cmp(function, scratch1); |
2803 __ B(ne, ¬_array_function); | 2834 __ B(ne, ¬_array_function); |
2804 | 2835 |
2805 // The target function is the Array constructor, | 2836 // The target function is the Array constructor, |
2806 // Create an AllocationSite if we don't already have it, store it in the | 2837 // Create an AllocationSite if we don't already have it, store it in the |
2807 // slot. | 2838 // slot. |
2808 { | 2839 CreateAllocationSiteStub create_stub(masm->isolate()); |
2809 FrameScope scope(masm, StackFrame::INTERNAL); | 2840 CallStubInRecordCallTarget(masm, &create_stub, argc, function, |
2810 CreateAllocationSiteStub create_stub(masm->isolate()); | 2841 feedback_vector, index); |
2811 | |
2812 // Arguments register must be smi-tagged to call out. | |
2813 __ SmiTag(argc); | |
2814 __ Push(argc, function, feedback_vector, index); | |
2815 | |
2816 // CreateAllocationSiteStub expect the feedback vector in x2 and the slot | |
2817 // index in x3. | |
2818 DCHECK(feedback_vector.Is(x2) && index.Is(x3)); | |
2819 __ CallStub(&create_stub); | |
2820 | |
2821 __ Pop(index, feedback_vector, function, argc); | |
2822 __ SmiUntag(argc); | |
2823 } | |
2824 __ B(&done); | 2842 __ B(&done); |
2825 | 2843 |
2826 __ Bind(¬_array_function); | 2844 __ Bind(¬_array_function); |
2827 } | 2845 } |
2828 | 2846 |
2829 // An uninitialized cache is patched with the function. | 2847 CreateWeakCellStub create_stub(masm->isolate()); |
2830 | 2848 CallStubInRecordCallTarget(masm, &create_stub, argc, function, |
2831 __ Add(scratch1, feedback_vector, | 2849 feedback_vector, index); |
2832 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | |
2833 __ Add(scratch1, scratch1, FixedArray::kHeaderSize - kHeapObjectTag); | |
2834 __ Str(function, MemOperand(scratch1, 0)); | |
2835 | |
2836 __ Push(function); | |
2837 __ RecordWrite(feedback_vector, scratch1, function, kLRHasNotBeenSaved, | |
2838 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | |
2839 __ Pop(function); | |
2840 | |
2841 __ Bind(&done); | 2850 __ Bind(&done); |
2842 } | 2851 } |
2843 | 2852 |
2844 | 2853 |
2845 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { | 2854 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { |
2846 // Do not transform the receiver for strict mode functions. | 2855 // Do not transform the receiver for strict mode functions. |
2847 __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); | 2856 __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); |
2848 __ Ldr(w4, FieldMemOperand(x3, SharedFunctionInfo::kCompilerHintsOffset)); | 2857 __ Ldr(w4, FieldMemOperand(x3, SharedFunctionInfo::kCompilerHintsOffset)); |
2849 __ Tbnz(w4, SharedFunctionInfo::kStrictModeFunction, cont); | 2858 __ Tbnz(w4, SharedFunctionInfo::kStrictModeFunction, cont); |
2850 | 2859 |
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2969 Label slow, non_function_call; | 2978 Label slow, non_function_call; |
2970 | 2979 |
2971 // Check that the function is not a smi. | 2980 // Check that the function is not a smi. |
2972 __ JumpIfSmi(function, &non_function_call); | 2981 __ JumpIfSmi(function, &non_function_call); |
2973 // Check that the function is a JSFunction. | 2982 // Check that the function is a JSFunction. |
2974 Register object_type = x10; | 2983 Register object_type = x10; |
2975 __ JumpIfNotObjectType(function, object_type, object_type, JS_FUNCTION_TYPE, | 2984 __ JumpIfNotObjectType(function, object_type, object_type, JS_FUNCTION_TYPE, |
2976 &slow); | 2985 &slow); |
2977 | 2986 |
2978 if (RecordCallTarget()) { | 2987 if (RecordCallTarget()) { |
2979 GenerateRecordCallTarget(masm, x0, function, x2, x3, x4, x5); | 2988 GenerateRecordCallTarget(masm, x0, function, x2, x3, x4, x5, x11); |
2980 | 2989 |
2981 __ Add(x5, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); | 2990 __ Add(x5, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); |
2982 if (FLAG_pretenuring_call_new) { | 2991 if (FLAG_pretenuring_call_new) { |
2983 // Put the AllocationSite from the feedback vector into x2. | 2992 // Put the AllocationSite from the feedback vector into x2. |
2984 // By adding kPointerSize we encode that we know the AllocationSite | 2993 // By adding kPointerSize we encode that we know the AllocationSite |
2985 // entry is at the feedback vector slot given by x3 + 1. | 2994 // entry is at the feedback vector slot given by x3 + 1. |
2986 __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize + kPointerSize)); | 2995 __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize + kPointerSize)); |
2987 } else { | 2996 } else { |
2988 Label feedback_register_initialized; | 2997 Label feedback_register_initialized; |
2989 // Put the AllocationSite from the feedback vector into x2, or undefined. | 2998 // Put the AllocationSite from the feedback vector into x2, or undefined. |
(...skipping 2754 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5744 kStackUnwindSpace, NULL, spill_offset, | 5753 kStackUnwindSpace, NULL, spill_offset, |
5745 MemOperand(fp, 6 * kPointerSize), NULL); | 5754 MemOperand(fp, 6 * kPointerSize), NULL); |
5746 } | 5755 } |
5747 | 5756 |
5748 | 5757 |
5749 #undef __ | 5758 #undef __ |
5750 | 5759 |
5751 } } // namespace v8::internal | 5760 } } // namespace v8::internal |
5752 | 5761 |
5753 #endif // V8_TARGET_ARCH_ARM64 | 5762 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |