Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(298)

Side by Side Diff: src/arm64/code-stubs-arm64.cc

Issue 1029093002: v8:3539 - hold constructor feedback in weak cells (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix in test-heap.cc Created 5 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_ARM64 7 #if V8_TARGET_ARCH_ARM64
8 8
9 #include "src/bootstrapper.h" 9 #include "src/bootstrapper.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
(...skipping 2713 matching lines...) Expand 10 before | Expand all | Expand 10 after
2724 2724
2725 // (9) Sliced string. Replace subject with parent. 2725 // (9) Sliced string. Replace subject with parent.
2726 __ Ldr(sliced_string_offset, 2726 __ Ldr(sliced_string_offset,
2727 UntagSmiFieldMemOperand(subject, SlicedString::kOffsetOffset)); 2727 UntagSmiFieldMemOperand(subject, SlicedString::kOffsetOffset));
2728 __ Ldr(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); 2728 __ Ldr(subject, FieldMemOperand(subject, SlicedString::kParentOffset));
2729 __ B(&check_underlying); // Go to (4). 2729 __ B(&check_underlying); // Go to (4).
2730 #endif 2730 #endif
2731 } 2731 }
2732 2732
2733 2733
2734 static void GenerateRecordCallTarget(MacroAssembler* masm, 2734 static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub,
2735 Register argc, 2735 Register argc, Register function,
2736 Register feedback_vector,
2737 Register index) {
2738 FrameScope scope(masm, StackFrame::INTERNAL);
2739
2740 // Arguments register must be smi-tagged to call out.
2741 __ SmiTag(argc);
2742 __ Push(argc, function, feedback_vector, index);
2743
2744 DCHECK(feedback_vector.Is(x2) && index.Is(x3));
2745 __ CallStub(stub);
2746
2747 __ Pop(index, feedback_vector, function, argc);
2748 __ SmiUntag(argc);
2749 }
2750
2751
2752 static void GenerateRecordCallTarget(MacroAssembler* masm, Register argc,
2736 Register function, 2753 Register function,
2737 Register feedback_vector, 2754 Register feedback_vector, Register index,
2738 Register index, 2755 Register scratch1, Register scratch2,
2739 Register scratch1, 2756 Register scratch3) {
2740 Register scratch2) {
2741 ASM_LOCATION("GenerateRecordCallTarget"); 2757 ASM_LOCATION("GenerateRecordCallTarget");
2742 DCHECK(!AreAliased(scratch1, scratch2, 2758 DCHECK(!AreAliased(scratch1, scratch2, scratch3, argc, function,
2743 argc, function, feedback_vector, index)); 2759 feedback_vector, index));
2744 // Cache the called function in a feedback vector slot. Cache states are 2760 // Cache the called function in a feedback vector slot. Cache states are
2745 // uninitialized, monomorphic (indicated by a JSFunction), and megamorphic. 2761 // uninitialized, monomorphic (indicated by a JSFunction), and megamorphic.
2746 // argc : number of arguments to the construct function 2762 // argc : number of arguments to the construct function
2747 // function : the function to call 2763 // function : the function to call
2748 // feedback_vector : the feedback vector 2764 // feedback_vector : the feedback vector
2749 // index : slot in feedback vector (smi) 2765 // index : slot in feedback vector (smi)
2750 Label initialize, done, miss, megamorphic, not_array_function; 2766 Label initialize, done, miss, megamorphic, not_array_function;
2751 2767
2752 DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()), 2768 DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()),
2753 masm->isolate()->heap()->megamorphic_symbol()); 2769 masm->isolate()->heap()->megamorphic_symbol());
2754 DCHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(masm->isolate()), 2770 DCHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(masm->isolate()),
2755 masm->isolate()->heap()->uninitialized_symbol()); 2771 masm->isolate()->heap()->uninitialized_symbol());
2756 2772
2757 // Load the cache state. 2773 // Load the cache state.
2758 __ Add(scratch1, feedback_vector, 2774 Register feedback = scratch1;
2775 Register feedback_map = scratch2;
2776 Register feedback_value = scratch3;
2777 __ Add(feedback, feedback_vector,
2759 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); 2778 Operand::UntagSmiAndScale(index, kPointerSizeLog2));
2760 __ Ldr(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize)); 2779 __ Ldr(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
2761 2780
2762 // A monomorphic cache hit or an already megamorphic state: invoke the 2781 // A monomorphic cache hit or an already megamorphic state: invoke the
2763 // function without changing the state. 2782 // function without changing the state.
2764 __ Cmp(scratch1, function); 2783 Label check_megamorphic;
2784 __ Ldr(feedback_value, FieldMemOperand(feedback, WeakCell::kValueOffset));
2785 __ Cmp(function, feedback_value);
2786 __ B(eq, &done);
2787 __ Ldr(feedback_map, FieldMemOperand(feedback, 0));
2788 __ CompareRoot(feedback_map, Heap::kWeakCellMapRootIndex);
2789 __ B(ne, &check_megamorphic);
2790
2791 // If function is not equal to the weak cell value, and the weak cell value is
2792 // cleared, we have a new chance to become monomorphic.
2793 __ JumpIfSmi(feedback_value, &initialize);
2794 __ B(&megamorphic);
2795
2796 __ bind(&check_megamorphic);
2797 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
2765 __ B(eq, &done); 2798 __ B(eq, &done);
2766 2799
2767 if (!FLAG_pretenuring_call_new) { 2800 if (!FLAG_pretenuring_call_new) {
2768 // If we came here, we need to see if we are the array function. 2801 // If we came here, we need to see if we are the array function.
2769 // If we didn't have a matching function, and we didn't find the megamorph 2802 // If we didn't have a matching function, and we didn't find the megamorph
2770 // sentinel, then we have in the slot either some other function or an 2803 // sentinel, then we have in the slot either some other function or an
2771 // AllocationSite. Do a map check on the object in scratch1 register. 2804 // AllocationSite. Do a map check on the object in scratch1 register.
2772 __ Ldr(scratch2, FieldMemOperand(scratch1, AllocationSite::kMapOffset)); 2805 __ JumpIfNotRoot(feedback_map, Heap::kAllocationSiteMapRootIndex, &miss);
2773 __ JumpIfNotRoot(scratch2, Heap::kAllocationSiteMapRootIndex, &miss);
2774 2806
2775 // Make sure the function is the Array() function 2807 // Make sure the function is the Array() function
2776 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1); 2808 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
2777 __ Cmp(function, scratch1); 2809 __ Cmp(function, scratch1);
2778 __ B(ne, &megamorphic); 2810 __ B(ne, &megamorphic);
2779 __ B(&done); 2811 __ B(&done);
2780 } 2812 }
2781 2813
2782 __ Bind(&miss); 2814 __ Bind(&miss);
2783 2815
(...skipping 15 matching lines...) Expand all
2799 2831
2800 if (!FLAG_pretenuring_call_new) { 2832 if (!FLAG_pretenuring_call_new) {
2801 // Make sure the function is the Array() function 2833 // Make sure the function is the Array() function
2802 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1); 2834 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1);
2803 __ Cmp(function, scratch1); 2835 __ Cmp(function, scratch1);
2804 __ B(ne, &not_array_function); 2836 __ B(ne, &not_array_function);
2805 2837
2806 // The target function is the Array constructor, 2838 // The target function is the Array constructor,
2807 // Create an AllocationSite if we don't already have it, store it in the 2839 // Create an AllocationSite if we don't already have it, store it in the
2808 // slot. 2840 // slot.
2809 { 2841 CreateAllocationSiteStub create_stub(masm->isolate());
2810 FrameScope scope(masm, StackFrame::INTERNAL); 2842 CallStubInRecordCallTarget(masm, &create_stub, argc, function,
2811 CreateAllocationSiteStub create_stub(masm->isolate()); 2843 feedback_vector, index);
2812
2813 // Arguments register must be smi-tagged to call out.
2814 __ SmiTag(argc);
2815 __ Push(argc, function, feedback_vector, index);
2816
2817 // CreateAllocationSiteStub expect the feedback vector in x2 and the slot
2818 // index in x3.
2819 DCHECK(feedback_vector.Is(x2) && index.Is(x3));
2820 __ CallStub(&create_stub);
2821
2822 __ Pop(index, feedback_vector, function, argc);
2823 __ SmiUntag(argc);
2824 }
2825 __ B(&done); 2844 __ B(&done);
2826 2845
2827 __ Bind(&not_array_function); 2846 __ Bind(&not_array_function);
2828 } 2847 }
2829 2848
2830 // An uninitialized cache is patched with the function. 2849 CreateWeakCellStub create_stub(masm->isolate());
2831 2850 CallStubInRecordCallTarget(masm, &create_stub, argc, function,
2832 __ Add(scratch1, feedback_vector, 2851 feedback_vector, index);
2833 Operand::UntagSmiAndScale(index, kPointerSizeLog2));
2834 __ Add(scratch1, scratch1, FixedArray::kHeaderSize - kHeapObjectTag);
2835 __ Str(function, MemOperand(scratch1, 0));
2836
2837 __ Push(function);
2838 __ RecordWrite(feedback_vector, scratch1, function, kLRHasNotBeenSaved,
2839 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
2840 __ Pop(function);
2841
2842 __ Bind(&done); 2852 __ Bind(&done);
2843 } 2853 }
2844 2854
2845 2855
2846 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { 2856 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
2847 // Do not transform the receiver for strict mode functions. 2857 // Do not transform the receiver for strict mode functions.
2848 __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); 2858 __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2849 __ Ldr(w4, FieldMemOperand(x3, SharedFunctionInfo::kCompilerHintsOffset)); 2859 __ Ldr(w4, FieldMemOperand(x3, SharedFunctionInfo::kCompilerHintsOffset));
2850 __ Tbnz(w4, SharedFunctionInfo::kStrictModeFunction, cont); 2860 __ Tbnz(w4, SharedFunctionInfo::kStrictModeFunction, cont);
2851 2861
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
2970 Label slow, non_function_call; 2980 Label slow, non_function_call;
2971 2981
2972 // Check that the function is not a smi. 2982 // Check that the function is not a smi.
2973 __ JumpIfSmi(function, &non_function_call); 2983 __ JumpIfSmi(function, &non_function_call);
2974 // Check that the function is a JSFunction. 2984 // Check that the function is a JSFunction.
2975 Register object_type = x10; 2985 Register object_type = x10;
2976 __ JumpIfNotObjectType(function, object_type, object_type, JS_FUNCTION_TYPE, 2986 __ JumpIfNotObjectType(function, object_type, object_type, JS_FUNCTION_TYPE,
2977 &slow); 2987 &slow);
2978 2988
2979 if (RecordCallTarget()) { 2989 if (RecordCallTarget()) {
2980 GenerateRecordCallTarget(masm, x0, function, x2, x3, x4, x5); 2990 GenerateRecordCallTarget(masm, x0, function, x2, x3, x4, x5, x11);
2981 2991
2982 __ Add(x5, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); 2992 __ Add(x5, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2));
2983 if (FLAG_pretenuring_call_new) { 2993 if (FLAG_pretenuring_call_new) {
2984 // Put the AllocationSite from the feedback vector into x2. 2994 // Put the AllocationSite from the feedback vector into x2.
2985 // By adding kPointerSize we encode that we know the AllocationSite 2995 // By adding kPointerSize we encode that we know the AllocationSite
2986 // entry is at the feedback vector slot given by x3 + 1. 2996 // entry is at the feedback vector slot given by x3 + 1.
2987 __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize + kPointerSize)); 2997 __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize + kPointerSize));
2988 } else { 2998 } else {
2989 Label feedback_register_initialized; 2999 Label feedback_register_initialized;
2990 // Put the AllocationSite from the feedback vector into x2, or undefined. 3000 // Put the AllocationSite from the feedback vector into x2, or undefined.
(...skipping 2754 matching lines...) Expand 10 before | Expand all | Expand 10 after
5745 kStackUnwindSpace, NULL, spill_offset, 5755 kStackUnwindSpace, NULL, spill_offset,
5746 MemOperand(fp, 6 * kPointerSize), NULL); 5756 MemOperand(fp, 6 * kPointerSize), NULL);
5747 } 5757 }
5748 5758
5749 5759
5750 #undef __ 5760 #undef __
5751 5761
5752 } } // namespace v8::internal 5762 } } // namespace v8::internal
5753 5763
5754 #endif // V8_TARGET_ARCH_ARM64 5764 #endif // V8_TARGET_ARCH_ARM64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698