| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_ARM64 | 5 #if V8_TARGET_ARCH_ARM64 |
| 6 | 6 |
| 7 #include "src/bootstrapper.h" | 7 #include "src/bootstrapper.h" |
| 8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
| 9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
| 10 #include "src/ic/handler-compiler.h" | 10 #include "src/ic/handler-compiler.h" |
| (...skipping 2663 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2674 // harmless to read at this position in a symbol (see static asserts in | 2674 // harmless to read at this position in a symbol (see static asserts in |
| 2675 // type-feedback-vector.h). | 2675 // type-feedback-vector.h). |
| 2676 Label check_allocation_site; | 2676 Label check_allocation_site; |
| 2677 __ Ldr(feedback_value, FieldMemOperand(feedback, WeakCell::kValueOffset)); | 2677 __ Ldr(feedback_value, FieldMemOperand(feedback, WeakCell::kValueOffset)); |
| 2678 __ Cmp(function, feedback_value); | 2678 __ Cmp(function, feedback_value); |
| 2679 __ B(eq, &done); | 2679 __ B(eq, &done); |
| 2680 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex); | 2680 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex); |
| 2681 __ B(eq, &done); | 2681 __ B(eq, &done); |
| 2682 __ Ldr(feedback_map, FieldMemOperand(feedback, HeapObject::kMapOffset)); | 2682 __ Ldr(feedback_map, FieldMemOperand(feedback, HeapObject::kMapOffset)); |
| 2683 __ CompareRoot(feedback_map, Heap::kWeakCellMapRootIndex); | 2683 __ CompareRoot(feedback_map, Heap::kWeakCellMapRootIndex); |
| 2684 __ B(ne, FLAG_pretenuring_call_new ? &miss : &check_allocation_site); | 2684 __ B(ne, &check_allocation_site); |
| 2685 | 2685 |
| 2686 // If the weak cell is cleared, we have a new chance to become monomorphic. | 2686 // If the weak cell is cleared, we have a new chance to become monomorphic. |
| 2687 __ JumpIfSmi(feedback_value, &initialize); | 2687 __ JumpIfSmi(feedback_value, &initialize); |
| 2688 __ B(&megamorphic); | 2688 __ B(&megamorphic); |
| 2689 | 2689 |
| 2690 if (!FLAG_pretenuring_call_new) { | 2690 __ bind(&check_allocation_site); |
| 2691 __ bind(&check_allocation_site); | 2691 // If we came here, we need to see if we are the array function. |
| 2692 // If we came here, we need to see if we are the array function. | 2692 // If we didn't have a matching function, and we didn't find the megamorph |
| 2693 // If we didn't have a matching function, and we didn't find the megamorph | 2693 // sentinel, then we have in the slot either some other function or an |
| 2694 // sentinel, then we have in the slot either some other function or an | 2694 // AllocationSite. |
| 2695 // AllocationSite. | 2695 __ JumpIfNotRoot(feedback_map, Heap::kAllocationSiteMapRootIndex, &miss); |
| 2696 __ JumpIfNotRoot(feedback_map, Heap::kAllocationSiteMapRootIndex, &miss); | |
| 2697 | 2696 |
| 2698 // Make sure the function is the Array() function | 2697 // Make sure the function is the Array() function |
| 2699 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1); | 2698 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1); |
| 2700 __ Cmp(function, scratch1); | 2699 __ Cmp(function, scratch1); |
| 2701 __ B(ne, &megamorphic); | 2700 __ B(ne, &megamorphic); |
| 2702 __ B(&done); | 2701 __ B(&done); |
| 2703 } | |
| 2704 | 2702 |
| 2705 __ Bind(&miss); | 2703 __ Bind(&miss); |
| 2706 | 2704 |
| 2707 // A monomorphic miss (i.e, here the cache is not uninitialized) goes | 2705 // A monomorphic miss (i.e, here the cache is not uninitialized) goes |
| 2708 // megamorphic. | 2706 // megamorphic. |
| 2709 __ JumpIfRoot(scratch1, Heap::kuninitialized_symbolRootIndex, &initialize); | 2707 __ JumpIfRoot(scratch1, Heap::kuninitialized_symbolRootIndex, &initialize); |
| 2710 // MegamorphicSentinel is an immortal immovable object (undefined) so no | 2708 // MegamorphicSentinel is an immortal immovable object (undefined) so no |
| 2711 // write-barrier is needed. | 2709 // write-barrier is needed. |
| 2712 __ Bind(&megamorphic); | 2710 __ Bind(&megamorphic); |
| 2713 __ Add(scratch1, feedback_vector, | 2711 __ Add(scratch1, feedback_vector, |
| 2714 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | 2712 Operand::UntagSmiAndScale(index, kPointerSizeLog2)); |
| 2715 __ LoadRoot(scratch2, Heap::kmegamorphic_symbolRootIndex); | 2713 __ LoadRoot(scratch2, Heap::kmegamorphic_symbolRootIndex); |
| 2716 __ Str(scratch2, FieldMemOperand(scratch1, FixedArray::kHeaderSize)); | 2714 __ Str(scratch2, FieldMemOperand(scratch1, FixedArray::kHeaderSize)); |
| 2717 __ B(&done); | 2715 __ B(&done); |
| 2718 | 2716 |
| 2719 // An uninitialized cache is patched with the function or sentinel to | 2717 // An uninitialized cache is patched with the function or sentinel to |
| 2720 // indicate the ElementsKind if function is the Array constructor. | 2718 // indicate the ElementsKind if function is the Array constructor. |
| 2721 __ Bind(&initialize); | 2719 __ Bind(&initialize); |
| 2722 | 2720 |
| 2723 if (!FLAG_pretenuring_call_new) { | 2721 // Make sure the function is the Array() function |
| 2724 // Make sure the function is the Array() function | 2722 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1); |
| 2725 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1); | 2723 __ Cmp(function, scratch1); |
| 2726 __ Cmp(function, scratch1); | 2724 __ B(ne, ¬_array_function); |
| 2727 __ B(ne, ¬_array_function); | |
| 2728 | 2725 |
| 2729 // The target function is the Array constructor, | 2726 // The target function is the Array constructor, |
| 2730 // Create an AllocationSite if we don't already have it, store it in the | 2727 // Create an AllocationSite if we don't already have it, store it in the |
| 2731 // slot. | 2728 // slot. |
| 2732 CreateAllocationSiteStub create_stub(masm->isolate()); | 2729 CreateAllocationSiteStub create_stub(masm->isolate()); |
| 2733 CallStubInRecordCallTarget(masm, &create_stub, argc, function, | 2730 CallStubInRecordCallTarget(masm, &create_stub, argc, function, |
| 2734 feedback_vector, index, orig_construct, | 2731 feedback_vector, index, orig_construct, is_super); |
| 2735 is_super); | 2732 __ B(&done); |
| 2736 __ B(&done); | |
| 2737 | 2733 |
| 2738 __ Bind(¬_array_function); | 2734 __ Bind(¬_array_function); |
| 2739 } | 2735 CreateWeakCellStub weak_cell_stub(masm->isolate()); |
| 2740 | 2736 CallStubInRecordCallTarget(masm, &weak_cell_stub, argc, function, |
| 2741 CreateWeakCellStub create_stub(masm->isolate()); | |
| 2742 CallStubInRecordCallTarget(masm, &create_stub, argc, function, | |
| 2743 feedback_vector, index, orig_construct, is_super); | 2737 feedback_vector, index, orig_construct, is_super); |
| 2744 __ Bind(&done); | 2738 __ Bind(&done); |
| 2745 } | 2739 } |
| 2746 | 2740 |
| 2747 | 2741 |
| 2748 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { | 2742 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { |
| 2749 // Do not transform the receiver for strict mode functions. | 2743 // Do not transform the receiver for strict mode functions. |
| 2750 __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); | 2744 __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); |
| 2751 __ Ldr(w4, FieldMemOperand(x3, SharedFunctionInfo::kCompilerHintsOffset)); | 2745 __ Ldr(w4, FieldMemOperand(x3, SharedFunctionInfo::kCompilerHintsOffset)); |
| 2752 __ Tbnz(w4, SharedFunctionInfo::kStrictModeFunction, cont); | 2746 __ Tbnz(w4, SharedFunctionInfo::kStrictModeFunction, cont); |
| (...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2855 // Check that the function is a JSFunction. | 2849 // Check that the function is a JSFunction. |
| 2856 Register object_type = x10; | 2850 Register object_type = x10; |
| 2857 __ JumpIfNotObjectType(function, object_type, object_type, JS_FUNCTION_TYPE, | 2851 __ JumpIfNotObjectType(function, object_type, object_type, JS_FUNCTION_TYPE, |
| 2858 &slow); | 2852 &slow); |
| 2859 | 2853 |
| 2860 if (RecordCallTarget()) { | 2854 if (RecordCallTarget()) { |
| 2861 GenerateRecordCallTarget(masm, x0, function, x2, x3, x4, x5, x11, x12, | 2855 GenerateRecordCallTarget(masm, x0, function, x2, x3, x4, x5, x11, x12, |
| 2862 IsSuperConstructorCall()); | 2856 IsSuperConstructorCall()); |
| 2863 | 2857 |
| 2864 __ Add(x5, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); | 2858 __ Add(x5, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); |
| 2865 if (FLAG_pretenuring_call_new) { | |
| 2866 // Put the AllocationSite from the feedback vector into x2. | |
| 2867 // By adding kPointerSize we encode that we know the AllocationSite | |
| 2868 // entry is at the feedback vector slot given by x3 + 1. | |
| 2869 __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize + kPointerSize)); | |
| 2870 } else { | |
| 2871 Label feedback_register_initialized; | 2859 Label feedback_register_initialized; |
| 2872 // Put the AllocationSite from the feedback vector into x2, or undefined. | 2860 // Put the AllocationSite from the feedback vector into x2, or undefined. |
| 2873 __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize)); | 2861 __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize)); |
| 2874 __ Ldr(x5, FieldMemOperand(x2, AllocationSite::kMapOffset)); | 2862 __ Ldr(x5, FieldMemOperand(x2, AllocationSite::kMapOffset)); |
| 2875 __ JumpIfRoot(x5, Heap::kAllocationSiteMapRootIndex, | 2863 __ JumpIfRoot(x5, Heap::kAllocationSiteMapRootIndex, |
| 2876 &feedback_register_initialized); | 2864 &feedback_register_initialized); |
| 2877 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); | 2865 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); |
| 2878 __ bind(&feedback_register_initialized); | 2866 __ bind(&feedback_register_initialized); |
| 2879 } | |
| 2880 | 2867 |
| 2881 __ AssertUndefinedOrAllocationSite(x2, x5); | 2868 __ AssertUndefinedOrAllocationSite(x2, x5); |
| 2882 } | 2869 } |
| 2883 | 2870 |
| 2884 if (IsSuperConstructorCall()) { | 2871 if (IsSuperConstructorCall()) { |
| 2885 __ Mov(x3, x4); | 2872 __ Mov(x3, x4); |
| 2886 } else { | 2873 } else { |
| 2887 __ Mov(x3, function); | 2874 __ Mov(x3, function); |
| 2888 } | 2875 } |
| 2889 | 2876 |
| (...skipping 3108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5998 MemOperand(fp, 6 * kPointerSize), NULL); | 5985 MemOperand(fp, 6 * kPointerSize), NULL); |
| 5999 } | 5986 } |
| 6000 | 5987 |
| 6001 | 5988 |
| 6002 #undef __ | 5989 #undef __ |
| 6003 | 5990 |
| 6004 } // namespace internal | 5991 } // namespace internal |
| 6005 } // namespace v8 | 5992 } // namespace v8 |
| 6006 | 5993 |
| 6007 #endif // V8_TARGET_ARCH_ARM64 | 5994 #endif // V8_TARGET_ARCH_ARM64 |
| OLD | NEW |