| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_ARM64 | 5 #if V8_TARGET_ARCH_ARM64 |
| 6 | 6 |
| 7 #include "src/bootstrapper.h" | 7 #include "src/bootstrapper.h" |
| 8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
| 9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
| 10 #include "src/ic/handler-compiler.h" | 10 #include "src/ic/handler-compiler.h" |
| (...skipping 2604 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2615 UntagSmiFieldMemOperand(subject, SlicedString::kOffsetOffset)); | 2615 UntagSmiFieldMemOperand(subject, SlicedString::kOffsetOffset)); |
| 2616 __ Ldr(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); | 2616 __ Ldr(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); |
| 2617 __ B(&check_underlying); // Go to (4). | 2617 __ B(&check_underlying); // Go to (4). |
| 2618 #endif | 2618 #endif |
| 2619 } | 2619 } |
| 2620 | 2620 |
| 2621 | 2621 |
| 2622 static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub, | 2622 static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub, |
| 2623 Register argc, Register function, | 2623 Register argc, Register function, |
| 2624 Register feedback_vector, Register index, | 2624 Register feedback_vector, Register index, |
| 2625 Register new_target, bool is_super) { | 2625 Register new_target) { |
| 2626 FrameScope scope(masm, StackFrame::INTERNAL); | 2626 FrameScope scope(masm, StackFrame::INTERNAL); |
| 2627 | 2627 |
| 2628 // Number-of-arguments register must be smi-tagged to call out. | 2628 // Number-of-arguments register must be smi-tagged to call out. |
| 2629 __ SmiTag(argc); | 2629 __ SmiTag(argc); |
| 2630 if (is_super) { | 2630 __ Push(argc, function, feedback_vector, index); |
| 2631 __ Push(argc, function, feedback_vector, index, new_target); | |
| 2632 } else { | |
| 2633 __ Push(argc, function, feedback_vector, index); | |
| 2634 } | |
| 2635 | 2631 |
| 2636 DCHECK(feedback_vector.Is(x2) && index.Is(x3)); | 2632 DCHECK(feedback_vector.Is(x2) && index.Is(x3)); |
| 2637 __ CallStub(stub); | 2633 __ CallStub(stub); |
| 2638 | 2634 |
| 2639 if (is_super) { | 2635 __ Pop(index, feedback_vector, function, argc); |
| 2640 __ Pop(new_target, index, feedback_vector, function, argc); | |
| 2641 } else { | |
| 2642 __ Pop(index, feedback_vector, function, argc); | |
| 2643 } | |
| 2644 __ SmiUntag(argc); | 2636 __ SmiUntag(argc); |
| 2645 } | 2637 } |
| 2646 | 2638 |
| 2647 | 2639 |
| 2648 static void GenerateRecordCallTarget(MacroAssembler* masm, Register argc, | 2640 static void GenerateRecordCallTarget(MacroAssembler* masm, Register argc, |
| 2649 Register function, | 2641 Register function, |
| 2650 Register feedback_vector, Register index, | 2642 Register feedback_vector, Register index, |
| 2651 Register new_target, Register scratch1, | 2643 Register new_target, Register scratch1, |
| 2652 Register scratch2, Register scratch3, | 2644 Register scratch2, Register scratch3) { |
| 2653 bool is_super) { | |
| 2654 ASM_LOCATION("GenerateRecordCallTarget"); | 2645 ASM_LOCATION("GenerateRecordCallTarget"); |
| 2655 DCHECK(!AreAliased(scratch1, scratch2, scratch3, argc, function, | 2646 DCHECK(!AreAliased(scratch1, scratch2, scratch3, argc, function, |
| 2656 feedback_vector, index, new_target)); | 2647 feedback_vector, index, new_target)); |
| 2657 // Cache the called function in a feedback vector slot. Cache states are | 2648 // Cache the called function in a feedback vector slot. Cache states are |
| 2658 // uninitialized, monomorphic (indicated by a JSFunction), and megamorphic. | 2649 // uninitialized, monomorphic (indicated by a JSFunction), and megamorphic. |
| 2659 // argc : number of arguments to the construct function | 2650 // argc : number of arguments to the construct function |
| 2660 // function : the function to call | 2651 // function : the function to call |
| 2661 // feedback_vector : the feedback vector | 2652 // feedback_vector : the feedback vector |
| 2662 // index : slot in feedback vector (smi) | 2653 // index : slot in feedback vector (smi) |
| 2663 // new_target : new target (for IsSuperConstructorCall) | |
| 2664 Label initialize, done, miss, megamorphic, not_array_function; | 2654 Label initialize, done, miss, megamorphic, not_array_function; |
| 2665 | 2655 |
| 2666 DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()), | 2656 DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()), |
| 2667 masm->isolate()->heap()->megamorphic_symbol()); | 2657 masm->isolate()->heap()->megamorphic_symbol()); |
| 2668 DCHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(masm->isolate()), | 2658 DCHECK_EQ(*TypeFeedbackVector::UninitializedSentinel(masm->isolate()), |
| 2669 masm->isolate()->heap()->uninitialized_symbol()); | 2659 masm->isolate()->heap()->uninitialized_symbol()); |
| 2670 | 2660 |
| 2671 // Load the cache state. | 2661 // Load the cache state. |
| 2672 Register feedback = scratch1; | 2662 Register feedback = scratch1; |
| 2673 Register feedback_map = scratch2; | 2663 Register feedback_map = scratch2; |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2729 // Make sure the function is the Array() function | 2719 // Make sure the function is the Array() function |
| 2730 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1); | 2720 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch1); |
| 2731 __ Cmp(function, scratch1); | 2721 __ Cmp(function, scratch1); |
| 2732 __ B(ne, ¬_array_function); | 2722 __ B(ne, ¬_array_function); |
| 2733 | 2723 |
| 2734 // The target function is the Array constructor, | 2724 // The target function is the Array constructor, |
| 2735 // Create an AllocationSite if we don't already have it, store it in the | 2725 // Create an AllocationSite if we don't already have it, store it in the |
| 2736 // slot. | 2726 // slot. |
| 2737 CreateAllocationSiteStub create_stub(masm->isolate()); | 2727 CreateAllocationSiteStub create_stub(masm->isolate()); |
| 2738 CallStubInRecordCallTarget(masm, &create_stub, argc, function, | 2728 CallStubInRecordCallTarget(masm, &create_stub, argc, function, |
| 2739 feedback_vector, index, new_target, is_super); | 2729 feedback_vector, index, new_target); |
| 2740 __ B(&done); | 2730 __ B(&done); |
| 2741 | 2731 |
| 2742 __ Bind(¬_array_function); | 2732 __ Bind(¬_array_function); |
| 2743 CreateWeakCellStub weak_cell_stub(masm->isolate()); | 2733 CreateWeakCellStub weak_cell_stub(masm->isolate()); |
| 2744 CallStubInRecordCallTarget(masm, &weak_cell_stub, argc, function, | 2734 CallStubInRecordCallTarget(masm, &weak_cell_stub, argc, function, |
| 2745 feedback_vector, index, new_target, is_super); | 2735 feedback_vector, index, new_target); |
| 2746 __ Bind(&done); | 2736 __ Bind(&done); |
| 2747 } | 2737 } |
| 2748 | 2738 |
| 2749 | 2739 |
| 2750 void CallConstructStub::Generate(MacroAssembler* masm) { | 2740 void CallConstructStub::Generate(MacroAssembler* masm) { |
| 2751 ASM_LOCATION("CallConstructStub::Generate"); | 2741 ASM_LOCATION("CallConstructStub::Generate"); |
| 2752 // x0 : number of arguments | 2742 // x0 : number of arguments |
| 2753 // x1 : the function to call | 2743 // x1 : the function to call |
| 2754 // x2 : feedback vector | 2744 // x2 : feedback vector |
| 2755 // x3 : slot in feedback vector (Smi, for RecordCallTarget) | 2745 // x3 : slot in feedback vector (Smi, for RecordCallTarget) |
| 2756 // x4 : new target (for IsSuperConstructorCall) | |
| 2757 Register function = x1; | 2746 Register function = x1; |
| 2758 | 2747 |
| 2759 Label non_function; | 2748 Label non_function; |
| 2760 // Check that the function is not a smi. | 2749 // Check that the function is not a smi. |
| 2761 __ JumpIfSmi(function, &non_function); | 2750 __ JumpIfSmi(function, &non_function); |
| 2762 // Check that the function is a JSFunction. | 2751 // Check that the function is a JSFunction. |
| 2763 Register object_type = x10; | 2752 Register object_type = x10; |
| 2764 __ JumpIfNotObjectType(function, object_type, object_type, JS_FUNCTION_TYPE, | 2753 __ JumpIfNotObjectType(function, object_type, object_type, JS_FUNCTION_TYPE, |
| 2765 &non_function); | 2754 &non_function); |
| 2766 | 2755 |
| 2767 if (RecordCallTarget()) { | 2756 GenerateRecordCallTarget(masm, x0, function, x2, x3, x4, x5, x11, x12); |
| 2768 GenerateRecordCallTarget(masm, x0, function, x2, x3, x4, x5, x11, x12, | |
| 2769 IsSuperConstructorCall()); | |
| 2770 | 2757 |
| 2771 __ Add(x5, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); | 2758 __ Add(x5, x2, Operand::UntagSmiAndScale(x3, kPointerSizeLog2)); |
| 2772 Label feedback_register_initialized; | 2759 Label feedback_register_initialized; |
| 2773 // Put the AllocationSite from the feedback vector into x2, or undefined. | 2760 // Put the AllocationSite from the feedback vector into x2, or undefined. |
| 2774 __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize)); | 2761 __ Ldr(x2, FieldMemOperand(x5, FixedArray::kHeaderSize)); |
| 2775 __ Ldr(x5, FieldMemOperand(x2, AllocationSite::kMapOffset)); | 2762 __ Ldr(x5, FieldMemOperand(x2, AllocationSite::kMapOffset)); |
| 2776 __ JumpIfRoot(x5, Heap::kAllocationSiteMapRootIndex, | 2763 __ JumpIfRoot(x5, Heap::kAllocationSiteMapRootIndex, |
| 2777 &feedback_register_initialized); | 2764 &feedback_register_initialized); |
| 2778 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); | 2765 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); |
| 2779 __ bind(&feedback_register_initialized); | 2766 __ bind(&feedback_register_initialized); |
| 2780 | 2767 |
| 2781 __ AssertUndefinedOrAllocationSite(x2, x5); | 2768 __ AssertUndefinedOrAllocationSite(x2, x5); |
| 2782 } | |
| 2783 | 2769 |
| 2784 if (IsSuperConstructorCall()) { | 2770 __ Mov(x3, function); |
| 2785 __ Mov(x3, x4); | |
| 2786 } else { | |
| 2787 __ Mov(x3, function); | |
| 2788 } | |
| 2789 | 2771 |
| 2790 // Tail call to the function-specific construct stub (still in the caller | 2772 // Tail call to the function-specific construct stub (still in the caller |
| 2791 // context at this point). | 2773 // context at this point). |
| 2792 __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); | 2774 __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); |
| 2793 __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset)); | 2775 __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset)); |
| 2794 __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag); | 2776 __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag); |
| 2795 __ Br(x4); | 2777 __ Br(x4); |
| 2796 | 2778 |
| 2797 __ Bind(&non_function); | 2779 __ Bind(&non_function); |
| 2798 __ Mov(x3, function); | 2780 __ Mov(x3, function); |
| (...skipping 3021 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5820 MemOperand(fp, 6 * kPointerSize), NULL); | 5802 MemOperand(fp, 6 * kPointerSize), NULL); |
| 5821 } | 5803 } |
| 5822 | 5804 |
| 5823 | 5805 |
| 5824 #undef __ | 5806 #undef __ |
| 5825 | 5807 |
| 5826 } // namespace internal | 5808 } // namespace internal |
| 5827 } // namespace v8 | 5809 } // namespace v8 |
| 5828 | 5810 |
| 5829 #endif // V8_TARGET_ARCH_ARM64 | 5811 #endif // V8_TARGET_ARCH_ARM64 |
| OLD | NEW |