OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS64 | 5 #if V8_TARGET_ARCH_MIPS64 |
6 | 6 |
7 #include "src/bootstrapper.h" | 7 #include "src/bootstrapper.h" |
8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
10 #include "src/ic/handler-compiler.h" | 10 #include "src/ic/handler-compiler.h" |
(...skipping 2520 matching lines...) Loading... |
2531 __ Branch(&done); | 2531 __ Branch(&done); |
2532 | 2532 |
2533 __ bind(¬_array_function); | 2533 __ bind(¬_array_function); |
2534 | 2534 |
2535 CreateWeakCellStub weak_cell_stub(masm->isolate()); | 2535 CreateWeakCellStub weak_cell_stub(masm->isolate()); |
2536 CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super); | 2536 CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super); |
2537 __ bind(&done); | 2537 __ bind(&done); |
2538 } | 2538 } |
2539 | 2539 |
2540 | 2540 |
2541 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { | |
2542 // ----------- S t a t e ------------- | |
2543 // -- a1 : the function to call | |
2544 // -- a3 : the shared function info | |
2545 // ----------------------------------- | |
2546 // Do not transform the receiver for strict mode functions. | |
2547 int32_t strict_mode_function_mask = | |
2548 1 << SharedFunctionInfo::kStrictModeBitWithinByte; | |
2549 // Do not transform the receiver for native (Compilerhints already in a3). | |
2550 int32_t native_mask = 1 << SharedFunctionInfo::kNativeBitWithinByte; | |
2551 | |
2552 __ lbu(a4, FieldMemOperand(a3, SharedFunctionInfo::kStrictModeByteOffset)); | |
2553 __ And(at, a4, Operand(strict_mode_function_mask)); | |
2554 __ Branch(cont, ne, at, Operand(zero_reg)); | |
2555 __ lbu(a4, FieldMemOperand(a3, SharedFunctionInfo::kNativeByteOffset)); | |
2556 __ And(at, a4, Operand(native_mask)); | |
2557 __ Branch(cont, ne, at, Operand(zero_reg)); | |
2558 } | |
2559 | |
2560 | |
2561 static void EmitSlowCase(MacroAssembler* masm, int argc) { | |
2562 __ li(a0, Operand(argc)); | |
2563 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | |
2564 } | |
2565 | |
2566 | |
2567 static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) { | |
2568 // Wrap the receiver and patch it back onto the stack. | |
2569 { FrameScope frame_scope(masm, StackFrame::INTERNAL); | |
2570 __ Push(a1); | |
2571 __ mov(a0, a3); | |
2572 ToObjectStub stub(masm->isolate()); | |
2573 __ CallStub(&stub); | |
2574 __ pop(a1); | |
2575 } | |
2576 __ Branch(USE_DELAY_SLOT, cont); | |
2577 __ sd(v0, MemOperand(sp, argc * kPointerSize)); | |
2578 } | |
2579 | |
2580 | |
2581 static void EmitClassConstructorCallCheck(MacroAssembler* masm) { | |
2582 // ----------- S t a t e ------------- | |
2583 // -- a1 : the function to call | |
2584 // -- a3 : the shared function info | |
2585 // ----------------------------------- | |
2586 // ClassConstructor Check: ES6 section 9.2.1 [[Call]] | |
2587 Label non_class_constructor; | |
2588 __ lbu(a4, FieldMemOperand(a3, SharedFunctionInfo::kFunctionKindByteOffset)); | |
2589 __ And(at, a4, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte)); | |
2590 __ Branch(&non_class_constructor, eq, at, Operand(zero_reg)); | |
2591 // If we call a classConstructor Function throw a TypeError | |
2592 // indirectly via the CallFunction builtin. | |
2593 __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET); | |
2594 __ bind(&non_class_constructor); | |
2595 } | |
2596 | |
2597 | |
2598 static void CallFunctionNoFeedback(MacroAssembler* masm, | |
2599 int argc, bool needs_checks, | |
2600 bool call_as_method) { | |
2601 // a1 : the function to call | |
2602 Label slow, wrap, cont; | |
2603 | |
2604 if (needs_checks) { | |
2605 // Check that the function is really a JavaScript function. | |
2606 // a1: pushed function (to be verified) | |
2607 __ JumpIfSmi(a1, &slow); | |
2608 | |
2609 // Goto slow case if we do not have a function. | |
2610 __ GetObjectType(a1, a4, a4); | |
2611 __ Branch(&slow, ne, a4, Operand(JS_FUNCTION_TYPE)); | |
2612 } | |
2613 | |
2614 __ ld(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | |
2615 EmitClassConstructorCallCheck(masm); | |
2616 | |
2617 // Fast-case: Invoke the function now. | |
2618 // a1: pushed function | |
2619 ParameterCount actual(argc); | |
2620 | |
2621 if (call_as_method) { | |
2622 if (needs_checks) { | |
2623 EmitContinueIfStrictOrNative(masm, &cont); | |
2624 } | |
2625 | |
2626 // Compute the receiver in sloppy mode. | |
2627 __ ld(a3, MemOperand(sp, argc * kPointerSize)); | |
2628 | |
2629 if (needs_checks) { | |
2630 __ JumpIfSmi(a3, &wrap); | |
2631 __ GetObjectType(a3, a4, a4); | |
2632 __ Branch(&wrap, lt, a4, Operand(FIRST_SPEC_OBJECT_TYPE)); | |
2633 } else { | |
2634 __ jmp(&wrap); | |
2635 } | |
2636 | |
2637 __ bind(&cont); | |
2638 } | |
2639 __ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper()); | |
2640 | |
2641 if (needs_checks) { | |
2642 // Slow-case: Non-function called. | |
2643 __ bind(&slow); | |
2644 EmitSlowCase(masm, argc); | |
2645 } | |
2646 | |
2647 if (call_as_method) { | |
2648 __ bind(&wrap); | |
2649 // Wrap the receiver and patch it back onto the stack. | |
2650 EmitWrapCase(masm, argc, &cont); | |
2651 } | |
2652 } | |
2653 | |
2654 | |
2655 void CallFunctionStub::Generate(MacroAssembler* masm) { | |
2656 CallFunctionNoFeedback(masm, argc(), NeedsChecks(), CallAsMethod()); | |
2657 } | |
2658 | |
2659 | |
2660 void CallConstructStub::Generate(MacroAssembler* masm) { | 2541 void CallConstructStub::Generate(MacroAssembler* masm) { |
2661 // a0 : number of arguments | 2542 // a0 : number of arguments |
2662 // a1 : the function to call | 2543 // a1 : the function to call |
2663 // a2 : feedback vector | 2544 // a2 : feedback vector |
2664 // a3 : slot in feedback vector (Smi, for RecordCallTarget) | 2545 // a3 : slot in feedback vector (Smi, for RecordCallTarget) |
2665 // a4 : original constructor (for IsSuperConstructorCall) | 2546 // a4 : original constructor (for IsSuperConstructorCall) |
2666 | 2547 |
2667 Label non_function; | 2548 Label non_function; |
2668 // Check that the function is not a smi. | 2549 // Check that the function is not a smi. |
2669 __ JumpIfSmi(a1, &non_function); | 2550 __ JumpIfSmi(a1, &non_function); |
(...skipping 103 matching lines...) Loading... |
2773 | 2654 |
2774 | 2655 |
2775 void CallICStub::Generate(MacroAssembler* masm) { | 2656 void CallICStub::Generate(MacroAssembler* masm) { |
2776 // a1 - function | 2657 // a1 - function |
2777 // a3 - slot id (Smi) | 2658 // a3 - slot id (Smi) |
2778 // a2 - vector | 2659 // a2 - vector |
2779 const int with_types_offset = | 2660 const int with_types_offset = |
2780 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); | 2661 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); |
2781 const int generic_offset = | 2662 const int generic_offset = |
2782 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); | 2663 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); |
2783 Label extra_checks_or_miss, slow_start; | 2664 Label extra_checks_or_miss, call; |
2784 Label slow, wrap, cont; | |
2785 Label have_js_function; | |
2786 int argc = arg_count(); | 2665 int argc = arg_count(); |
2787 ParameterCount actual(argc); | 2666 ParameterCount actual(argc); |
2788 | 2667 |
2789 // The checks. First, does r1 match the recorded monomorphic target? | 2668 // The checks. First, does r1 match the recorded monomorphic target? |
2790 __ dsrl(a4, a3, 32 - kPointerSizeLog2); | 2669 __ dsrl(a4, a3, 32 - kPointerSizeLog2); |
2791 __ Daddu(a4, a2, Operand(a4)); | 2670 __ Daddu(a4, a2, Operand(a4)); |
2792 __ ld(a4, FieldMemOperand(a4, FixedArray::kHeaderSize)); | 2671 __ ld(a4, FieldMemOperand(a4, FixedArray::kHeaderSize)); |
2793 | 2672 |
2794 // We don't know that we have a weak cell. We might have a private symbol | 2673 // We don't know that we have a weak cell. We might have a private symbol |
2795 // or an AllocationSite, but the memory is safe to examine. | 2674 // or an AllocationSite, but the memory is safe to examine. |
(...skipping 16 matching lines...) Loading... |
2812 // convincing us that we have a monomorphic JSFunction. | 2691 // convincing us that we have a monomorphic JSFunction. |
2813 __ JumpIfSmi(a1, &extra_checks_or_miss); | 2692 __ JumpIfSmi(a1, &extra_checks_or_miss); |
2814 | 2693 |
2815 // Increment the call count for monomorphic function calls. | 2694 // Increment the call count for monomorphic function calls. |
2816 __ dsrl(t0, a3, 32 - kPointerSizeLog2); | 2695 __ dsrl(t0, a3, 32 - kPointerSizeLog2); |
2817 __ Daddu(a3, a2, Operand(t0)); | 2696 __ Daddu(a3, a2, Operand(t0)); |
2818 __ ld(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize)); | 2697 __ ld(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize)); |
2819 __ Daddu(t0, t0, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); | 2698 __ Daddu(t0, t0, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); |
2820 __ sd(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize)); | 2699 __ sd(t0, FieldMemOperand(a3, FixedArray::kHeaderSize + kPointerSize)); |
2821 | 2700 |
2822 __ bind(&have_js_function); | 2701 __ bind(&call); |
2823 | 2702 __ li(a0, Operand(argc)); |
2824 __ ld(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | 2703 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); |
2825 EmitClassConstructorCallCheck(masm); | |
2826 | |
2827 if (CallAsMethod()) { | |
2828 EmitContinueIfStrictOrNative(masm, &cont); | |
2829 // Compute the receiver in sloppy mode. | |
2830 __ ld(a3, MemOperand(sp, argc * kPointerSize)); | |
2831 | |
2832 __ JumpIfSmi(a3, &wrap); | |
2833 __ GetObjectType(a3, a4, a4); | |
2834 __ Branch(&wrap, lt, a4, Operand(FIRST_SPEC_OBJECT_TYPE)); | |
2835 | |
2836 __ bind(&cont); | |
2837 } | |
2838 | |
2839 __ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper()); | |
2840 | |
2841 __ bind(&slow); | |
2842 EmitSlowCase(masm, argc); | |
2843 | |
2844 if (CallAsMethod()) { | |
2845 __ bind(&wrap); | |
2846 EmitWrapCase(masm, argc, &cont); | |
2847 } | |
2848 | 2704 |
2849 __ bind(&extra_checks_or_miss); | 2705 __ bind(&extra_checks_or_miss); |
2850 Label uninitialized, miss, not_allocation_site; | 2706 Label uninitialized, miss, not_allocation_site; |
2851 | 2707 |
2852 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); | 2708 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); |
2853 __ Branch(&slow_start, eq, a4, Operand(at)); | 2709 __ Branch(&call, eq, a4, Operand(at)); |
2854 | 2710 |
2855 // Verify that a4 contains an AllocationSite | 2711 // Verify that a4 contains an AllocationSite |
2856 __ ld(a5, FieldMemOperand(a4, HeapObject::kMapOffset)); | 2712 __ ld(a5, FieldMemOperand(a4, HeapObject::kMapOffset)); |
2857 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); | 2713 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); |
2858 __ Branch(¬_allocation_site, ne, a5, Operand(at)); | 2714 __ Branch(¬_allocation_site, ne, a5, Operand(at)); |
2859 | 2715 |
2860 HandleArrayCase(masm, &miss); | 2716 HandleArrayCase(masm, &miss); |
2861 | 2717 |
2862 __ bind(¬_allocation_site); | 2718 __ bind(¬_allocation_site); |
2863 | 2719 |
(...skipping 14 matching lines...) Loading... |
2878 __ dsrl(a4, a3, 32 - kPointerSizeLog2); | 2734 __ dsrl(a4, a3, 32 - kPointerSizeLog2); |
2879 __ Daddu(a4, a2, Operand(a4)); | 2735 __ Daddu(a4, a2, Operand(a4)); |
2880 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); | 2736 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); |
2881 __ sd(at, FieldMemOperand(a4, FixedArray::kHeaderSize)); | 2737 __ sd(at, FieldMemOperand(a4, FixedArray::kHeaderSize)); |
2882 // We have to update statistics for runtime profiling. | 2738 // We have to update statistics for runtime profiling. |
2883 __ ld(a4, FieldMemOperand(a2, with_types_offset)); | 2739 __ ld(a4, FieldMemOperand(a2, with_types_offset)); |
2884 __ Dsubu(a4, a4, Operand(Smi::FromInt(1))); | 2740 __ Dsubu(a4, a4, Operand(Smi::FromInt(1))); |
2885 __ sd(a4, FieldMemOperand(a2, with_types_offset)); | 2741 __ sd(a4, FieldMemOperand(a2, with_types_offset)); |
2886 __ ld(a4, FieldMemOperand(a2, generic_offset)); | 2742 __ ld(a4, FieldMemOperand(a2, generic_offset)); |
2887 __ Daddu(a4, a4, Operand(Smi::FromInt(1))); | 2743 __ Daddu(a4, a4, Operand(Smi::FromInt(1))); |
2888 __ Branch(USE_DELAY_SLOT, &slow_start); | 2744 __ Branch(USE_DELAY_SLOT, &call); |
2889 __ sd(a4, FieldMemOperand(a2, generic_offset)); // In delay slot. | 2745 __ sd(a4, FieldMemOperand(a2, generic_offset)); // In delay slot. |
2890 | 2746 |
2891 __ bind(&uninitialized); | 2747 __ bind(&uninitialized); |
2892 | 2748 |
2893 // We are going monomorphic, provided we actually have a JSFunction. | 2749 // We are going monomorphic, provided we actually have a JSFunction. |
2894 __ JumpIfSmi(a1, &miss); | 2750 __ JumpIfSmi(a1, &miss); |
2895 | 2751 |
2896 // Goto miss case if we do not have a function. | 2752 // Goto miss case if we do not have a function. |
2897 __ GetObjectType(a1, a4, a4); | 2753 __ GetObjectType(a1, a4, a4); |
2898 __ Branch(&miss, ne, a4, Operand(JS_FUNCTION_TYPE)); | 2754 __ Branch(&miss, ne, a4, Operand(JS_FUNCTION_TYPE)); |
(...skipping 19 matching lines...) Loading... |
2918 // a3 - slot | 2774 // a3 - slot |
2919 // a1 - function | 2775 // a1 - function |
2920 { | 2776 { |
2921 FrameScope scope(masm, StackFrame::INTERNAL); | 2777 FrameScope scope(masm, StackFrame::INTERNAL); |
2922 CreateWeakCellStub create_stub(masm->isolate()); | 2778 CreateWeakCellStub create_stub(masm->isolate()); |
2923 __ Push(a1); | 2779 __ Push(a1); |
2924 __ CallStub(&create_stub); | 2780 __ CallStub(&create_stub); |
2925 __ Pop(a1); | 2781 __ Pop(a1); |
2926 } | 2782 } |
2927 | 2783 |
2928 __ Branch(&have_js_function); | 2784 __ Branch(&call); |
2929 | 2785 |
2930 // We are here because tracing is on or we encountered a MISS case we can't | 2786 // We are here because tracing is on or we encountered a MISS case we can't |
2931 // handle here. | 2787 // handle here. |
2932 __ bind(&miss); | 2788 __ bind(&miss); |
2933 GenerateMiss(masm); | 2789 GenerateMiss(masm); |
2934 | 2790 |
2935 // the slow case | 2791 __ Branch(&call); |
2936 __ bind(&slow_start); | |
2937 // Check that the function is really a JavaScript function. | |
2938 // a1: pushed function (to be verified) | |
2939 __ JumpIfSmi(a1, &slow); | |
2940 | |
2941 // Goto slow case if we do not have a function. | |
2942 __ GetObjectType(a1, a4, a4); | |
2943 __ Branch(&slow, ne, a4, Operand(JS_FUNCTION_TYPE)); | |
2944 __ Branch(&have_js_function); | |
2945 } | 2792 } |
2946 | 2793 |
2947 | 2794 |
2948 void CallICStub::GenerateMiss(MacroAssembler* masm) { | 2795 void CallICStub::GenerateMiss(MacroAssembler* masm) { |
2949 FrameScope scope(masm, StackFrame::INTERNAL); | 2796 FrameScope scope(masm, StackFrame::INTERNAL); |
2950 | 2797 |
2951 // Push the receiver and the function and feedback info. | 2798 // Push the receiver and the function and feedback info. |
2952 __ Push(a1, a2, a3); | 2799 __ Push(a1, a2, a3); |
2953 | 2800 |
2954 // Call the entry. | 2801 // Call the entry. |
(...skipping 2863 matching lines...) Loading... |
5818 MemOperand(fp, 6 * kPointerSize), NULL); | 5665 MemOperand(fp, 6 * kPointerSize), NULL); |
5819 } | 5666 } |
5820 | 5667 |
5821 | 5668 |
5822 #undef __ | 5669 #undef __ |
5823 | 5670 |
5824 } // namespace internal | 5671 } // namespace internal |
5825 } // namespace v8 | 5672 } // namespace v8 |
5826 | 5673 |
5827 #endif // V8_TARGET_ARCH_MIPS64 | 5674 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |