| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_MIPS | 5 #if V8_TARGET_ARCH_MIPS |
| 6 | 6 |
| 7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
| 8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
| 9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
| 10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
| (...skipping 2484 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2495 CallStubInRecordCallTarget(masm, &create_stub, is_super); | 2495 CallStubInRecordCallTarget(masm, &create_stub, is_super); |
| 2496 __ Branch(&done); | 2496 __ Branch(&done); |
| 2497 | 2497 |
| 2498 __ bind(¬_array_function); | 2498 __ bind(¬_array_function); |
| 2499 CreateWeakCellStub weak_cell_stub(masm->isolate()); | 2499 CreateWeakCellStub weak_cell_stub(masm->isolate()); |
| 2500 CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super); | 2500 CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super); |
| 2501 __ bind(&done); | 2501 __ bind(&done); |
| 2502 } | 2502 } |
| 2503 | 2503 |
| 2504 | 2504 |
| 2505 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { | |
| 2506 // ----------- S t a t e ------------- | |
| 2507 // -- a1 : the function to call | |
| 2508 // -- a3 : the function's shared function info | |
| 2509 // ----------------------------------- | |
| 2510 __ lw(t0, FieldMemOperand(a3, SharedFunctionInfo::kCompilerHintsOffset)); | |
| 2511 | |
| 2512 // Do not transform the receiver for strict mode functions. | |
| 2513 int32_t strict_mode_function_mask = | |
| 2514 1 << (SharedFunctionInfo::kStrictModeFunction + kSmiTagSize); | |
| 2515 // Do not transform the receiver for native (Compilerhints already in a3). | |
| 2516 int32_t native_mask = 1 << (SharedFunctionInfo::kNative + kSmiTagSize); | |
| 2517 __ And(at, t0, Operand(strict_mode_function_mask | native_mask)); | |
| 2518 __ Branch(cont, ne, at, Operand(zero_reg)); | |
| 2519 } | |
| 2520 | |
| 2521 | |
| 2522 static void EmitSlowCase(MacroAssembler* masm, int argc) { | |
| 2523 __ li(a0, Operand(argc)); | |
| 2524 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | |
| 2525 } | |
| 2526 | |
| 2527 | |
| 2528 static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) { | |
| 2529 // Wrap the receiver and patch it back onto the stack. | |
| 2530 { FrameScope frame_scope(masm, StackFrame::INTERNAL); | |
| 2531 __ Push(a1); | |
| 2532 __ mov(a0, a3); | |
| 2533 ToObjectStub stub(masm->isolate()); | |
| 2534 __ CallStub(&stub); | |
| 2535 __ pop(a1); | |
| 2536 } | |
| 2537 __ Branch(USE_DELAY_SLOT, cont); | |
| 2538 __ sw(v0, MemOperand(sp, argc * kPointerSize)); | |
| 2539 } | |
| 2540 | |
| 2541 | |
| 2542 static void EmitClassConstructorCallCheck(MacroAssembler* masm) { | |
| 2543 // ----------- S t a t e ------------- | |
| 2544 // -- a1 : the function to call | |
| 2545 // -- a3 : the function's shared function info | |
| 2546 // ----------------------------------- | |
| 2547 // ClassConstructor Check: ES6 section 9.2.1 [[Call]] | |
| 2548 Label non_class_constructor; | |
| 2549 // Check whether the current function is a classConstructor. | |
| 2550 __ lbu(t0, FieldMemOperand(a3, SharedFunctionInfo::kFunctionKindByteOffset)); | |
| 2551 __ And(at, t0, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte)); | |
| 2552 __ Branch(&non_class_constructor, eq, at, Operand(zero_reg)); | |
| 2553 // Step: 2, If we call a classConstructor Function throw a TypeError. | |
| 2554 __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET); | |
| 2555 __ bind(&non_class_constructor); | |
| 2556 } | |
| 2557 | |
| 2558 | |
| 2559 static void CallFunctionNoFeedback(MacroAssembler* masm, | |
| 2560 int argc, bool needs_checks, | |
| 2561 bool call_as_method) { | |
| 2562 // a1 : the function to call | |
| 2563 Label slow, wrap, cont; | |
| 2564 | |
| 2565 if (needs_checks) { | |
| 2566 // Check that the function is really a JavaScript function. | |
| 2567 // a1: pushed function (to be verified) | |
| 2568 __ JumpIfSmi(a1, &slow); | |
| 2569 | |
| 2570 // Goto slow case if we do not have a function. | |
| 2571 __ GetObjectType(a1, t0, t0); | |
| 2572 __ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE)); | |
| 2573 } | |
| 2574 | |
| 2575 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | |
| 2576 EmitClassConstructorCallCheck(masm); | |
| 2577 | |
| 2578 // Fast-case: Invoke the function now. | |
| 2579 // a1: pushed function | |
| 2580 ParameterCount actual(argc); | |
| 2581 | |
| 2582 if (call_as_method) { | |
| 2583 if (needs_checks) { | |
| 2584 EmitContinueIfStrictOrNative(masm, &cont); | |
| 2585 } | |
| 2586 | |
| 2587 // Compute the receiver in sloppy mode. | |
| 2588 __ lw(a3, MemOperand(sp, argc * kPointerSize)); | |
| 2589 | |
| 2590 if (needs_checks) { | |
| 2591 __ JumpIfSmi(a3, &wrap); | |
| 2592 __ GetObjectType(a3, t0, t0); | |
| 2593 __ Branch(&wrap, lt, t0, Operand(FIRST_SPEC_OBJECT_TYPE)); | |
| 2594 } else { | |
| 2595 __ jmp(&wrap); | |
| 2596 } | |
| 2597 | |
| 2598 __ bind(&cont); | |
| 2599 } | |
| 2600 | |
| 2601 __ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper()); | |
| 2602 | |
| 2603 if (needs_checks) { | |
| 2604 // Slow-case: Non-function called. | |
| 2605 __ bind(&slow); | |
| 2606 EmitSlowCase(masm, argc); | |
| 2607 } | |
| 2608 | |
| 2609 if (call_as_method) { | |
| 2610 __ bind(&wrap); | |
| 2611 // Wrap the receiver and patch it back onto the stack. | |
| 2612 EmitWrapCase(masm, argc, &cont); | |
| 2613 } | |
| 2614 } | |
| 2615 | |
| 2616 | |
| 2617 void CallFunctionStub::Generate(MacroAssembler* masm) { | |
| 2618 CallFunctionNoFeedback(masm, argc(), NeedsChecks(), CallAsMethod()); | |
| 2619 } | |
| 2620 | |
| 2621 | |
| 2622 void CallConstructStub::Generate(MacroAssembler* masm) { | 2505 void CallConstructStub::Generate(MacroAssembler* masm) { |
| 2623 // a0 : number of arguments | 2506 // a0 : number of arguments |
| 2624 // a1 : the function to call | 2507 // a1 : the function to call |
| 2625 // a2 : feedback vector | 2508 // a2 : feedback vector |
| 2626 // a3 : slot in feedback vector (Smi, for RecordCallTarget) | 2509 // a3 : slot in feedback vector (Smi, for RecordCallTarget) |
| 2627 // t0 : original constructor (for IsSuperConstructorCall) | 2510 // t0 : original constructor (for IsSuperConstructorCall) |
| 2628 | 2511 |
| 2629 Label non_function; | 2512 Label non_function; |
| 2630 // Check that the function is not a smi. | 2513 // Check that the function is not a smi. |
| 2631 __ JumpIfSmi(a1, &non_function); | 2514 __ JumpIfSmi(a1, &non_function); |
| (...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2695 | 2578 |
| 2696 | 2579 |
| 2697 void CallICStub::Generate(MacroAssembler* masm) { | 2580 void CallICStub::Generate(MacroAssembler* masm) { |
| 2698 // a1 - function | 2581 // a1 - function |
| 2699 // a3 - slot id (Smi) | 2582 // a3 - slot id (Smi) |
| 2700 // a2 - vector | 2583 // a2 - vector |
| 2701 const int with_types_offset = | 2584 const int with_types_offset = |
| 2702 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); | 2585 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); |
| 2703 const int generic_offset = | 2586 const int generic_offset = |
| 2704 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); | 2587 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); |
| 2705 Label extra_checks_or_miss, slow_start; | 2588 Label extra_checks_or_miss, call; |
| 2706 Label slow, wrap, cont; | |
| 2707 Label have_js_function; | |
| 2708 int argc = arg_count(); | 2589 int argc = arg_count(); |
| 2709 ParameterCount actual(argc); | 2590 ParameterCount actual(argc); |
| 2710 | 2591 |
| 2711 // The checks. First, does r1 match the recorded monomorphic target? | 2592 // The checks. First, does r1 match the recorded monomorphic target? |
| 2712 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); | 2593 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); |
| 2713 __ Addu(t0, a2, Operand(t0)); | 2594 __ Addu(t0, a2, Operand(t0)); |
| 2714 __ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize)); | 2595 __ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize)); |
| 2715 | 2596 |
| 2716 // We don't know that we have a weak cell. We might have a private symbol | 2597 // We don't know that we have a weak cell. We might have a private symbol |
| 2717 // or an AllocationSite, but the memory is safe to examine. | 2598 // or an AllocationSite, but the memory is safe to examine. |
| (...skipping 16 matching lines...) Expand all Loading... |
| 2734 // convincing us that we have a monomorphic JSFunction. | 2615 // convincing us that we have a monomorphic JSFunction. |
| 2735 __ JumpIfSmi(a1, &extra_checks_or_miss); | 2616 __ JumpIfSmi(a1, &extra_checks_or_miss); |
| 2736 | 2617 |
| 2737 // Increment the call count for monomorphic function calls. | 2618 // Increment the call count for monomorphic function calls. |
| 2738 __ sll(at, a3, kPointerSizeLog2 - kSmiTagSize); | 2619 __ sll(at, a3, kPointerSizeLog2 - kSmiTagSize); |
| 2739 __ Addu(at, a2, Operand(at)); | 2620 __ Addu(at, a2, Operand(at)); |
| 2740 __ lw(a3, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize)); | 2621 __ lw(a3, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize)); |
| 2741 __ Addu(a3, a3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); | 2622 __ Addu(a3, a3, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement))); |
| 2742 __ sw(a3, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize)); | 2623 __ sw(a3, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize)); |
| 2743 | 2624 |
| 2744 __ bind(&have_js_function); | 2625 __ bind(&call); |
| 2745 | 2626 __ li(a0, Operand(argc)); |
| 2746 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | 2627 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); |
| 2747 EmitClassConstructorCallCheck(masm); | |
| 2748 | |
| 2749 if (CallAsMethod()) { | |
| 2750 EmitContinueIfStrictOrNative(masm, &cont); | |
| 2751 // Compute the receiver in sloppy mode. | |
| 2752 __ lw(a3, MemOperand(sp, argc * kPointerSize)); | |
| 2753 | |
| 2754 __ JumpIfSmi(a3, &wrap); | |
| 2755 __ GetObjectType(a3, t0, t0); | |
| 2756 __ Branch(&wrap, lt, t0, Operand(FIRST_SPEC_OBJECT_TYPE)); | |
| 2757 | |
| 2758 __ bind(&cont); | |
| 2759 } | |
| 2760 | |
| 2761 __ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper()); | |
| 2762 | |
| 2763 __ bind(&slow); | |
| 2764 EmitSlowCase(masm, argc); | |
| 2765 | |
| 2766 if (CallAsMethod()) { | |
| 2767 __ bind(&wrap); | |
| 2768 EmitWrapCase(masm, argc, &cont); | |
| 2769 } | |
| 2770 | 2628 |
| 2771 __ bind(&extra_checks_or_miss); | 2629 __ bind(&extra_checks_or_miss); |
| 2772 Label uninitialized, miss, not_allocation_site; | 2630 Label uninitialized, miss, not_allocation_site; |
| 2773 | 2631 |
| 2774 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); | 2632 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); |
| 2775 __ Branch(&slow_start, eq, t0, Operand(at)); | 2633 __ Branch(&call, eq, t0, Operand(at)); |
| 2776 | 2634 |
| 2777 // Verify that t0 contains an AllocationSite | 2635 // Verify that t0 contains an AllocationSite |
| 2778 __ lw(t1, FieldMemOperand(t0, HeapObject::kMapOffset)); | 2636 __ lw(t1, FieldMemOperand(t0, HeapObject::kMapOffset)); |
| 2779 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); | 2637 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); |
| 2780 __ Branch(¬_allocation_site, ne, t1, Operand(at)); | 2638 __ Branch(¬_allocation_site, ne, t1, Operand(at)); |
| 2781 | 2639 |
| 2782 HandleArrayCase(masm, &miss); | 2640 HandleArrayCase(masm, &miss); |
| 2783 | 2641 |
| 2784 __ bind(¬_allocation_site); | 2642 __ bind(¬_allocation_site); |
| 2785 | 2643 |
| (...skipping 14 matching lines...) Expand all Loading... |
| 2800 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); | 2658 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); |
| 2801 __ Addu(t0, a2, Operand(t0)); | 2659 __ Addu(t0, a2, Operand(t0)); |
| 2802 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); | 2660 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); |
| 2803 __ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize)); | 2661 __ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize)); |
| 2804 // We have to update statistics for runtime profiling. | 2662 // We have to update statistics for runtime profiling. |
| 2805 __ lw(t0, FieldMemOperand(a2, with_types_offset)); | 2663 __ lw(t0, FieldMemOperand(a2, with_types_offset)); |
| 2806 __ Subu(t0, t0, Operand(Smi::FromInt(1))); | 2664 __ Subu(t0, t0, Operand(Smi::FromInt(1))); |
| 2807 __ sw(t0, FieldMemOperand(a2, with_types_offset)); | 2665 __ sw(t0, FieldMemOperand(a2, with_types_offset)); |
| 2808 __ lw(t0, FieldMemOperand(a2, generic_offset)); | 2666 __ lw(t0, FieldMemOperand(a2, generic_offset)); |
| 2809 __ Addu(t0, t0, Operand(Smi::FromInt(1))); | 2667 __ Addu(t0, t0, Operand(Smi::FromInt(1))); |
| 2810 __ Branch(USE_DELAY_SLOT, &slow_start); | 2668 __ Branch(USE_DELAY_SLOT, &call); |
| 2811 __ sw(t0, FieldMemOperand(a2, generic_offset)); // In delay slot. | 2669 __ sw(t0, FieldMemOperand(a2, generic_offset)); // In delay slot. |
| 2812 | 2670 |
| 2813 __ bind(&uninitialized); | 2671 __ bind(&uninitialized); |
| 2814 | 2672 |
| 2815 // We are going monomorphic, provided we actually have a JSFunction. | 2673 // We are going monomorphic, provided we actually have a JSFunction. |
| 2816 __ JumpIfSmi(a1, &miss); | 2674 __ JumpIfSmi(a1, &miss); |
| 2817 | 2675 |
| 2818 // Goto miss case if we do not have a function. | 2676 // Goto miss case if we do not have a function. |
| 2819 __ GetObjectType(a1, t0, t0); | 2677 __ GetObjectType(a1, t0, t0); |
| 2820 __ Branch(&miss, ne, t0, Operand(JS_FUNCTION_TYPE)); | 2678 __ Branch(&miss, ne, t0, Operand(JS_FUNCTION_TYPE)); |
| (...skipping 19 matching lines...) Expand all Loading... |
| 2840 // a3 - slot | 2698 // a3 - slot |
| 2841 // a1 - function | 2699 // a1 - function |
| 2842 { | 2700 { |
| 2843 FrameScope scope(masm, StackFrame::INTERNAL); | 2701 FrameScope scope(masm, StackFrame::INTERNAL); |
| 2844 CreateWeakCellStub create_stub(masm->isolate()); | 2702 CreateWeakCellStub create_stub(masm->isolate()); |
| 2845 __ Push(a1); | 2703 __ Push(a1); |
| 2846 __ CallStub(&create_stub); | 2704 __ CallStub(&create_stub); |
| 2847 __ Pop(a1); | 2705 __ Pop(a1); |
| 2848 } | 2706 } |
| 2849 | 2707 |
| 2850 __ Branch(&have_js_function); | 2708 __ Branch(&call); |
| 2851 | 2709 |
| 2852 // We are here because tracing is on or we encountered a MISS case we can't | 2710 // We are here because tracing is on or we encountered a MISS case we can't |
| 2853 // handle here. | 2711 // handle here. |
| 2854 __ bind(&miss); | 2712 __ bind(&miss); |
| 2855 GenerateMiss(masm); | 2713 GenerateMiss(masm); |
| 2856 | 2714 |
| 2857 // the slow case | 2715 __ Branch(&call); |
| 2858 __ bind(&slow_start); | |
| 2859 // Check that the function is really a JavaScript function. | |
| 2860 // r1: pushed function (to be verified) | |
| 2861 __ JumpIfSmi(a1, &slow); | |
| 2862 | |
| 2863 // Goto slow case if we do not have a function. | |
| 2864 __ GetObjectType(a1, t0, t0); | |
| 2865 __ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE)); | |
| 2866 __ Branch(&have_js_function); | |
| 2867 } | 2716 } |
| 2868 | 2717 |
| 2869 | 2718 |
| 2870 void CallICStub::GenerateMiss(MacroAssembler* masm) { | 2719 void CallICStub::GenerateMiss(MacroAssembler* masm) { |
| 2871 FrameScope scope(masm, StackFrame::INTERNAL); | 2720 FrameScope scope(masm, StackFrame::INTERNAL); |
| 2872 | 2721 |
| 2873 // Push the receiver and the function and feedback info. | 2722 // Push the receiver and the function and feedback info. |
| 2874 __ Push(a1, a2, a3); | 2723 __ Push(a1, a2, a3); |
| 2875 | 2724 |
| 2876 // Call the entry. | 2725 // Call the entry. |
| (...skipping 2909 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5786 MemOperand(fp, 6 * kPointerSize), NULL); | 5635 MemOperand(fp, 6 * kPointerSize), NULL); |
| 5787 } | 5636 } |
| 5788 | 5637 |
| 5789 | 5638 |
| 5790 #undef __ | 5639 #undef __ |
| 5791 | 5640 |
| 5792 } // namespace internal | 5641 } // namespace internal |
| 5793 } // namespace v8 | 5642 } // namespace v8 |
| 5794 | 5643 |
| 5795 #endif // V8_TARGET_ARCH_MIPS | 5644 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |