| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_PPC | 5 #if V8_TARGET_ARCH_PPC |
| 6 | 6 |
| 7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
| 8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
| 9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
| 10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
| (...skipping 2511 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2522 __ b(&done); | 2522 __ b(&done); |
| 2523 | 2523 |
| 2524 __ bind(¬_array_function); | 2524 __ bind(¬_array_function); |
| 2525 | 2525 |
| 2526 CreateWeakCellStub weak_cell_stub(masm->isolate()); | 2526 CreateWeakCellStub weak_cell_stub(masm->isolate()); |
| 2527 CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super); | 2527 CallStubInRecordCallTarget(masm, &weak_cell_stub, is_super); |
| 2528 __ bind(&done); | 2528 __ bind(&done); |
| 2529 } | 2529 } |
| 2530 | 2530 |
| 2531 | 2531 |
| 2532 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { | |
| 2533 // ----------- S t a t e ------------- | |
| 2534 // -- r4 : the function to call | |
| 2535 // -- r6 : the function's shared function info | |
| 2536 // ----------------------------------- | |
| 2537 // Do not transform the receiver for strict mode functions and natives. | |
| 2538 __ lwz(r7, FieldMemOperand(r6, SharedFunctionInfo::kCompilerHintsOffset)); | |
| 2539 __ andi(r0, r7, Operand((1 << SharedFunctionInfo::kStrictModeBit) | | |
| 2540 (1 << SharedFunctionInfo::kNativeBit))); | |
| 2541 __ bne(cont, cr0); | |
| 2542 } | |
| 2543 | |
| 2544 | |
| 2545 static void EmitSlowCase(MacroAssembler* masm, int argc) { | |
| 2546 __ mov(r3, Operand(argc)); | |
| 2547 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | |
| 2548 } | |
| 2549 | |
| 2550 | |
| 2551 static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) { | |
| 2552 // Wrap the receiver and patch it back onto the stack. | |
| 2553 { | |
| 2554 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); | |
| 2555 __ push(r4); | |
| 2556 __ mr(r3, r6); | |
| 2557 ToObjectStub stub(masm->isolate()); | |
| 2558 __ CallStub(&stub); | |
| 2559 __ pop(r4); | |
| 2560 } | |
| 2561 __ StoreP(r3, MemOperand(sp, argc * kPointerSize), r0); | |
| 2562 __ b(cont); | |
| 2563 } | |
| 2564 | |
| 2565 | |
| 2566 static void EmitClassConstructorCallCheck(MacroAssembler* masm) { | |
| 2567 // ----------- S t a t e ------------- | |
| 2568 // -- r4 : the function to call | |
| 2569 // -- r6 : the function's shared function info | |
| 2570 // ----------------------------------- | |
| 2571 // ClassConstructor Check: ES6 section 9.2.1 [[Call]] | |
| 2572 Label non_class_constructor; | |
| 2573 // Check whether the current function is a classConstructor. | |
| 2574 __ lwz(r7, FieldMemOperand(r6, SharedFunctionInfo::kCompilerHintsOffset)); | |
| 2575 __ TestBitMask(r7, SharedFunctionInfo::kClassConstructorBits, r0); | |
| 2576 __ beq(&non_class_constructor, cr0); | |
| 2577 // If we call a classConstructor Function throw a TypeError | |
| 2578 // indirectly via the CallFunction builtin. | |
| 2579 __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET); | |
| 2580 __ bind(&non_class_constructor); | |
| 2581 } | |
| 2582 | |
| 2583 | |
| 2584 static void CallFunctionNoFeedback(MacroAssembler* masm, int argc, | |
| 2585 bool needs_checks, bool call_as_method) { | |
| 2586 // r4 : the function to call | |
| 2587 Label slow, wrap, cont; | |
| 2588 | |
| 2589 if (needs_checks) { | |
| 2590 // Check that the function is really a JavaScript function. | |
| 2591 // r4: pushed function (to be verified) | |
| 2592 __ JumpIfSmi(r4, &slow); | |
| 2593 | |
| 2594 // Goto slow case if we do not have a function. | |
| 2595 __ CompareObjectType(r4, r7, r7, JS_FUNCTION_TYPE); | |
| 2596 __ bne(&slow); | |
| 2597 } | |
| 2598 | |
| 2599 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); | |
| 2600 EmitClassConstructorCallCheck(masm); | |
| 2601 | |
| 2602 // Fast-case: Invoke the function now. | |
| 2603 // r4: pushed function | |
| 2604 ParameterCount actual(argc); | |
| 2605 | |
| 2606 if (call_as_method) { | |
| 2607 if (needs_checks) { | |
| 2608 EmitContinueIfStrictOrNative(masm, &cont); | |
| 2609 } | |
| 2610 | |
| 2611 // Compute the receiver in sloppy mode. | |
| 2612 __ LoadP(r6, MemOperand(sp, argc * kPointerSize), r0); | |
| 2613 | |
| 2614 if (needs_checks) { | |
| 2615 __ JumpIfSmi(r6, &wrap); | |
| 2616 __ CompareObjectType(r6, r7, r7, FIRST_SPEC_OBJECT_TYPE); | |
| 2617 __ blt(&wrap); | |
| 2618 } else { | |
| 2619 __ b(&wrap); | |
| 2620 } | |
| 2621 | |
| 2622 __ bind(&cont); | |
| 2623 } | |
| 2624 | |
| 2625 __ InvokeFunction(r4, actual, JUMP_FUNCTION, NullCallWrapper()); | |
| 2626 | |
| 2627 if (needs_checks) { | |
| 2628 // Slow-case: Non-function called. | |
| 2629 __ bind(&slow); | |
| 2630 EmitSlowCase(masm, argc); | |
| 2631 } | |
| 2632 | |
| 2633 if (call_as_method) { | |
| 2634 __ bind(&wrap); | |
| 2635 EmitWrapCase(masm, argc, &cont); | |
| 2636 } | |
| 2637 } | |
| 2638 | |
| 2639 | |
| 2640 void CallFunctionStub::Generate(MacroAssembler* masm) { | |
| 2641 CallFunctionNoFeedback(masm, argc(), NeedsChecks(), CallAsMethod()); | |
| 2642 } | |
| 2643 | |
| 2644 | |
| 2645 void CallConstructStub::Generate(MacroAssembler* masm) { | 2532 void CallConstructStub::Generate(MacroAssembler* masm) { |
| 2646 // r3 : number of arguments | 2533 // r3 : number of arguments |
| 2647 // r4 : the function to call | 2534 // r4 : the function to call |
| 2648 // r5 : feedback vector | 2535 // r5 : feedback vector |
| 2649 // r6 : slot in feedback vector (Smi, for RecordCallTarget) | 2536 // r6 : slot in feedback vector (Smi, for RecordCallTarget) |
| 2650 // r7 : original constructor (for IsSuperConstructorCall) | 2537 // r7 : original constructor (for IsSuperConstructorCall) |
| 2651 | 2538 |
| 2652 Label non_function; | 2539 Label non_function; |
| 2653 // Check that the function is not a smi. | 2540 // Check that the function is not a smi. |
| 2654 __ JumpIfSmi(r4, &non_function); | 2541 __ JumpIfSmi(r4, &non_function); |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2725 | 2612 |
| 2726 | 2613 |
| 2727 void CallICStub::Generate(MacroAssembler* masm) { | 2614 void CallICStub::Generate(MacroAssembler* masm) { |
| 2728 // r4 - function | 2615 // r4 - function |
| 2729 // r6 - slot id (Smi) | 2616 // r6 - slot id (Smi) |
| 2730 // r5 - vector | 2617 // r5 - vector |
| 2731 const int with_types_offset = | 2618 const int with_types_offset = |
| 2732 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); | 2619 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); |
| 2733 const int generic_offset = | 2620 const int generic_offset = |
| 2734 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); | 2621 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); |
| 2735 Label extra_checks_or_miss, slow_start; | 2622 Label extra_checks_or_miss, call; |
| 2736 Label slow, wrap, cont; | |
| 2737 Label have_js_function; | |
| 2738 int argc = arg_count(); | 2623 int argc = arg_count(); |
| 2739 ParameterCount actual(argc); | 2624 ParameterCount actual(argc); |
| 2740 | 2625 |
| 2741 // The checks. First, does r4 match the recorded monomorphic target? | 2626 // The checks. First, does r4 match the recorded monomorphic target? |
| 2742 __ SmiToPtrArrayOffset(r9, r6); | 2627 __ SmiToPtrArrayOffset(r9, r6); |
| 2743 __ add(r9, r5, r9); | 2628 __ add(r9, r5, r9); |
| 2744 __ LoadP(r7, FieldMemOperand(r9, FixedArray::kHeaderSize)); | 2629 __ LoadP(r7, FieldMemOperand(r9, FixedArray::kHeaderSize)); |
| 2745 | 2630 |
| 2746 // We don't know that we have a weak cell. We might have a private symbol | 2631 // We don't know that we have a weak cell. We might have a private symbol |
| 2747 // or an AllocationSite, but the memory is safe to examine. | 2632 // or an AllocationSite, but the memory is safe to examine. |
| (...skipping 16 matching lines...) Expand all Loading... |
| 2764 // The compare above could have been a SMI/SMI comparison. Guard against this | 2649 // The compare above could have been a SMI/SMI comparison. Guard against this |
| 2765 // convincing us that we have a monomorphic JSFunction. | 2650 // convincing us that we have a monomorphic JSFunction. |
| 2766 __ JumpIfSmi(r4, &extra_checks_or_miss); | 2651 __ JumpIfSmi(r4, &extra_checks_or_miss); |
| 2767 | 2652 |
| 2768 // Increment the call count for monomorphic function calls. | 2653 // Increment the call count for monomorphic function calls. |
| 2769 const int count_offset = FixedArray::kHeaderSize + kPointerSize; | 2654 const int count_offset = FixedArray::kHeaderSize + kPointerSize; |
| 2770 __ LoadP(r6, FieldMemOperand(r9, count_offset)); | 2655 __ LoadP(r6, FieldMemOperand(r9, count_offset)); |
| 2771 __ AddSmiLiteral(r6, r6, Smi::FromInt(CallICNexus::kCallCountIncrement), r0); | 2656 __ AddSmiLiteral(r6, r6, Smi::FromInt(CallICNexus::kCallCountIncrement), r0); |
| 2772 __ StoreP(r6, FieldMemOperand(r9, count_offset), r0); | 2657 __ StoreP(r6, FieldMemOperand(r9, count_offset), r0); |
| 2773 | 2658 |
| 2774 __ bind(&have_js_function); | 2659 __ bind(&call); |
| 2775 | 2660 __ mov(r3, Operand(argc)); |
| 2776 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); | 2661 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); |
| 2777 EmitClassConstructorCallCheck(masm); | |
| 2778 | |
| 2779 if (CallAsMethod()) { | |
| 2780 EmitContinueIfStrictOrNative(masm, &cont); | |
| 2781 // Compute the receiver in sloppy mode. | |
| 2782 __ LoadP(r6, MemOperand(sp, argc * kPointerSize), r0); | |
| 2783 | |
| 2784 __ JumpIfSmi(r6, &wrap); | |
| 2785 __ CompareObjectType(r6, r7, r7, FIRST_SPEC_OBJECT_TYPE); | |
| 2786 __ blt(&wrap); | |
| 2787 | |
| 2788 __ bind(&cont); | |
| 2789 } | |
| 2790 | |
| 2791 __ InvokeFunction(r4, actual, JUMP_FUNCTION, NullCallWrapper()); | |
| 2792 | |
| 2793 __ bind(&slow); | |
| 2794 EmitSlowCase(masm, argc); | |
| 2795 | |
| 2796 if (CallAsMethod()) { | |
| 2797 __ bind(&wrap); | |
| 2798 EmitWrapCase(masm, argc, &cont); | |
| 2799 } | |
| 2800 | 2662 |
| 2801 __ bind(&extra_checks_or_miss); | 2663 __ bind(&extra_checks_or_miss); |
| 2802 Label uninitialized, miss, not_allocation_site; | 2664 Label uninitialized, miss, not_allocation_site; |
| 2803 | 2665 |
| 2804 __ CompareRoot(r7, Heap::kmegamorphic_symbolRootIndex); | 2666 __ CompareRoot(r7, Heap::kmegamorphic_symbolRootIndex); |
| 2805 __ beq(&slow_start); | 2667 __ beq(&call); |
| 2806 | 2668 |
| 2807 // Verify that r7 contains an AllocationSite | 2669 // Verify that r7 contains an AllocationSite |
| 2808 __ LoadP(r8, FieldMemOperand(r7, HeapObject::kMapOffset)); | 2670 __ LoadP(r8, FieldMemOperand(r7, HeapObject::kMapOffset)); |
| 2809 __ CompareRoot(r8, Heap::kAllocationSiteMapRootIndex); | 2671 __ CompareRoot(r8, Heap::kAllocationSiteMapRootIndex); |
| 2810 __ bne(¬_allocation_site); | 2672 __ bne(¬_allocation_site); |
| 2811 | 2673 |
| 2812 // We have an allocation site. | 2674 // We have an allocation site. |
| 2813 HandleArrayCase(masm, &miss); | 2675 HandleArrayCase(masm, &miss); |
| 2814 | 2676 |
| 2815 __ bind(¬_allocation_site); | 2677 __ bind(¬_allocation_site); |
| (...skipping 14 matching lines...) Expand all Loading... |
| 2830 __ bne(&miss); | 2692 __ bne(&miss); |
| 2831 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); | 2693 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); |
| 2832 __ StoreP(ip, FieldMemOperand(r9, FixedArray::kHeaderSize), r0); | 2694 __ StoreP(ip, FieldMemOperand(r9, FixedArray::kHeaderSize), r0); |
| 2833 // We have to update statistics for runtime profiling. | 2695 // We have to update statistics for runtime profiling. |
| 2834 __ LoadP(r7, FieldMemOperand(r5, with_types_offset)); | 2696 __ LoadP(r7, FieldMemOperand(r5, with_types_offset)); |
| 2835 __ SubSmiLiteral(r7, r7, Smi::FromInt(1), r0); | 2697 __ SubSmiLiteral(r7, r7, Smi::FromInt(1), r0); |
| 2836 __ StoreP(r7, FieldMemOperand(r5, with_types_offset), r0); | 2698 __ StoreP(r7, FieldMemOperand(r5, with_types_offset), r0); |
| 2837 __ LoadP(r7, FieldMemOperand(r5, generic_offset)); | 2699 __ LoadP(r7, FieldMemOperand(r5, generic_offset)); |
| 2838 __ AddSmiLiteral(r7, r7, Smi::FromInt(1), r0); | 2700 __ AddSmiLiteral(r7, r7, Smi::FromInt(1), r0); |
| 2839 __ StoreP(r7, FieldMemOperand(r5, generic_offset), r0); | 2701 __ StoreP(r7, FieldMemOperand(r5, generic_offset), r0); |
| 2840 __ b(&slow_start); | 2702 __ b(&call); |
| 2841 | 2703 |
| 2842 __ bind(&uninitialized); | 2704 __ bind(&uninitialized); |
| 2843 | 2705 |
| 2844 // We are going monomorphic, provided we actually have a JSFunction. | 2706 // We are going monomorphic, provided we actually have a JSFunction. |
| 2845 __ JumpIfSmi(r4, &miss); | 2707 __ JumpIfSmi(r4, &miss); |
| 2846 | 2708 |
| 2847 // Goto miss case if we do not have a function. | 2709 // Goto miss case if we do not have a function. |
| 2848 __ CompareObjectType(r4, r7, r7, JS_FUNCTION_TYPE); | 2710 __ CompareObjectType(r4, r7, r7, JS_FUNCTION_TYPE); |
| 2849 __ bne(&miss); | 2711 __ bne(&miss); |
| 2850 | 2712 |
| (...skipping 17 matching lines...) Expand all Loading... |
| 2868 // r6 - slot | 2730 // r6 - slot |
| 2869 // r4 - function | 2731 // r4 - function |
| 2870 { | 2732 { |
| 2871 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 2733 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 2872 CreateWeakCellStub create_stub(masm->isolate()); | 2734 CreateWeakCellStub create_stub(masm->isolate()); |
| 2873 __ Push(r4); | 2735 __ Push(r4); |
| 2874 __ CallStub(&create_stub); | 2736 __ CallStub(&create_stub); |
| 2875 __ Pop(r4); | 2737 __ Pop(r4); |
| 2876 } | 2738 } |
| 2877 | 2739 |
| 2878 __ b(&have_js_function); | 2740 __ b(&call); |
| 2879 | 2741 |
| 2880 // We are here because tracing is on or we encountered a MISS case we can't | 2742 // We are here because tracing is on or we encountered a MISS case we can't |
| 2881 // handle here. | 2743 // handle here. |
| 2882 __ bind(&miss); | 2744 __ bind(&miss); |
| 2883 GenerateMiss(masm); | 2745 GenerateMiss(masm); |
| 2884 | 2746 |
| 2885 // the slow case | 2747 __ b(&call); |
| 2886 __ bind(&slow_start); | |
| 2887 // Check that the function is really a JavaScript function. | |
| 2888 // r4: pushed function (to be verified) | |
| 2889 __ JumpIfSmi(r4, &slow); | |
| 2890 | |
| 2891 // Goto slow case if we do not have a function. | |
| 2892 __ CompareObjectType(r4, r7, r7, JS_FUNCTION_TYPE); | |
| 2893 __ bne(&slow); | |
| 2894 __ b(&have_js_function); | |
| 2895 } | 2748 } |
| 2896 | 2749 |
| 2897 | 2750 |
| 2898 void CallICStub::GenerateMiss(MacroAssembler* masm) { | 2751 void CallICStub::GenerateMiss(MacroAssembler* masm) { |
| 2899 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 2752 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 2900 | 2753 |
| 2901 // Push the function and feedback info. | 2754 // Push the function and feedback info. |
| 2902 __ Push(r4, r5, r6); | 2755 __ Push(r4, r5, r6); |
| 2903 | 2756 |
| 2904 // Call the entry. | 2757 // Call the entry. |
| (...skipping 2965 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5870 kStackUnwindSpace, NULL, | 5723 kStackUnwindSpace, NULL, |
| 5871 MemOperand(fp, 6 * kPointerSize), NULL); | 5724 MemOperand(fp, 6 * kPointerSize), NULL); |
| 5872 } | 5725 } |
| 5873 | 5726 |
| 5874 | 5727 |
| 5875 #undef __ | 5728 #undef __ |
| 5876 } // namespace internal | 5729 } // namespace internal |
| 5877 } // namespace v8 | 5730 } // namespace v8 |
| 5878 | 5731 |
| 5879 #endif // V8_TARGET_ARCH_PPC | 5732 #endif // V8_TARGET_ARCH_PPC |
| OLD | NEW |