OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_ARM | 7 #if V8_TARGET_ARCH_ARM |
8 | 8 |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
(...skipping 2694 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2705 CallFunctionNoFeedback(masm, | 2705 CallFunctionNoFeedback(masm, |
2706 arg_count(), | 2706 arg_count(), |
2707 true, | 2707 true, |
2708 CallAsMethod()); | 2708 CallAsMethod()); |
2709 | 2709 |
2710 // Unreachable. | 2710 // Unreachable. |
2711 __ stop("Unexpected code address"); | 2711 __ stop("Unexpected code address"); |
2712 } | 2712 } |
2713 | 2713 |
2714 | 2714 |
| 2715 void CallIC_RoundStub::Generate(MacroAssembler* masm) { |
| 2716 Register function = r1; |
| 2717 Register vector = r2; |
| 2718 Register slot = r3; |
| 2719 |
| 2720 Register temp1 = r0; |
| 2721 Register temp2 = r4; |
| 2722 DwVfpRegister double_temp1 = d1; |
| 2723 DwVfpRegister double_temp2 = d2; |
| 2724 Label tail, miss; |
| 2725 |
| 2726 // Ensure nobody has snuck in another function. |
| 2727 __ BranchIfNotBuiltin(function, temp1, kMathRound, &miss); |
| 2728 |
| 2729 if (arg_count() > 0) { |
| 2730 __ ldr(temp1, MemOperand(sp, (arg_count() - 1) * kPointerSize)); |
| 2731 Handle<Map> map = isolate()->factory()->heap_number_map(); |
| 2732 __ CheckMap(temp1, temp2, map, &tail, DO_SMI_CHECK); |
| 2733 __ sub(temp1, temp1, Operand(kHeapObjectTag)); |
| 2734 __ vldr(double_temp1, temp1, HeapNumber::kValueOffset); |
| 2735 |
| 2736 // If the number is >0, it doesn't round to -0 |
| 2737 __ Vmov(double_temp2, 0, temp1); |
| 2738 __ VFPCompareAndSetFlags(double_temp1, double_temp2); |
| 2739 __ b(gt, &tail); |
| 2740 |
| 2741 // If the number is <-.5, it doesn't round to -0 |
| 2742 __ Vmov(double_temp2, -.5, temp1); |
| 2743 __ VFPCompareAndSetFlags(double_temp1, double_temp2); |
| 2744 __ b(lt, &tail); |
| 2745 |
| 2746 // +0 doesn't round to -0 |
| 2747 __ VmovHigh(temp1, double_temp1); |
| 2748 __ cmp(temp1, Operand(0x80000000)); |
| 2749 __ b(ne, &tail); |
| 2750 |
| 2751 __ mov(temp1, Operand(slot, LSL, 1)); |
| 2752 __ add(temp1, temp1, vector); |
| 2753 __ Move(temp2, Operand(Smi::FromInt(kHasReturnedMinusZeroSentinel))); |
| 2754 __ str(temp2, |
| 2755 FieldMemOperand(temp1, FixedArray::kHeaderSize + kPointerSize)); |
| 2756 } |
| 2757 |
| 2758 __ bind(&tail); |
| 2759 // The slow case, we need this no matter what to complete a call after a miss. |
| 2760 CallFunctionNoFeedback(masm, arg_count(), true, CallAsMethod()); |
| 2761 |
| 2762 // Unreachable. |
| 2763 __ stop("Unreachable"); |
| 2764 |
| 2765 __ bind(&miss); |
| 2766 GenerateMiss(masm); |
| 2767 __ b(&tail); |
| 2768 } |
| 2769 |
| 2770 |
| 2771 void CallIC_FloorStub::Generate(MacroAssembler* masm) { |
| 2772 Register function = r1; |
| 2773 Register vector = r2; |
| 2774 Register slot = r3; |
| 2775 |
| 2776 Register temp1 = r0; |
| 2777 Register temp2 = r4; |
| 2778 DwVfpRegister double_temp = d1; |
| 2779 Label tail, miss; |
| 2780 |
| 2781 // Ensure nobody has snuck in another function. |
| 2782 __ BranchIfNotBuiltin(function, temp1, kMathFloor, &miss); |
| 2783 |
| 2784 if (arg_count() > 0) { |
| 2785 __ ldr(temp1, MemOperand(sp, (arg_count() - 1) * kPointerSize)); |
| 2786 Handle<Map> map = isolate()->factory()->heap_number_map(); |
| 2787 __ CheckMap(temp1, temp2, map, &tail, DO_SMI_CHECK); |
| 2788 __ sub(temp1, temp1, Operand(kHeapObjectTag)); |
| 2789 __ vldr(double_temp, temp1, HeapNumber::kValueOffset); |
| 2790 |
| 2791 // Only -0 floors to -0. |
| 2792 __ VmovHigh(temp1, double_temp); |
| 2793 __ cmp(temp1, Operand(0x80000000)); |
| 2794 __ b(ne, &tail); |
| 2795 __ VmovLow(temp1, double_temp); |
| 2796 __ cmp(temp1, Operand(0)); |
| 2797 __ b(ne, &tail); |
| 2798 |
| 2799 __ mov(temp1, Operand(slot, LSL, 1)); |
| 2800 __ add(temp1, temp1, vector); |
| 2801 __ Move(temp2, Operand(Smi::FromInt(kHasReturnedMinusZeroSentinel))); |
| 2802 __ str(temp2, |
| 2803 FieldMemOperand(temp1, FixedArray::kHeaderSize + kPointerSize)); |
| 2804 } |
| 2805 |
| 2806 __ bind(&tail); |
| 2807 // The slow case, we need this no matter what to complete a call after a miss. |
| 2808 CallFunctionNoFeedback(masm, arg_count(), true, CallAsMethod()); |
| 2809 |
| 2810 // Unreachable. |
| 2811 __ stop("Unreachable"); |
| 2812 |
| 2813 __ bind(&miss); |
| 2814 GenerateMiss(masm); |
| 2815 __ b(&tail); |
| 2816 } |
| 2817 |
| 2818 |
| 2819 void CallIC_CeilStub::Generate(MacroAssembler* masm) { |
| 2820 Register function = r1; |
| 2821 Register vector = r2; |
| 2822 Register slot = r3; |
| 2823 |
| 2824 Register temp1 = r0; |
| 2825 Register temp2 = r4; |
| 2826 DwVfpRegister double_temp1 = d1; |
| 2827 DwVfpRegister double_temp2 = d2; |
| 2828 Label tail, miss; |
| 2829 |
| 2830 // Ensure nobody has snuck in another function. |
| 2831 __ BranchIfNotBuiltin(function, temp1, kMathCeil, &miss); |
| 2832 |
| 2833 if (arg_count() > 0) { |
| 2834 __ ldr(temp1, MemOperand(sp, (arg_count() - 1) * kPointerSize)); |
| 2835 Handle<Map> map = isolate()->factory()->heap_number_map(); |
| 2836 __ CheckMap(temp1, temp2, map, &tail, DO_SMI_CHECK); |
| 2837 __ sub(temp1, temp1, Operand(kHeapObjectTag)); |
| 2838 __ vldr(double_temp1, temp1, HeapNumber::kValueOffset); |
| 2839 |
| 2840 // If the number is >0, it doesn't round to -0 |
| 2841 __ Vmov(double_temp2, 0, temp1); |
| 2842 __ VFPCompareAndSetFlags(double_temp1, double_temp2); |
| 2843 __ b(gt, &tail); |
| 2844 |
| 2845 // If the number is <=-1, it doesn't round to -0 |
| 2846 __ Vmov(double_temp2, -1, temp1); |
| 2847 __ VFPCompareAndSetFlags(double_temp1, double_temp2); |
| 2848 __ b(le, &tail); |
| 2849 |
| 2850 // +0 doesn't round to -0. |
| 2851 __ VmovHigh(temp1, double_temp1); |
| 2852 __ cmp(temp1, Operand(0x80000000)); |
| 2853 __ b(ne, &tail); |
| 2854 |
| 2855 __ mov(temp1, Operand(slot, LSL, 1)); |
| 2856 __ add(temp1, temp1, vector); |
| 2857 __ Move(temp2, Operand(Smi::FromInt(kHasReturnedMinusZeroSentinel))); |
| 2858 __ str(temp2, |
| 2859 FieldMemOperand(temp1, FixedArray::kHeaderSize + kPointerSize)); |
| 2860 } |
| 2861 |
| 2862 __ bind(&tail); |
| 2863 // The slow case, we need this no matter what to complete a call after a miss. |
| 2864 CallFunctionNoFeedback(masm, arg_count(), true, CallAsMethod()); |
| 2865 |
| 2866 // Unreachable. |
| 2867 __ stop("Unreachable"); |
| 2868 |
| 2869 __ bind(&miss); |
| 2870 GenerateMiss(masm); |
| 2871 __ b(&tail); |
| 2872 } |
| 2873 |
| 2874 |
2715 void CallICStub::Generate(MacroAssembler* masm) { | 2875 void CallICStub::Generate(MacroAssembler* masm) { |
2716 // r1 - function | 2876 // r1 - function |
2717 // r3 - slot id (Smi) | 2877 // r3 - slot id (Smi) |
2718 // r2 - vector | 2878 // r2 - vector |
2719 const int with_types_offset = | 2879 const int with_types_offset = |
2720 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); | 2880 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); |
2721 const int generic_offset = | 2881 const int generic_offset = |
2722 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); | 2882 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); |
2723 Label extra_checks_or_miss, slow_start; | 2883 Label extra_checks_or_miss, slow_start; |
2724 Label slow, non_function, wrap, cont; | 2884 Label slow, non_function, wrap, cont; |
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2815 // Goto miss case if we do not have a function. | 2975 // Goto miss case if we do not have a function. |
2816 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); | 2976 __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); |
2817 __ b(ne, &miss); | 2977 __ b(ne, &miss); |
2818 | 2978 |
2819 // Make sure the function is not the Array() function, which requires special | 2979 // Make sure the function is not the Array() function, which requires special |
2820 // behavior on MISS. | 2980 // behavior on MISS. |
2821 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4); | 2981 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4); |
2822 __ cmp(r1, r4); | 2982 __ cmp(r1, r4); |
2823 __ b(eq, &miss); | 2983 __ b(eq, &miss); |
2824 | 2984 |
| 2985 // Some builtin functions require special handling, miss to the runtime. |
| 2986 __ ldr(r0, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); |
| 2987 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kFunctionDataOffset)); |
| 2988 __ cmp(r0, Operand(Smi::FromInt(0))); |
| 2989 __ b(ne, &miss); |
| 2990 |
2825 // Update stats. | 2991 // Update stats. |
2826 __ ldr(r4, FieldMemOperand(r2, with_types_offset)); | 2992 __ ldr(r4, FieldMemOperand(r2, with_types_offset)); |
2827 __ add(r4, r4, Operand(Smi::FromInt(1))); | 2993 __ add(r4, r4, Operand(Smi::FromInt(1))); |
2828 __ str(r4, FieldMemOperand(r2, with_types_offset)); | 2994 __ str(r4, FieldMemOperand(r2, with_types_offset)); |
2829 | 2995 |
2830 // Store the function. Use a stub since we need a frame for allocation. | 2996 // Store the function. Use a stub since we need a frame for allocation. |
2831 // r2 - vector | 2997 // r2 - vector |
2832 // r3 - slot | 2998 // r3 - slot |
2833 // r1 - function | 2999 // r1 - function |
2834 { | 3000 { |
(...skipping 1523 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4358 } | 4524 } |
4359 | 4525 |
4360 | 4526 |
4361 void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) { | 4527 void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) { |
4362 EmitLoadTypeFeedbackVector(masm, r2); | 4528 EmitLoadTypeFeedbackVector(masm, r2); |
4363 CallIC_ArrayStub stub(isolate(), state()); | 4529 CallIC_ArrayStub stub(isolate(), state()); |
4364 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 4530 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
4365 } | 4531 } |
4366 | 4532 |
4367 | 4533 |
| 4534 void CallIC_RoundTrampolineStub::Generate(MacroAssembler* masm) { |
| 4535 EmitLoadTypeFeedbackVector(masm, r2); |
| 4536 CallIC_RoundStub stub(isolate(), state()); |
| 4537 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 4538 } |
| 4539 |
| 4540 |
| 4541 void CallIC_FloorTrampolineStub::Generate(MacroAssembler* masm) { |
| 4542 EmitLoadTypeFeedbackVector(masm, r2); |
| 4543 CallIC_FloorStub stub(isolate(), state()); |
| 4544 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 4545 } |
| 4546 |
| 4547 |
| 4548 void CallIC_CeilTrampolineStub::Generate(MacroAssembler* masm) { |
| 4549 EmitLoadTypeFeedbackVector(masm, r2); |
| 4550 CallIC_CeilStub stub(isolate(), state()); |
| 4551 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 4552 } |
| 4553 |
| 4554 |
4368 void VectorRawLoadStub::Generate(MacroAssembler* masm) { | 4555 void VectorRawLoadStub::Generate(MacroAssembler* masm) { |
4369 GenerateImpl(masm, false); | 4556 GenerateImpl(masm, false); |
4370 } | 4557 } |
4371 | 4558 |
4372 | 4559 |
4373 void VectorRawLoadStub::GenerateForTrampoline(MacroAssembler* masm) { | 4560 void VectorRawLoadStub::GenerateForTrampoline(MacroAssembler* masm) { |
4374 GenerateImpl(masm, true); | 4561 GenerateImpl(masm, true); |
4375 } | 4562 } |
4376 | 4563 |
4377 | 4564 |
(...skipping 905 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5283 kStackUnwindSpace, NULL, | 5470 kStackUnwindSpace, NULL, |
5284 MemOperand(fp, 6 * kPointerSize), NULL); | 5471 MemOperand(fp, 6 * kPointerSize), NULL); |
5285 } | 5472 } |
5286 | 5473 |
5287 | 5474 |
5288 #undef __ | 5475 #undef __ |
5289 | 5476 |
5290 } } // namespace v8::internal | 5477 } } // namespace v8::internal |
5291 | 5478 |
5292 #endif // V8_TARGET_ARCH_ARM | 5479 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |