OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_X64 | 5 #if V8_TARGET_ARCH_X64 |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/base/division-by-constant.h" | 8 #include "src/base/division-by-constant.h" |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 983 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
994 | 994 |
995 void MacroAssembler::Cvttss2siq(Register dst, const Operand& src) { | 995 void MacroAssembler::Cvttss2siq(Register dst, const Operand& src) { |
996 if (CpuFeatures::IsSupported(AVX)) { | 996 if (CpuFeatures::IsSupported(AVX)) { |
997 CpuFeatureScope scope(this, AVX); | 997 CpuFeatureScope scope(this, AVX); |
998 vcvttss2siq(dst, src); | 998 vcvttss2siq(dst, src); |
999 } else { | 999 } else { |
1000 cvttss2siq(dst, src); | 1000 cvttss2siq(dst, src); |
1001 } | 1001 } |
1002 } | 1002 } |
1003 | 1003 |
1004 | |
1005 void MacroAssembler::Cvtss2siq(Register dst, XMMRegister src) { | |
1006 if (CpuFeatures::IsSupported(AVX)) { | |
1007 CpuFeatureScope scope(this, AVX); | |
1008 vcvtss2siq(dst, src); | |
1009 } else { | |
1010 cvtss2siq(dst, src); | |
1011 } | |
1012 } | |
1013 | |
1014 | |
1015 void MacroAssembler::Cvtss2siq(Register dst, const Operand& src) { | |
1016 if (CpuFeatures::IsSupported(AVX)) { | |
1017 CpuFeatureScope scope(this, AVX); | |
1018 vcvtss2siq(dst, src); | |
1019 } else { | |
1020 cvtss2siq(dst, src); | |
1021 } | |
1022 } | |
1023 | |
1024 | |
1025 void MacroAssembler::Cvtsd2siq(Register dst, XMMRegister src) { | |
1026 if (CpuFeatures::IsSupported(AVX)) { | |
1027 CpuFeatureScope scope(this, AVX); | |
1028 vcvtsd2siq(dst, src); | |
1029 } else { | |
1030 cvtsd2siq(dst, src); | |
1031 } | |
1032 } | |
1033 | |
1034 | |
1035 void MacroAssembler::Cvtsd2siq(Register dst, const Operand& src) { | |
1036 if (CpuFeatures::IsSupported(AVX)) { | |
1037 CpuFeatureScope scope(this, AVX); | |
1038 vcvtsd2siq(dst, src); | |
1039 } else { | |
1040 cvtsd2siq(dst, src); | |
1041 } | |
1042 } | |
1043 | |
1044 | 1004 |
1045 void MacroAssembler::Cvttsd2siq(Register dst, XMMRegister src) { | 1005 void MacroAssembler::Cvttsd2siq(Register dst, XMMRegister src) { |
1046 if (CpuFeatures::IsSupported(AVX)) { | 1006 if (CpuFeatures::IsSupported(AVX)) { |
1047 CpuFeatureScope scope(this, AVX); | 1007 CpuFeatureScope scope(this, AVX); |
1048 vcvttsd2siq(dst, src); | 1008 vcvttsd2siq(dst, src); |
1049 } else { | 1009 } else { |
1050 cvttsd2siq(dst, src); | 1010 cvttsd2siq(dst, src); |
1051 } | 1011 } |
1052 } | 1012 } |
1053 | 1013 |
(...skipping 1795 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2849 RoundingMode mode) { | 2809 RoundingMode mode) { |
2850 if (CpuFeatures::IsSupported(AVX)) { | 2810 if (CpuFeatures::IsSupported(AVX)) { |
2851 CpuFeatureScope scope(this, AVX); | 2811 CpuFeatureScope scope(this, AVX); |
2852 vroundss(dst, dst, src, mode); | 2812 vroundss(dst, dst, src, mode); |
2853 } else { | 2813 } else { |
2854 roundss(dst, src, mode); | 2814 roundss(dst, src, mode); |
2855 } | 2815 } |
2856 } | 2816 } |
2857 | 2817 |
2858 | 2818 |
2859 void MacroAssembler::Roundss(XMMRegister dst, XMMRegister src, Register tmp, | |
2860 RoundingMode mode) { | |
2861 if (CpuFeatures::IsSupported(SSE4_1)) { | |
2862 CpuFeatureScope scope(this, SSE4_1); | |
2863 Roundss(dst, src, mode); | |
2864 } else { | |
2865 { | |
2866 // Set the right rounding mode. | |
2867 subq(rsp, Immediate(kPointerSize * 2)); | |
2868 stmxcsr(Operand(rsp, 0)); | |
2869 movl(tmp, Operand(rsp, 0)); | |
2870 andl(tmp, Immediate(0xffff9fff)); | |
2871 orl(tmp, Immediate(mode << 13)); | |
2872 movl(Operand(rsp, kPointerSize), tmp); | |
2873 ldmxcsr(Operand(rsp, kPointerSize)); | |
2874 } | |
2875 | |
2876 // Do rounding by conversion to int64. | |
2877 Cvtss2siq(tmp, src); | |
2878 | |
2879 Label done; | |
2880 Label out_of_range; | |
2881 cmpq(tmp, Immediate(1)); | |
2882 // If the conversion results in INT64_MIN, then the input is outside | |
2883 // int64 range, and due to the limited precision of float32 this means | |
2884 // that the input must have been an integer already. We are therefore | |
2885 // done already. | |
2886 j(overflow, &out_of_range); | |
2887 // Rounding is done by converting the value back to float. | |
2888 Cvtqsi2ss(dst, tmp); | |
2889 if (!dst.is(src)) { | |
2890 jmp(&done); | |
2891 } | |
2892 | |
2893 bind(&out_of_range); | |
2894 if (!dst.is(src)) { | |
2895 movss(dst, src); | |
2896 } | |
2897 | |
2898 bind(&done); | |
2899 // Restore the original rounding mode. | |
2900 ldmxcsr(Operand(rsp, 0)); | |
2901 addq(rsp, Immediate(kPointerSize * 2)); | |
2902 } | |
2903 } | |
2904 | |
2905 | |
2906 void MacroAssembler::Roundsd(XMMRegister dst, XMMRegister src, | 2819 void MacroAssembler::Roundsd(XMMRegister dst, XMMRegister src, |
2907 RoundingMode mode) { | 2820 RoundingMode mode) { |
2908 if (CpuFeatures::IsSupported(AVX)) { | 2821 if (CpuFeatures::IsSupported(AVX)) { |
2909 CpuFeatureScope scope(this, AVX); | 2822 CpuFeatureScope scope(this, AVX); |
2910 vroundsd(dst, dst, src, mode); | 2823 vroundsd(dst, dst, src, mode); |
2911 } else { | 2824 } else { |
2912 roundsd(dst, src, mode); | 2825 roundsd(dst, src, mode); |
2913 } | 2826 } |
2914 } | 2827 } |
2915 | 2828 |
2916 | |
2917 void MacroAssembler::Roundsd(XMMRegister dst, XMMRegister src, Register tmp, | |
2918 RoundingMode mode) { | |
2919 if (CpuFeatures::IsSupported(SSE4_1)) { | |
2920 CpuFeatureScope scope(this, SSE4_1); | |
2921 Roundsd(dst, src, mode); | |
2922 } else { | |
2923 { | |
2924 // Set the right rounding mode. | |
2925 subq(rsp, Immediate(kPointerSize * 2)); | |
2926 stmxcsr(Operand(rsp, 0)); | |
2927 movl(tmp, Operand(rsp, 0)); | |
2928 andl(tmp, Immediate(0xffff9fff)); | |
2929 orl(tmp, Immediate(mode << 13)); | |
2930 movl(Operand(rsp, kPointerSize), tmp); | |
2931 ldmxcsr(Operand(rsp, kPointerSize)); | |
2932 } | |
2933 | |
2934 // Do rounding by conversion to int64. | |
2935 Cvtsd2siq(tmp, src); | |
2936 | |
2937 Label out_of_range; | |
2938 Label done; | |
2939 cmpq(tmp, Immediate(1)); | |
2940 // If the conversion results in INT64_MIN, then the input is outside | |
2941 // int64 range, and due to the limited precision of float64 this means | |
2942 // that the input must have been an integer already. We are therefore | |
2943 // done already. | |
2944 j(overflow, &out_of_range); | |
2945 // Rounding is done by converting the value back to float. | |
2946 Cvtqsi2sd(dst, tmp); | |
2947 if (!dst.is(src)) { | |
2948 jmp(&done); | |
2949 } | |
2950 | |
2951 bind(&out_of_range); | |
2952 if (!dst.is(src)) { | |
2953 movsd(dst, src); | |
2954 } | |
2955 | |
2956 bind(&done); | |
2957 // Restore the original rounding mode. | |
2958 ldmxcsr(Operand(rsp, 0)); | |
2959 addq(rsp, Immediate(kPointerSize * 2)); | |
2960 } | |
2961 } | |
2962 | |
2963 | 2829 |
2964 void MacroAssembler::Sqrtsd(XMMRegister dst, XMMRegister src) { | 2830 void MacroAssembler::Sqrtsd(XMMRegister dst, XMMRegister src) { |
2965 if (CpuFeatures::IsSupported(AVX)) { | 2831 if (CpuFeatures::IsSupported(AVX)) { |
2966 CpuFeatureScope scope(this, AVX); | 2832 CpuFeatureScope scope(this, AVX); |
2967 vsqrtsd(dst, dst, src); | 2833 vsqrtsd(dst, dst, src); |
2968 } else { | 2834 } else { |
2969 sqrtsd(dst, src); | 2835 sqrtsd(dst, src); |
2970 } | 2836 } |
2971 } | 2837 } |
2972 | 2838 |
(...skipping 2683 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5656 movl(rax, dividend); | 5522 movl(rax, dividend); |
5657 shrl(rax, Immediate(31)); | 5523 shrl(rax, Immediate(31)); |
5658 addl(rdx, rax); | 5524 addl(rdx, rax); |
5659 } | 5525 } |
5660 | 5526 |
5661 | 5527 |
5662 } // namespace internal | 5528 } // namespace internal |
5663 } // namespace v8 | 5529 } // namespace v8 |
5664 | 5530 |
5665 #endif // V8_TARGET_ARCH_X64 | 5531 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |