OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2845 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2856 void Assembler::ucomisd(XMMRegister dst, const Operand& src) { | 2856 void Assembler::ucomisd(XMMRegister dst, const Operand& src) { |
2857 EnsureSpace ensure_space(this); | 2857 EnsureSpace ensure_space(this); |
2858 emit(0x66); | 2858 emit(0x66); |
2859 emit_optional_rex_32(dst, src); | 2859 emit_optional_rex_32(dst, src); |
2860 emit(0x0f); | 2860 emit(0x0f); |
2861 emit(0x2e); | 2861 emit(0x2e); |
2862 emit_sse_operand(dst, src); | 2862 emit_sse_operand(dst, src); |
2863 } | 2863 } |
2864 | 2864 |
2865 | 2865 |
| 2866 void Assembler::roundsd(XMMRegister dst, XMMRegister src, |
| 2867 Assembler::RoundingMode mode) { |
| 2868 ASSERT(CpuFeatures::IsEnabled(SSE4_1)); |
| 2869 EnsureSpace ensure_space(this); |
| 2870 emit(0x66); |
| 2871 emit_optional_rex_32(dst, src); |
| 2872 emit(0x0f); |
| 2873 emit(0x3a); |
| 2874 emit(0x0b); |
| 2875 emit_sse_operand(dst, src); |
| 2876 // Mask precision exeption. |
| 2877 emit(static_cast<byte>(mode) | 0x8); |
| 2878 } |
| 2879 |
| 2880 |
2866 void Assembler::movmskpd(Register dst, XMMRegister src) { | 2881 void Assembler::movmskpd(Register dst, XMMRegister src) { |
2867 EnsureSpace ensure_space(this); | 2882 EnsureSpace ensure_space(this); |
2868 emit(0x66); | 2883 emit(0x66); |
2869 emit_optional_rex_32(dst, src); | 2884 emit_optional_rex_32(dst, src); |
2870 emit(0x0f); | 2885 emit(0x0f); |
2871 emit(0x50); | 2886 emit(0x50); |
2872 emit_sse_operand(dst, src); | 2887 emit_sse_operand(dst, src); |
2873 } | 2888 } |
2874 | 2889 |
2875 | 2890 |
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2954 // specially coded on x64 means that it is a relative 32 bit address, as used | 2969 // specially coded on x64 means that it is a relative 32 bit address, as used |
2955 // by branch instructions. | 2970 // by branch instructions. |
2956 return (1 << rmode_) & kApplyMask; | 2971 return (1 << rmode_) & kApplyMask; |
2957 } | 2972 } |
2958 | 2973 |
2959 | 2974 |
2960 | 2975 |
2961 } } // namespace v8::internal | 2976 } } // namespace v8::internal |
2962 | 2977 |
2963 #endif // V8_TARGET_ARCH_X64 | 2978 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |