OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <algorithm> | 5 #include <algorithm> |
6 | 6 |
7 #include "src/compiler/instruction-selector-impl.h" | 7 #include "src/compiler/instruction-selector-impl.h" |
8 #include "src/compiler/node-matchers.h" | 8 #include "src/compiler/node-matchers.h" |
9 #include "src/compiler/node-properties.h" | 9 #include "src/compiler/node-properties.h" |
10 | 10 |
(...skipping 838 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
849 X64OperandGenerator g(selector); | 849 X64OperandGenerator g(selector); |
850 InstructionOperand operand0 = g.UseRegister(node->InputAt(0)); | 850 InstructionOperand operand0 = g.UseRegister(node->InputAt(0)); |
851 InstructionOperand operand1 = g.Use(node->InputAt(1)); | 851 InstructionOperand operand1 = g.Use(node->InputAt(1)); |
852 if (selector->IsSupported(AVX)) { | 852 if (selector->IsSupported(AVX)) { |
853 selector->Emit(avx_opcode, g.DefineAsRegister(node), operand0, operand1); | 853 selector->Emit(avx_opcode, g.DefineAsRegister(node), operand0, operand1); |
854 } else { | 854 } else { |
855 selector->Emit(sse_opcode, g.DefineSameAsFirst(node), operand0, operand1); | 855 selector->Emit(sse_opcode, g.DefineSameAsFirst(node), operand0, operand1); |
856 } | 856 } |
857 } | 857 } |
858 | 858 |
| 859 |
| 860 void VisitFloatUnop(InstructionSelector* selector, Node* node, Node* input, |
| 861 ArchOpcode avx_opcode, ArchOpcode sse_opcode) { |
| 862 X64OperandGenerator g(selector); |
| 863 if (selector->IsSupported(AVX)) { |
| 864 selector->Emit(avx_opcode, g.DefineAsRegister(node), g.Use(input)); |
| 865 } else { |
| 866 selector->Emit(sse_opcode, g.DefineSameAsFirst(node), g.UseRegister(input)); |
| 867 } |
| 868 } |
| 869 |
| 870 |
859 } // namespace | 871 } // namespace |
860 | 872 |
861 | 873 |
862 void InstructionSelector::VisitFloat32Add(Node* node) { | 874 void InstructionSelector::VisitFloat32Add(Node* node) { |
863 VisitFloatBinop(this, node, kAVXFloat32Add, kSSEFloat32Add); | 875 VisitFloatBinop(this, node, kAVXFloat32Add, kSSEFloat32Add); |
864 } | 876 } |
865 | 877 |
866 | 878 |
867 void InstructionSelector::VisitFloat32Sub(Node* node) { | 879 void InstructionSelector::VisitFloat32Sub(Node* node) { |
868 X64OperandGenerator g(this); | 880 X64OperandGenerator g(this); |
869 Float32BinopMatcher m(node); | 881 Float32BinopMatcher m(node); |
870 if (m.left().IsMinusZero()) { | 882 if (m.left().IsMinusZero()) { |
871 Emit(kSSEFloat32Neg, g.DefineSameAsFirst(node), | 883 VisitFloatUnop(this, node, m.right().node(), kAVXFloat32Neg, |
872 g.UseRegister(m.right().node())); | 884 kSSEFloat32Neg); |
873 return; | 885 return; |
874 } | 886 } |
875 VisitFloatBinop(this, node, kAVXFloat32Sub, kSSEFloat32Sub); | 887 VisitFloatBinop(this, node, kAVXFloat32Sub, kSSEFloat32Sub); |
876 } | 888 } |
877 | 889 |
878 | 890 |
879 void InstructionSelector::VisitFloat32Mul(Node* node) { | 891 void InstructionSelector::VisitFloat32Mul(Node* node) { |
880 VisitFloatBinop(this, node, kAVXFloat32Mul, kSSEFloat32Mul); | 892 VisitFloatBinop(this, node, kAVXFloat32Mul, kSSEFloat32Mul); |
881 } | 893 } |
882 | 894 |
883 | 895 |
884 void InstructionSelector::VisitFloat32Div(Node* node) { | 896 void InstructionSelector::VisitFloat32Div(Node* node) { |
885 VisitFloatBinop(this, node, kAVXFloat32Div, kSSEFloat32Div); | 897 VisitFloatBinop(this, node, kAVXFloat32Div, kSSEFloat32Div); |
886 } | 898 } |
887 | 899 |
888 | 900 |
889 void InstructionSelector::VisitFloat32Max(Node* node) { | 901 void InstructionSelector::VisitFloat32Max(Node* node) { |
890 VisitFloatBinop(this, node, kAVXFloat32Max, kSSEFloat32Max); | 902 VisitFloatBinop(this, node, kAVXFloat32Max, kSSEFloat32Max); |
891 } | 903 } |
892 | 904 |
893 | 905 |
894 void InstructionSelector::VisitFloat32Min(Node* node) { | 906 void InstructionSelector::VisitFloat32Min(Node* node) { |
895 VisitFloatBinop(this, node, kAVXFloat32Min, kSSEFloat32Min); | 907 VisitFloatBinop(this, node, kAVXFloat32Min, kSSEFloat32Min); |
896 } | 908 } |
897 | 909 |
898 | 910 |
899 void InstructionSelector::VisitFloat32Abs(Node* node) { | 911 void InstructionSelector::VisitFloat32Abs(Node* node) { |
900 X64OperandGenerator g(this); | 912 X64OperandGenerator g(this); |
901 Emit(kSSEFloat32Abs, g.DefineSameAsFirst(node), g.Use(node->InputAt(0))); | 913 VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat32Abs, kSSEFloat32Abs); |
902 } | 914 } |
903 | 915 |
904 | 916 |
905 void InstructionSelector::VisitFloat32Sqrt(Node* node) { | 917 void InstructionSelector::VisitFloat32Sqrt(Node* node) { |
906 X64OperandGenerator g(this); | 918 X64OperandGenerator g(this); |
907 Emit(kSSEFloat32Sqrt, g.DefineAsRegister(node), g.Use(node->InputAt(0))); | 919 Emit(kSSEFloat32Sqrt, g.DefineAsRegister(node), g.Use(node->InputAt(0))); |
908 } | 920 } |
909 | 921 |
910 | 922 |
911 void InstructionSelector::VisitFloat64Add(Node* node) { | 923 void InstructionSelector::VisitFloat64Add(Node* node) { |
(...skipping 10 matching lines...) Expand all Loading... |
922 if (m.right().InputAt(0)->opcode() == IrOpcode::kFloat64Sub && | 934 if (m.right().InputAt(0)->opcode() == IrOpcode::kFloat64Sub && |
923 CanCover(m.right().node(), m.right().InputAt(0))) { | 935 CanCover(m.right().node(), m.right().InputAt(0))) { |
924 Float64BinopMatcher mright0(m.right().InputAt(0)); | 936 Float64BinopMatcher mright0(m.right().InputAt(0)); |
925 if (mright0.left().IsMinusZero()) { | 937 if (mright0.left().IsMinusZero()) { |
926 Emit(kSSEFloat64Round | MiscField::encode(kRoundUp), | 938 Emit(kSSEFloat64Round | MiscField::encode(kRoundUp), |
927 g.DefineAsRegister(node), g.UseRegister(mright0.right().node())); | 939 g.DefineAsRegister(node), g.UseRegister(mright0.right().node())); |
928 return; | 940 return; |
929 } | 941 } |
930 } | 942 } |
931 } | 943 } |
932 Emit(kSSEFloat64Neg, g.DefineSameAsFirst(node), | 944 VisitFloatUnop(this, node, m.right().node(), kAVXFloat64Neg, |
933 g.UseRegister(m.right().node())); | 945 kSSEFloat64Neg); |
934 return; | 946 return; |
935 } | 947 } |
936 VisitFloatBinop(this, node, kAVXFloat64Sub, kSSEFloat64Sub); | 948 VisitFloatBinop(this, node, kAVXFloat64Sub, kSSEFloat64Sub); |
937 } | 949 } |
938 | 950 |
939 | 951 |
940 void InstructionSelector::VisitFloat64Mul(Node* node) { | 952 void InstructionSelector::VisitFloat64Mul(Node* node) { |
941 VisitFloatBinop(this, node, kAVXFloat64Mul, kSSEFloat64Mul); | 953 VisitFloatBinop(this, node, kAVXFloat64Mul, kSSEFloat64Mul); |
942 } | 954 } |
943 | 955 |
(...skipping 17 matching lines...) Expand all Loading... |
961 } | 973 } |
962 | 974 |
963 | 975 |
964 void InstructionSelector::VisitFloat64Min(Node* node) { | 976 void InstructionSelector::VisitFloat64Min(Node* node) { |
965 VisitFloatBinop(this, node, kAVXFloat64Min, kSSEFloat64Min); | 977 VisitFloatBinop(this, node, kAVXFloat64Min, kSSEFloat64Min); |
966 } | 978 } |
967 | 979 |
968 | 980 |
969 void InstructionSelector::VisitFloat64Abs(Node* node) { | 981 void InstructionSelector::VisitFloat64Abs(Node* node) { |
970 X64OperandGenerator g(this); | 982 X64OperandGenerator g(this); |
971 Emit(kSSEFloat64Abs, g.DefineSameAsFirst(node), g.Use(node->InputAt(0))); | 983 VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat64Abs, kSSEFloat64Abs); |
972 } | 984 } |
973 | 985 |
974 | 986 |
975 void InstructionSelector::VisitFloat64Sqrt(Node* node) { | 987 void InstructionSelector::VisitFloat64Sqrt(Node* node) { |
976 X64OperandGenerator g(this); | 988 X64OperandGenerator g(this); |
977 Emit(kSSEFloat64Sqrt, g.DefineAsRegister(node), g.Use(node->InputAt(0))); | 989 Emit(kSSEFloat64Sqrt, g.DefineAsRegister(node), g.Use(node->InputAt(0))); |
978 } | 990 } |
979 | 991 |
980 | 992 |
981 namespace { | 993 namespace { |
(...skipping 555 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1537 if (CpuFeatures::IsSupported(SSE4_1)) { | 1549 if (CpuFeatures::IsSupported(SSE4_1)) { |
1538 flags |= MachineOperatorBuilder::kFloat64RoundDown | | 1550 flags |= MachineOperatorBuilder::kFloat64RoundDown | |
1539 MachineOperatorBuilder::kFloat64RoundTruncate; | 1551 MachineOperatorBuilder::kFloat64RoundTruncate; |
1540 } | 1552 } |
1541 return flags; | 1553 return flags; |
1542 } | 1554 } |
1543 | 1555 |
1544 } // namespace compiler | 1556 } // namespace compiler |
1545 } // namespace internal | 1557 } // namespace internal |
1546 } // namespace v8 | 1558 } // namespace v8 |
OLD | NEW |