OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
6 | 6 |
7 #include "src/compiler/code-generator-impl.h" | 7 #include "src/compiler/code-generator-impl.h" |
8 #include "src/compiler/gap-resolver.h" | 8 #include "src/compiler/gap-resolver.h" |
9 #include "src/compiler/node-matchers.h" | 9 #include "src/compiler/node-matchers.h" |
10 #include "src/compiler/osr.h" | 10 #include "src/compiler/osr.h" |
(...skipping 921 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
932 __ movl(kScratchRegister, i.InputRegister(0)); | 932 __ movl(kScratchRegister, i.InputRegister(0)); |
933 } else { | 933 } else { |
934 __ movl(kScratchRegister, i.InputOperand(0)); | 934 __ movl(kScratchRegister, i.InputOperand(0)); |
935 } | 935 } |
936 __ cvtqsi2sd(i.OutputDoubleRegister(), kScratchRegister); | 936 __ cvtqsi2sd(i.OutputDoubleRegister(), kScratchRegister); |
937 break; | 937 break; |
938 case kSSEFloat64ExtractLowWord32: | 938 case kSSEFloat64ExtractLowWord32: |
939 if (instr->InputAt(0)->IsDoubleStackSlot()) { | 939 if (instr->InputAt(0)->IsDoubleStackSlot()) { |
940 __ movl(i.OutputRegister(), i.InputOperand(0)); | 940 __ movl(i.OutputRegister(), i.InputOperand(0)); |
941 } else { | 941 } else { |
942 __ movd(i.OutputRegister(), i.InputDoubleRegister(0)); | 942 __ Movd(i.OutputRegister(), i.InputDoubleRegister(0)); |
943 } | 943 } |
944 break; | 944 break; |
945 case kSSEFloat64ExtractHighWord32: | 945 case kSSEFloat64ExtractHighWord32: |
946 if (instr->InputAt(0)->IsDoubleStackSlot()) { | 946 if (instr->InputAt(0)->IsDoubleStackSlot()) { |
947 __ movl(i.OutputRegister(), i.InputOperand(0, kDoubleSize / 2)); | 947 __ movl(i.OutputRegister(), i.InputOperand(0, kDoubleSize / 2)); |
948 } else { | 948 } else { |
949 __ Pextrd(i.OutputRegister(), i.InputDoubleRegister(0), 1); | 949 __ Pextrd(i.OutputRegister(), i.InputDoubleRegister(0), 1); |
950 } | 950 } |
951 break; | 951 break; |
952 case kSSEFloat64InsertLowWord32: | 952 case kSSEFloat64InsertLowWord32: |
953 if (instr->InputAt(1)->IsRegister()) { | 953 if (instr->InputAt(1)->IsRegister()) { |
954 __ Pinsrd(i.OutputDoubleRegister(), i.InputRegister(1), 0); | 954 __ Pinsrd(i.OutputDoubleRegister(), i.InputRegister(1), 0); |
955 } else { | 955 } else { |
956 __ Pinsrd(i.OutputDoubleRegister(), i.InputOperand(1), 0); | 956 __ Pinsrd(i.OutputDoubleRegister(), i.InputOperand(1), 0); |
957 } | 957 } |
958 break; | 958 break; |
959 case kSSEFloat64InsertHighWord32: | 959 case kSSEFloat64InsertHighWord32: |
960 if (instr->InputAt(1)->IsRegister()) { | 960 if (instr->InputAt(1)->IsRegister()) { |
961 __ Pinsrd(i.OutputDoubleRegister(), i.InputRegister(1), 1); | 961 __ Pinsrd(i.OutputDoubleRegister(), i.InputRegister(1), 1); |
962 } else { | 962 } else { |
963 __ Pinsrd(i.OutputDoubleRegister(), i.InputOperand(1), 1); | 963 __ Pinsrd(i.OutputDoubleRegister(), i.InputOperand(1), 1); |
964 } | 964 } |
965 break; | 965 break; |
966 case kSSEFloat64LoadLowWord32: | 966 case kSSEFloat64LoadLowWord32: |
967 if (instr->InputAt(0)->IsRegister()) { | 967 if (instr->InputAt(0)->IsRegister()) { |
968 __ movd(i.OutputDoubleRegister(), i.InputRegister(0)); | 968 __ Movd(i.OutputDoubleRegister(), i.InputRegister(0)); |
969 } else { | 969 } else { |
970 __ movd(i.OutputDoubleRegister(), i.InputOperand(0)); | 970 __ Movd(i.OutputDoubleRegister(), i.InputOperand(0)); |
971 } | 971 } |
972 break; | 972 break; |
973 case kAVXFloat32Cmp: { | 973 case kAVXFloat32Cmp: { |
974 CpuFeatureScope avx_scope(masm(), AVX); | 974 CpuFeatureScope avx_scope(masm(), AVX); |
975 if (instr->InputAt(1)->IsDoubleRegister()) { | 975 if (instr->InputAt(1)->IsDoubleRegister()) { |
976 __ vucomiss(i.InputDoubleRegister(0), i.InputDoubleRegister(1)); | 976 __ vucomiss(i.InputDoubleRegister(0), i.InputDoubleRegister(1)); |
977 } else { | 977 } else { |
978 __ vucomiss(i.InputDoubleRegister(0), i.InputOperand(1)); | 978 __ vucomiss(i.InputDoubleRegister(0), i.InputOperand(1)); |
979 } | 979 } |
980 break; | 980 break; |
(...skipping 194 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1175 } else { | 1175 } else { |
1176 size_t index = 0; | 1176 size_t index = 0; |
1177 Operand operand = i.MemoryOperand(&index); | 1177 Operand operand = i.MemoryOperand(&index); |
1178 __ Movsd(operand, i.InputDoubleRegister(index)); | 1178 __ Movsd(operand, i.InputDoubleRegister(index)); |
1179 } | 1179 } |
1180 break; | 1180 break; |
1181 case kX64BitcastFI: | 1181 case kX64BitcastFI: |
1182 if (instr->InputAt(0)->IsDoubleStackSlot()) { | 1182 if (instr->InputAt(0)->IsDoubleStackSlot()) { |
1183 __ movl(i.OutputRegister(), i.InputOperand(0)); | 1183 __ movl(i.OutputRegister(), i.InputOperand(0)); |
1184 } else { | 1184 } else { |
1185 __ movd(i.OutputRegister(), i.InputDoubleRegister(0)); | 1185 __ Movd(i.OutputRegister(), i.InputDoubleRegister(0)); |
1186 } | 1186 } |
1187 break; | 1187 break; |
1188 case kX64BitcastDL: | 1188 case kX64BitcastDL: |
1189 if (instr->InputAt(0)->IsDoubleStackSlot()) { | 1189 if (instr->InputAt(0)->IsDoubleStackSlot()) { |
1190 __ movq(i.OutputRegister(), i.InputOperand(0)); | 1190 __ movq(i.OutputRegister(), i.InputOperand(0)); |
1191 } else { | 1191 } else { |
1192 __ movq(i.OutputRegister(), i.InputDoubleRegister(0)); | 1192 __ Movq(i.OutputRegister(), i.InputDoubleRegister(0)); |
1193 } | 1193 } |
1194 break; | 1194 break; |
1195 case kX64BitcastIF: | 1195 case kX64BitcastIF: |
1196 if (instr->InputAt(0)->IsRegister()) { | 1196 if (instr->InputAt(0)->IsRegister()) { |
1197 __ movd(i.OutputDoubleRegister(), i.InputRegister(0)); | 1197 __ Movd(i.OutputDoubleRegister(), i.InputRegister(0)); |
1198 } else { | 1198 } else { |
1199 __ movss(i.OutputDoubleRegister(), i.InputOperand(0)); | 1199 __ movss(i.OutputDoubleRegister(), i.InputOperand(0)); |
1200 } | 1200 } |
1201 break; | 1201 break; |
1202 case kX64BitcastLD: | 1202 case kX64BitcastLD: |
1203 if (instr->InputAt(0)->IsRegister()) { | 1203 if (instr->InputAt(0)->IsRegister()) { |
1204 __ movq(i.OutputDoubleRegister(), i.InputRegister(0)); | 1204 __ Movq(i.OutputDoubleRegister(), i.InputRegister(0)); |
1205 } else { | 1205 } else { |
1206 __ Movsd(i.OutputDoubleRegister(), i.InputOperand(0)); | 1206 __ Movsd(i.OutputDoubleRegister(), i.InputOperand(0)); |
1207 } | 1207 } |
1208 break; | 1208 break; |
1209 case kX64Lea32: { | 1209 case kX64Lea32: { |
1210 AddressingMode mode = AddressingModeField::decode(instr->opcode()); | 1210 AddressingMode mode = AddressingModeField::decode(instr->opcode()); |
1211 // Shorten "leal" to "addl", "subl" or "shll" if the register allocation | 1211 // Shorten "leal" to "addl", "subl" or "shll" if the register allocation |
1212 // and addressing mode just happens to work out. The "addl"/"subl" forms | 1212 // and addressing mode just happens to work out. The "addl"/"subl" forms |
1213 // in these cases are faster based on measurements. | 1213 // in these cases are faster based on measurements. |
1214 if (i.InputRegister(0).is(i.OutputRegister())) { | 1214 if (i.InputRegister(0).is(i.OutputRegister())) { |
(...skipping 604 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1819 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; | 1819 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; |
1820 __ Nop(padding_size); | 1820 __ Nop(padding_size); |
1821 } | 1821 } |
1822 } | 1822 } |
1823 | 1823 |
1824 #undef __ | 1824 #undef __ |
1825 | 1825 |
1826 } // namespace compiler | 1826 } // namespace compiler |
1827 } // namespace internal | 1827 } // namespace internal |
1828 } // namespace v8 | 1828 } // namespace v8 |
OLD | NEW |