| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/instruction-selector-impl.h" | 5 #include "src/compiler/instruction-selector-impl.h" |
| 6 #include "src/compiler/node-matchers.h" | 6 #include "src/compiler/node-matchers.h" |
| 7 #include "src/compiler-intrinsics.h" | 7 #include "src/compiler-intrinsics.h" |
| 8 | 8 |
| 9 namespace v8 { | 9 namespace v8 { |
| 10 namespace internal { | 10 namespace internal { |
| (...skipping 231 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 242 return true; | 242 return true; |
| 243 } | 243 } |
| 244 if (TryMatchShift(selector, opcode_return, node, &inputs[0], &inputs[1])) { | 244 if (TryMatchShift(selector, opcode_return, node, &inputs[0], &inputs[1])) { |
| 245 *input_count_return = 2; | 245 *input_count_return = 2; |
| 246 return true; | 246 return true; |
| 247 } | 247 } |
| 248 return false; | 248 return false; |
| 249 } | 249 } |
| 250 | 250 |
| 251 | 251 |
| 252 // Shared routine for multiple binary operations. | |
| 253 static void VisitBinop(InstructionSelector* selector, Node* node, | 252 static void VisitBinop(InstructionSelector* selector, Node* node, |
| 254 InstructionCode opcode, InstructionCode reverse_opcode) { | 253 InstructionCode opcode, InstructionCode reverse_opcode, |
| 254 FlagsContinuation* cont) { |
| 255 ArmOperandGenerator g(selector); | 255 ArmOperandGenerator g(selector); |
| 256 Int32BinopMatcher m(node); | 256 Int32BinopMatcher m(node); |
| 257 InstructionOperand* inputs[3]; | 257 InstructionOperand* inputs[5]; |
| 258 size_t input_count = 0; | |
| 259 InstructionOperand* outputs[1] = {g.DefineAsRegister(node)}; | |
| 260 const size_t output_count = ARRAY_SIZE(outputs); | |
| 261 | |
| 262 if (TryMatchImmediateOrShift(selector, &opcode, m.right().node(), | |
| 263 &input_count, &inputs[1])) { | |
| 264 inputs[0] = g.UseRegister(m.left().node()); | |
| 265 input_count++; | |
| 266 } else if (TryMatchImmediateOrShift(selector, &reverse_opcode, | |
| 267 m.left().node(), &input_count, | |
| 268 &inputs[1])) { | |
| 269 inputs[0] = g.UseRegister(m.right().node()); | |
| 270 opcode = reverse_opcode; | |
| 271 input_count++; | |
| 272 } else { | |
| 273 opcode |= AddressingModeField::encode(kMode_Operand2_R); | |
| 274 inputs[input_count++] = g.UseRegister(m.left().node()); | |
| 275 inputs[input_count++] = g.UseRegister(m.right().node()); | |
| 276 } | |
| 277 | |
| 278 ASSERT_NE(0, input_count); | |
| 279 ASSERT_GE(ARRAY_SIZE(inputs), input_count); | |
| 280 ASSERT_NE(kMode_None, AddressingModeField::decode(opcode)); | |
| 281 | |
| 282 selector->Emit(opcode, output_count, outputs, input_count, inputs); | |
| 283 } | |
| 284 | |
| 285 | |
| 286 static void VisitBinopWithOverflow(InstructionSelector* selector, Node* node, | |
| 287 InstructionCode opcode, | |
| 288 InstructionCode reverse_opcode) { | |
| 289 ArmOperandGenerator g(selector); | |
| 290 Int32BinopMatcher m(node); | |
| 291 InstructionOperand* inputs[3]; | |
| 292 size_t input_count = 0; | 258 size_t input_count = 0; |
| 293 InstructionOperand* outputs[2]; | 259 InstructionOperand* outputs[2]; |
| 294 size_t output_count = 0; | 260 size_t output_count = 0; |
| 295 | 261 |
| 296 if (TryMatchImmediateOrShift(selector, &opcode, m.right().node(), | 262 if (TryMatchImmediateOrShift(selector, &opcode, m.right().node(), |
| 297 &input_count, &inputs[1])) { | 263 &input_count, &inputs[1])) { |
| 298 inputs[0] = g.UseRegister(m.left().node()); | 264 inputs[0] = g.UseRegister(m.left().node()); |
| 299 input_count++; | 265 input_count++; |
| 300 } else if (TryMatchImmediateOrShift(selector, &reverse_opcode, | 266 } else if (TryMatchImmediateOrShift(selector, &reverse_opcode, |
| 301 m.left().node(), &input_count, | 267 m.left().node(), &input_count, |
| 302 &inputs[1])) { | 268 &inputs[1])) { |
| 303 inputs[0] = g.UseRegister(m.right().node()); | 269 inputs[0] = g.UseRegister(m.right().node()); |
| 304 opcode = reverse_opcode; | 270 opcode = reverse_opcode; |
| 305 input_count++; | 271 input_count++; |
| 306 } else { | 272 } else { |
| 307 opcode |= AddressingModeField::encode(kMode_Operand2_R); | 273 opcode |= AddressingModeField::encode(kMode_Operand2_R); |
| 308 inputs[input_count++] = g.UseRegister(m.left().node()); | 274 inputs[input_count++] = g.UseRegister(m.left().node()); |
| 309 inputs[input_count++] = g.UseRegister(m.right().node()); | 275 inputs[input_count++] = g.UseRegister(m.right().node()); |
| 310 } | 276 } |
| 311 | 277 |
| 312 // Define outputs depending on the projections. | 278 if (cont->IsBranch()) { |
| 313 Node* projections[2]; | 279 inputs[input_count++] = g.Label(cont->true_block()); |
| 314 node->CollectProjections(ARRAY_SIZE(projections), projections); | 280 inputs[input_count++] = g.Label(cont->false_block()); |
| 315 if (projections[0]) { | |
| 316 outputs[output_count++] = g.DefineAsRegister(projections[0]); | |
| 317 } | 281 } |
| 318 if (projections[1]) { | 282 |
| 319 opcode |= FlagsModeField::encode(kFlags_set); | 283 outputs[output_count++] = g.DefineAsRegister(node); |
| 320 opcode |= FlagsConditionField::encode(kOverflow); | 284 if (cont->IsSet()) { |
| 321 outputs[output_count++] = g.DefineAsRegister(projections[1]); | 285 outputs[output_count++] = g.DefineAsRegister(cont->result()); |
| 322 } | 286 } |
| 323 | 287 |
| 324 ASSERT_NE(0, input_count); | 288 ASSERT_NE(0, input_count); |
| 325 ASSERT_NE(0, output_count); | 289 ASSERT_NE(0, output_count); |
| 326 ASSERT_GE(ARRAY_SIZE(inputs), input_count); | 290 ASSERT_GE(ARRAY_SIZE(inputs), input_count); |
| 327 ASSERT_GE(ARRAY_SIZE(outputs), output_count); | 291 ASSERT_GE(ARRAY_SIZE(outputs), output_count); |
| 328 ASSERT_NE(kMode_None, AddressingModeField::decode(opcode)); | 292 ASSERT_NE(kMode_None, AddressingModeField::decode(opcode)); |
| 329 | 293 |
| 330 selector->Emit(opcode, output_count, outputs, input_count, inputs); | 294 Instruction* instr = selector->Emit(cont->Encode(opcode), output_count, |
| 295 outputs, input_count, inputs); |
| 296 if (cont->IsBranch()) instr->MarkAsControl(); |
| 331 } | 297 } |
| 332 | 298 |
| 333 | 299 |
| 300 static void VisitBinop(InstructionSelector* selector, Node* node, |
| 301 InstructionCode opcode, InstructionCode reverse_opcode) { |
| 302 FlagsContinuation cont; |
| 303 VisitBinop(selector, node, opcode, reverse_opcode, &cont); |
| 304 } |
| 305 |
| 306 |
| 334 void InstructionSelector::VisitLoad(Node* node) { | 307 void InstructionSelector::VisitLoad(Node* node) { |
| 335 MachineRepresentation rep = OpParameter<MachineRepresentation>(node); | 308 MachineRepresentation rep = OpParameter<MachineRepresentation>(node); |
| 336 ArmOperandGenerator g(this); | 309 ArmOperandGenerator g(this); |
| 337 Node* base = node->InputAt(0); | 310 Node* base = node->InputAt(0); |
| 338 Node* index = node->InputAt(1); | 311 Node* index = node->InputAt(1); |
| 339 | 312 |
| 340 InstructionOperand* result = rep == kMachineFloat64 | 313 InstructionOperand* result = rep == kMachineFloat64 |
| 341 ? g.DefineAsDoubleRegister(node) | 314 ? g.DefineAsDoubleRegister(node) |
| 342 : g.DefineAsRegister(node); | 315 : g.DefineAsRegister(node); |
| 343 | 316 |
| (...skipping 246 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 590 if (m.right().IsInt32Mul() && CanCover(node, m.right().node())) { | 563 if (m.right().IsInt32Mul() && CanCover(node, m.right().node())) { |
| 591 Int32BinopMatcher mright(m.right().node()); | 564 Int32BinopMatcher mright(m.right().node()); |
| 592 Emit(kArmMla, g.DefineAsRegister(node), g.UseRegister(mright.left().node()), | 565 Emit(kArmMla, g.DefineAsRegister(node), g.UseRegister(mright.left().node()), |
| 593 g.UseRegister(mright.right().node()), g.UseRegister(m.left().node())); | 566 g.UseRegister(mright.right().node()), g.UseRegister(m.left().node())); |
| 594 return; | 567 return; |
| 595 } | 568 } |
| 596 VisitBinop(this, node, kArmAdd, kArmAdd); | 569 VisitBinop(this, node, kArmAdd, kArmAdd); |
| 597 } | 570 } |
| 598 | 571 |
| 599 | 572 |
| 600 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) { | |
| 601 VisitBinopWithOverflow(this, node, kArmAdd, kArmAdd); | |
| 602 } | |
| 603 | |
| 604 | |
| 605 void InstructionSelector::VisitInt32Sub(Node* node) { | 573 void InstructionSelector::VisitInt32Sub(Node* node) { |
| 606 ArmOperandGenerator g(this); | 574 ArmOperandGenerator g(this); |
| 607 Int32BinopMatcher m(node); | 575 Int32BinopMatcher m(node); |
| 608 if (CpuFeatures::IsSupported(MLS) && m.right().IsInt32Mul() && | 576 if (CpuFeatures::IsSupported(MLS) && m.right().IsInt32Mul() && |
| 609 CanCover(node, m.right().node())) { | 577 CanCover(node, m.right().node())) { |
| 610 Int32BinopMatcher mright(m.right().node()); | 578 Int32BinopMatcher mright(m.right().node()); |
| 611 Emit(kArmMls, g.DefineAsRegister(node), g.UseRegister(mright.left().node()), | 579 Emit(kArmMls, g.DefineAsRegister(node), g.UseRegister(mright.left().node()), |
| 612 g.UseRegister(mright.right().node()), g.UseRegister(m.left().node())); | 580 g.UseRegister(mright.right().node()), g.UseRegister(m.left().node())); |
| 613 return; | 581 return; |
| 614 } | 582 } |
| 615 VisitBinop(this, node, kArmSub, kArmRsb); | 583 VisitBinop(this, node, kArmSub, kArmRsb); |
| 616 } | 584 } |
| 617 | 585 |
| 618 | 586 |
| 619 void InstructionSelector::VisitInt32SubWithOverflow(Node* node) { | |
| 620 VisitBinopWithOverflow(this, node, kArmSub, kArmRsb); | |
| 621 } | |
| 622 | |
| 623 | |
| 624 void InstructionSelector::VisitInt32Mul(Node* node) { | 587 void InstructionSelector::VisitInt32Mul(Node* node) { |
| 625 ArmOperandGenerator g(this); | 588 ArmOperandGenerator g(this); |
| 626 Int32BinopMatcher m(node); | 589 Int32BinopMatcher m(node); |
| 627 if (m.right().HasValue() && m.right().Value() > 0) { | 590 if (m.right().HasValue() && m.right().Value() > 0) { |
| 628 int32_t value = m.right().Value(); | 591 int32_t value = m.right().Value(); |
| 629 if (IsPowerOf2(value - 1)) { | 592 if (IsPowerOf2(value - 1)) { |
| 630 Emit(kArmAdd | AddressingModeField::encode(kMode_Operand2_R_LSL_I), | 593 Emit(kArmAdd | AddressingModeField::encode(kMode_Operand2_R_LSL_I), |
| 631 g.DefineAsRegister(node), g.UseRegister(m.left().node()), | 594 g.DefineAsRegister(node), g.UseRegister(m.left().node()), |
| 632 g.UseRegister(m.left().node()), | 595 g.UseRegister(m.left().node()), |
| 633 g.TempImmediate(WhichPowerOf2(value - 1))); | 596 g.TempImmediate(WhichPowerOf2(value - 1))); |
| (...skipping 226 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 860 | 823 |
| 861 // Caller clean up of stack for C-style calls. | 824 // Caller clean up of stack for C-style calls. |
| 862 if (descriptor->kind() == CallDescriptor::kCallAddress && | 825 if (descriptor->kind() == CallDescriptor::kCallAddress && |
| 863 buffer.pushed_count > 0) { | 826 buffer.pushed_count > 0) { |
| 864 ASSERT(deoptimization == NULL && continuation == NULL); | 827 ASSERT(deoptimization == NULL && continuation == NULL); |
| 865 Emit(kArmDrop | MiscField::encode(buffer.pushed_count), NULL); | 828 Emit(kArmDrop | MiscField::encode(buffer.pushed_count), NULL); |
| 866 } | 829 } |
| 867 } | 830 } |
| 868 | 831 |
| 869 | 832 |
| 833 void InstructionSelector::VisitInt32AddWithOverflow(Node* node, |
| 834 FlagsContinuation* cont) { |
| 835 VisitBinop(this, node, kArmAdd, kArmAdd, cont); |
| 836 } |
| 837 |
| 838 |
| 839 void InstructionSelector::VisitInt32SubWithOverflow(Node* node, |
| 840 FlagsContinuation* cont) { |
| 841 VisitBinop(this, node, kArmSub, kArmRsb, cont); |
| 842 } |
| 843 |
| 844 |
| 870 // Shared routine for multiple compare operations. | 845 // Shared routine for multiple compare operations. |
| 871 static void VisitWordCompare(InstructionSelector* selector, Node* node, | 846 static void VisitWordCompare(InstructionSelector* selector, Node* node, |
| 872 InstructionCode opcode, FlagsContinuation* cont, | 847 InstructionCode opcode, FlagsContinuation* cont, |
| 873 bool commutative, bool requires_output) { | 848 bool commutative) { |
| 874 ArmOperandGenerator g(selector); | 849 ArmOperandGenerator g(selector); |
| 875 Int32BinopMatcher m(node); | 850 Int32BinopMatcher m(node); |
| 876 InstructionOperand* inputs[5]; | 851 InstructionOperand* inputs[5]; |
| 877 size_t input_count = 0; | 852 size_t input_count = 0; |
| 878 InstructionOperand* outputs[1]; | 853 InstructionOperand* outputs[1]; |
| 879 size_t output_count = 0; | 854 size_t output_count = 0; |
| 880 | 855 |
| 881 if (TryMatchImmediateOrShift(selector, &opcode, m.right().node(), | 856 if (TryMatchImmediateOrShift(selector, &opcode, m.right().node(), |
| 882 &input_count, &inputs[1])) { | 857 &input_count, &inputs[1])) { |
| 883 inputs[0] = g.UseRegister(m.left().node()); | 858 inputs[0] = g.UseRegister(m.left().node()); |
| 884 input_count++; | 859 input_count++; |
| 885 } else if (TryMatchImmediateOrShift(selector, &opcode, m.left().node(), | 860 } else if (TryMatchImmediateOrShift(selector, &opcode, m.left().node(), |
| 886 &input_count, &inputs[1])) { | 861 &input_count, &inputs[1])) { |
| 887 if (!commutative) cont->Commute(); | 862 if (!commutative) cont->Commute(); |
| 888 inputs[0] = g.UseRegister(m.right().node()); | 863 inputs[0] = g.UseRegister(m.right().node()); |
| 889 input_count++; | 864 input_count++; |
| 890 } else { | 865 } else { |
| 891 opcode |= AddressingModeField::encode(kMode_Operand2_R); | 866 opcode |= AddressingModeField::encode(kMode_Operand2_R); |
| 892 inputs[input_count++] = g.UseRegister(m.left().node()); | 867 inputs[input_count++] = g.UseRegister(m.left().node()); |
| 893 inputs[input_count++] = g.UseRegister(m.right().node()); | 868 inputs[input_count++] = g.UseRegister(m.right().node()); |
| 894 } | 869 } |
| 895 | 870 |
| 896 if (cont->IsBranch()) { | 871 if (cont->IsBranch()) { |
| 897 if (requires_output) { | |
| 898 outputs[output_count++] = g.DefineAsRegister(node); | |
| 899 } | |
| 900 inputs[input_count++] = g.Label(cont->true_block()); | 872 inputs[input_count++] = g.Label(cont->true_block()); |
| 901 inputs[input_count++] = g.Label(cont->false_block()); | 873 inputs[input_count++] = g.Label(cont->false_block()); |
| 902 } else { | 874 } else { |
| 903 ASSERT(cont->IsSet()); | 875 ASSERT(cont->IsSet()); |
| 904 outputs[output_count++] = g.DefineAsRegister(cont->result()); | 876 outputs[output_count++] = g.DefineAsRegister(cont->result()); |
| 905 } | 877 } |
| 906 | 878 |
| 907 ASSERT_NE(0, input_count); | 879 ASSERT_NE(0, input_count); |
| 908 ASSERT_GE(ARRAY_SIZE(inputs), input_count); | 880 ASSERT_GE(ARRAY_SIZE(inputs), input_count); |
| 909 ASSERT_GE(ARRAY_SIZE(outputs), output_count); | 881 ASSERT_GE(ARRAY_SIZE(outputs), output_count); |
| 910 | 882 |
| 911 Instruction* instr = selector->Emit(cont->Encode(opcode), output_count, | 883 Instruction* instr = selector->Emit(cont->Encode(opcode), output_count, |
| 912 outputs, input_count, inputs); | 884 outputs, input_count, inputs); |
| 913 if (cont->IsBranch()) instr->MarkAsControl(); | 885 if (cont->IsBranch()) instr->MarkAsControl(); |
| 914 } | 886 } |
| 915 | 887 |
| 916 | 888 |
| 917 void InstructionSelector::VisitWord32Test(Node* node, FlagsContinuation* cont) { | 889 void InstructionSelector::VisitWord32Test(Node* node, FlagsContinuation* cont) { |
| 918 switch (node->opcode()) { | 890 switch (node->opcode()) { |
| 919 case IrOpcode::kInt32Add: | 891 case IrOpcode::kInt32Add: |
| 920 return VisitWordCompare(this, node, kArmCmn, cont, true, false); | 892 return VisitWordCompare(this, node, kArmCmn, cont, true); |
| 921 case IrOpcode::kInt32Sub: | 893 case IrOpcode::kInt32Sub: |
| 922 return VisitWordCompare(this, node, kArmCmp, cont, false, false); | 894 return VisitWordCompare(this, node, kArmCmp, cont, false); |
| 923 case IrOpcode::kWord32And: | 895 case IrOpcode::kWord32And: |
| 924 return VisitWordCompare(this, node, kArmTst, cont, true, false); | 896 return VisitWordCompare(this, node, kArmTst, cont, true); |
| 925 case IrOpcode::kWord32Or: | 897 case IrOpcode::kWord32Or: |
| 926 return VisitWordCompare(this, node, kArmOrr, cont, true, true); | 898 return VisitBinop(this, node, kArmOrr, kArmOrr, cont); |
| 927 case IrOpcode::kWord32Xor: | 899 case IrOpcode::kWord32Xor: |
| 928 return VisitWordCompare(this, node, kArmTeq, cont, true, false); | 900 return VisitWordCompare(this, node, kArmTeq, cont, true); |
| 929 default: | 901 default: |
| 930 break; | 902 break; |
| 931 } | 903 } |
| 932 | 904 |
| 933 ArmOperandGenerator g(this); | 905 ArmOperandGenerator g(this); |
| 934 InstructionCode opcode = | 906 InstructionCode opcode = |
| 935 cont->Encode(kArmTst) | AddressingModeField::encode(kMode_Operand2_R); | 907 cont->Encode(kArmTst) | AddressingModeField::encode(kMode_Operand2_R); |
| 936 if (cont->IsBranch()) { | 908 if (cont->IsBranch()) { |
| 937 Emit(opcode, NULL, g.UseRegister(node), g.UseRegister(node), | 909 Emit(opcode, NULL, g.UseRegister(node), g.UseRegister(node), |
| 938 g.Label(cont->true_block()), | 910 g.Label(cont->true_block()), |
| 939 g.Label(cont->false_block()))->MarkAsControl(); | 911 g.Label(cont->false_block()))->MarkAsControl(); |
| 940 } else { | 912 } else { |
| 941 Emit(opcode, g.DefineAsRegister(cont->result()), g.UseRegister(node), | 913 Emit(opcode, g.DefineAsRegister(cont->result()), g.UseRegister(node), |
| 942 g.UseRegister(node)); | 914 g.UseRegister(node)); |
| 943 } | 915 } |
| 944 } | 916 } |
| 945 | 917 |
| 946 | 918 |
| 947 void InstructionSelector::VisitWord32Compare(Node* node, | 919 void InstructionSelector::VisitWord32Compare(Node* node, |
| 948 FlagsContinuation* cont) { | 920 FlagsContinuation* cont) { |
| 949 VisitWordCompare(this, node, kArmCmp, cont, false, false); | 921 VisitWordCompare(this, node, kArmCmp, cont, false); |
| 950 } | 922 } |
| 951 | 923 |
| 952 | 924 |
| 953 void InstructionSelector::VisitFloat64Compare(Node* node, | 925 void InstructionSelector::VisitFloat64Compare(Node* node, |
| 954 FlagsContinuation* cont) { | 926 FlagsContinuation* cont) { |
| 955 ArmOperandGenerator g(this); | 927 ArmOperandGenerator g(this); |
| 956 Float64BinopMatcher m(node); | 928 Float64BinopMatcher m(node); |
| 957 if (cont->IsBranch()) { | 929 if (cont->IsBranch()) { |
| 958 Emit(cont->Encode(kArmVcmpF64), NULL, g.UseDoubleRegister(m.left().node()), | 930 Emit(cont->Encode(kArmVcmpF64), NULL, g.UseDoubleRegister(m.left().node()), |
| 959 g.UseDoubleRegister(m.right().node()), g.Label(cont->true_block()), | 931 g.UseDoubleRegister(m.right().node()), g.Label(cont->true_block()), |
| 960 g.Label(cont->false_block()))->MarkAsControl(); | 932 g.Label(cont->false_block()))->MarkAsControl(); |
| 961 } else { | 933 } else { |
| 962 ASSERT(cont->IsSet()); | 934 ASSERT(cont->IsSet()); |
| 963 Emit(cont->Encode(kArmVcmpF64), g.DefineAsRegister(cont->result()), | 935 Emit(cont->Encode(kArmVcmpF64), g.DefineAsRegister(cont->result()), |
| 964 g.UseDoubleRegister(m.left().node()), | 936 g.UseDoubleRegister(m.left().node()), |
| 965 g.UseDoubleRegister(m.right().node())); | 937 g.UseDoubleRegister(m.right().node())); |
| 966 } | 938 } |
| 967 } | 939 } |
| 968 | 940 |
| 969 } // namespace compiler | 941 } // namespace compiler |
| 970 } // namespace internal | 942 } // namespace internal |
| 971 } // namespace v8 | 943 } // namespace v8 |
| OLD | NEW |