OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/instruction-selector-impl.h" | 5 #include "src/compiler/instruction-selector-impl.h" |
6 #include "src/compiler/node-matchers.h" | 6 #include "src/compiler/node-matchers.h" |
7 #include "src/compiler-intrinsics.h" | 7 #include "src/compiler-intrinsics.h" |
8 | 8 |
9 namespace v8 { | 9 namespace v8 { |
10 namespace internal { | 10 namespace internal { |
(...skipping 267 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
278 if (cont->IsBranch()) { | 278 if (cont->IsBranch()) { |
279 inputs[input_count++] = g.Label(cont->true_block()); | 279 inputs[input_count++] = g.Label(cont->true_block()); |
280 inputs[input_count++] = g.Label(cont->false_block()); | 280 inputs[input_count++] = g.Label(cont->false_block()); |
281 } | 281 } |
282 | 282 |
283 outputs[output_count++] = g.DefineAsRegister(node); | 283 outputs[output_count++] = g.DefineAsRegister(node); |
284 if (cont->IsSet()) { | 284 if (cont->IsSet()) { |
285 outputs[output_count++] = g.DefineAsRegister(cont->result()); | 285 outputs[output_count++] = g.DefineAsRegister(cont->result()); |
286 } | 286 } |
287 | 287 |
288 ASSERT_NE(0, input_count); | 288 DCHECK_NE(0, input_count); |
289 ASSERT_NE(0, output_count); | 289 DCHECK_NE(0, output_count); |
290 ASSERT_GE(ARRAY_SIZE(inputs), input_count); | 290 DCHECK_GE(ARRAY_SIZE(inputs), input_count); |
291 ASSERT_GE(ARRAY_SIZE(outputs), output_count); | 291 DCHECK_GE(ARRAY_SIZE(outputs), output_count); |
292 ASSERT_NE(kMode_None, AddressingModeField::decode(opcode)); | 292 DCHECK_NE(kMode_None, AddressingModeField::decode(opcode)); |
293 | 293 |
294 Instruction* instr = selector->Emit(cont->Encode(opcode), output_count, | 294 Instruction* instr = selector->Emit(cont->Encode(opcode), output_count, |
295 outputs, input_count, inputs); | 295 outputs, input_count, inputs); |
296 if (cont->IsBranch()) instr->MarkAsControl(); | 296 if (cont->IsBranch()) instr->MarkAsControl(); |
297 } | 297 } |
298 | 298 |
299 | 299 |
300 static void VisitBinop(InstructionSelector* selector, Node* node, | 300 static void VisitBinop(InstructionSelector* selector, Node* node, |
301 InstructionCode opcode, InstructionCode reverse_opcode) { | 301 InstructionCode opcode, InstructionCode reverse_opcode) { |
302 FlagsContinuation cont; | 302 FlagsContinuation cont; |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
349 | 349 |
350 void InstructionSelector::VisitStore(Node* node) { | 350 void InstructionSelector::VisitStore(Node* node) { |
351 ArmOperandGenerator g(this); | 351 ArmOperandGenerator g(this); |
352 Node* base = node->InputAt(0); | 352 Node* base = node->InputAt(0); |
353 Node* index = node->InputAt(1); | 353 Node* index = node->InputAt(1); |
354 Node* value = node->InputAt(2); | 354 Node* value = node->InputAt(2); |
355 | 355 |
356 StoreRepresentation store_rep = OpParameter<StoreRepresentation>(node); | 356 StoreRepresentation store_rep = OpParameter<StoreRepresentation>(node); |
357 MachineRepresentation rep = store_rep.rep; | 357 MachineRepresentation rep = store_rep.rep; |
358 if (store_rep.write_barrier_kind == kFullWriteBarrier) { | 358 if (store_rep.write_barrier_kind == kFullWriteBarrier) { |
359 ASSERT(rep == kMachineTagged); | 359 DCHECK(rep == kMachineTagged); |
360 // TODO(dcarney): refactor RecordWrite function to take temp registers | 360 // TODO(dcarney): refactor RecordWrite function to take temp registers |
361 // and pass them here instead of using fixed regs | 361 // and pass them here instead of using fixed regs |
362 // TODO(dcarney): handle immediate indices. | 362 // TODO(dcarney): handle immediate indices. |
363 InstructionOperand* temps[] = {g.TempRegister(r5), g.TempRegister(r6)}; | 363 InstructionOperand* temps[] = {g.TempRegister(r5), g.TempRegister(r6)}; |
364 Emit(kArmStoreWriteBarrier, NULL, g.UseFixed(base, r4), | 364 Emit(kArmStoreWriteBarrier, NULL, g.UseFixed(base, r4), |
365 g.UseFixed(index, r5), g.UseFixed(value, r6), ARRAY_SIZE(temps), | 365 g.UseFixed(index, r5), g.UseFixed(value, r6), ARRAY_SIZE(temps), |
366 temps); | 366 temps); |
367 return; | 367 return; |
368 } | 368 } |
369 ASSERT_EQ(kNoWriteBarrier, store_rep.write_barrier_kind); | 369 DCHECK_EQ(kNoWriteBarrier, store_rep.write_barrier_kind); |
370 InstructionOperand* val = rep == kMachineFloat64 ? g.UseDoubleRegister(value) | 370 InstructionOperand* val = rep == kMachineFloat64 ? g.UseDoubleRegister(value) |
371 : g.UseRegister(value); | 371 : g.UseRegister(value); |
372 | 372 |
373 ArchOpcode opcode; | 373 ArchOpcode opcode; |
374 switch (rep) { | 374 switch (rep) { |
375 case kMachineFloat64: | 375 case kMachineFloat64: |
376 opcode = kArmFloat64Store; | 376 opcode = kArmFloat64Store; |
377 break; | 377 break; |
378 case kMachineWord8: | 378 case kMachineWord8: |
379 opcode = kArmStoreWord8; | 379 opcode = kArmStoreWord8; |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
435 if (mright.right().Is(-1)) { | 435 if (mright.right().Is(-1)) { |
436 EmitBic(this, node, m.left().node(), mright.left().node()); | 436 EmitBic(this, node, m.left().node(), mright.left().node()); |
437 return; | 437 return; |
438 } | 438 } |
439 } | 439 } |
440 if (CpuFeatures::IsSupported(ARMv7) && m.right().HasValue()) { | 440 if (CpuFeatures::IsSupported(ARMv7) && m.right().HasValue()) { |
441 uint32_t value = m.right().Value(); | 441 uint32_t value = m.right().Value(); |
442 uint32_t width = CompilerIntrinsics::CountSetBits(value); | 442 uint32_t width = CompilerIntrinsics::CountSetBits(value); |
443 uint32_t msb = CompilerIntrinsics::CountLeadingZeros(value); | 443 uint32_t msb = CompilerIntrinsics::CountLeadingZeros(value); |
444 if (width != 0 && msb + width == 32) { | 444 if (width != 0 && msb + width == 32) { |
445 ASSERT_EQ(0, CompilerIntrinsics::CountTrailingZeros(value)); | 445 DCHECK_EQ(0, CompilerIntrinsics::CountTrailingZeros(value)); |
446 if (m.left().IsWord32Shr()) { | 446 if (m.left().IsWord32Shr()) { |
447 Int32BinopMatcher mleft(m.left().node()); | 447 Int32BinopMatcher mleft(m.left().node()); |
448 if (mleft.right().IsInRange(0, 31)) { | 448 if (mleft.right().IsInRange(0, 31)) { |
449 Emit(kArmUbfx, g.DefineAsRegister(node), | 449 Emit(kArmUbfx, g.DefineAsRegister(node), |
450 g.UseRegister(mleft.left().node()), | 450 g.UseRegister(mleft.left().node()), |
451 g.UseImmediate(mleft.right().node()), g.TempImmediate(width)); | 451 g.UseImmediate(mleft.right().node()), g.TempImmediate(width)); |
452 return; | 452 return; |
453 } | 453 } |
454 } | 454 } |
455 Emit(kArmUbfx, g.DefineAsRegister(node), g.UseRegister(m.left().node()), | 455 Emit(kArmUbfx, g.DefineAsRegister(node), g.UseRegister(m.left().node()), |
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
527 Int32BinopMatcher m(node); | 527 Int32BinopMatcher m(node); |
528 if (CpuFeatures::IsSupported(ARMv7) && m.left().IsWord32And() && | 528 if (CpuFeatures::IsSupported(ARMv7) && m.left().IsWord32And() && |
529 m.right().IsInRange(0, 31)) { | 529 m.right().IsInRange(0, 31)) { |
530 int32_t lsb = m.right().Value(); | 530 int32_t lsb = m.right().Value(); |
531 Int32BinopMatcher mleft(m.left().node()); | 531 Int32BinopMatcher mleft(m.left().node()); |
532 if (mleft.right().HasValue()) { | 532 if (mleft.right().HasValue()) { |
533 uint32_t value = (mleft.right().Value() >> lsb) << lsb; | 533 uint32_t value = (mleft.right().Value() >> lsb) << lsb; |
534 uint32_t width = CompilerIntrinsics::CountSetBits(value); | 534 uint32_t width = CompilerIntrinsics::CountSetBits(value); |
535 uint32_t msb = CompilerIntrinsics::CountLeadingZeros(value); | 535 uint32_t msb = CompilerIntrinsics::CountLeadingZeros(value); |
536 if (msb + width + lsb == 32) { | 536 if (msb + width + lsb == 32) { |
537 ASSERT_EQ(lsb, CompilerIntrinsics::CountTrailingZeros(value)); | 537 DCHECK_EQ(lsb, CompilerIntrinsics::CountTrailingZeros(value)); |
538 Emit(kArmUbfx, g.DefineAsRegister(node), | 538 Emit(kArmUbfx, g.DefineAsRegister(node), |
539 g.UseRegister(mleft.left().node()), g.TempImmediate(lsb), | 539 g.UseRegister(mleft.left().node()), g.TempImmediate(lsb), |
540 g.TempImmediate(width)); | 540 g.TempImmediate(width)); |
541 return; | 541 return; |
542 } | 542 } |
543 } | 543 } |
544 } | 544 } |
545 VisitShift(this, node, TryMatchLSR); | 545 VisitShift(this, node, TryMatchLSR); |
546 } | 546 } |
547 | 547 |
(...skipping 262 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
810 return; | 810 return; |
811 } | 811 } |
812 | 812 |
813 // Emit the call instruction. | 813 // Emit the call instruction. |
814 Instruction* call_instr = | 814 Instruction* call_instr = |
815 Emit(opcode, buffer.output_count, buffer.outputs, | 815 Emit(opcode, buffer.output_count, buffer.outputs, |
816 buffer.fixed_and_control_count(), buffer.fixed_and_control_args); | 816 buffer.fixed_and_control_count(), buffer.fixed_and_control_args); |
817 | 817 |
818 call_instr->MarkAsCall(); | 818 call_instr->MarkAsCall(); |
819 if (deoptimization != NULL) { | 819 if (deoptimization != NULL) { |
820 ASSERT(continuation != NULL); | 820 DCHECK(continuation != NULL); |
821 call_instr->MarkAsControl(); | 821 call_instr->MarkAsControl(); |
822 } | 822 } |
823 | 823 |
824 // Caller clean up of stack for C-style calls. | 824 // Caller clean up of stack for C-style calls. |
825 if (descriptor->kind() == CallDescriptor::kCallAddress && | 825 if (descriptor->kind() == CallDescriptor::kCallAddress && |
826 buffer.pushed_count > 0) { | 826 buffer.pushed_count > 0) { |
827 ASSERT(deoptimization == NULL && continuation == NULL); | 827 DCHECK(deoptimization == NULL && continuation == NULL); |
828 Emit(kArmDrop | MiscField::encode(buffer.pushed_count), NULL); | 828 Emit(kArmDrop | MiscField::encode(buffer.pushed_count), NULL); |
829 } | 829 } |
830 } | 830 } |
831 | 831 |
832 | 832 |
833 void InstructionSelector::VisitInt32AddWithOverflow(Node* node, | 833 void InstructionSelector::VisitInt32AddWithOverflow(Node* node, |
834 FlagsContinuation* cont) { | 834 FlagsContinuation* cont) { |
835 VisitBinop(this, node, kArmAdd, kArmAdd, cont); | 835 VisitBinop(this, node, kArmAdd, kArmAdd, cont); |
836 } | 836 } |
837 | 837 |
(...skipping 27 matching lines...) Expand all Loading... |
865 } else { | 865 } else { |
866 opcode |= AddressingModeField::encode(kMode_Operand2_R); | 866 opcode |= AddressingModeField::encode(kMode_Operand2_R); |
867 inputs[input_count++] = g.UseRegister(m.left().node()); | 867 inputs[input_count++] = g.UseRegister(m.left().node()); |
868 inputs[input_count++] = g.UseRegister(m.right().node()); | 868 inputs[input_count++] = g.UseRegister(m.right().node()); |
869 } | 869 } |
870 | 870 |
871 if (cont->IsBranch()) { | 871 if (cont->IsBranch()) { |
872 inputs[input_count++] = g.Label(cont->true_block()); | 872 inputs[input_count++] = g.Label(cont->true_block()); |
873 inputs[input_count++] = g.Label(cont->false_block()); | 873 inputs[input_count++] = g.Label(cont->false_block()); |
874 } else { | 874 } else { |
875 ASSERT(cont->IsSet()); | 875 DCHECK(cont->IsSet()); |
876 outputs[output_count++] = g.DefineAsRegister(cont->result()); | 876 outputs[output_count++] = g.DefineAsRegister(cont->result()); |
877 } | 877 } |
878 | 878 |
879 ASSERT_NE(0, input_count); | 879 DCHECK_NE(0, input_count); |
880 ASSERT_GE(ARRAY_SIZE(inputs), input_count); | 880 DCHECK_GE(ARRAY_SIZE(inputs), input_count); |
881 ASSERT_GE(ARRAY_SIZE(outputs), output_count); | 881 DCHECK_GE(ARRAY_SIZE(outputs), output_count); |
882 | 882 |
883 Instruction* instr = selector->Emit(cont->Encode(opcode), output_count, | 883 Instruction* instr = selector->Emit(cont->Encode(opcode), output_count, |
884 outputs, input_count, inputs); | 884 outputs, input_count, inputs); |
885 if (cont->IsBranch()) instr->MarkAsControl(); | 885 if (cont->IsBranch()) instr->MarkAsControl(); |
886 } | 886 } |
887 | 887 |
888 | 888 |
889 void InstructionSelector::VisitWord32Test(Node* node, FlagsContinuation* cont) { | 889 void InstructionSelector::VisitWord32Test(Node* node, FlagsContinuation* cont) { |
890 switch (node->opcode()) { | 890 switch (node->opcode()) { |
891 case IrOpcode::kInt32Add: | 891 case IrOpcode::kInt32Add: |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
924 | 924 |
925 void InstructionSelector::VisitFloat64Compare(Node* node, | 925 void InstructionSelector::VisitFloat64Compare(Node* node, |
926 FlagsContinuation* cont) { | 926 FlagsContinuation* cont) { |
927 ArmOperandGenerator g(this); | 927 ArmOperandGenerator g(this); |
928 Float64BinopMatcher m(node); | 928 Float64BinopMatcher m(node); |
929 if (cont->IsBranch()) { | 929 if (cont->IsBranch()) { |
930 Emit(cont->Encode(kArmVcmpF64), NULL, g.UseDoubleRegister(m.left().node()), | 930 Emit(cont->Encode(kArmVcmpF64), NULL, g.UseDoubleRegister(m.left().node()), |
931 g.UseDoubleRegister(m.right().node()), g.Label(cont->true_block()), | 931 g.UseDoubleRegister(m.right().node()), g.Label(cont->true_block()), |
932 g.Label(cont->false_block()))->MarkAsControl(); | 932 g.Label(cont->false_block()))->MarkAsControl(); |
933 } else { | 933 } else { |
934 ASSERT(cont->IsSet()); | 934 DCHECK(cont->IsSet()); |
935 Emit(cont->Encode(kArmVcmpF64), g.DefineAsRegister(cont->result()), | 935 Emit(cont->Encode(kArmVcmpF64), g.DefineAsRegister(cont->result()), |
936 g.UseDoubleRegister(m.left().node()), | 936 g.UseDoubleRegister(m.left().node()), |
937 g.UseDoubleRegister(m.right().node())); | 937 g.UseDoubleRegister(m.right().node())); |
938 } | 938 } |
939 } | 939 } |
940 | 940 |
941 } // namespace compiler | 941 } // namespace compiler |
942 } // namespace internal | 942 } // namespace internal |
943 } // namespace v8 | 943 } // namespace v8 |
OLD | NEW |