OLD | NEW |
1 //===- subzero/src/IceTargetLoweringMIPS32.cpp - MIPS32 lowering ----------===// | 1 //===- subzero/src/IceTargetLoweringMIPS32.cpp - MIPS32 lowering ----------===// |
2 // | 2 // |
3 // The Subzero Code Generator | 3 // The Subzero Code Generator |
4 // | 4 // |
5 // This file is distributed under the University of Illinois Open Source | 5 // This file is distributed under the University of Illinois Open Source |
6 // License. See LICENSE.TXT for details. | 6 // License. See LICENSE.TXT for details. |
7 // | 7 // |
8 //===----------------------------------------------------------------------===// | 8 //===----------------------------------------------------------------------===// |
9 /// | 9 /// |
10 /// \file | 10 /// \file |
(...skipping 539 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
550 } | 550 } |
551 | 551 |
552 void TargetMIPS32::lowerAlloca(const InstAlloca *Inst) { | 552 void TargetMIPS32::lowerAlloca(const InstAlloca *Inst) { |
553 UsesFramePointer = true; | 553 UsesFramePointer = true; |
554 // Conservatively require the stack to be aligned. Some stack adjustment | 554 // Conservatively require the stack to be aligned. Some stack adjustment |
555 // operations implemented below assume that the stack is aligned before the | 555 // operations implemented below assume that the stack is aligned before the |
556 // alloca. All the alloca code ensures that the stack alignment is preserved | 556 // alloca. All the alloca code ensures that the stack alignment is preserved |
557 // after the alloca. The stack alignment restriction can be relaxed in some | 557 // after the alloca. The stack alignment restriction can be relaxed in some |
558 // cases. | 558 // cases. |
559 NeedsStackAlignment = true; | 559 NeedsStackAlignment = true; |
560 (void)Inst; | 560 UnimplementedLoweringError(this, Inst); |
561 UnimplementedError(Func->getContext()->getFlags()); | |
562 } | 561 } |
563 | 562 |
564 void TargetMIPS32::lowerArithmetic(const InstArithmetic *Inst) { | 563 void TargetMIPS32::lowerArithmetic(const InstArithmetic *Inst) { |
565 Variable *Dest = Inst->getDest(); | 564 Variable *Dest = Inst->getDest(); |
566 Operand *Src0 = legalizeUndef(Inst->getSrc(0)); | 565 // We need to signal all the UnimplementedLoweringError errors before any |
567 Operand *Src1 = legalizeUndef(Inst->getSrc(1)); | 566 // legalization into new variables, otherwise Om1 register allocation may fail |
| 567 // when it sees variables that are defined but not used. |
568 if (Dest->getType() == IceType_i64) { | 568 if (Dest->getType() == IceType_i64) { |
569 // TODO(reed kotler): fakedef needed for now until all cases are implemented | 569 UnimplementedLoweringError(this, Inst); |
570 auto *DestLo = llvm::cast<Variable>(loOperand(Dest)); | |
571 auto *DestHi = llvm::cast<Variable>(hiOperand(Dest)); | |
572 Context.insert<InstFakeDef>(DestLo); | |
573 Context.insert<InstFakeDef>(DestHi); | |
574 UnimplementedError(Func->getContext()->getFlags()); | |
575 return; | 570 return; |
576 } | 571 } |
577 if (isVectorType(Dest->getType())) { | 572 if (isVectorType(Dest->getType())) { |
578 Context.insert<InstFakeDef>(Dest); | 573 UnimplementedLoweringError(this, Inst); |
579 UnimplementedError(Func->getContext()->getFlags()); | |
580 return; | 574 return; |
581 } | 575 } |
582 // Dest->getType() is non-i64 scalar | 576 switch (Inst->getOp()) { |
| 577 default: |
| 578 break; |
| 579 case InstArithmetic::Shl: |
| 580 case InstArithmetic::Lshr: |
| 581 case InstArithmetic::Ashr: |
| 582 case InstArithmetic::Udiv: |
| 583 case InstArithmetic::Sdiv: |
| 584 case InstArithmetic::Urem: |
| 585 case InstArithmetic::Srem: |
| 586 case InstArithmetic::Fadd: |
| 587 case InstArithmetic::Fsub: |
| 588 case InstArithmetic::Fmul: |
| 589 case InstArithmetic::Fdiv: |
| 590 case InstArithmetic::Frem: |
| 591 UnimplementedLoweringError(this, Inst); |
| 592 return; |
| 593 } |
| 594 |
| 595 // At this point Dest->getType() is non-i64 scalar |
| 596 |
583 Variable *T = makeReg(Dest->getType()); | 597 Variable *T = makeReg(Dest->getType()); |
| 598 Operand *Src0 = legalizeUndef(Inst->getSrc(0)); |
| 599 Operand *Src1 = legalizeUndef(Inst->getSrc(1)); |
584 Variable *Src0R = legalizeToReg(Src0); | 600 Variable *Src0R = legalizeToReg(Src0); |
585 Variable *Src1R = legalizeToReg(Src1); | 601 Variable *Src1R = legalizeToReg(Src1); |
| 602 |
586 switch (Inst->getOp()) { | 603 switch (Inst->getOp()) { |
587 case InstArithmetic::_num: | 604 case InstArithmetic::_num: |
588 break; | 605 break; |
589 case InstArithmetic::Add: | 606 case InstArithmetic::Add: |
590 _add(T, Src0R, Src1R); | 607 _add(T, Src0R, Src1R); |
591 _mov(Dest, T); | 608 _mov(Dest, T); |
592 return; | 609 return; |
593 case InstArithmetic::And: | 610 case InstArithmetic::And: |
594 _and(T, Src0R, Src1R); | 611 _and(T, Src0R, Src1R); |
595 _mov(Dest, T); | 612 _mov(Dest, T); |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
629 break; | 646 break; |
630 case InstArithmetic::Fsub: | 647 case InstArithmetic::Fsub: |
631 break; | 648 break; |
632 case InstArithmetic::Fmul: | 649 case InstArithmetic::Fmul: |
633 break; | 650 break; |
634 case InstArithmetic::Fdiv: | 651 case InstArithmetic::Fdiv: |
635 break; | 652 break; |
636 case InstArithmetic::Frem: | 653 case InstArithmetic::Frem: |
637 break; | 654 break; |
638 } | 655 } |
639 // TODO(reed kotler): | |
640 // fakedef and fakeuse needed for now until all cases are implemented | |
641 Context.insert<InstFakeUse>(Src0R); | |
642 Context.insert<InstFakeUse>(Src1R); | |
643 Context.insert<InstFakeDef>(Dest); | |
644 UnimplementedError(Func->getContext()->getFlags()); | |
645 } | 656 } |
646 | 657 |
647 void TargetMIPS32::lowerAssign(const InstAssign *Inst) { | 658 void TargetMIPS32::lowerAssign(const InstAssign *Inst) { |
648 Variable *Dest = Inst->getDest(); | 659 Variable *Dest = Inst->getDest(); |
649 Operand *Src0 = Inst->getSrc(0); | 660 Operand *Src0 = Inst->getSrc(0); |
650 assert(Dest->getType() == Src0->getType()); | 661 assert(Dest->getType() == Src0->getType()); |
651 if (Dest->getType() == IceType_i64) { | 662 if (Dest->getType() == IceType_i64) { |
652 Src0 = legalizeUndef(Src0); | 663 Src0 = legalizeUndef(Src0); |
653 Operand *Src0Lo = legalize(loOperand(Src0), Legal_Reg); | 664 Operand *Src0Lo = legalize(loOperand(Src0), Legal_Reg); |
654 Operand *Src0Hi = legalize(hiOperand(Src0), Legal_Reg); | 665 Operand *Src0Hi = legalize(hiOperand(Src0), Legal_Reg); |
(...skipping 13 matching lines...) Expand all Loading... |
668 // into a Variable with the same register assignment. This especially | 679 // into a Variable with the same register assignment. This especially |
669 // helps allow the use of Flex operands. | 680 // helps allow the use of Flex operands. |
670 SrcR = legalize(Src0, Legal_Reg, Dest->getRegNum()); | 681 SrcR = legalize(Src0, Legal_Reg, Dest->getRegNum()); |
671 } else { | 682 } else { |
672 // Dest could be a stack operand. Since we could potentially need | 683 // Dest could be a stack operand. Since we could potentially need |
673 // to do a Store (and store can only have Register operands), | 684 // to do a Store (and store can only have Register operands), |
674 // legalize this to a register. | 685 // legalize this to a register. |
675 SrcR = legalize(Src0, Legal_Reg); | 686 SrcR = legalize(Src0, Legal_Reg); |
676 } | 687 } |
677 if (isVectorType(Dest->getType())) { | 688 if (isVectorType(Dest->getType())) { |
678 UnimplementedError(Func->getContext()->getFlags()); | 689 UnimplementedLoweringError(this, Inst); |
679 } else { | 690 } else { |
680 _mov(Dest, SrcR); | 691 _mov(Dest, SrcR); |
681 } | 692 } |
682 } | 693 } |
683 } | 694 } |
684 | 695 |
685 void TargetMIPS32::lowerBr(const InstBr *Inst) { | 696 void TargetMIPS32::lowerBr(const InstBr *Inst) { |
686 (void)Inst; | 697 UnimplementedLoweringError(this, Inst); |
687 UnimplementedError(Func->getContext()->getFlags()); | |
688 } | 698 } |
689 | 699 |
690 void TargetMIPS32::lowerCall(const InstCall *Inst) { | 700 void TargetMIPS32::lowerCall(const InstCall *Inst) { |
691 (void)Inst; | 701 UnimplementedLoweringError(this, Inst); |
692 UnimplementedError(Func->getContext()->getFlags()); | |
693 } | 702 } |
694 | 703 |
695 void TargetMIPS32::lowerCast(const InstCast *Inst) { | 704 void TargetMIPS32::lowerCast(const InstCast *Inst) { |
696 InstCast::OpKind CastKind = Inst->getCastKind(); | 705 InstCast::OpKind CastKind = Inst->getCastKind(); |
697 switch (CastKind) { | 706 switch (CastKind) { |
698 default: | 707 default: |
699 Func->setError("Cast type not supported"); | 708 Func->setError("Cast type not supported"); |
700 return; | 709 return; |
701 case InstCast::Sext: { | 710 case InstCast::Sext: { |
702 UnimplementedError(Func->getContext()->getFlags()); | 711 UnimplementedLoweringError(this, Inst); |
703 break; | 712 break; |
704 } | 713 } |
705 case InstCast::Zext: { | 714 case InstCast::Zext: { |
706 UnimplementedError(Func->getContext()->getFlags()); | 715 UnimplementedLoweringError(this, Inst); |
707 break; | 716 break; |
708 } | 717 } |
709 case InstCast::Trunc: { | 718 case InstCast::Trunc: { |
710 UnimplementedError(Func->getContext()->getFlags()); | 719 UnimplementedLoweringError(this, Inst); |
711 break; | 720 break; |
712 } | 721 } |
713 case InstCast::Fptrunc: | 722 case InstCast::Fptrunc: |
714 UnimplementedError(Func->getContext()->getFlags()); | 723 UnimplementedLoweringError(this, Inst); |
715 break; | 724 break; |
716 case InstCast::Fpext: { | 725 case InstCast::Fpext: { |
717 UnimplementedError(Func->getContext()->getFlags()); | 726 UnimplementedLoweringError(this, Inst); |
718 break; | 727 break; |
719 } | 728 } |
720 case InstCast::Fptosi: | 729 case InstCast::Fptosi: |
721 UnimplementedError(Func->getContext()->getFlags()); | 730 UnimplementedLoweringError(this, Inst); |
722 break; | 731 break; |
723 case InstCast::Fptoui: | 732 case InstCast::Fptoui: |
724 UnimplementedError(Func->getContext()->getFlags()); | 733 UnimplementedLoweringError(this, Inst); |
725 break; | 734 break; |
726 case InstCast::Sitofp: | 735 case InstCast::Sitofp: |
727 UnimplementedError(Func->getContext()->getFlags()); | 736 UnimplementedLoweringError(this, Inst); |
728 break; | 737 break; |
729 case InstCast::Uitofp: { | 738 case InstCast::Uitofp: { |
730 UnimplementedError(Func->getContext()->getFlags()); | 739 UnimplementedLoweringError(this, Inst); |
731 break; | 740 break; |
732 } | 741 } |
733 case InstCast::Bitcast: { | 742 case InstCast::Bitcast: { |
734 UnimplementedError(Func->getContext()->getFlags()); | 743 UnimplementedLoweringError(this, Inst); |
735 break; | 744 break; |
736 } | 745 } |
737 } | 746 } |
738 } | 747 } |
739 | 748 |
740 void TargetMIPS32::lowerExtractElement(const InstExtractElement *Inst) { | 749 void TargetMIPS32::lowerExtractElement(const InstExtractElement *Inst) { |
741 (void)Inst; | 750 UnimplementedLoweringError(this, Inst); |
742 UnimplementedError(Func->getContext()->getFlags()); | |
743 } | 751 } |
744 | 752 |
745 void TargetMIPS32::lowerFcmp(const InstFcmp *Inst) { | 753 void TargetMIPS32::lowerFcmp(const InstFcmp *Inst) { |
746 (void)Inst; | 754 UnimplementedLoweringError(this, Inst); |
747 UnimplementedError(Func->getContext()->getFlags()); | |
748 } | 755 } |
749 | 756 |
750 void TargetMIPS32::lowerIcmp(const InstIcmp *Inst) { | 757 void TargetMIPS32::lowerIcmp(const InstIcmp *Inst) { |
751 (void)Inst; | 758 UnimplementedLoweringError(this, Inst); |
752 UnimplementedError(Func->getContext()->getFlags()); | |
753 } | 759 } |
754 | 760 |
755 void TargetMIPS32::lowerInsertElement(const InstInsertElement *Inst) { | 761 void TargetMIPS32::lowerInsertElement(const InstInsertElement *Inst) { |
756 (void)Inst; | 762 UnimplementedLoweringError(this, Inst); |
757 UnimplementedError(Func->getContext()->getFlags()); | |
758 } | 763 } |
759 | 764 |
760 void TargetMIPS32::lowerIntrinsicCall(const InstIntrinsicCall *Instr) { | 765 void TargetMIPS32::lowerIntrinsicCall(const InstIntrinsicCall *Instr) { |
761 switch (Instr->getIntrinsicInfo().ID) { | 766 switch (Instr->getIntrinsicInfo().ID) { |
762 case Intrinsics::AtomicCmpxchg: { | 767 case Intrinsics::AtomicCmpxchg: { |
763 UnimplementedError(Func->getContext()->getFlags()); | 768 UnimplementedLoweringError(this, Instr); |
764 return; | 769 return; |
765 } | 770 } |
766 case Intrinsics::AtomicFence: | 771 case Intrinsics::AtomicFence: |
767 UnimplementedError(Func->getContext()->getFlags()); | 772 UnimplementedLoweringError(this, Instr); |
768 return; | 773 return; |
769 case Intrinsics::AtomicFenceAll: | 774 case Intrinsics::AtomicFenceAll: |
770 // NOTE: FenceAll should prevent and load/store from being moved across the | 775 // NOTE: FenceAll should prevent and load/store from being moved across the |
771 // fence (both atomic and non-atomic). The InstMIPS32Mfence instruction is | 776 // fence (both atomic and non-atomic). The InstMIPS32Mfence instruction is |
772 // currently marked coarsely as "HasSideEffects". | 777 // currently marked coarsely as "HasSideEffects". |
773 UnimplementedError(Func->getContext()->getFlags()); | 778 UnimplementedLoweringError(this, Instr); |
774 return; | 779 return; |
775 case Intrinsics::AtomicIsLockFree: { | 780 case Intrinsics::AtomicIsLockFree: { |
776 UnimplementedError(Func->getContext()->getFlags()); | 781 UnimplementedLoweringError(this, Instr); |
777 return; | 782 return; |
778 } | 783 } |
779 case Intrinsics::AtomicLoad: { | 784 case Intrinsics::AtomicLoad: { |
780 UnimplementedError(Func->getContext()->getFlags()); | 785 UnimplementedLoweringError(this, Instr); |
781 return; | 786 return; |
782 } | 787 } |
783 case Intrinsics::AtomicRMW: | 788 case Intrinsics::AtomicRMW: |
784 UnimplementedError(Func->getContext()->getFlags()); | 789 UnimplementedLoweringError(this, Instr); |
785 return; | 790 return; |
786 case Intrinsics::AtomicStore: { | 791 case Intrinsics::AtomicStore: { |
787 UnimplementedError(Func->getContext()->getFlags()); | 792 UnimplementedLoweringError(this, Instr); |
788 return; | 793 return; |
789 } | 794 } |
790 case Intrinsics::Bswap: { | 795 case Intrinsics::Bswap: { |
791 UnimplementedError(Func->getContext()->getFlags()); | 796 UnimplementedLoweringError(this, Instr); |
792 return; | 797 return; |
793 } | 798 } |
794 case Intrinsics::Ctpop: { | 799 case Intrinsics::Ctpop: { |
795 UnimplementedError(Func->getContext()->getFlags()); | 800 UnimplementedLoweringError(this, Instr); |
796 return; | 801 return; |
797 } | 802 } |
798 case Intrinsics::Ctlz: { | 803 case Intrinsics::Ctlz: { |
799 UnimplementedError(Func->getContext()->getFlags()); | 804 UnimplementedLoweringError(this, Instr); |
800 return; | 805 return; |
801 } | 806 } |
802 case Intrinsics::Cttz: { | 807 case Intrinsics::Cttz: { |
803 UnimplementedError(Func->getContext()->getFlags()); | 808 UnimplementedLoweringError(this, Instr); |
804 return; | 809 return; |
805 } | 810 } |
806 case Intrinsics::Fabs: { | 811 case Intrinsics::Fabs: { |
807 UnimplementedError(Func->getContext()->getFlags()); | 812 UnimplementedLoweringError(this, Instr); |
808 return; | 813 return; |
809 } | 814 } |
810 case Intrinsics::Longjmp: { | 815 case Intrinsics::Longjmp: { |
811 InstCall *Call = makeHelperCall(H_call_longjmp, nullptr, 2); | 816 InstCall *Call = makeHelperCall(H_call_longjmp, nullptr, 2); |
812 Call->addArg(Instr->getArg(0)); | 817 Call->addArg(Instr->getArg(0)); |
813 Call->addArg(Instr->getArg(1)); | 818 Call->addArg(Instr->getArg(1)); |
814 lowerCall(Call); | 819 lowerCall(Call); |
815 return; | 820 return; |
816 } | 821 } |
817 case Intrinsics::Memcpy: { | 822 case Intrinsics::Memcpy: { |
(...skipping 23 matching lines...) Expand all Loading... |
841 lowerCast(InstCast::create(Func, InstCast::Zext, ValExt, ValOp)); | 846 lowerCast(InstCast::create(Func, InstCast::Zext, ValExt, ValOp)); |
842 InstCall *Call = makeHelperCall(H_call_memset, nullptr, 3); | 847 InstCall *Call = makeHelperCall(H_call_memset, nullptr, 3); |
843 Call->addArg(Instr->getArg(0)); | 848 Call->addArg(Instr->getArg(0)); |
844 Call->addArg(ValExt); | 849 Call->addArg(ValExt); |
845 Call->addArg(Instr->getArg(2)); | 850 Call->addArg(Instr->getArg(2)); |
846 lowerCall(Call); | 851 lowerCall(Call); |
847 return; | 852 return; |
848 } | 853 } |
849 case Intrinsics::NaClReadTP: { | 854 case Intrinsics::NaClReadTP: { |
850 if (Ctx->getFlags().getUseSandboxing()) { | 855 if (Ctx->getFlags().getUseSandboxing()) { |
851 UnimplementedError(Func->getContext()->getFlags()); | 856 UnimplementedLoweringError(this, Instr); |
852 } else { | 857 } else { |
853 InstCall *Call = makeHelperCall(H_call_read_tp, Instr->getDest(), 0); | 858 InstCall *Call = makeHelperCall(H_call_read_tp, Instr->getDest(), 0); |
854 lowerCall(Call); | 859 lowerCall(Call); |
855 } | 860 } |
856 return; | 861 return; |
857 } | 862 } |
858 case Intrinsics::Setjmp: { | 863 case Intrinsics::Setjmp: { |
859 InstCall *Call = makeHelperCall(H_call_setjmp, Instr->getDest(), 1); | 864 InstCall *Call = makeHelperCall(H_call_setjmp, Instr->getDest(), 1); |
860 Call->addArg(Instr->getArg(0)); | 865 Call->addArg(Instr->getArg(0)); |
861 lowerCall(Call); | 866 lowerCall(Call); |
862 return; | 867 return; |
863 } | 868 } |
864 case Intrinsics::Sqrt: { | 869 case Intrinsics::Sqrt: { |
865 UnimplementedError(Func->getContext()->getFlags()); | 870 UnimplementedLoweringError(this, Instr); |
866 return; | 871 return; |
867 } | 872 } |
868 case Intrinsics::Stacksave: { | 873 case Intrinsics::Stacksave: { |
869 UnimplementedError(Func->getContext()->getFlags()); | 874 UnimplementedLoweringError(this, Instr); |
870 return; | 875 return; |
871 } | 876 } |
872 case Intrinsics::Stackrestore: { | 877 case Intrinsics::Stackrestore: { |
873 UnimplementedError(Func->getContext()->getFlags()); | 878 UnimplementedLoweringError(this, Instr); |
874 return; | 879 return; |
875 } | 880 } |
876 case Intrinsics::Trap: | 881 case Intrinsics::Trap: |
877 UnimplementedError(Func->getContext()->getFlags()); | 882 UnimplementedLoweringError(this, Instr); |
878 return; | 883 return; |
879 case Intrinsics::UnknownIntrinsic: | 884 case Intrinsics::UnknownIntrinsic: |
880 Func->setError("Should not be lowering UnknownIntrinsic"); | 885 Func->setError("Should not be lowering UnknownIntrinsic"); |
881 return; | 886 return; |
882 } | 887 } |
883 return; | 888 return; |
884 } | 889 } |
885 | 890 |
886 void TargetMIPS32::lowerLoad(const InstLoad *Inst) { | 891 void TargetMIPS32::lowerLoad(const InstLoad *Inst) { |
887 (void)Inst; | 892 UnimplementedLoweringError(this, Inst); |
888 UnimplementedError(Func->getContext()->getFlags()); | |
889 } | 893 } |
890 | 894 |
891 void TargetMIPS32::doAddressOptLoad() { | 895 void TargetMIPS32::doAddressOptLoad() { |
892 UnimplementedError(Func->getContext()->getFlags()); | 896 UnimplementedError(Func->getContext()->getFlags()); |
893 } | 897 } |
894 | 898 |
895 void TargetMIPS32::randomlyInsertNop(float Probability, | 899 void TargetMIPS32::randomlyInsertNop(float Probability, |
896 RandomNumberGenerator &RNG) { | 900 RandomNumberGenerator &RNG) { |
897 RandomNumberGeneratorWrapper RNGW(RNG); | 901 RandomNumberGeneratorWrapper RNGW(RNG); |
898 if (RNGW.getTrueWithProbability(Probability)) { | 902 if (RNGW.getTrueWithProbability(Probability)) { |
(...skipping 23 matching lines...) Expand all Loading... |
922 case IceType_i64: { | 926 case IceType_i64: { |
923 Src0 = legalizeUndef(Src0); | 927 Src0 = legalizeUndef(Src0); |
924 Variable *R0 = legalizeToReg(loOperand(Src0), RegMIPS32::Reg_V0); | 928 Variable *R0 = legalizeToReg(loOperand(Src0), RegMIPS32::Reg_V0); |
925 Variable *R1 = legalizeToReg(hiOperand(Src0), RegMIPS32::Reg_V1); | 929 Variable *R1 = legalizeToReg(hiOperand(Src0), RegMIPS32::Reg_V1); |
926 Reg = R0; | 930 Reg = R0; |
927 Context.insert<InstFakeUse>(R1); | 931 Context.insert<InstFakeUse>(R1); |
928 break; | 932 break; |
929 } | 933 } |
930 | 934 |
931 default: | 935 default: |
932 UnimplementedError(Func->getContext()->getFlags()); | 936 UnimplementedLoweringError(this, Inst); |
933 } | 937 } |
934 } | 938 } |
935 _ret(getPhysicalRegister(RegMIPS32::Reg_RA), Reg); | 939 _ret(getPhysicalRegister(RegMIPS32::Reg_RA), Reg); |
936 } | 940 } |
937 | 941 |
938 void TargetMIPS32::lowerSelect(const InstSelect *Inst) { | 942 void TargetMIPS32::lowerSelect(const InstSelect *Inst) { |
939 (void)Inst; | 943 UnimplementedLoweringError(this, Inst); |
940 UnimplementedError(Func->getContext()->getFlags()); | |
941 } | 944 } |
942 | 945 |
943 void TargetMIPS32::lowerStore(const InstStore *Inst) { | 946 void TargetMIPS32::lowerStore(const InstStore *Inst) { |
944 (void)Inst; | 947 UnimplementedLoweringError(this, Inst); |
945 UnimplementedError(Func->getContext()->getFlags()); | |
946 } | 948 } |
947 | 949 |
948 void TargetMIPS32::doAddressOptStore() { | 950 void TargetMIPS32::doAddressOptStore() { |
949 UnimplementedError(Func->getContext()->getFlags()); | 951 UnimplementedError(Func->getContext()->getFlags()); |
950 } | 952 } |
951 | 953 |
952 void TargetMIPS32::lowerSwitch(const InstSwitch *Inst) { | 954 void TargetMIPS32::lowerSwitch(const InstSwitch *Inst) { |
953 (void)Inst; | 955 UnimplementedLoweringError(this, Inst); |
954 UnimplementedError(Func->getContext()->getFlags()); | |
955 } | 956 } |
956 | 957 |
957 void TargetMIPS32::lowerUnreachable(const InstUnreachable * /*Inst*/) { | 958 void TargetMIPS32::lowerUnreachable(const InstUnreachable *Inst) { |
958 UnimplementedError(Func->getContext()->getFlags()); | 959 UnimplementedLoweringError(this, Inst); |
959 } | 960 } |
960 | 961 |
961 // Turn an i64 Phi instruction into a pair of i32 Phi instructions, to preserve | 962 // Turn an i64 Phi instruction into a pair of i32 Phi instructions, to preserve |
962 // integrity of liveness analysis. Undef values are also turned into zeroes, | 963 // integrity of liveness analysis. Undef values are also turned into zeroes, |
963 // since loOperand() and hiOperand() don't expect Undef input. | 964 // since loOperand() and hiOperand() don't expect Undef input. |
964 void TargetMIPS32::prelowerPhis() { | 965 void TargetMIPS32::prelowerPhis() { |
965 PhiLowering::prelowerPhis32Bit<TargetMIPS32>(this, Context.getNode(), Func); | 966 PhiLowering::prelowerPhis32Bit<TargetMIPS32>(this, Context.getNode(), Func); |
966 } | 967 } |
967 | 968 |
968 void TargetMIPS32::postLower() { | 969 void TargetMIPS32::postLower() { |
(...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1115 Str << "\t.set\t" | 1116 Str << "\t.set\t" |
1116 << "nomips16\n"; | 1117 << "nomips16\n"; |
1117 } | 1118 } |
1118 | 1119 |
1119 llvm::SmallBitVector TargetMIPS32::TypeToRegisterSet[IceType_NUM]; | 1120 llvm::SmallBitVector TargetMIPS32::TypeToRegisterSet[IceType_NUM]; |
1120 llvm::SmallBitVector TargetMIPS32::RegisterAliases[RegMIPS32::Reg_NUM]; | 1121 llvm::SmallBitVector TargetMIPS32::RegisterAliases[RegMIPS32::Reg_NUM]; |
1121 llvm::SmallBitVector TargetMIPS32::ScratchRegs; | 1122 llvm::SmallBitVector TargetMIPS32::ScratchRegs; |
1122 | 1123 |
1123 } // end of namespace MIPS32 | 1124 } // end of namespace MIPS32 |
1124 } // end of namespace Ice | 1125 } // end of namespace Ice |
OLD | NEW |