| OLD | NEW | 
|---|
| 1 //===- subzero/src/IceInstX86BaseImpl.h - Generic X86 instructions -*- C++ -*=// | 1 //===- subzero/src/IceInstX86BaseImpl.h - Generic X86 instructions -*- C++ -*=// | 
| 2 // | 2 // | 
| 3 //                        The Subzero Code Generator | 3 //                        The Subzero Code Generator | 
| 4 // | 4 // | 
| 5 // This file is distributed under the University of Illinois Open Source | 5 // This file is distributed under the University of Illinois Open Source | 
| 6 // License. See LICENSE.TXT for details. | 6 // License. See LICENSE.TXT for details. | 
| 7 // | 7 // | 
| 8 //===----------------------------------------------------------------------===// | 8 //===----------------------------------------------------------------------===// | 
| 9 /// | 9 /// | 
| 10 /// \file | 10 /// \file | 
| (...skipping 385 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 396   Str << ", "; | 396   Str << ", "; | 
| 397   getData()->dump(Func); | 397   getData()->dump(Func); | 
| 398   Str << ", beacon="; | 398   Str << ", beacon="; | 
| 399   getBeacon()->dump(Func); | 399   getBeacon()->dump(Func); | 
| 400 } | 400 } | 
| 401 | 401 | 
| 402 template <typename TraitsType> | 402 template <typename TraitsType> | 
| 403 void InstImpl<TraitsType>::InstX86GetIP::emit(const Cfg *Func) const { | 403 void InstImpl<TraitsType>::InstX86GetIP::emit(const Cfg *Func) const { | 
| 404   if (!BuildDefs::dump()) | 404   if (!BuildDefs::dump()) | 
| 405     return; | 405     return; | 
|  | 406   const auto *Dest = this->getDest(); | 
|  | 407   assert(Dest->hasReg()); | 
| 406   Ostream &Str = Func->getContext()->getStrEmit(); | 408   Ostream &Str = Func->getContext()->getStrEmit(); | 
| 407   assert(this->getDest()->hasReg()); |  | 
| 408   Str << "\t" | 409   Str << "\t" | 
| 409          "addl\t$_GLOBAL_OFFSET_TABLE_, "; | 410          "call" | 
| 410   this->getDest()->emit(Func); | 411          "\t"; | 
|  | 412   auto *Target = static_cast<TargetLowering *>(Func->getTarget()); | 
|  | 413   Target->emitWithoutPrefix(Target->createGetIPForRegister(Dest)); | 
| 411 } | 414 } | 
| 412 | 415 | 
| 413 template <typename TraitsType> | 416 template <typename TraitsType> | 
| 414 void InstImpl<TraitsType>::InstX86GetIP::emitIAS(const Cfg *Func) const { | 417 void InstImpl<TraitsType>::InstX86GetIP::emitIAS(const Cfg *Func) const { | 
| 415   if (Func->getContext()->getFlags().getOutFileType() == FT_Iasm) { | 418   const auto *Dest = this->getDest(); | 
| 416     // TODO(stichnot): Find a workaround for llvm-mc's inability to handle |  | 
| 417     // something like ".long _GLOBAL_OFFSET_TABLE_ + ." .  One possibility is to |  | 
| 418     // just use hybrid iasm output for this add instruction. |  | 
| 419     llvm::report_fatal_error( |  | 
| 420         "Iasm support for _GLOBAL_OFFSET_TABLE_ not implemented"); |  | 
| 421   } |  | 
| 422   Assembler *Asm = Func->getAssembler<Assembler>(); | 419   Assembler *Asm = Func->getAssembler<Assembler>(); | 
| 423   assert(this->getDest()->hasReg()); | 420   assert(Dest->hasReg()); | 
| 424   GPRRegister Reg = Traits::getEncodedGPR(this->getDest()->getRegNum()); | 421   Asm->call(static_cast<TargetLowering *>(Func->getTarget()) | 
| 425   Constant *GlobalOffsetTable = | 422                 ->createGetIPForRegister(Dest)); | 
| 426       Func->getContext()->getConstantExternSym("_GLOBAL_OFFSET_TABLE_"); |  | 
| 427   AssemblerFixup *Fixup = Asm->createFixup(Traits::FK_GotPC, GlobalOffsetTable); |  | 
| 428   intptr_t OrigPos = Asm->getBufferSize(); |  | 
| 429   constexpr int32_t TempDisp = 0; |  | 
| 430   constexpr int32_t ImmediateWidth = 4; |  | 
| 431   // Emit the add instruction once, in a preliminary fashion, to find its total |  | 
| 432   // size.  TODO(stichnot): IceType_i32 should really be something that |  | 
| 433   // represents the target's pointer type. |  | 
| 434   Asm->add(IceType_i32, Reg, AssemblerImmediate(TempDisp, Fixup)); |  | 
| 435   const int32_t Disp = Asm->getBufferSize() - OrigPos - ImmediateWidth; |  | 
| 436   // Now roll back and emit the add instruction again, this time with the |  | 
| 437   // correct displacement. |  | 
| 438   Asm->setBufferSize(OrigPos); |  | 
| 439   Asm->add(IceType_i32, Reg, AssemblerImmediate(Disp, Fixup)); |  | 
| 440 } | 423 } | 
| 441 | 424 | 
| 442 template <typename TraitsType> | 425 template <typename TraitsType> | 
| 443 void InstImpl<TraitsType>::InstX86GetIP::dump(const Cfg *Func) const { | 426 void InstImpl<TraitsType>::InstX86GetIP::dump(const Cfg *Func) const { | 
| 444   if (!BuildDefs::dump()) | 427   if (!BuildDefs::dump()) | 
| 445     return; | 428     return; | 
| 446   Ostream &Str = Func->getContext()->getStrDump(); | 429   Ostream &Str = Func->getContext()->getStrDump(); | 
| 447   this->getDest()->dump(Func); | 430   this->getDest()->dump(Func); | 
| 448   Str << " = call getIP"; | 431   Str << " = call getIP"; | 
| 449 } | 432 } | 
| (...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 588       // encode, but it isn't a valid sandboxed instruction, and there | 571       // encode, but it isn't a valid sandboxed instruction, and there | 
| 589       // shouldn't be a register allocation issue to jump through a scratch | 572       // shouldn't be a register allocation issue to jump through a scratch | 
| 590       // register, so we don't really need to bother implementing it. | 573       // register, so we don't really need to bother implementing it. | 
| 591       llvm::report_fatal_error("Assembler can't jmp to memory operand"); | 574       llvm::report_fatal_error("Assembler can't jmp to memory operand"); | 
| 592     } | 575     } | 
| 593   } else if (const auto *Mem = llvm::dyn_cast<X86OperandMem>(Target)) { | 576   } else if (const auto *Mem = llvm::dyn_cast<X86OperandMem>(Target)) { | 
| 594     (void)Mem; | 577     (void)Mem; | 
| 595     assert(Mem->getSegmentRegister() == X86OperandMem::DefaultSegment); | 578     assert(Mem->getSegmentRegister() == X86OperandMem::DefaultSegment); | 
| 596     llvm::report_fatal_error("Assembler can't jmp to memory operand"); | 579     llvm::report_fatal_error("Assembler can't jmp to memory operand"); | 
| 597   } else if (const auto *CR = llvm::dyn_cast<ConstantRelocatable>(Target)) { | 580   } else if (const auto *CR = llvm::dyn_cast<ConstantRelocatable>(Target)) { | 
| 598     assert(CR->getOffset() == 0 && "We only support jumping to a function"); |  | 
| 599     Asm->jmp(CR); | 581     Asm->jmp(CR); | 
| 600   } else if (const auto *Imm = llvm::dyn_cast<ConstantInteger32>(Target)) { | 582   } else if (const auto *Imm = llvm::dyn_cast<ConstantInteger32>(Target)) { | 
| 601     // NaCl trampoline calls refer to an address within the sandbox directly. | 583     // NaCl trampoline calls refer to an address within the sandbox directly. | 
| 602     // This is usually only needed for non-IRT builds and otherwise not very | 584     // This is usually only needed for non-IRT builds and otherwise not very | 
| 603     // portable or stable. Usually this is only done for "calls" and not jumps. | 585     // portable or stable. Usually this is only done for "calls" and not jumps. | 
| 604     Asm->jmp(AssemblerImmediate(Imm->getValue())); | 586     Asm->jmp(AssemblerImmediate(Imm->getValue())); | 
| 605   } else { | 587   } else { | 
| 606     llvm::report_fatal_error("Unexpected operand type"); | 588     llvm::report_fatal_error("Unexpected operand type"); | 
| 607   } | 589   } | 
| 608 } | 590 } | 
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 646   if (const auto *Var = llvm::dyn_cast<Variable>(CallTarget)) { | 628   if (const auto *Var = llvm::dyn_cast<Variable>(CallTarget)) { | 
| 647     if (Var->hasReg()) { | 629     if (Var->hasReg()) { | 
| 648       Asm->call(Traits::getEncodedGPR(Var->getRegNum())); | 630       Asm->call(Traits::getEncodedGPR(Var->getRegNum())); | 
| 649     } else { | 631     } else { | 
| 650       Asm->call(Target->stackVarToAsmOperand(Var)); | 632       Asm->call(Target->stackVarToAsmOperand(Var)); | 
| 651     } | 633     } | 
| 652   } else if (const auto *Mem = llvm::dyn_cast<X86OperandMem>(CallTarget)) { | 634   } else if (const auto *Mem = llvm::dyn_cast<X86OperandMem>(CallTarget)) { | 
| 653     assert(Mem->getSegmentRegister() == X86OperandMem::DefaultSegment); | 635     assert(Mem->getSegmentRegister() == X86OperandMem::DefaultSegment); | 
| 654     Asm->call(Mem->toAsmAddress(Asm, Target)); | 636     Asm->call(Mem->toAsmAddress(Asm, Target)); | 
| 655   } else if (const auto *CR = llvm::dyn_cast<ConstantRelocatable>(CallTarget)) { | 637   } else if (const auto *CR = llvm::dyn_cast<ConstantRelocatable>(CallTarget)) { | 
| 656     assert(CR->getOffset() == 0 && "We only support calling a function"); |  | 
| 657     Asm->call(CR); | 638     Asm->call(CR); | 
| 658   } else if (const auto *Imm = llvm::dyn_cast<ConstantInteger32>(CallTarget)) { | 639   } else if (const auto *Imm = llvm::dyn_cast<ConstantInteger32>(CallTarget)) { | 
| 659     Asm->call(AssemblerImmediate(Imm->getValue())); | 640     Asm->call(AssemblerImmediate(Imm->getValue())); | 
| 660   } else { | 641   } else { | 
| 661     llvm_unreachable("Unexpected operand type"); | 642     llvm_unreachable("Unexpected operand type"); | 
| 662   } | 643   } | 
| 663 } | 644 } | 
| 664 | 645 | 
| 665 template <typename TraitsType> | 646 template <typename TraitsType> | 
| 666 void InstImpl<TraitsType>::InstX86Call::dump(const Cfg *Func) const { | 647 void InstImpl<TraitsType>::InstX86Call::dump(const Cfg *Func) const { | 
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 741       Address SrcStackAddr = Target->stackVarToAsmOperand(SrcVar); | 722       Address SrcStackAddr = Target->stackVarToAsmOperand(SrcVar); | 
| 742       (Asm->*(Emitter.GPRAddr))(Ty, VarReg, SrcStackAddr); | 723       (Asm->*(Emitter.GPRAddr))(Ty, VarReg, SrcStackAddr); | 
| 743     } | 724     } | 
| 744   } else if (const auto *Mem = llvm::dyn_cast<X86OperandMem>(Src)) { | 725   } else if (const auto *Mem = llvm::dyn_cast<X86OperandMem>(Src)) { | 
| 745     Mem->emitSegmentOverride(Asm); | 726     Mem->emitSegmentOverride(Asm); | 
| 746     (Asm->*(Emitter.GPRAddr))(Ty, VarReg, | 727     (Asm->*(Emitter.GPRAddr))(Ty, VarReg, | 
| 747                               Mem->toAsmAddress(Asm, Target, IsLea)); | 728                               Mem->toAsmAddress(Asm, Target, IsLea)); | 
| 748   } else if (const auto *Imm = llvm::dyn_cast<ConstantInteger32>(Src)) { | 729   } else if (const auto *Imm = llvm::dyn_cast<ConstantInteger32>(Src)) { | 
| 749     (Asm->*(Emitter.GPRImm))(Ty, VarReg, AssemblerImmediate(Imm->getValue())); | 730     (Asm->*(Emitter.GPRImm))(Ty, VarReg, AssemblerImmediate(Imm->getValue())); | 
| 750   } else if (const auto *Reloc = llvm::dyn_cast<ConstantRelocatable>(Src)) { | 731   } else if (const auto *Reloc = llvm::dyn_cast<ConstantRelocatable>(Src)) { | 
| 751     AssemblerFixup *Fixup = | 732     const auto FixupKind = Reloc->getName() == GlobalOffsetTable | 
| 752         Asm->createFixup(Traits::TargetLowering::getAbsFixup(), Reloc); | 733                                ? Traits::FK_GotPC | 
| 753     (Asm->*(Emitter.GPRImm))(Ty, VarReg, | 734                                : Traits::TargetLowering::getAbsFixup(); | 
| 754                              AssemblerImmediate(Reloc->getOffset(), Fixup)); | 735     AssemblerFixup *Fixup = Asm->createFixup(FixupKind, Reloc); | 
|  | 736     (Asm->*(Emitter.GPRImm))(Ty, VarReg, AssemblerImmediate(Fixup)); | 
| 755   } else if (const auto *Split = llvm::dyn_cast<VariableSplit>(Src)) { | 737   } else if (const auto *Split = llvm::dyn_cast<VariableSplit>(Src)) { | 
| 756     (Asm->*(Emitter.GPRAddr))(Ty, VarReg, Split->toAsmAddress(Func)); | 738     (Asm->*(Emitter.GPRAddr))(Ty, VarReg, Split->toAsmAddress(Func)); | 
| 757   } else { | 739   } else { | 
| 758     llvm_unreachable("Unexpected operand type"); | 740     llvm_unreachable("Unexpected operand type"); | 
| 759   } | 741   } | 
| 760 } | 742 } | 
| 761 | 743 | 
| 762 template <typename TraitsType> | 744 template <typename TraitsType> | 
| 763 void InstImpl<TraitsType>::emitIASAddrOpTyGPR(const Cfg *Func, Type Ty, | 745 void InstImpl<TraitsType>::emitIASAddrOpTyGPR(const Cfg *Func, Type Ty, | 
| 764                                               const Address &Addr, | 746                                               const Address &Addr, | 
| 765                                               const Operand *Src, | 747                                               const Operand *Src, | 
| 766                                               const GPREmitterAddrOp &Emitter) { | 748                                               const GPREmitterAddrOp &Emitter) { | 
| 767   Assembler *Asm = Func->getAssembler<Assembler>(); | 749   Assembler *Asm = Func->getAssembler<Assembler>(); | 
| 768   // Src can only be Reg or AssemblerImmediate. | 750   // Src can only be Reg or AssemblerImmediate. | 
| 769   if (const auto *SrcVar = llvm::dyn_cast<Variable>(Src)) { | 751   if (const auto *SrcVar = llvm::dyn_cast<Variable>(Src)) { | 
| 770     assert(SrcVar->hasReg()); | 752     assert(SrcVar->hasReg()); | 
| 771     GPRRegister SrcReg = Traits::getEncodedGPR(SrcVar->getRegNum()); | 753     GPRRegister SrcReg = Traits::getEncodedGPR(SrcVar->getRegNum()); | 
| 772     (Asm->*(Emitter.AddrGPR))(Ty, Addr, SrcReg); | 754     (Asm->*(Emitter.AddrGPR))(Ty, Addr, SrcReg); | 
| 773   } else if (const auto *Imm = llvm::dyn_cast<ConstantInteger32>(Src)) { | 755   } else if (const auto *Imm = llvm::dyn_cast<ConstantInteger32>(Src)) { | 
| 774     (Asm->*(Emitter.AddrImm))(Ty, Addr, AssemblerImmediate(Imm->getValue())); | 756     (Asm->*(Emitter.AddrImm))(Ty, Addr, AssemblerImmediate(Imm->getValue())); | 
| 775   } else if (const auto *Reloc = llvm::dyn_cast<ConstantRelocatable>(Src)) { | 757   } else if (const auto *Reloc = llvm::dyn_cast<ConstantRelocatable>(Src)) { | 
| 776     AssemblerFixup *Fixup = | 758     const auto FixupKind = Reloc->getName() == GlobalOffsetTable | 
| 777         Asm->createFixup(Traits::TargetLowering::getAbsFixup(), Reloc); | 759                                ? Traits::FK_GotPC | 
| 778     (Asm->*(Emitter.AddrImm))(Ty, Addr, | 760                                : Traits::TargetLowering::getAbsFixup(); | 
| 779                               AssemblerImmediate(Reloc->getOffset(), Fixup)); | 761     AssemblerFixup *Fixup = Asm->createFixup(FixupKind, Reloc); | 
|  | 762     (Asm->*(Emitter.AddrImm))(Ty, Addr, AssemblerImmediate(Fixup)); | 
| 780   } else { | 763   } else { | 
| 781     llvm_unreachable("Unexpected operand type"); | 764     llvm_unreachable("Unexpected operand type"); | 
| 782   } | 765   } | 
| 783 } | 766 } | 
| 784 | 767 | 
| 785 template <typename TraitsType> | 768 template <typename TraitsType> | 
| 786 void InstImpl<TraitsType>::emitIASAsAddrOpTyGPR( | 769 void InstImpl<TraitsType>::emitIASAsAddrOpTyGPR( | 
| 787     const Cfg *Func, Type Ty, const Operand *Op0, const Operand *Op1, | 770     const Cfg *Func, Type Ty, const Operand *Op0, const Operand *Op1, | 
| 788     const GPREmitterAddrOp &Emitter) { | 771     const GPREmitterAddrOp &Emitter) { | 
| 789   auto *Target = InstX86Base::getTarget(Func); | 772   auto *Target = InstX86Base::getTarget(Func); | 
| (...skipping 2116 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 2906     return; | 2889     return; | 
| 2907   Ostream &Str = Func->getContext()->getStrDump(); | 2890   Ostream &Str = Func->getContext()->getStrDump(); | 
| 2908   Str << "IACA_END"; | 2891   Str << "IACA_END"; | 
| 2909 } | 2892 } | 
| 2910 | 2893 | 
| 2911 } // end of namespace X86NAMESPACE | 2894 } // end of namespace X86NAMESPACE | 
| 2912 | 2895 | 
| 2913 } // end of namespace Ice | 2896 } // end of namespace Ice | 
| 2914 | 2897 | 
| 2915 #endif // SUBZERO_SRC_ICEINSTX86BASEIMPL_H | 2898 #endif // SUBZERO_SRC_ICEINSTX86BASEIMPL_H | 
| OLD | NEW | 
|---|