| OLD | NEW |
| 1 //===- subzero/src/IceTargetLoweringX86Base.h - x86 lowering ----*- C++ -*-===// | 1 //===- subzero/src/IceTargetLoweringX86Base.h - x86 lowering ----*- C++ -*-===// |
| 2 // | 2 // |
| 3 // The Subzero Code Generator | 3 // The Subzero Code Generator |
| 4 // | 4 // |
| 5 // This file is distributed under the University of Illinois Open Source | 5 // This file is distributed under the University of Illinois Open Source |
| 6 // License. See LICENSE.TXT for details. | 6 // License. See LICENSE.TXT for details. |
| 7 // | 7 // |
| 8 //===----------------------------------------------------------------------===// | 8 //===----------------------------------------------------------------------===// |
| 9 /// | 9 /// |
| 10 /// \file | 10 /// \file |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 77 using InstX86Label = typename Traits::Insts::Label; | 77 using InstX86Label = typename Traits::Insts::Label; |
| 78 | 78 |
| 79 ~TargetX86Base() override = default; | 79 ~TargetX86Base() override = default; |
| 80 | 80 |
| 81 static void staticInit(const ClFlags &Flags); | 81 static void staticInit(const ClFlags &Flags); |
| 82 static TargetX86Base *create(Cfg *Func) { return new TargetX86Base(Func); } | 82 static TargetX86Base *create(Cfg *Func) { return new TargetX86Base(Func); } |
| 83 | 83 |
| 84 static FixupKind getPcRelFixup() { return PcRelFixup; } | 84 static FixupKind getPcRelFixup() { return PcRelFixup; } |
| 85 static FixupKind getAbsFixup() { return AbsFixup; } | 85 static FixupKind getAbsFixup() { return AbsFixup; } |
| 86 | 86 |
| 87 bool needSandboxing() const { return NeedSandboxing; } |
| 88 |
| 87 void translateOm1() override; | 89 void translateOm1() override; |
| 88 void translateO2() override; | 90 void translateO2() override; |
| 89 void doLoadOpt(); | 91 void doLoadOpt(); |
| 90 bool doBranchOpt(Inst *I, const CfgNode *NextNode) override; | 92 bool doBranchOpt(Inst *I, const CfgNode *NextNode) override; |
| 91 | 93 |
| 92 SizeT getNumRegisters() const override { | 94 SizeT getNumRegisters() const override { |
| 93 return Traits::RegisterSet::Reg_NUM; | 95 return Traits::RegisterSet::Reg_NUM; |
| 94 } | 96 } |
| 95 Variable *getPhysicalRegister(SizeT RegNum, Type Ty = IceType_void) override; | 97 Variable *getPhysicalRegister(SizeT RegNum, Type Ty = IceType_void) override; |
| 96 IceString getRegName(SizeT RegNum, Type Ty) const override; | 98 IceString getRegName(SizeT RegNum, Type Ty) const override; |
| (...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 177 | 179 |
| 178 void finishArgumentLowering(Variable *Arg, Variable *FramePtr, | 180 void finishArgumentLowering(Variable *Arg, Variable *FramePtr, |
| 179 size_t BasicFrameOffset, size_t StackAdjBytes, | 181 size_t BasicFrameOffset, size_t StackAdjBytes, |
| 180 size_t &InArgsSizeBytes); | 182 size_t &InArgsSizeBytes); |
| 181 X86Address stackVarToAsmOperand(const Variable *Var) const; | 183 X86Address stackVarToAsmOperand(const Variable *Var) const; |
| 182 | 184 |
| 183 InstructionSetEnum getInstructionSet() const { return InstructionSet; } | 185 InstructionSetEnum getInstructionSet() const { return InstructionSet; } |
| 184 Operand *legalizeUndef(Operand *From, int32_t RegNum = Variable::NoRegister); | 186 Operand *legalizeUndef(Operand *From, int32_t RegNum = Variable::NoRegister); |
| 185 | 187 |
| 186 protected: | 188 protected: |
| 189 const bool NeedSandboxing; |
| 190 |
| 187 explicit TargetX86Base(Cfg *Func); | 191 explicit TargetX86Base(Cfg *Func); |
| 188 | 192 |
| 189 void postLower() override; | 193 void postLower() override; |
| 190 | 194 |
| 195 void initSandbox() { |
| 196 dispatchToConcrete(&Traits::ConcreteTarget::initSandbox); |
| 197 } |
| 198 |
| 191 void lowerAlloca(const InstAlloca *Inst) override; | 199 void lowerAlloca(const InstAlloca *Inst) override; |
| 192 void lowerArithmetic(const InstArithmetic *Inst) override; | 200 void lowerArithmetic(const InstArithmetic *Inst) override; |
| 193 void lowerAssign(const InstAssign *Inst) override; | 201 void lowerAssign(const InstAssign *Inst) override; |
| 194 void lowerBr(const InstBr *Inst) override; | 202 void lowerBr(const InstBr *Inst) override; |
| 195 void lowerCast(const InstCast *Inst) override; | 203 void lowerCast(const InstCast *Inst) override; |
| 196 void lowerExtractElement(const InstExtractElement *Inst) override; | 204 void lowerExtractElement(const InstExtractElement *Inst) override; |
| 197 void lowerFcmp(const InstFcmp *Inst) override; | 205 void lowerFcmp(const InstFcmp *Inst) override; |
| 198 void lowerIcmp(const InstIcmp *Inst) override; | 206 void lowerIcmp(const InstIcmp *Inst) override; |
| 199 | 207 |
| 200 void lowerIntrinsicCall(const InstIntrinsicCall *Inst) override; | 208 void lowerIntrinsicCall(const InstIntrinsicCall *Inst) override; |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 235 /// Copy memory of given type from Src to Dest using OffsetAmt on both. | 243 /// Copy memory of given type from Src to Dest using OffsetAmt on both. |
| 236 void copyMemory(Type Ty, Variable *Dest, Variable *Src, int32_t OffsetAmt); | 244 void copyMemory(Type Ty, Variable *Dest, Variable *Src, int32_t OffsetAmt); |
| 237 /// Replace some calls to memcpy with inline instructions. | 245 /// Replace some calls to memcpy with inline instructions. |
| 238 void lowerMemcpy(Operand *Dest, Operand *Src, Operand *Count); | 246 void lowerMemcpy(Operand *Dest, Operand *Src, Operand *Count); |
| 239 /// Replace some calls to memmove with inline instructions. | 247 /// Replace some calls to memmove with inline instructions. |
| 240 void lowerMemmove(Operand *Dest, Operand *Src, Operand *Count); | 248 void lowerMemmove(Operand *Dest, Operand *Src, Operand *Count); |
| 241 /// Replace some calls to memset with inline instructions. | 249 /// Replace some calls to memset with inline instructions. |
| 242 void lowerMemset(Operand *Dest, Operand *Val, Operand *Count); | 250 void lowerMemset(Operand *Dest, Operand *Val, Operand *Count); |
| 243 | 251 |
| 244 /// Lower an indirect jump adding sandboxing when needed. | 252 /// Lower an indirect jump adding sandboxing when needed. |
| 245 void lowerIndirectJump(Variable *JumpTarget); | 253 void lowerIndirectJump(Variable *JumpTarget) { |
| 254 // Without std::move below, the compiler deduces that the argument to |
| 255 // lowerIndirectJmp is a Variable *&, not a Variable *. |
| 256 dispatchToConcrete(&Traits::ConcreteTarget::lowerIndirectJump, |
| 257 std::move(JumpTarget)); |
| 258 } |
| 246 | 259 |
| 247 /// Check the comparison is in [Min,Max]. The flags register will be modified | 260 /// Check the comparison is in [Min,Max]. The flags register will be modified |
| 248 /// with: | 261 /// with: |
| 249 /// - below equal, if in range | 262 /// - below equal, if in range |
| 250 /// - above, set if not in range | 263 /// - above, set if not in range |
| 251 /// The index into the range is returned. | 264 /// The index into the range is returned. |
| 252 Operand *lowerCmpRange(Operand *Comparison, uint64_t Min, uint64_t Max); | 265 Operand *lowerCmpRange(Operand *Comparison, uint64_t Min, uint64_t Max); |
| 253 /// Lowering of a cluster of switch cases. If the case is not matched control | 266 /// Lowering of a cluster of switch cases. If the case is not matched control |
| 254 /// will pass to the default label provided. If the default label is nullptr | 267 /// will pass to the default label provided. If the default label is nullptr |
| 255 /// then control will fall through to the next instruction. DoneCmp should be | 268 /// then control will fall through to the next instruction. DoneCmp should be |
| (...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 339 | 352 |
| 340 /// Return a memory operand corresponding to a stack allocated Variable. | 353 /// Return a memory operand corresponding to a stack allocated Variable. |
| 341 X86OperandMem *getMemoryOperandForStackSlot(Type Ty, Variable *Slot, | 354 X86OperandMem *getMemoryOperandForStackSlot(Type Ty, Variable *Slot, |
| 342 uint32_t Offset = 0); | 355 uint32_t Offset = 0); |
| 343 | 356 |
| 344 void | 357 void |
| 345 makeRandomRegisterPermutation(llvm::SmallVectorImpl<int32_t> &Permutation, | 358 makeRandomRegisterPermutation(llvm::SmallVectorImpl<int32_t> &Permutation, |
| 346 const llvm::SmallBitVector &ExcludeRegisters, | 359 const llvm::SmallBitVector &ExcludeRegisters, |
| 347 uint64_t Salt) const override; | 360 uint64_t Salt) const override; |
| 348 | 361 |
| 362 /// AutoMemorySandboxer emits a bundle-lock/bundle-unlock pair if the |
| 363 /// instruction's operand is a memory reference. This is only needed for |
| 364 /// x86-64 NaCl sandbox. |
| 365 template <InstBundleLock::Option BundleLockOpt = InstBundleLock::Opt_None> |
| 366 class AutoMemorySandboxer { |
| 367 AutoMemorySandboxer() = delete; |
| 368 AutoMemorySandboxer(const AutoMemorySandboxer &) = delete; |
| 369 AutoMemorySandboxer &operator=(const AutoMemorySandboxer &) = delete; |
| 370 |
| 371 private: |
| 372 typename Traits::TargetLowering *Target; |
| 373 |
| 374 template <typename T, typename... Tail> |
| 375 X86OperandMem **findMemoryReference(T **First, Tail... Others) { |
| 376 if (llvm::isa<X86OperandMem>(*First)) { |
| 377 return reinterpret_cast<X86OperandMem **>(First); |
| 378 } |
| 379 return findMemoryReference(Others...); |
| 380 } |
| 381 |
| 382 X86OperandMem **findMemoryReference() { return nullptr; } |
| 383 |
| 384 public: |
| 385 X86OperandMem **const MemOperand; |
| 386 |
| 387 template <typename... T> |
| 388 AutoMemorySandboxer(typename Traits::TargetLowering *Target, T... Args) |
| 389 : Target(Target), |
| 390 MemOperand( |
| 391 (!Traits::Is64Bit || !Target->Ctx->getFlags().getUseSandboxing()) |
| 392 ? nullptr |
| 393 : findMemoryReference(Args...)) { |
| 394 if (MemOperand != nullptr) { |
| 395 Target->_bundle_lock(BundleLockOpt); |
| 396 *MemOperand = Target->_sandbox_mem_reference(*MemOperand); |
| 397 } |
| 398 } |
| 399 |
| 400 ~AutoMemorySandboxer() { |
| 401 if (MemOperand != nullptr) { |
| 402 Target->_bundle_unlock(); |
| 403 } |
| 404 } |
| 405 }; |
| 406 |
| 349 /// The following are helpers that insert lowered x86 instructions with | 407 /// The following are helpers that insert lowered x86 instructions with |
| 350 /// minimal syntactic overhead, so that the lowering code can look as close to | 408 /// minimal syntactic overhead, so that the lowering code can look as close to |
| 351 /// assembly as practical. | 409 /// assembly as practical. |
| 352 void _adc(Variable *Dest, Operand *Src0) { | 410 void _adc(Variable *Dest, Operand *Src0) { |
| 411 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 353 Context.insert<typename Traits::Insts::Adc>(Dest, Src0); | 412 Context.insert<typename Traits::Insts::Adc>(Dest, Src0); |
| 354 } | 413 } |
| 355 void _adc_rmw(X86OperandMem *DestSrc0, Operand *Src1) { | 414 void _adc_rmw(X86OperandMem *DestSrc0, Operand *Src1) { |
| 415 AutoMemorySandboxer<> _(this, &DestSrc0, &Src1); |
| 356 Context.insert<typename Traits::Insts::AdcRMW>(DestSrc0, Src1); | 416 Context.insert<typename Traits::Insts::AdcRMW>(DestSrc0, Src1); |
| 357 } | 417 } |
| 358 void _add(Variable *Dest, Operand *Src0) { | 418 void _add(Variable *Dest, Operand *Src0) { |
| 419 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 359 Context.insert<typename Traits::Insts::Add>(Dest, Src0); | 420 Context.insert<typename Traits::Insts::Add>(Dest, Src0); |
| 360 } | 421 } |
| 361 void _add_rmw(X86OperandMem *DestSrc0, Operand *Src1) { | 422 void _add_rmw(X86OperandMem *DestSrc0, Operand *Src1) { |
| 423 AutoMemorySandboxer<> _(this, &DestSrc0, &Src1); |
| 362 Context.insert<typename Traits::Insts::AddRMW>(DestSrc0, Src1); | 424 Context.insert<typename Traits::Insts::AddRMW>(DestSrc0, Src1); |
| 363 } | 425 } |
| 364 void _addps(Variable *Dest, Operand *Src0) { | 426 void _addps(Variable *Dest, Operand *Src0) { |
| 427 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 365 Context.insert<typename Traits::Insts::Addps>(Dest, Src0); | 428 Context.insert<typename Traits::Insts::Addps>(Dest, Src0); |
| 366 } | 429 } |
| 367 void _addss(Variable *Dest, Operand *Src0) { | 430 void _addss(Variable *Dest, Operand *Src0) { |
| 431 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 368 Context.insert<typename Traits::Insts::Addss>(Dest, Src0); | 432 Context.insert<typename Traits::Insts::Addss>(Dest, Src0); |
| 369 } | 433 } |
| 434 void _add_sp(Operand *Adjustment) { |
| 435 dispatchToConcrete(&Traits::ConcreteTarget::_add_sp, std::move(Adjustment)); |
| 436 } |
| 370 void _and(Variable *Dest, Operand *Src0) { | 437 void _and(Variable *Dest, Operand *Src0) { |
| 438 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 371 Context.insert<typename Traits::Insts::And>(Dest, Src0); | 439 Context.insert<typename Traits::Insts::And>(Dest, Src0); |
| 372 } | 440 } |
| 373 void _andnps(Variable *Dest, Operand *Src0) { | 441 void _andnps(Variable *Dest, Operand *Src0) { |
| 442 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 374 Context.insert<typename Traits::Insts::Andnps>(Dest, Src0); | 443 Context.insert<typename Traits::Insts::Andnps>(Dest, Src0); |
| 375 } | 444 } |
| 376 void _andps(Variable *Dest, Operand *Src0) { | 445 void _andps(Variable *Dest, Operand *Src0) { |
| 446 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 377 Context.insert<typename Traits::Insts::Andps>(Dest, Src0); | 447 Context.insert<typename Traits::Insts::Andps>(Dest, Src0); |
| 378 } | 448 } |
| 379 void _and_rmw(X86OperandMem *DestSrc0, Operand *Src1) { | 449 void _and_rmw(X86OperandMem *DestSrc0, Operand *Src1) { |
| 450 AutoMemorySandboxer<> _(this, &DestSrc0, &Src1); |
| 380 Context.insert<typename Traits::Insts::AndRMW>(DestSrc0, Src1); | 451 Context.insert<typename Traits::Insts::AndRMW>(DestSrc0, Src1); |
| 381 } | 452 } |
| 382 void _blendvps(Variable *Dest, Operand *Src0, Operand *Src1) { | 453 void _blendvps(Variable *Dest, Operand *Src0, Operand *Src1) { |
| 454 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 383 Context.insert<typename Traits::Insts::Blendvps>(Dest, Src0, Src1); | 455 Context.insert<typename Traits::Insts::Blendvps>(Dest, Src0, Src1); |
| 384 } | 456 } |
| 385 void _br(BrCond Condition, CfgNode *TargetTrue, CfgNode *TargetFalse) { | 457 void _br(BrCond Condition, CfgNode *TargetTrue, CfgNode *TargetFalse) { |
| 386 Context.insert<InstX86Br>(TargetTrue, TargetFalse, Condition, | 458 Context.insert<InstX86Br>(TargetTrue, TargetFalse, Condition, |
| 387 InstX86Br::Far); | 459 InstX86Br::Far); |
| 388 } | 460 } |
| 389 void _br(CfgNode *Target) { | 461 void _br(CfgNode *Target) { |
| 390 Context.insert<InstX86Br>(Target, InstX86Br::Far); | 462 Context.insert<InstX86Br>(Target, InstX86Br::Far); |
| 391 } | 463 } |
| 392 void _br(BrCond Condition, CfgNode *Target) { | 464 void _br(BrCond Condition, CfgNode *Target) { |
| 393 Context.insert<InstX86Br>(Target, Condition, InstX86Br::Far); | 465 Context.insert<InstX86Br>(Target, Condition, InstX86Br::Far); |
| 394 } | 466 } |
| 395 void _br(BrCond Condition, InstX86Label *Label, | 467 void _br(BrCond Condition, InstX86Label *Label, |
| 396 typename InstX86Br::Mode Kind = InstX86Br::Near) { | 468 typename InstX86Br::Mode Kind = InstX86Br::Near) { |
| 397 Context.insert<InstX86Br>(Label, Condition, Kind); | 469 Context.insert<InstX86Br>(Label, Condition, Kind); |
| 398 } | 470 } |
| 399 void _bsf(Variable *Dest, Operand *Src0) { | 471 void _bsf(Variable *Dest, Operand *Src0) { |
| 472 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 400 Context.insert<typename Traits::Insts::Bsf>(Dest, Src0); | 473 Context.insert<typename Traits::Insts::Bsf>(Dest, Src0); |
| 401 } | 474 } |
| 402 void _bsr(Variable *Dest, Operand *Src0) { | 475 void _bsr(Variable *Dest, Operand *Src0) { |
| 476 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 403 Context.insert<typename Traits::Insts::Bsr>(Dest, Src0); | 477 Context.insert<typename Traits::Insts::Bsr>(Dest, Src0); |
| 404 } | 478 } |
| 405 void _bswap(Variable *SrcDest) { | 479 void _bswap(Variable *SrcDest) { |
| 480 AutoMemorySandboxer<> _(this, &SrcDest); |
| 406 Context.insert<typename Traits::Insts::Bswap>(SrcDest); | 481 Context.insert<typename Traits::Insts::Bswap>(SrcDest); |
| 407 } | 482 } |
| 408 void _cbwdq(Variable *Dest, Operand *Src0) { | 483 void _cbwdq(Variable *Dest, Operand *Src0) { |
| 484 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 409 Context.insert<typename Traits::Insts::Cbwdq>(Dest, Src0); | 485 Context.insert<typename Traits::Insts::Cbwdq>(Dest, Src0); |
| 410 } | 486 } |
| 411 void _cmov(Variable *Dest, Operand *Src0, BrCond Condition) { | 487 void _cmov(Variable *Dest, Operand *Src0, BrCond Condition) { |
| 488 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 412 Context.insert<typename Traits::Insts::Cmov>(Dest, Src0, Condition); | 489 Context.insert<typename Traits::Insts::Cmov>(Dest, Src0, Condition); |
| 413 } | 490 } |
| 414 void _cmp(Operand *Src0, Operand *Src1) { | 491 void _cmp(Operand *Src0, Operand *Src1) { |
| 492 AutoMemorySandboxer<> _(this, &Src0, &Src1); |
| 415 Context.insert<typename Traits::Insts::Icmp>(Src0, Src1); | 493 Context.insert<typename Traits::Insts::Icmp>(Src0, Src1); |
| 416 } | 494 } |
| 417 void _cmpps(Variable *Dest, Operand *Src0, CmppsCond Condition) { | 495 void _cmpps(Variable *Dest, Operand *Src0, CmppsCond Condition) { |
| 496 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 418 Context.insert<typename Traits::Insts::Cmpps>(Dest, Src0, Condition); | 497 Context.insert<typename Traits::Insts::Cmpps>(Dest, Src0, Condition); |
| 419 } | 498 } |
| 420 void _cmpxchg(Operand *DestOrAddr, Variable *Eax, Variable *Desired, | 499 void _cmpxchg(Operand *DestOrAddr, Variable *Eax, Variable *Desired, |
| 421 bool Locked) { | 500 bool Locked) { |
| 501 AutoMemorySandboxer<> _(this, &DestOrAddr); |
| 422 Context.insert<typename Traits::Insts::Cmpxchg>(DestOrAddr, Eax, Desired, | 502 Context.insert<typename Traits::Insts::Cmpxchg>(DestOrAddr, Eax, Desired, |
| 423 Locked); | 503 Locked); |
| 424 // Mark eax as possibly modified by cmpxchg. | 504 // Mark eax as possibly modified by cmpxchg. |
| 425 Context.insert<InstFakeDef>(Eax, llvm::dyn_cast<Variable>(DestOrAddr)); | 505 Context.insert<InstFakeDef>(Eax, llvm::dyn_cast<Variable>(DestOrAddr)); |
| 426 _set_dest_redefined(); | 506 _set_dest_redefined(); |
| 427 Context.insert<InstFakeUse>(Eax); | 507 Context.insert<InstFakeUse>(Eax); |
| 428 } | 508 } |
| 429 void _cmpxchg8b(X86OperandMem *Addr, Variable *Edx, Variable *Eax, | 509 void _cmpxchg8b(X86OperandMem *Addr, Variable *Edx, Variable *Eax, |
| 430 Variable *Ecx, Variable *Ebx, bool Locked) { | 510 Variable *Ecx, Variable *Ebx, bool Locked) { |
| 511 AutoMemorySandboxer<> _(this, &Addr); |
| 431 Context.insert<typename Traits::Insts::Cmpxchg8b>(Addr, Edx, Eax, Ecx, Ebx, | 512 Context.insert<typename Traits::Insts::Cmpxchg8b>(Addr, Edx, Eax, Ecx, Ebx, |
| 432 Locked); | 513 Locked); |
| 433 // Mark edx, and eax as possibly modified by cmpxchg8b. | 514 // Mark edx, and eax as possibly modified by cmpxchg8b. |
| 434 Context.insert<InstFakeDef>(Edx); | 515 Context.insert<InstFakeDef>(Edx); |
| 435 _set_dest_redefined(); | 516 _set_dest_redefined(); |
| 436 Context.insert<InstFakeUse>(Edx); | 517 Context.insert<InstFakeUse>(Edx); |
| 437 Context.insert<InstFakeDef>(Eax); | 518 Context.insert<InstFakeDef>(Eax); |
| 438 _set_dest_redefined(); | 519 _set_dest_redefined(); |
| 439 Context.insert<InstFakeUse>(Eax); | 520 Context.insert<InstFakeUse>(Eax); |
| 440 } | 521 } |
| 441 void _cvt(Variable *Dest, Operand *Src0, | 522 void _cvt(Variable *Dest, Operand *Src0, |
| 442 typename Traits::Insts::Cvt::CvtVariant Variant) { | 523 typename Traits::Insts::Cvt::CvtVariant Variant) { |
| 524 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 443 Context.insert<typename Traits::Insts::Cvt>(Dest, Src0, Variant); | 525 Context.insert<typename Traits::Insts::Cvt>(Dest, Src0, Variant); |
| 444 } | 526 } |
| 445 void _div(Variable *Dest, Operand *Src0, Operand *Src1) { | 527 void _div(Variable *Dest, Operand *Src0, Operand *Src1) { |
| 528 AutoMemorySandboxer<> _(this, &Dest, &Src0, &Src1); |
| 446 Context.insert<typename Traits::Insts::Div>(Dest, Src0, Src1); | 529 Context.insert<typename Traits::Insts::Div>(Dest, Src0, Src1); |
| 447 } | 530 } |
| 448 void _divps(Variable *Dest, Operand *Src0) { | 531 void _divps(Variable *Dest, Operand *Src0) { |
| 532 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 449 Context.insert<typename Traits::Insts::Divps>(Dest, Src0); | 533 Context.insert<typename Traits::Insts::Divps>(Dest, Src0); |
| 450 } | 534 } |
| 451 void _divss(Variable *Dest, Operand *Src0) { | 535 void _divss(Variable *Dest, Operand *Src0) { |
| 536 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 452 Context.insert<typename Traits::Insts::Divss>(Dest, Src0); | 537 Context.insert<typename Traits::Insts::Divss>(Dest, Src0); |
| 453 } | 538 } |
| 454 template <typename T = Traits> | 539 template <typename T = Traits> |
| 455 typename std::enable_if<T::UsesX87, void>::type _fld(Operand *Src0) { | 540 typename std::enable_if<T::UsesX87, void>::type _fld(Operand *Src0) { |
| 541 AutoMemorySandboxer<> _(this, &Src0); |
| 456 Context.insert<typename Traits::Insts::template Fld<>>(Src0); | 542 Context.insert<typename Traits::Insts::template Fld<>>(Src0); |
| 457 } | 543 } |
| 458 // TODO(jpp): when implementing the X8664 calling convention, make sure x8664 | 544 // TODO(jpp): when implementing the X8664 calling convention, make sure x8664 |
| 459 // does not invoke this method, and remove it. | 545 // does not invoke this method, and remove it. |
| 460 template <typename T = Traits> | 546 template <typename T = Traits> |
| 461 typename std::enable_if<!T::UsesX87, void>::type _fld(Operand *) { | 547 typename std::enable_if<!T::UsesX87, void>::type _fld(Operand *) { |
| 462 llvm::report_fatal_error("fld is not available in x86-64"); | 548 llvm::report_fatal_error("fld is not available in x86-64"); |
| 463 } | 549 } |
| 464 template <typename T = Traits> | 550 template <typename T = Traits> |
| 465 typename std::enable_if<T::UsesX87, void>::type _fstp(Variable *Dest) { | 551 typename std::enable_if<T::UsesX87, void>::type _fstp(Variable *Dest) { |
| 552 AutoMemorySandboxer<> _(this, &Dest); |
| 466 Context.insert<typename Traits::Insts::template Fstp<>>(Dest); | 553 Context.insert<typename Traits::Insts::template Fstp<>>(Dest); |
| 467 } | 554 } |
| 468 // TODO(jpp): when implementing the X8664 calling convention, make sure x8664 | 555 // TODO(jpp): when implementing the X8664 calling convention, make sure x8664 |
| 469 // does not invoke this method, and remove it. | 556 // does not invoke this method, and remove it. |
| 470 template <typename T = Traits> | 557 template <typename T = Traits> |
| 471 typename std::enable_if<!T::UsesX87, void>::type _fstp(Variable *) { | 558 typename std::enable_if<!T::UsesX87, void>::type _fstp(Variable *) { |
| 472 llvm::report_fatal_error("fstp is not available in x86-64"); | 559 llvm::report_fatal_error("fstp is not available in x86-64"); |
| 473 } | 560 } |
| 474 void _idiv(Variable *Dest, Operand *Src0, Operand *Src1) { | 561 void _idiv(Variable *Dest, Operand *Src0, Operand *Src1) { |
| 562 AutoMemorySandboxer<> _(this, &Dest, &Src0, &Src1); |
| 475 Context.insert<typename Traits::Insts::Idiv>(Dest, Src0, Src1); | 563 Context.insert<typename Traits::Insts::Idiv>(Dest, Src0, Src1); |
| 476 } | 564 } |
| 477 void _imul(Variable *Dest, Operand *Src0) { | 565 void _imul(Variable *Dest, Operand *Src0) { |
| 566 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 478 Context.insert<typename Traits::Insts::Imul>(Dest, Src0); | 567 Context.insert<typename Traits::Insts::Imul>(Dest, Src0); |
| 479 } | 568 } |
| 480 void _imul_imm(Variable *Dest, Operand *Src0, Constant *Imm) { | 569 void _imul_imm(Variable *Dest, Operand *Src0, Constant *Imm) { |
| 570 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 481 Context.insert<typename Traits::Insts::ImulImm>(Dest, Src0, Imm); | 571 Context.insert<typename Traits::Insts::ImulImm>(Dest, Src0, Imm); |
| 482 } | 572 } |
| 483 void _insertps(Variable *Dest, Operand *Src0, Operand *Src1) { | 573 void _insertps(Variable *Dest, Operand *Src0, Operand *Src1) { |
| 574 AutoMemorySandboxer<> _(this, &Dest, &Src0, &Src1); |
| 484 Context.insert<typename Traits::Insts::Insertps>(Dest, Src0, Src1); | 575 Context.insert<typename Traits::Insts::Insertps>(Dest, Src0, Src1); |
| 485 } | 576 } |
| 486 void _jmp(Operand *Target) { | 577 void _jmp(Operand *Target) { |
| 578 AutoMemorySandboxer<> _(this, &Target); |
| 487 Context.insert<typename Traits::Insts::Jmp>(Target); | 579 Context.insert<typename Traits::Insts::Jmp>(Target); |
| 488 } | 580 } |
| 489 void _lea(Variable *Dest, Operand *Src0) { | 581 void _lea(Variable *Dest, Operand *Src0) { |
| 490 Context.insert<typename Traits::Insts::Lea>(Dest, Src0); | 582 Context.insert<typename Traits::Insts::Lea>(Dest, Src0); |
| 491 } | 583 } |
| 492 void _mfence() { Context.insert<typename Traits::Insts::Mfence>(); } | 584 void _mfence() { Context.insert<typename Traits::Insts::Mfence>(); } |
| 493 /// Moves can be used to redefine registers, creating "partial kills" for | 585 /// Moves can be used to redefine registers, creating "partial kills" for |
| 494 /// liveness. Mark where moves are used in this way. | 586 /// liveness. Mark where moves are used in this way. |
| 495 void _redefined(Inst *MovInst, bool IsRedefinition = true) { | 587 void _redefined(Inst *MovInst, bool IsRedefinition = true) { |
| 496 if (IsRedefinition) | 588 if (IsRedefinition) |
| 497 MovInst->setDestRedefined(); | 589 MovInst->setDestRedefined(); |
| 498 } | 590 } |
| 499 /// If Dest=nullptr is passed in, then a new variable is created, marked as | 591 /// If Dest=nullptr is passed in, then a new variable is created, marked as |
| 500 /// infinite register allocation weight, and returned through the in/out Dest | 592 /// infinite register allocation weight, and returned through the in/out Dest |
| 501 /// argument. | 593 /// argument. |
| 502 typename Traits::Insts::Mov *_mov(Variable *&Dest, Operand *Src0, | 594 typename Traits::Insts::Mov *_mov(Variable *&Dest, Operand *Src0, |
| 503 int32_t RegNum = Variable::NoRegister) { | 595 int32_t RegNum = Variable::NoRegister) { |
| 504 if (Dest == nullptr) | 596 if (Dest == nullptr) |
| 505 Dest = makeReg(Src0->getType(), RegNum); | 597 Dest = makeReg(Src0->getType(), RegNum); |
| 598 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 506 return Context.insert<typename Traits::Insts::Mov>(Dest, Src0); | 599 return Context.insert<typename Traits::Insts::Mov>(Dest, Src0); |
| 507 } | 600 } |
| 601 void _mov_sp(Operand *NewValue) { |
| 602 dispatchToConcrete(&Traits::ConcreteTarget::_mov_sp, std::move(NewValue)); |
| 603 } |
| 508 typename Traits::Insts::Movp *_movp(Variable *Dest, Operand *Src0) { | 604 typename Traits::Insts::Movp *_movp(Variable *Dest, Operand *Src0) { |
| 605 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 509 return Context.insert<typename Traits::Insts::Movp>(Dest, Src0); | 606 return Context.insert<typename Traits::Insts::Movp>(Dest, Src0); |
| 510 } | 607 } |
| 511 void _movd(Variable *Dest, Operand *Src0) { | 608 void _movd(Variable *Dest, Operand *Src0) { |
| 609 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 512 Context.insert<typename Traits::Insts::Movd>(Dest, Src0); | 610 Context.insert<typename Traits::Insts::Movd>(Dest, Src0); |
| 513 } | 611 } |
| 514 void _movq(Variable *Dest, Operand *Src0) { | 612 void _movq(Variable *Dest, Operand *Src0) { |
| 613 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 515 Context.insert<typename Traits::Insts::Movq>(Dest, Src0); | 614 Context.insert<typename Traits::Insts::Movq>(Dest, Src0); |
| 516 } | 615 } |
| 517 void _movss(Variable *Dest, Variable *Src0) { | 616 void _movss(Variable *Dest, Variable *Src0) { |
| 518 Context.insert<typename Traits::Insts::MovssRegs>(Dest, Src0); | 617 Context.insert<typename Traits::Insts::MovssRegs>(Dest, Src0); |
| 519 } | 618 } |
| 520 void _movsx(Variable *Dest, Operand *Src0) { | 619 void _movsx(Variable *Dest, Operand *Src0) { |
| 620 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 521 Context.insert<typename Traits::Insts::Movsx>(Dest, Src0); | 621 Context.insert<typename Traits::Insts::Movsx>(Dest, Src0); |
| 522 } | 622 } |
| 523 void _movzx(Variable *Dest, Operand *Src0) { | 623 typename Traits::Insts::Movzx *_movzx(Variable *Dest, Operand *Src0) { |
| 524 Context.insert<typename Traits::Insts::Movzx>(Dest, Src0); | 624 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 625 return Context.insert<typename Traits::Insts::Movzx>(Dest, Src0); |
| 525 } | 626 } |
| 526 void _maxss(Variable *Dest, Operand *Src0) { | 627 void _maxss(Variable *Dest, Operand *Src0) { |
| 628 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 527 Context.insert<typename Traits::Insts::Maxss>(Dest, Src0); | 629 Context.insert<typename Traits::Insts::Maxss>(Dest, Src0); |
| 528 } | 630 } |
| 529 void _minss(Variable *Dest, Operand *Src0) { | 631 void _minss(Variable *Dest, Operand *Src0) { |
| 632 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 530 Context.insert<typename Traits::Insts::Minss>(Dest, Src0); | 633 Context.insert<typename Traits::Insts::Minss>(Dest, Src0); |
| 531 } | 634 } |
| 532 void _mul(Variable *Dest, Variable *Src0, Operand *Src1) { | 635 void _mul(Variable *Dest, Variable *Src0, Operand *Src1) { |
| 636 AutoMemorySandboxer<> _(this, &Dest, &Src0, &Src1); |
| 533 Context.insert<typename Traits::Insts::Mul>(Dest, Src0, Src1); | 637 Context.insert<typename Traits::Insts::Mul>(Dest, Src0, Src1); |
| 534 } | 638 } |
| 535 void _mulps(Variable *Dest, Operand *Src0) { | 639 void _mulps(Variable *Dest, Operand *Src0) { |
| 640 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 536 Context.insert<typename Traits::Insts::Mulps>(Dest, Src0); | 641 Context.insert<typename Traits::Insts::Mulps>(Dest, Src0); |
| 537 } | 642 } |
| 538 void _mulss(Variable *Dest, Operand *Src0) { | 643 void _mulss(Variable *Dest, Operand *Src0) { |
| 644 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 539 Context.insert<typename Traits::Insts::Mulss>(Dest, Src0); | 645 Context.insert<typename Traits::Insts::Mulss>(Dest, Src0); |
| 540 } | 646 } |
| 541 void _neg(Variable *SrcDest) { | 647 void _neg(Variable *SrcDest) { |
| 648 AutoMemorySandboxer<> _(this, &SrcDest); |
| 542 Context.insert<typename Traits::Insts::Neg>(SrcDest); | 649 Context.insert<typename Traits::Insts::Neg>(SrcDest); |
| 543 } | 650 } |
| 544 void _nop(SizeT Variant) { | 651 void _nop(SizeT Variant) { |
| 545 Context.insert<typename Traits::Insts::Nop>(Variant); | 652 Context.insert<typename Traits::Insts::Nop>(Variant); |
| 546 } | 653 } |
| 547 void _or(Variable *Dest, Operand *Src0) { | 654 void _or(Variable *Dest, Operand *Src0) { |
| 655 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 548 Context.insert<typename Traits::Insts::Or>(Dest, Src0); | 656 Context.insert<typename Traits::Insts::Or>(Dest, Src0); |
| 549 } | 657 } |
| 550 void _orps(Variable *Dest, Operand *Src0) { | 658 void _orps(Variable *Dest, Operand *Src0) { |
| 659 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 551 Context.insert<typename Traits::Insts::Orps>(Dest, Src0); | 660 Context.insert<typename Traits::Insts::Orps>(Dest, Src0); |
| 552 } | 661 } |
| 553 void _or_rmw(X86OperandMem *DestSrc0, Operand *Src1) { | 662 void _or_rmw(X86OperandMem *DestSrc0, Operand *Src1) { |
| 663 AutoMemorySandboxer<> _(this, &DestSrc0, &Src1); |
| 554 Context.insert<typename Traits::Insts::OrRMW>(DestSrc0, Src1); | 664 Context.insert<typename Traits::Insts::OrRMW>(DestSrc0, Src1); |
| 555 } | 665 } |
| 556 void _padd(Variable *Dest, Operand *Src0) { | 666 void _padd(Variable *Dest, Operand *Src0) { |
| 667 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 557 Context.insert<typename Traits::Insts::Padd>(Dest, Src0); | 668 Context.insert<typename Traits::Insts::Padd>(Dest, Src0); |
| 558 } | 669 } |
| 559 void _pand(Variable *Dest, Operand *Src0) { | 670 void _pand(Variable *Dest, Operand *Src0) { |
| 671 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 560 Context.insert<typename Traits::Insts::Pand>(Dest, Src0); | 672 Context.insert<typename Traits::Insts::Pand>(Dest, Src0); |
| 561 } | 673 } |
| 562 void _pandn(Variable *Dest, Operand *Src0) { | 674 void _pandn(Variable *Dest, Operand *Src0) { |
| 675 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 563 Context.insert<typename Traits::Insts::Pandn>(Dest, Src0); | 676 Context.insert<typename Traits::Insts::Pandn>(Dest, Src0); |
| 564 } | 677 } |
| 565 void _pblendvb(Variable *Dest, Operand *Src0, Operand *Src1) { | 678 void _pblendvb(Variable *Dest, Operand *Src0, Operand *Src1) { |
| 679 AutoMemorySandboxer<> _(this, &Dest, &Src0, &Src1); |
| 566 Context.insert<typename Traits::Insts::Pblendvb>(Dest, Src0, Src1); | 680 Context.insert<typename Traits::Insts::Pblendvb>(Dest, Src0, Src1); |
| 567 } | 681 } |
| 568 void _pcmpeq(Variable *Dest, Operand *Src0, | 682 void _pcmpeq(Variable *Dest, Operand *Src0, |
| 569 Type ArithmeticTypeOverride = IceType_void) { | 683 Type ArithmeticTypeOverride = IceType_void) { |
| 684 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 570 Context.insert<typename Traits::Insts::Pcmpeq>(Dest, Src0, | 685 Context.insert<typename Traits::Insts::Pcmpeq>(Dest, Src0, |
| 571 ArithmeticTypeOverride); | 686 ArithmeticTypeOverride); |
| 572 } | 687 } |
| 573 void _pcmpgt(Variable *Dest, Operand *Src0) { | 688 void _pcmpgt(Variable *Dest, Operand *Src0) { |
| 689 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 574 Context.insert<typename Traits::Insts::Pcmpgt>(Dest, Src0); | 690 Context.insert<typename Traits::Insts::Pcmpgt>(Dest, Src0); |
| 575 } | 691 } |
| 576 void _pextr(Variable *Dest, Operand *Src0, Operand *Src1) { | 692 void _pextr(Variable *Dest, Operand *Src0, Operand *Src1) { |
| 693 AutoMemorySandboxer<> _(this, &Dest, &Src0, &Src1); |
| 577 Context.insert<typename Traits::Insts::Pextr>(Dest, Src0, Src1); | 694 Context.insert<typename Traits::Insts::Pextr>(Dest, Src0, Src1); |
| 578 } | 695 } |
| 579 void _pinsr(Variable *Dest, Operand *Src0, Operand *Src1) { | 696 void _pinsr(Variable *Dest, Operand *Src0, Operand *Src1) { |
| 697 AutoMemorySandboxer<> _(this, &Dest, &Src0, &Src1); |
| 580 Context.insert<typename Traits::Insts::Pinsr>(Dest, Src0, Src1); | 698 Context.insert<typename Traits::Insts::Pinsr>(Dest, Src0, Src1); |
| 581 } | 699 } |
| 582 void _pmull(Variable *Dest, Operand *Src0) { | 700 void _pmull(Variable *Dest, Operand *Src0) { |
| 701 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 583 Context.insert<typename Traits::Insts::Pmull>(Dest, Src0); | 702 Context.insert<typename Traits::Insts::Pmull>(Dest, Src0); |
| 584 } | 703 } |
| 585 void _pmuludq(Variable *Dest, Operand *Src0) { | 704 void _pmuludq(Variable *Dest, Operand *Src0) { |
| 705 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 586 Context.insert<typename Traits::Insts::Pmuludq>(Dest, Src0); | 706 Context.insert<typename Traits::Insts::Pmuludq>(Dest, Src0); |
| 587 } | 707 } |
| 588 void _pop(Variable *Dest) { | 708 void _pop(Variable *Dest) { |
| 589 Context.insert<typename Traits::Insts::Pop>(Dest); | 709 Context.insert<typename Traits::Insts::Pop>(Dest); |
| 590 } | 710 } |
| 591 void _por(Variable *Dest, Operand *Src0) { | 711 void _por(Variable *Dest, Operand *Src0) { |
| 712 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 592 Context.insert<typename Traits::Insts::Por>(Dest, Src0); | 713 Context.insert<typename Traits::Insts::Por>(Dest, Src0); |
| 593 } | 714 } |
| 594 void _pshufd(Variable *Dest, Operand *Src0, Operand *Src1) { | 715 void _pshufd(Variable *Dest, Operand *Src0, Operand *Src1) { |
| 716 AutoMemorySandboxer<> _(this, &Dest, &Src0, &Src1); |
| 595 Context.insert<typename Traits::Insts::Pshufd>(Dest, Src0, Src1); | 717 Context.insert<typename Traits::Insts::Pshufd>(Dest, Src0, Src1); |
| 596 } | 718 } |
| 597 void _psll(Variable *Dest, Operand *Src0) { | 719 void _psll(Variable *Dest, Operand *Src0) { |
| 720 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 598 Context.insert<typename Traits::Insts::Psll>(Dest, Src0); | 721 Context.insert<typename Traits::Insts::Psll>(Dest, Src0); |
| 599 } | 722 } |
| 600 void _psra(Variable *Dest, Operand *Src0) { | 723 void _psra(Variable *Dest, Operand *Src0) { |
| 724 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 601 Context.insert<typename Traits::Insts::Psra>(Dest, Src0); | 725 Context.insert<typename Traits::Insts::Psra>(Dest, Src0); |
| 602 } | 726 } |
| 603 void _psrl(Variable *Dest, Operand *Src0) { | 727 void _psrl(Variable *Dest, Operand *Src0) { |
| 728 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 604 Context.insert<typename Traits::Insts::Psrl>(Dest, Src0); | 729 Context.insert<typename Traits::Insts::Psrl>(Dest, Src0); |
| 605 } | 730 } |
| 606 void _psub(Variable *Dest, Operand *Src0) { | 731 void _psub(Variable *Dest, Operand *Src0) { |
| 732 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 607 Context.insert<typename Traits::Insts::Psub>(Dest, Src0); | 733 Context.insert<typename Traits::Insts::Psub>(Dest, Src0); |
| 608 } | 734 } |
| 609 void _push(Variable *Src0) { | 735 void _push(Operand *Src0) { |
| 610 Context.insert<typename Traits::Insts::Push>(Src0); | 736 Context.insert<typename Traits::Insts::Push>(Src0); |
| 611 } | 737 } |
| 612 void _pxor(Variable *Dest, Operand *Src0) { | 738 void _pxor(Variable *Dest, Operand *Src0) { |
| 739 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 613 Context.insert<typename Traits::Insts::Pxor>(Dest, Src0); | 740 Context.insert<typename Traits::Insts::Pxor>(Dest, Src0); |
| 614 } | 741 } |
| 615 void _ret(Variable *Src0 = nullptr) { | 742 void _ret(Variable *Src0 = nullptr) { |
| 616 Context.insert<typename Traits::Insts::Ret>(Src0); | 743 Context.insert<typename Traits::Insts::Ret>(Src0); |
| 617 } | 744 } |
| 618 void _rol(Variable *Dest, Operand *Src0) { | 745 void _rol(Variable *Dest, Operand *Src0) { |
| 746 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 619 Context.insert<typename Traits::Insts::Rol>(Dest, Src0); | 747 Context.insert<typename Traits::Insts::Rol>(Dest, Src0); |
| 620 } | 748 } |
| 749 X86OperandMem *_sandbox_mem_reference(X86OperandMem *Mem) { |
| 750 return dispatchToConcrete(&Traits::ConcreteTarget::_sandbox_mem_reference, |
| 751 std::move(Mem)); |
| 752 } |
| 621 void _sar(Variable *Dest, Operand *Src0) { | 753 void _sar(Variable *Dest, Operand *Src0) { |
| 754 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 622 Context.insert<typename Traits::Insts::Sar>(Dest, Src0); | 755 Context.insert<typename Traits::Insts::Sar>(Dest, Src0); |
| 623 } | 756 } |
| 624 void _sbb(Variable *Dest, Operand *Src0) { | 757 void _sbb(Variable *Dest, Operand *Src0) { |
| 758 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 625 Context.insert<typename Traits::Insts::Sbb>(Dest, Src0); | 759 Context.insert<typename Traits::Insts::Sbb>(Dest, Src0); |
| 626 } | 760 } |
| 627 void _sbb_rmw(X86OperandMem *DestSrc0, Operand *Src1) { | 761 void _sbb_rmw(X86OperandMem *DestSrc0, Operand *Src1) { |
| 762 AutoMemorySandboxer<> _(this, &DestSrc0, &Src1); |
| 628 Context.insert<typename Traits::Insts::SbbRMW>(DestSrc0, Src1); | 763 Context.insert<typename Traits::Insts::SbbRMW>(DestSrc0, Src1); |
| 629 } | 764 } |
| 630 void _setcc(Variable *Dest, BrCond Condition) { | 765 void _setcc(Variable *Dest, BrCond Condition) { |
| 631 Context.insert<typename Traits::Insts::Setcc>(Dest, Condition); | 766 Context.insert<typename Traits::Insts::Setcc>(Dest, Condition); |
| 632 } | 767 } |
| 633 void _shl(Variable *Dest, Operand *Src0) { | 768 void _shl(Variable *Dest, Operand *Src0) { |
| 769 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 634 Context.insert<typename Traits::Insts::Shl>(Dest, Src0); | 770 Context.insert<typename Traits::Insts::Shl>(Dest, Src0); |
| 635 } | 771 } |
| 636 void _shld(Variable *Dest, Variable *Src0, Operand *Src1) { | 772 void _shld(Variable *Dest, Variable *Src0, Operand *Src1) { |
| 773 AutoMemorySandboxer<> _(this, &Dest, &Src0, &Src1); |
| 637 Context.insert<typename Traits::Insts::Shld>(Dest, Src0, Src1); | 774 Context.insert<typename Traits::Insts::Shld>(Dest, Src0, Src1); |
| 638 } | 775 } |
| 639 void _shr(Variable *Dest, Operand *Src0) { | 776 void _shr(Variable *Dest, Operand *Src0) { |
| 777 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 640 Context.insert<typename Traits::Insts::Shr>(Dest, Src0); | 778 Context.insert<typename Traits::Insts::Shr>(Dest, Src0); |
| 641 } | 779 } |
| 642 void _shrd(Variable *Dest, Variable *Src0, Operand *Src1) { | 780 void _shrd(Variable *Dest, Variable *Src0, Operand *Src1) { |
| 781 AutoMemorySandboxer<> _(this, &Dest, &Src0, &Src1); |
| 643 Context.insert<typename Traits::Insts::Shrd>(Dest, Src0, Src1); | 782 Context.insert<typename Traits::Insts::Shrd>(Dest, Src0, Src1); |
| 644 } | 783 } |
| 645 void _shufps(Variable *Dest, Operand *Src0, Operand *Src1) { | 784 void _shufps(Variable *Dest, Operand *Src0, Operand *Src1) { |
| 785 AutoMemorySandboxer<> _(this, &Dest, &Src0, &Src1); |
| 646 Context.insert<typename Traits::Insts::Shufps>(Dest, Src0, Src1); | 786 Context.insert<typename Traits::Insts::Shufps>(Dest, Src0, Src1); |
| 647 } | 787 } |
| 648 void _sqrtss(Variable *Dest, Operand *Src0) { | 788 void _sqrtss(Variable *Dest, Operand *Src0) { |
| 789 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 649 Context.insert<typename Traits::Insts::Sqrtss>(Dest, Src0); | 790 Context.insert<typename Traits::Insts::Sqrtss>(Dest, Src0); |
| 650 } | 791 } |
| 651 void _store(Operand *Value, X86Operand *Mem) { | 792 void _store(Operand *Value, X86Operand *Mem) { |
| 793 AutoMemorySandboxer<> _(this, &Value, &Mem); |
| 652 Context.insert<typename Traits::Insts::Store>(Value, Mem); | 794 Context.insert<typename Traits::Insts::Store>(Value, Mem); |
| 653 } | 795 } |
| 654 void _storep(Variable *Value, X86OperandMem *Mem) { | 796 void _storep(Variable *Value, X86OperandMem *Mem) { |
| 797 AutoMemorySandboxer<> _(this, &Value, &Mem); |
| 655 Context.insert<typename Traits::Insts::StoreP>(Value, Mem); | 798 Context.insert<typename Traits::Insts::StoreP>(Value, Mem); |
| 656 } | 799 } |
| 657 void _storeq(Variable *Value, X86OperandMem *Mem) { | 800 void _storeq(Variable *Value, X86OperandMem *Mem) { |
| 801 AutoMemorySandboxer<> _(this, &Value, &Mem); |
| 658 Context.insert<typename Traits::Insts::StoreQ>(Value, Mem); | 802 Context.insert<typename Traits::Insts::StoreQ>(Value, Mem); |
| 659 } | 803 } |
| 660 void _sub(Variable *Dest, Operand *Src0) { | 804 void _sub(Variable *Dest, Operand *Src0) { |
| 805 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 661 Context.insert<typename Traits::Insts::Sub>(Dest, Src0); | 806 Context.insert<typename Traits::Insts::Sub>(Dest, Src0); |
| 662 } | 807 } |
| 663 void _sub_rmw(X86OperandMem *DestSrc0, Operand *Src1) { | 808 void _sub_rmw(X86OperandMem *DestSrc0, Operand *Src1) { |
| 809 AutoMemorySandboxer<> _(this, &DestSrc0, &Src1); |
| 664 Context.insert<typename Traits::Insts::SubRMW>(DestSrc0, Src1); | 810 Context.insert<typename Traits::Insts::SubRMW>(DestSrc0, Src1); |
| 665 } | 811 } |
| 812 void _sub_sp(Operand *Adjustment) { |
| 813 dispatchToConcrete(&Traits::ConcreteTarget::_sub_sp, std::move(Adjustment)); |
| 814 } |
| 666 void _subps(Variable *Dest, Operand *Src0) { | 815 void _subps(Variable *Dest, Operand *Src0) { |
| 816 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 667 Context.insert<typename Traits::Insts::Subps>(Dest, Src0); | 817 Context.insert<typename Traits::Insts::Subps>(Dest, Src0); |
| 668 } | 818 } |
| 669 void _subss(Variable *Dest, Operand *Src0) { | 819 void _subss(Variable *Dest, Operand *Src0) { |
| 820 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 670 Context.insert<typename Traits::Insts::Subss>(Dest, Src0); | 821 Context.insert<typename Traits::Insts::Subss>(Dest, Src0); |
| 671 } | 822 } |
| 672 void _test(Operand *Src0, Operand *Src1) { | 823 void _test(Operand *Src0, Operand *Src1) { |
| 824 AutoMemorySandboxer<> _(this, &Src0, &Src1); |
| 673 Context.insert<typename Traits::Insts::Test>(Src0, Src1); | 825 Context.insert<typename Traits::Insts::Test>(Src0, Src1); |
| 674 } | 826 } |
| 675 void _ucomiss(Operand *Src0, Operand *Src1) { | 827 void _ucomiss(Operand *Src0, Operand *Src1) { |
| 828 AutoMemorySandboxer<> _(this, &Src0, &Src1); |
| 676 Context.insert<typename Traits::Insts::Ucomiss>(Src0, Src1); | 829 Context.insert<typename Traits::Insts::Ucomiss>(Src0, Src1); |
| 677 } | 830 } |
| 678 void _ud2() { Context.insert<typename Traits::Insts::UD2>(); } | 831 void _ud2() { Context.insert<typename Traits::Insts::UD2>(); } |
| 679 void _xadd(Operand *Dest, Variable *Src, bool Locked) { | 832 void _xadd(Operand *Dest, Variable *Src, bool Locked) { |
| 833 AutoMemorySandboxer<> _(this, &Dest, &Src); |
| 680 Context.insert<typename Traits::Insts::Xadd>(Dest, Src, Locked); | 834 Context.insert<typename Traits::Insts::Xadd>(Dest, Src, Locked); |
| 681 // The xadd exchanges Dest and Src (modifying Src). Model that update with | 835 // The xadd exchanges Dest and Src (modifying Src). Model that update with |
| 682 // a FakeDef followed by a FakeUse. | 836 // a FakeDef followed by a FakeUse. |
| 683 Context.insert<InstFakeDef>(Src, llvm::dyn_cast<Variable>(Dest)); | 837 Context.insert<InstFakeDef>(Src, llvm::dyn_cast<Variable>(Dest)); |
| 684 _set_dest_redefined(); | 838 _set_dest_redefined(); |
| 685 Context.insert<InstFakeUse>(Src); | 839 Context.insert<InstFakeUse>(Src); |
| 686 } | 840 } |
| 687 void _xchg(Operand *Dest, Variable *Src) { | 841 void _xchg(Operand *Dest, Variable *Src) { |
| 842 AutoMemorySandboxer<> _(this, &Dest, &Src); |
| 688 Context.insert<typename Traits::Insts::Xchg>(Dest, Src); | 843 Context.insert<typename Traits::Insts::Xchg>(Dest, Src); |
| 689 // The xchg modifies Dest and Src -- model that update with a | 844 // The xchg modifies Dest and Src -- model that update with a |
| 690 // FakeDef/FakeUse. | 845 // FakeDef/FakeUse. |
| 691 Context.insert<InstFakeDef>(Src, llvm::dyn_cast<Variable>(Dest)); | 846 Context.insert<InstFakeDef>(Src, llvm::dyn_cast<Variable>(Dest)); |
| 692 _set_dest_redefined(); | 847 _set_dest_redefined(); |
| 693 Context.insert<InstFakeUse>(Src); | 848 Context.insert<InstFakeUse>(Src); |
| 694 } | 849 } |
| 695 void _xor(Variable *Dest, Operand *Src0) { | 850 void _xor(Variable *Dest, Operand *Src0) { |
| 851 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 696 Context.insert<typename Traits::Insts::Xor>(Dest, Src0); | 852 Context.insert<typename Traits::Insts::Xor>(Dest, Src0); |
| 697 } | 853 } |
| 698 void _xorps(Variable *Dest, Operand *Src0) { | 854 void _xorps(Variable *Dest, Operand *Src0) { |
| 855 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 699 Context.insert<typename Traits::Insts::Xorps>(Dest, Src0); | 856 Context.insert<typename Traits::Insts::Xorps>(Dest, Src0); |
| 700 } | 857 } |
| 701 void _xor_rmw(X86OperandMem *DestSrc0, Operand *Src1) { | 858 void _xor_rmw(X86OperandMem *DestSrc0, Operand *Src1) { |
| 859 AutoMemorySandboxer<> _(this, &DestSrc0, &Src1); |
| 702 Context.insert<typename Traits::Insts::XorRMW>(DestSrc0, Src1); | 860 Context.insert<typename Traits::Insts::XorRMW>(DestSrc0, Src1); |
| 703 } | 861 } |
| 704 | 862 |
| 705 void _iaca_start() { | 863 void _iaca_start() { |
| 706 if (!BuildDefs::minimal()) | 864 if (!BuildDefs::minimal()) |
| 707 Context.insert<typename Traits::Insts::IacaStart>(); | 865 Context.insert<typename Traits::Insts::IacaStart>(); |
| 708 } | 866 } |
| 709 void _iaca_end() { | 867 void _iaca_end() { |
| 710 if (!BuildDefs::minimal()) | 868 if (!BuildDefs::minimal()) |
| 711 Context.insert<typename Traits::Insts::IacaEnd>(); | 869 Context.insert<typename Traits::Insts::IacaEnd>(); |
| (...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 841 | 999 |
| 842 static FixupKind PcRelFixup; | 1000 static FixupKind PcRelFixup; |
| 843 static FixupKind AbsFixup; | 1001 static FixupKind AbsFixup; |
| 844 }; | 1002 }; |
| 845 } // end of namespace X86NAMESPACE | 1003 } // end of namespace X86NAMESPACE |
| 846 } // end of namespace Ice | 1004 } // end of namespace Ice |
| 847 | 1005 |
| 848 #include "IceTargetLoweringX86BaseImpl.h" | 1006 #include "IceTargetLoweringX86BaseImpl.h" |
| 849 | 1007 |
| 850 #endif // SUBZERO_SRC_ICETARGETLOWERINGX86BASE_H | 1008 #endif // SUBZERO_SRC_ICETARGETLOWERINGX86BASE_H |
| OLD | NEW |