OLD | NEW |
1 //===- subzero/src/IceTargetLoweringX86Base.h - x86 lowering ----*- C++ -*-===// | 1 //===- subzero/src/IceTargetLoweringX86Base.h - x86 lowering ----*- C++ -*-===// |
2 // | 2 // |
3 // The Subzero Code Generator | 3 // The Subzero Code Generator |
4 // | 4 // |
5 // This file is distributed under the University of Illinois Open Source | 5 // This file is distributed under the University of Illinois Open Source |
6 // License. See LICENSE.TXT for details. | 6 // License. See LICENSE.TXT for details. |
7 // | 7 // |
8 //===----------------------------------------------------------------------===// | 8 //===----------------------------------------------------------------------===// |
9 /// | 9 /// |
10 /// \file | 10 /// \file |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
77 using InstX86Label = typename Traits::Insts::Label; | 77 using InstX86Label = typename Traits::Insts::Label; |
78 | 78 |
79 ~TargetX86Base() override = default; | 79 ~TargetX86Base() override = default; |
80 | 80 |
81 static void staticInit(const ClFlags &Flags); | 81 static void staticInit(const ClFlags &Flags); |
82 static TargetX86Base *create(Cfg *Func) { return new TargetX86Base(Func); } | 82 static TargetX86Base *create(Cfg *Func) { return new TargetX86Base(Func); } |
83 | 83 |
84 static FixupKind getPcRelFixup() { return PcRelFixup; } | 84 static FixupKind getPcRelFixup() { return PcRelFixup; } |
85 static FixupKind getAbsFixup() { return AbsFixup; } | 85 static FixupKind getAbsFixup() { return AbsFixup; } |
86 | 86 |
| 87 bool needSandboxing() const { return NeedSandboxing; } |
| 88 |
87 void translateOm1() override; | 89 void translateOm1() override; |
88 void translateO2() override; | 90 void translateO2() override; |
89 void doLoadOpt(); | 91 void doLoadOpt(); |
90 bool doBranchOpt(Inst *I, const CfgNode *NextNode) override; | 92 bool doBranchOpt(Inst *I, const CfgNode *NextNode) override; |
91 | 93 |
92 SizeT getNumRegisters() const override { | 94 SizeT getNumRegisters() const override { |
93 return Traits::RegisterSet::Reg_NUM; | 95 return Traits::RegisterSet::Reg_NUM; |
94 } | 96 } |
95 Variable *getPhysicalRegister(SizeT RegNum, Type Ty = IceType_void) override; | 97 Variable *getPhysicalRegister(SizeT RegNum, Type Ty = IceType_void) override; |
96 IceString getRegName(SizeT RegNum, Type Ty) const override; | 98 IceString getRegName(SizeT RegNum, Type Ty) const override; |
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
177 | 179 |
178 void finishArgumentLowering(Variable *Arg, Variable *FramePtr, | 180 void finishArgumentLowering(Variable *Arg, Variable *FramePtr, |
179 size_t BasicFrameOffset, size_t StackAdjBytes, | 181 size_t BasicFrameOffset, size_t StackAdjBytes, |
180 size_t &InArgsSizeBytes); | 182 size_t &InArgsSizeBytes); |
181 X86Address stackVarToAsmOperand(const Variable *Var) const; | 183 X86Address stackVarToAsmOperand(const Variable *Var) const; |
182 | 184 |
183 InstructionSetEnum getInstructionSet() const { return InstructionSet; } | 185 InstructionSetEnum getInstructionSet() const { return InstructionSet; } |
184 Operand *legalizeUndef(Operand *From, int32_t RegNum = Variable::NoRegister); | 186 Operand *legalizeUndef(Operand *From, int32_t RegNum = Variable::NoRegister); |
185 | 187 |
186 protected: | 188 protected: |
| 189 const bool NeedSandboxing; |
| 190 |
187 explicit TargetX86Base(Cfg *Func); | 191 explicit TargetX86Base(Cfg *Func); |
188 | 192 |
189 void postLower() override; | 193 void postLower() override; |
190 | 194 |
| 195 void initSandbox() { |
| 196 dispatchToConcrete(&Traits::ConcreteTarget::initSandbox); |
| 197 } |
| 198 |
191 void lowerAlloca(const InstAlloca *Inst) override; | 199 void lowerAlloca(const InstAlloca *Inst) override; |
192 void lowerArithmetic(const InstArithmetic *Inst) override; | 200 void lowerArithmetic(const InstArithmetic *Inst) override; |
193 void lowerAssign(const InstAssign *Inst) override; | 201 void lowerAssign(const InstAssign *Inst) override; |
194 void lowerBr(const InstBr *Inst) override; | 202 void lowerBr(const InstBr *Inst) override; |
195 void lowerCast(const InstCast *Inst) override; | 203 void lowerCast(const InstCast *Inst) override; |
196 void lowerExtractElement(const InstExtractElement *Inst) override; | 204 void lowerExtractElement(const InstExtractElement *Inst) override; |
197 void lowerFcmp(const InstFcmp *Inst) override; | 205 void lowerFcmp(const InstFcmp *Inst) override; |
198 void lowerIcmp(const InstIcmp *Inst) override; | 206 void lowerIcmp(const InstIcmp *Inst) override; |
199 | 207 |
200 void lowerIntrinsicCall(const InstIntrinsicCall *Inst) override; | 208 void lowerIntrinsicCall(const InstIntrinsicCall *Inst) override; |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
235 /// Copy memory of given type from Src to Dest using OffsetAmt on both. | 243 /// Copy memory of given type from Src to Dest using OffsetAmt on both. |
236 void copyMemory(Type Ty, Variable *Dest, Variable *Src, int32_t OffsetAmt); | 244 void copyMemory(Type Ty, Variable *Dest, Variable *Src, int32_t OffsetAmt); |
237 /// Replace some calls to memcpy with inline instructions. | 245 /// Replace some calls to memcpy with inline instructions. |
238 void lowerMemcpy(Operand *Dest, Operand *Src, Operand *Count); | 246 void lowerMemcpy(Operand *Dest, Operand *Src, Operand *Count); |
239 /// Replace some calls to memmove with inline instructions. | 247 /// Replace some calls to memmove with inline instructions. |
240 void lowerMemmove(Operand *Dest, Operand *Src, Operand *Count); | 248 void lowerMemmove(Operand *Dest, Operand *Src, Operand *Count); |
241 /// Replace some calls to memset with inline instructions. | 249 /// Replace some calls to memset with inline instructions. |
242 void lowerMemset(Operand *Dest, Operand *Val, Operand *Count); | 250 void lowerMemset(Operand *Dest, Operand *Val, Operand *Count); |
243 | 251 |
244 /// Lower an indirect jump adding sandboxing when needed. | 252 /// Lower an indirect jump adding sandboxing when needed. |
245 void lowerIndirectJump(Variable *JumpTarget); | 253 void lowerIndirectJump(Variable *JumpTarget) { |
| 254 // Without std::move below, the compiler deduces that the argument to |
| 255 // lowerIndirectJmp is a Variable *&, not a Variable *. |
| 256 dispatchToConcrete(&Traits::ConcreteTarget::lowerIndirectJump, |
| 257 std::move(JumpTarget)); |
| 258 } |
246 | 259 |
247 /// Check the comparison is in [Min,Max]. The flags register will be modified | 260 /// Check the comparison is in [Min,Max]. The flags register will be modified |
248 /// with: | 261 /// with: |
249 /// - below equal, if in range | 262 /// - below equal, if in range |
250 /// - above, set if not in range | 263 /// - above, set if not in range |
251 /// The index into the range is returned. | 264 /// The index into the range is returned. |
252 Operand *lowerCmpRange(Operand *Comparison, uint64_t Min, uint64_t Max); | 265 Operand *lowerCmpRange(Operand *Comparison, uint64_t Min, uint64_t Max); |
253 /// Lowering of a cluster of switch cases. If the case is not matched control | 266 /// Lowering of a cluster of switch cases. If the case is not matched control |
254 /// will pass to the default label provided. If the default label is nullptr | 267 /// will pass to the default label provided. If the default label is nullptr |
255 /// then control will fall through to the next instruction. DoneCmp should be | 268 /// then control will fall through to the next instruction. DoneCmp should be |
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
339 | 352 |
340 /// Return a memory operand corresponding to a stack allocated Variable. | 353 /// Return a memory operand corresponding to a stack allocated Variable. |
341 X86OperandMem *getMemoryOperandForStackSlot(Type Ty, Variable *Slot, | 354 X86OperandMem *getMemoryOperandForStackSlot(Type Ty, Variable *Slot, |
342 uint32_t Offset = 0); | 355 uint32_t Offset = 0); |
343 | 356 |
344 void | 357 void |
345 makeRandomRegisterPermutation(llvm::SmallVectorImpl<int32_t> &Permutation, | 358 makeRandomRegisterPermutation(llvm::SmallVectorImpl<int32_t> &Permutation, |
346 const llvm::SmallBitVector &ExcludeRegisters, | 359 const llvm::SmallBitVector &ExcludeRegisters, |
347 uint64_t Salt) const override; | 360 uint64_t Salt) const override; |
348 | 361 |
| 362 /// AutoMemorySandboxer emits a bundle-lock/bundle-unlock pair if the |
| 363 /// instruction's operand is a memory reference. This is only needed for |
| 364 /// x86-64 nacl sandbox. |
| 365 template <InstBundleLock::Option BundleLockOpt = InstBundleLock::Opt_None> |
| 366 class AutoMemorySandboxer { |
| 367 AutoMemorySandboxer() = delete; |
| 368 AutoMemorySandboxer(const AutoMemorySandboxer &) = delete; |
| 369 AutoMemorySandboxer &operator=(const AutoMemorySandboxer &) = delete; |
| 370 |
| 371 private: |
| 372 typename Traits::TargetLowering *Target; |
| 373 |
| 374 template <typename T, typename... Tail> |
| 375 X86OperandMem **findMemoryReference(T **First, Tail... Others) { |
| 376 if (llvm::isa<X86OperandMem>(*First)) { |
| 377 return reinterpret_cast<X86OperandMem **>(First); |
| 378 } |
| 379 return findMemoryReference(Others...); |
| 380 } |
| 381 |
| 382 X86OperandMem **findMemoryReference() { return nullptr; } |
| 383 |
| 384 public: |
| 385 X86OperandMem **const MemOperand; |
| 386 |
| 387 template <typename... T> |
| 388 AutoMemorySandboxer(typename Traits::TargetLowering *Target, T... Args) |
| 389 : Target(Target), |
| 390 MemOperand( |
| 391 (!Traits::Is64Bit || !Target->Ctx->getFlags().getUseSandboxing()) |
| 392 ? nullptr |
| 393 : findMemoryReference(Args...)) { |
| 394 if (MemOperand != nullptr) { |
| 395 Target->_bundle_lock(BundleLockOpt); |
| 396 *MemOperand = Target->_sandbox_mem_reference(*MemOperand); |
| 397 } |
| 398 } |
| 399 |
| 400 ~AutoMemorySandboxer() { |
| 401 if (MemOperand != nullptr) { |
| 402 Target->_bundle_unlock(); |
| 403 } |
| 404 } |
| 405 }; |
| 406 |
349 /// The following are helpers that insert lowered x86 instructions with | 407 /// The following are helpers that insert lowered x86 instructions with |
350 /// minimal syntactic overhead, so that the lowering code can look as close to | 408 /// minimal syntactic overhead, so that the lowering code can look as close to |
351 /// assembly as practical. | 409 /// assembly as practical. |
352 void _adc(Variable *Dest, Operand *Src0) { | 410 void _adc(Variable *Dest, Operand *Src0) { |
| 411 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
353 Context.insert<typename Traits::Insts::Adc>(Dest, Src0); | 412 Context.insert<typename Traits::Insts::Adc>(Dest, Src0); |
354 } | 413 } |
355 void _adc_rmw(X86OperandMem *DestSrc0, Operand *Src1) { | 414 void _adc_rmw(X86OperandMem *DestSrc0, Operand *Src1) { |
| 415 AutoMemorySandboxer<> _(this, &DestSrc0, &Src1); |
356 Context.insert<typename Traits::Insts::AdcRMW>(DestSrc0, Src1); | 416 Context.insert<typename Traits::Insts::AdcRMW>(DestSrc0, Src1); |
357 } | 417 } |
358 void _add(Variable *Dest, Operand *Src0) { | 418 void _add(Variable *Dest, Operand *Src0) { |
| 419 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
359 Context.insert<typename Traits::Insts::Add>(Dest, Src0); | 420 Context.insert<typename Traits::Insts::Add>(Dest, Src0); |
360 } | 421 } |
361 void _add_rmw(X86OperandMem *DestSrc0, Operand *Src1) { | 422 void _add_rmw(X86OperandMem *DestSrc0, Operand *Src1) { |
| 423 AutoMemorySandboxer<> _(this, &DestSrc0, &Src1); |
362 Context.insert<typename Traits::Insts::AddRMW>(DestSrc0, Src1); | 424 Context.insert<typename Traits::Insts::AddRMW>(DestSrc0, Src1); |
363 } | 425 } |
364 void _addps(Variable *Dest, Operand *Src0) { | 426 void _addps(Variable *Dest, Operand *Src0) { |
| 427 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
365 Context.insert<typename Traits::Insts::Addps>(Dest, Src0); | 428 Context.insert<typename Traits::Insts::Addps>(Dest, Src0); |
366 } | 429 } |
367 void _addss(Variable *Dest, Operand *Src0) { | 430 void _addss(Variable *Dest, Operand *Src0) { |
| 431 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
368 Context.insert<typename Traits::Insts::Addss>(Dest, Src0); | 432 Context.insert<typename Traits::Insts::Addss>(Dest, Src0); |
369 } | 433 } |
| 434 void _add_sp(Operand *Adjustment) { |
| 435 dispatchToConcrete(&Traits::ConcreteTarget::_add_sp, std::move(Adjustment)); |
| 436 } |
370 void _and(Variable *Dest, Operand *Src0) { | 437 void _and(Variable *Dest, Operand *Src0) { |
| 438 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
371 Context.insert<typename Traits::Insts::And>(Dest, Src0); | 439 Context.insert<typename Traits::Insts::And>(Dest, Src0); |
372 } | 440 } |
373 void _andnps(Variable *Dest, Operand *Src0) { | 441 void _andnps(Variable *Dest, Operand *Src0) { |
| 442 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
374 Context.insert<typename Traits::Insts::Andnps>(Dest, Src0); | 443 Context.insert<typename Traits::Insts::Andnps>(Dest, Src0); |
375 } | 444 } |
376 void _andps(Variable *Dest, Operand *Src0) { | 445 void _andps(Variable *Dest, Operand *Src0) { |
| 446 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
377 Context.insert<typename Traits::Insts::Andps>(Dest, Src0); | 447 Context.insert<typename Traits::Insts::Andps>(Dest, Src0); |
378 } | 448 } |
379 void _and_rmw(X86OperandMem *DestSrc0, Operand *Src1) { | 449 void _and_rmw(X86OperandMem *DestSrc0, Operand *Src1) { |
| 450 AutoMemorySandboxer<> _(this, &DestSrc0, &Src1); |
380 Context.insert<typename Traits::Insts::AndRMW>(DestSrc0, Src1); | 451 Context.insert<typename Traits::Insts::AndRMW>(DestSrc0, Src1); |
381 } | 452 } |
382 void _blendvps(Variable *Dest, Operand *Src0, Operand *Src1) { | 453 void _blendvps(Variable *Dest, Operand *Src0, Operand *Src1) { |
| 454 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
383 Context.insert<typename Traits::Insts::Blendvps>(Dest, Src0, Src1); | 455 Context.insert<typename Traits::Insts::Blendvps>(Dest, Src0, Src1); |
384 } | 456 } |
385 void _br(BrCond Condition, CfgNode *TargetTrue, CfgNode *TargetFalse) { | 457 void _br(BrCond Condition, CfgNode *TargetTrue, CfgNode *TargetFalse) { |
386 Context.insert<InstX86Br>(TargetTrue, TargetFalse, Condition, | 458 Context.insert<InstX86Br>(TargetTrue, TargetFalse, Condition, |
387 InstX86Br::Far); | 459 InstX86Br::Far); |
388 } | 460 } |
389 void _br(CfgNode *Target) { | 461 void _br(CfgNode *Target) { |
390 Context.insert<InstX86Br>(Target, InstX86Br::Far); | 462 Context.insert<InstX86Br>(Target, InstX86Br::Far); |
391 } | 463 } |
392 void _br(BrCond Condition, CfgNode *Target) { | 464 void _br(BrCond Condition, CfgNode *Target) { |
393 Context.insert<InstX86Br>(Target, Condition, InstX86Br::Far); | 465 Context.insert<InstX86Br>(Target, Condition, InstX86Br::Far); |
394 } | 466 } |
395 void _br(BrCond Condition, InstX86Label *Label, | 467 void _br(BrCond Condition, InstX86Label *Label, |
396 typename InstX86Br::Mode Kind = InstX86Br::Near) { | 468 typename InstX86Br::Mode Kind = InstX86Br::Near) { |
397 Context.insert<InstX86Br>(Label, Condition, Kind); | 469 Context.insert<InstX86Br>(Label, Condition, Kind); |
398 } | 470 } |
399 void _bsf(Variable *Dest, Operand *Src0) { | 471 void _bsf(Variable *Dest, Operand *Src0) { |
| 472 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
400 Context.insert<typename Traits::Insts::Bsf>(Dest, Src0); | 473 Context.insert<typename Traits::Insts::Bsf>(Dest, Src0); |
401 } | 474 } |
402 void _bsr(Variable *Dest, Operand *Src0) { | 475 void _bsr(Variable *Dest, Operand *Src0) { |
| 476 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
403 Context.insert<typename Traits::Insts::Bsr>(Dest, Src0); | 477 Context.insert<typename Traits::Insts::Bsr>(Dest, Src0); |
404 } | 478 } |
405 void _bswap(Variable *SrcDest) { | 479 void _bswap(Variable *SrcDest) { |
| 480 AutoMemorySandboxer<> _(this, &SrcDest); |
406 Context.insert<typename Traits::Insts::Bswap>(SrcDest); | 481 Context.insert<typename Traits::Insts::Bswap>(SrcDest); |
407 } | 482 } |
408 void _cbwdq(Variable *Dest, Operand *Src0) { | 483 void _cbwdq(Variable *Dest, Operand *Src0) { |
| 484 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
409 Context.insert<typename Traits::Insts::Cbwdq>(Dest, Src0); | 485 Context.insert<typename Traits::Insts::Cbwdq>(Dest, Src0); |
410 } | 486 } |
411 void _cmov(Variable *Dest, Operand *Src0, BrCond Condition) { | 487 void _cmov(Variable *Dest, Operand *Src0, BrCond Condition) { |
| 488 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
412 Context.insert<typename Traits::Insts::Cmov>(Dest, Src0, Condition); | 489 Context.insert<typename Traits::Insts::Cmov>(Dest, Src0, Condition); |
413 } | 490 } |
414 void _cmp(Operand *Src0, Operand *Src1) { | 491 void _cmp(Operand *Src0, Operand *Src1) { |
| 492 AutoMemorySandboxer<> _(this, &Src0, &Src1); |
415 Context.insert<typename Traits::Insts::Icmp>(Src0, Src1); | 493 Context.insert<typename Traits::Insts::Icmp>(Src0, Src1); |
416 } | 494 } |
417 void _cmpps(Variable *Dest, Operand *Src0, CmppsCond Condition) { | 495 void _cmpps(Variable *Dest, Operand *Src0, CmppsCond Condition) { |
| 496 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
418 Context.insert<typename Traits::Insts::Cmpps>(Dest, Src0, Condition); | 497 Context.insert<typename Traits::Insts::Cmpps>(Dest, Src0, Condition); |
419 } | 498 } |
420 void _cmpxchg(Operand *DestOrAddr, Variable *Eax, Variable *Desired, | 499 void _cmpxchg(Operand *DestOrAddr, Variable *Eax, Variable *Desired, |
421 bool Locked) { | 500 bool Locked) { |
| 501 AutoMemorySandboxer<> _(this, &DestOrAddr); |
422 Context.insert<typename Traits::Insts::Cmpxchg>(DestOrAddr, Eax, Desired, | 502 Context.insert<typename Traits::Insts::Cmpxchg>(DestOrAddr, Eax, Desired, |
423 Locked); | 503 Locked); |
424 // Mark eax as possibly modified by cmpxchg. | 504 // Mark eax as possibly modified by cmpxchg. |
425 Context.insert<InstFakeDef>(Eax, llvm::dyn_cast<Variable>(DestOrAddr)); | 505 Context.insert<InstFakeDef>(Eax, llvm::dyn_cast<Variable>(DestOrAddr)); |
426 _set_dest_redefined(); | 506 _set_dest_redefined(); |
427 Context.insert<InstFakeUse>(Eax); | 507 Context.insert<InstFakeUse>(Eax); |
428 } | 508 } |
429 void _cmpxchg8b(X86OperandMem *Addr, Variable *Edx, Variable *Eax, | 509 void _cmpxchg8b(X86OperandMem *Addr, Variable *Edx, Variable *Eax, |
430 Variable *Ecx, Variable *Ebx, bool Locked) { | 510 Variable *Ecx, Variable *Ebx, bool Locked) { |
| 511 AutoMemorySandboxer<> _(this, &Addr); |
431 Context.insert<typename Traits::Insts::Cmpxchg8b>(Addr, Edx, Eax, Ecx, Ebx, | 512 Context.insert<typename Traits::Insts::Cmpxchg8b>(Addr, Edx, Eax, Ecx, Ebx, |
432 Locked); | 513 Locked); |
433 // Mark edx, and eax as possibly modified by cmpxchg8b. | 514 // Mark edx, and eax as possibly modified by cmpxchg8b. |
434 Context.insert<InstFakeDef>(Edx); | 515 Context.insert<InstFakeDef>(Edx); |
435 _set_dest_redefined(); | 516 _set_dest_redefined(); |
436 Context.insert<InstFakeUse>(Edx); | 517 Context.insert<InstFakeUse>(Edx); |
437 Context.insert<InstFakeDef>(Eax); | 518 Context.insert<InstFakeDef>(Eax); |
438 _set_dest_redefined(); | 519 _set_dest_redefined(); |
439 Context.insert<InstFakeUse>(Eax); | 520 Context.insert<InstFakeUse>(Eax); |
440 } | 521 } |
441 void _cvt(Variable *Dest, Operand *Src0, | 522 void _cvt(Variable *Dest, Operand *Src0, |
442 typename Traits::Insts::Cvt::CvtVariant Variant) { | 523 typename Traits::Insts::Cvt::CvtVariant Variant) { |
| 524 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
443 Context.insert<typename Traits::Insts::Cvt>(Dest, Src0, Variant); | 525 Context.insert<typename Traits::Insts::Cvt>(Dest, Src0, Variant); |
444 } | 526 } |
445 void _div(Variable *Dest, Operand *Src0, Operand *Src1) { | 527 void _div(Variable *Dest, Operand *Src0, Operand *Src1) { |
| 528 AutoMemorySandboxer<> _(this, &Dest, &Src0, &Src1); |
446 Context.insert<typename Traits::Insts::Div>(Dest, Src0, Src1); | 529 Context.insert<typename Traits::Insts::Div>(Dest, Src0, Src1); |
447 } | 530 } |
448 void _divps(Variable *Dest, Operand *Src0) { | 531 void _divps(Variable *Dest, Operand *Src0) { |
| 532 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
449 Context.insert<typename Traits::Insts::Divps>(Dest, Src0); | 533 Context.insert<typename Traits::Insts::Divps>(Dest, Src0); |
450 } | 534 } |
451 void _divss(Variable *Dest, Operand *Src0) { | 535 void _divss(Variable *Dest, Operand *Src0) { |
| 536 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
452 Context.insert<typename Traits::Insts::Divss>(Dest, Src0); | 537 Context.insert<typename Traits::Insts::Divss>(Dest, Src0); |
453 } | 538 } |
454 template <typename T = Traits> | 539 template <typename T = Traits> |
455 typename std::enable_if<T::UsesX87, void>::type _fld(Operand *Src0) { | 540 typename std::enable_if<T::UsesX87, void>::type _fld(Operand *Src0) { |
| 541 AutoMemorySandboxer<> _(this, &Src0); |
456 Context.insert<typename Traits::Insts::template Fld<>>(Src0); | 542 Context.insert<typename Traits::Insts::template Fld<>>(Src0); |
457 } | 543 } |
458 // TODO(jpp): when implementing the X8664 calling convention, make sure x8664 | 544 // TODO(jpp): when implementing the X8664 calling convention, make sure x8664 |
459 // does not invoke this method, and remove it. | 545 // does not invoke this method, and remove it. |
460 template <typename T = Traits> | 546 template <typename T = Traits> |
461 typename std::enable_if<!T::UsesX87, void>::type _fld(Operand *) { | 547 typename std::enable_if<!T::UsesX87, void>::type _fld(Operand *) { |
462 llvm::report_fatal_error("fld is not available in x86-64"); | 548 llvm::report_fatal_error("fld is not available in x86-64"); |
463 } | 549 } |
464 template <typename T = Traits> | 550 template <typename T = Traits> |
465 typename std::enable_if<T::UsesX87, void>::type _fstp(Variable *Dest) { | 551 typename std::enable_if<T::UsesX87, void>::type _fstp(Variable *Dest) { |
| 552 AutoMemorySandboxer<> _(this, &Dest); |
466 Context.insert<typename Traits::Insts::template Fstp<>>(Dest); | 553 Context.insert<typename Traits::Insts::template Fstp<>>(Dest); |
467 } | 554 } |
468 // TODO(jpp): when implementing the X8664 calling convention, make sure x8664 | 555 // TODO(jpp): when implementing the X8664 calling convention, make sure x8664 |
469 // does not invoke this method, and remove it. | 556 // does not invoke this method, and remove it. |
470 template <typename T = Traits> | 557 template <typename T = Traits> |
471 typename std::enable_if<!T::UsesX87, void>::type _fstp(Variable *) { | 558 typename std::enable_if<!T::UsesX87, void>::type _fstp(Variable *) { |
472 llvm::report_fatal_error("fstp is not available in x86-64"); | 559 llvm::report_fatal_error("fstp is not available in x86-64"); |
473 } | 560 } |
474 void _idiv(Variable *Dest, Operand *Src0, Operand *Src1) { | 561 void _idiv(Variable *Dest, Operand *Src0, Operand *Src1) { |
| 562 AutoMemorySandboxer<> _(this, &Dest, &Src0, &Src1); |
475 Context.insert<typename Traits::Insts::Idiv>(Dest, Src0, Src1); | 563 Context.insert<typename Traits::Insts::Idiv>(Dest, Src0, Src1); |
476 } | 564 } |
477 void _imul(Variable *Dest, Operand *Src0) { | 565 void _imul(Variable *Dest, Operand *Src0) { |
| 566 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
478 Context.insert<typename Traits::Insts::Imul>(Dest, Src0); | 567 Context.insert<typename Traits::Insts::Imul>(Dest, Src0); |
479 } | 568 } |
480 void _imul_imm(Variable *Dest, Operand *Src0, Constant *Imm) { | 569 void _imul_imm(Variable *Dest, Operand *Src0, Constant *Imm) { |
| 570 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
481 Context.insert<typename Traits::Insts::ImulImm>(Dest, Src0, Imm); | 571 Context.insert<typename Traits::Insts::ImulImm>(Dest, Src0, Imm); |
482 } | 572 } |
483 void _insertps(Variable *Dest, Operand *Src0, Operand *Src1) { | 573 void _insertps(Variable *Dest, Operand *Src0, Operand *Src1) { |
| 574 AutoMemorySandboxer<> _(this, &Dest, &Src0, &Src1); |
484 Context.insert<typename Traits::Insts::Insertps>(Dest, Src0, Src1); | 575 Context.insert<typename Traits::Insts::Insertps>(Dest, Src0, Src1); |
485 } | 576 } |
486 void _jmp(Operand *Target) { | 577 void _jmp(Operand *Target) { |
| 578 AutoMemorySandboxer<> _(this, &Target); |
487 Context.insert<typename Traits::Insts::Jmp>(Target); | 579 Context.insert<typename Traits::Insts::Jmp>(Target); |
488 } | 580 } |
489 void _lea(Variable *Dest, Operand *Src0) { | 581 void _lea(Variable *Dest, Operand *Src0) { |
490 Context.insert<typename Traits::Insts::Lea>(Dest, Src0); | 582 Context.insert<typename Traits::Insts::Lea>(Dest, Src0); |
491 } | 583 } |
492 void _mfence() { Context.insert<typename Traits::Insts::Mfence>(); } | 584 void _mfence() { Context.insert<typename Traits::Insts::Mfence>(); } |
493 /// Moves can be used to redefine registers, creating "partial kills" for | 585 /// Moves can be used to redefine registers, creating "partial kills" for |
494 /// liveness. Mark where moves are used in this way. | 586 /// liveness. Mark where moves are used in this way. |
495 void _redefined(Inst *MovInst, bool IsRedefinition = true) { | 587 void _redefined(Inst *MovInst, bool IsRedefinition = true) { |
496 if (IsRedefinition) | 588 if (IsRedefinition) |
497 MovInst->setDestRedefined(); | 589 MovInst->setDestRedefined(); |
498 } | 590 } |
499 /// If Dest=nullptr is passed in, then a new variable is created, marked as | 591 /// If Dest=nullptr is passed in, then a new variable is created, marked as |
500 /// infinite register allocation weight, and returned through the in/out Dest | 592 /// infinite register allocation weight, and returned through the in/out Dest |
501 /// argument. | 593 /// argument. |
502 typename Traits::Insts::Mov *_mov(Variable *&Dest, Operand *Src0, | 594 typename Traits::Insts::Mov *_mov(Variable *&Dest, Operand *Src0, |
503 int32_t RegNum = Variable::NoRegister) { | 595 int32_t RegNum = Variable::NoRegister) { |
504 if (Dest == nullptr) | 596 if (Dest == nullptr) |
505 Dest = makeReg(Src0->getType(), RegNum); | 597 Dest = makeReg(Src0->getType(), RegNum); |
| 598 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
506 return Context.insert<typename Traits::Insts::Mov>(Dest, Src0); | 599 return Context.insert<typename Traits::Insts::Mov>(Dest, Src0); |
507 } | 600 } |
| 601 void _mov_sp(Operand *NewValue) { |
| 602 dispatchToConcrete(&Traits::ConcreteTarget::_mov_sp, std::move(NewValue)); |
| 603 } |
508 typename Traits::Insts::Movp *_movp(Variable *Dest, Operand *Src0) { | 604 typename Traits::Insts::Movp *_movp(Variable *Dest, Operand *Src0) { |
| 605 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
509 return Context.insert<typename Traits::Insts::Movp>(Dest, Src0); | 606 return Context.insert<typename Traits::Insts::Movp>(Dest, Src0); |
510 } | 607 } |
511 void _movd(Variable *Dest, Operand *Src0) { | 608 void _movd(Variable *Dest, Operand *Src0) { |
| 609 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
512 Context.insert<typename Traits::Insts::Movd>(Dest, Src0); | 610 Context.insert<typename Traits::Insts::Movd>(Dest, Src0); |
513 } | 611 } |
514 void _movq(Variable *Dest, Operand *Src0) { | 612 void _movq(Variable *Dest, Operand *Src0) { |
| 613 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
515 Context.insert<typename Traits::Insts::Movq>(Dest, Src0); | 614 Context.insert<typename Traits::Insts::Movq>(Dest, Src0); |
516 } | 615 } |
517 void _movss(Variable *Dest, Variable *Src0) { | 616 void _movss(Variable *Dest, Variable *Src0) { |
518 Context.insert<typename Traits::Insts::MovssRegs>(Dest, Src0); | 617 Context.insert<typename Traits::Insts::MovssRegs>(Dest, Src0); |
519 } | 618 } |
520 void _movsx(Variable *Dest, Operand *Src0) { | 619 void _movsx(Variable *Dest, Operand *Src0) { |
| 620 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
521 Context.insert<typename Traits::Insts::Movsx>(Dest, Src0); | 621 Context.insert<typename Traits::Insts::Movsx>(Dest, Src0); |
522 } | 622 } |
523 void _movzx(Variable *Dest, Operand *Src0) { | 623 typename Traits::Insts::Movzx *_movzx(Variable *Dest, Operand *Src0) { |
524 Context.insert<typename Traits::Insts::Movzx>(Dest, Src0); | 624 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
| 625 return Context.insert<typename Traits::Insts::Movzx>(Dest, Src0); |
525 } | 626 } |
526 void _maxss(Variable *Dest, Operand *Src0) { | 627 void _maxss(Variable *Dest, Operand *Src0) { |
| 628 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
527 Context.insert<typename Traits::Insts::Maxss>(Dest, Src0); | 629 Context.insert<typename Traits::Insts::Maxss>(Dest, Src0); |
528 } | 630 } |
529 void _minss(Variable *Dest, Operand *Src0) { | 631 void _minss(Variable *Dest, Operand *Src0) { |
| 632 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
530 Context.insert<typename Traits::Insts::Minss>(Dest, Src0); | 633 Context.insert<typename Traits::Insts::Minss>(Dest, Src0); |
531 } | 634 } |
532 void _mul(Variable *Dest, Variable *Src0, Operand *Src1) { | 635 void _mul(Variable *Dest, Variable *Src0, Operand *Src1) { |
| 636 AutoMemorySandboxer<> _(this, &Dest, &Src0, &Src1); |
533 Context.insert<typename Traits::Insts::Mul>(Dest, Src0, Src1); | 637 Context.insert<typename Traits::Insts::Mul>(Dest, Src0, Src1); |
534 } | 638 } |
535 void _mulps(Variable *Dest, Operand *Src0) { | 639 void _mulps(Variable *Dest, Operand *Src0) { |
| 640 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
536 Context.insert<typename Traits::Insts::Mulps>(Dest, Src0); | 641 Context.insert<typename Traits::Insts::Mulps>(Dest, Src0); |
537 } | 642 } |
538 void _mulss(Variable *Dest, Operand *Src0) { | 643 void _mulss(Variable *Dest, Operand *Src0) { |
| 644 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
539 Context.insert<typename Traits::Insts::Mulss>(Dest, Src0); | 645 Context.insert<typename Traits::Insts::Mulss>(Dest, Src0); |
540 } | 646 } |
541 void _neg(Variable *SrcDest) { | 647 void _neg(Variable *SrcDest) { |
| 648 AutoMemorySandboxer<> _(this, &SrcDest); |
542 Context.insert<typename Traits::Insts::Neg>(SrcDest); | 649 Context.insert<typename Traits::Insts::Neg>(SrcDest); |
543 } | 650 } |
544 void _nop(SizeT Variant) { | 651 void _nop(SizeT Variant) { |
545 Context.insert<typename Traits::Insts::Nop>(Variant); | 652 Context.insert<typename Traits::Insts::Nop>(Variant); |
546 } | 653 } |
547 void _or(Variable *Dest, Operand *Src0) { | 654 void _or(Variable *Dest, Operand *Src0) { |
| 655 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
548 Context.insert<typename Traits::Insts::Or>(Dest, Src0); | 656 Context.insert<typename Traits::Insts::Or>(Dest, Src0); |
549 } | 657 } |
550 void _orps(Variable *Dest, Operand *Src0) { | 658 void _orps(Variable *Dest, Operand *Src0) { |
| 659 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
551 Context.insert<typename Traits::Insts::Orps>(Dest, Src0); | 660 Context.insert<typename Traits::Insts::Orps>(Dest, Src0); |
552 } | 661 } |
553 void _or_rmw(X86OperandMem *DestSrc0, Operand *Src1) { | 662 void _or_rmw(X86OperandMem *DestSrc0, Operand *Src1) { |
| 663 AutoMemorySandboxer<> _(this, &DestSrc0, &Src1); |
554 Context.insert<typename Traits::Insts::OrRMW>(DestSrc0, Src1); | 664 Context.insert<typename Traits::Insts::OrRMW>(DestSrc0, Src1); |
555 } | 665 } |
556 void _padd(Variable *Dest, Operand *Src0) { | 666 void _padd(Variable *Dest, Operand *Src0) { |
| 667 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
557 Context.insert<typename Traits::Insts::Padd>(Dest, Src0); | 668 Context.insert<typename Traits::Insts::Padd>(Dest, Src0); |
558 } | 669 } |
559 void _pand(Variable *Dest, Operand *Src0) { | 670 void _pand(Variable *Dest, Operand *Src0) { |
| 671 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
560 Context.insert<typename Traits::Insts::Pand>(Dest, Src0); | 672 Context.insert<typename Traits::Insts::Pand>(Dest, Src0); |
561 } | 673 } |
562 void _pandn(Variable *Dest, Operand *Src0) { | 674 void _pandn(Variable *Dest, Operand *Src0) { |
| 675 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
563 Context.insert<typename Traits::Insts::Pandn>(Dest, Src0); | 676 Context.insert<typename Traits::Insts::Pandn>(Dest, Src0); |
564 } | 677 } |
565 void _pblendvb(Variable *Dest, Operand *Src0, Operand *Src1) { | 678 void _pblendvb(Variable *Dest, Operand *Src0, Operand *Src1) { |
| 679 AutoMemorySandboxer<> _(this, &Dest, &Src0, &Src1); |
566 Context.insert<typename Traits::Insts::Pblendvb>(Dest, Src0, Src1); | 680 Context.insert<typename Traits::Insts::Pblendvb>(Dest, Src0, Src1); |
567 } | 681 } |
568 void _pcmpeq(Variable *Dest, Operand *Src0) { | 682 void _pcmpeq(Variable *Dest, Operand *Src0) { |
| 683 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
569 Context.insert<typename Traits::Insts::Pcmpeq>(Dest, Src0); | 684 Context.insert<typename Traits::Insts::Pcmpeq>(Dest, Src0); |
570 } | 685 } |
571 void _pcmpgt(Variable *Dest, Operand *Src0) { | 686 void _pcmpgt(Variable *Dest, Operand *Src0) { |
| 687 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
572 Context.insert<typename Traits::Insts::Pcmpgt>(Dest, Src0); | 688 Context.insert<typename Traits::Insts::Pcmpgt>(Dest, Src0); |
573 } | 689 } |
574 void _pextr(Variable *Dest, Operand *Src0, Operand *Src1) { | 690 void _pextr(Variable *Dest, Operand *Src0, Operand *Src1) { |
| 691 AutoMemorySandboxer<> _(this, &Dest, &Src0, &Src1); |
575 Context.insert<typename Traits::Insts::Pextr>(Dest, Src0, Src1); | 692 Context.insert<typename Traits::Insts::Pextr>(Dest, Src0, Src1); |
576 } | 693 } |
577 void _pinsr(Variable *Dest, Operand *Src0, Operand *Src1) { | 694 void _pinsr(Variable *Dest, Operand *Src0, Operand *Src1) { |
| 695 AutoMemorySandboxer<> _(this, &Dest, &Src0, &Src1); |
578 Context.insert<typename Traits::Insts::Pinsr>(Dest, Src0, Src1); | 696 Context.insert<typename Traits::Insts::Pinsr>(Dest, Src0, Src1); |
579 } | 697 } |
580 void _pmull(Variable *Dest, Operand *Src0) { | 698 void _pmull(Variable *Dest, Operand *Src0) { |
| 699 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
581 Context.insert<typename Traits::Insts::Pmull>(Dest, Src0); | 700 Context.insert<typename Traits::Insts::Pmull>(Dest, Src0); |
582 } | 701 } |
583 void _pmuludq(Variable *Dest, Operand *Src0) { | 702 void _pmuludq(Variable *Dest, Operand *Src0) { |
| 703 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
584 Context.insert<typename Traits::Insts::Pmuludq>(Dest, Src0); | 704 Context.insert<typename Traits::Insts::Pmuludq>(Dest, Src0); |
585 } | 705 } |
586 void _pop(Variable *Dest) { | 706 void _pop(Variable *Dest) { |
587 Context.insert<typename Traits::Insts::Pop>(Dest); | 707 Context.insert<typename Traits::Insts::Pop>(Dest); |
588 } | 708 } |
589 void _por(Variable *Dest, Operand *Src0) { | 709 void _por(Variable *Dest, Operand *Src0) { |
| 710 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
590 Context.insert<typename Traits::Insts::Por>(Dest, Src0); | 711 Context.insert<typename Traits::Insts::Por>(Dest, Src0); |
591 } | 712 } |
592 void _pshufd(Variable *Dest, Operand *Src0, Operand *Src1) { | 713 void _pshufd(Variable *Dest, Operand *Src0, Operand *Src1) { |
| 714 AutoMemorySandboxer<> _(this, &Dest, &Src0, &Src1); |
593 Context.insert<typename Traits::Insts::Pshufd>(Dest, Src0, Src1); | 715 Context.insert<typename Traits::Insts::Pshufd>(Dest, Src0, Src1); |
594 } | 716 } |
595 void _psll(Variable *Dest, Operand *Src0) { | 717 void _psll(Variable *Dest, Operand *Src0) { |
| 718 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
596 Context.insert<typename Traits::Insts::Psll>(Dest, Src0); | 719 Context.insert<typename Traits::Insts::Psll>(Dest, Src0); |
597 } | 720 } |
598 void _psra(Variable *Dest, Operand *Src0) { | 721 void _psra(Variable *Dest, Operand *Src0) { |
| 722 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
599 Context.insert<typename Traits::Insts::Psra>(Dest, Src0); | 723 Context.insert<typename Traits::Insts::Psra>(Dest, Src0); |
600 } | 724 } |
601 void _psrl(Variable *Dest, Operand *Src0) { | 725 void _psrl(Variable *Dest, Operand *Src0) { |
| 726 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
602 Context.insert<typename Traits::Insts::Psrl>(Dest, Src0); | 727 Context.insert<typename Traits::Insts::Psrl>(Dest, Src0); |
603 } | 728 } |
604 void _psub(Variable *Dest, Operand *Src0) { | 729 void _psub(Variable *Dest, Operand *Src0) { |
| 730 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
605 Context.insert<typename Traits::Insts::Psub>(Dest, Src0); | 731 Context.insert<typename Traits::Insts::Psub>(Dest, Src0); |
606 } | 732 } |
607 void _push(Variable *Src0) { | 733 void _push(Operand *Src0) { |
608 Context.insert<typename Traits::Insts::Push>(Src0); | 734 Context.insert<typename Traits::Insts::Push>(Src0); |
609 } | 735 } |
610 void _pxor(Variable *Dest, Operand *Src0) { | 736 void _pxor(Variable *Dest, Operand *Src0) { |
| 737 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
611 Context.insert<typename Traits::Insts::Pxor>(Dest, Src0); | 738 Context.insert<typename Traits::Insts::Pxor>(Dest, Src0); |
612 } | 739 } |
613 void _ret(Variable *Src0 = nullptr) { | 740 void _ret(Variable *Src0 = nullptr) { |
614 Context.insert<typename Traits::Insts::Ret>(Src0); | 741 Context.insert<typename Traits::Insts::Ret>(Src0); |
615 } | 742 } |
616 void _rol(Variable *Dest, Operand *Src0) { | 743 void _rol(Variable *Dest, Operand *Src0) { |
| 744 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
617 Context.insert<typename Traits::Insts::Rol>(Dest, Src0); | 745 Context.insert<typename Traits::Insts::Rol>(Dest, Src0); |
618 } | 746 } |
| 747 X86OperandMem *_sandbox_mem_reference(X86OperandMem *Mem) { |
| 748 return dispatchToConcrete(&Traits::ConcreteTarget::_sandbox_mem_reference, |
| 749 std::move(Mem)); |
| 750 } |
619 void _sar(Variable *Dest, Operand *Src0) { | 751 void _sar(Variable *Dest, Operand *Src0) { |
| 752 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
620 Context.insert<typename Traits::Insts::Sar>(Dest, Src0); | 753 Context.insert<typename Traits::Insts::Sar>(Dest, Src0); |
621 } | 754 } |
622 void _sbb(Variable *Dest, Operand *Src0) { | 755 void _sbb(Variable *Dest, Operand *Src0) { |
| 756 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
623 Context.insert<typename Traits::Insts::Sbb>(Dest, Src0); | 757 Context.insert<typename Traits::Insts::Sbb>(Dest, Src0); |
624 } | 758 } |
625 void _sbb_rmw(X86OperandMem *DestSrc0, Operand *Src1) { | 759 void _sbb_rmw(X86OperandMem *DestSrc0, Operand *Src1) { |
| 760 AutoMemorySandboxer<> _(this, &DestSrc0, &Src1); |
626 Context.insert<typename Traits::Insts::SbbRMW>(DestSrc0, Src1); | 761 Context.insert<typename Traits::Insts::SbbRMW>(DestSrc0, Src1); |
627 } | 762 } |
628 void _setcc(Variable *Dest, BrCond Condition) { | 763 void _setcc(Variable *Dest, BrCond Condition) { |
629 Context.insert<typename Traits::Insts::Setcc>(Dest, Condition); | 764 Context.insert<typename Traits::Insts::Setcc>(Dest, Condition); |
630 } | 765 } |
631 void _shl(Variable *Dest, Operand *Src0) { | 766 void _shl(Variable *Dest, Operand *Src0) { |
| 767 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
632 Context.insert<typename Traits::Insts::Shl>(Dest, Src0); | 768 Context.insert<typename Traits::Insts::Shl>(Dest, Src0); |
633 } | 769 } |
634 void _shld(Variable *Dest, Variable *Src0, Operand *Src1) { | 770 void _shld(Variable *Dest, Variable *Src0, Operand *Src1) { |
| 771 AutoMemorySandboxer<> _(this, &Dest, &Src0, &Src1); |
635 Context.insert<typename Traits::Insts::Shld>(Dest, Src0, Src1); | 772 Context.insert<typename Traits::Insts::Shld>(Dest, Src0, Src1); |
636 } | 773 } |
637 void _shr(Variable *Dest, Operand *Src0) { | 774 void _shr(Variable *Dest, Operand *Src0) { |
| 775 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
638 Context.insert<typename Traits::Insts::Shr>(Dest, Src0); | 776 Context.insert<typename Traits::Insts::Shr>(Dest, Src0); |
639 } | 777 } |
640 void _shrd(Variable *Dest, Variable *Src0, Operand *Src1) { | 778 void _shrd(Variable *Dest, Variable *Src0, Operand *Src1) { |
| 779 AutoMemorySandboxer<> _(this, &Dest, &Src0, &Src1); |
641 Context.insert<typename Traits::Insts::Shrd>(Dest, Src0, Src1); | 780 Context.insert<typename Traits::Insts::Shrd>(Dest, Src0, Src1); |
642 } | 781 } |
643 void _shufps(Variable *Dest, Operand *Src0, Operand *Src1) { | 782 void _shufps(Variable *Dest, Operand *Src0, Operand *Src1) { |
| 783 AutoMemorySandboxer<> _(this, &Dest, &Src0, &Src1); |
644 Context.insert<typename Traits::Insts::Shufps>(Dest, Src0, Src1); | 784 Context.insert<typename Traits::Insts::Shufps>(Dest, Src0, Src1); |
645 } | 785 } |
646 void _sqrtss(Variable *Dest, Operand *Src0) { | 786 void _sqrtss(Variable *Dest, Operand *Src0) { |
| 787 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
647 Context.insert<typename Traits::Insts::Sqrtss>(Dest, Src0); | 788 Context.insert<typename Traits::Insts::Sqrtss>(Dest, Src0); |
648 } | 789 } |
649 void _store(Operand *Value, X86Operand *Mem) { | 790 void _store(Operand *Value, X86Operand *Mem) { |
| 791 AutoMemorySandboxer<> _(this, &Value, &Mem); |
650 Context.insert<typename Traits::Insts::Store>(Value, Mem); | 792 Context.insert<typename Traits::Insts::Store>(Value, Mem); |
651 } | 793 } |
652 void _storep(Variable *Value, X86OperandMem *Mem) { | 794 void _storep(Variable *Value, X86OperandMem *Mem) { |
| 795 AutoMemorySandboxer<> _(this, &Value, &Mem); |
653 Context.insert<typename Traits::Insts::StoreP>(Value, Mem); | 796 Context.insert<typename Traits::Insts::StoreP>(Value, Mem); |
654 } | 797 } |
655 void _storeq(Variable *Value, X86OperandMem *Mem) { | 798 void _storeq(Variable *Value, X86OperandMem *Mem) { |
| 799 AutoMemorySandboxer<> _(this, &Value, &Mem); |
656 Context.insert<typename Traits::Insts::StoreQ>(Value, Mem); | 800 Context.insert<typename Traits::Insts::StoreQ>(Value, Mem); |
657 } | 801 } |
658 void _sub(Variable *Dest, Operand *Src0) { | 802 void _sub(Variable *Dest, Operand *Src0) { |
| 803 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
659 Context.insert<typename Traits::Insts::Sub>(Dest, Src0); | 804 Context.insert<typename Traits::Insts::Sub>(Dest, Src0); |
660 } | 805 } |
661 void _sub_rmw(X86OperandMem *DestSrc0, Operand *Src1) { | 806 void _sub_rmw(X86OperandMem *DestSrc0, Operand *Src1) { |
| 807 AutoMemorySandboxer<> _(this, &DestSrc0, &Src1); |
662 Context.insert<typename Traits::Insts::SubRMW>(DestSrc0, Src1); | 808 Context.insert<typename Traits::Insts::SubRMW>(DestSrc0, Src1); |
663 } | 809 } |
| 810 void _sub_sp(Operand *Adjustment) { |
| 811 dispatchToConcrete(&Traits::ConcreteTarget::_sub_sp, std::move(Adjustment)); |
| 812 } |
664 void _subps(Variable *Dest, Operand *Src0) { | 813 void _subps(Variable *Dest, Operand *Src0) { |
| 814 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
665 Context.insert<typename Traits::Insts::Subps>(Dest, Src0); | 815 Context.insert<typename Traits::Insts::Subps>(Dest, Src0); |
666 } | 816 } |
667 void _subss(Variable *Dest, Operand *Src0) { | 817 void _subss(Variable *Dest, Operand *Src0) { |
| 818 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
668 Context.insert<typename Traits::Insts::Subss>(Dest, Src0); | 819 Context.insert<typename Traits::Insts::Subss>(Dest, Src0); |
669 } | 820 } |
670 void _test(Operand *Src0, Operand *Src1) { | 821 void _test(Operand *Src0, Operand *Src1) { |
| 822 AutoMemorySandboxer<> _(this, &Src0, &Src1); |
671 Context.insert<typename Traits::Insts::Test>(Src0, Src1); | 823 Context.insert<typename Traits::Insts::Test>(Src0, Src1); |
672 } | 824 } |
673 void _ucomiss(Operand *Src0, Operand *Src1) { | 825 void _ucomiss(Operand *Src0, Operand *Src1) { |
| 826 AutoMemorySandboxer<> _(this, &Src0, &Src1); |
674 Context.insert<typename Traits::Insts::Ucomiss>(Src0, Src1); | 827 Context.insert<typename Traits::Insts::Ucomiss>(Src0, Src1); |
675 } | 828 } |
676 void _ud2() { Context.insert<typename Traits::Insts::UD2>(); } | 829 void _ud2() { Context.insert<typename Traits::Insts::UD2>(); } |
677 void _xadd(Operand *Dest, Variable *Src, bool Locked) { | 830 void _xadd(Operand *Dest, Variable *Src, bool Locked) { |
| 831 AutoMemorySandboxer<> _(this, &Dest, &Src); |
678 Context.insert<typename Traits::Insts::Xadd>(Dest, Src, Locked); | 832 Context.insert<typename Traits::Insts::Xadd>(Dest, Src, Locked); |
679 // The xadd exchanges Dest and Src (modifying Src). Model that update with | 833 // The xadd exchanges Dest and Src (modifying Src). Model that update with |
680 // a FakeDef followed by a FakeUse. | 834 // a FakeDef followed by a FakeUse. |
681 Context.insert<InstFakeDef>(Src, llvm::dyn_cast<Variable>(Dest)); | 835 Context.insert<InstFakeDef>(Src, llvm::dyn_cast<Variable>(Dest)); |
682 _set_dest_redefined(); | 836 _set_dest_redefined(); |
683 Context.insert<InstFakeUse>(Src); | 837 Context.insert<InstFakeUse>(Src); |
684 } | 838 } |
685 void _xchg(Operand *Dest, Variable *Src) { | 839 void _xchg(Operand *Dest, Variable *Src) { |
| 840 AutoMemorySandboxer<> _(this, &Dest, &Src); |
686 Context.insert<typename Traits::Insts::Xchg>(Dest, Src); | 841 Context.insert<typename Traits::Insts::Xchg>(Dest, Src); |
687 // The xchg modifies Dest and Src -- model that update with a | 842 // The xchg modifies Dest and Src -- model that update with a |
688 // FakeDef/FakeUse. | 843 // FakeDef/FakeUse. |
689 Context.insert<InstFakeDef>(Src, llvm::dyn_cast<Variable>(Dest)); | 844 Context.insert<InstFakeDef>(Src, llvm::dyn_cast<Variable>(Dest)); |
690 _set_dest_redefined(); | 845 _set_dest_redefined(); |
691 Context.insert<InstFakeUse>(Src); | 846 Context.insert<InstFakeUse>(Src); |
692 } | 847 } |
693 void _xor(Variable *Dest, Operand *Src0) { | 848 void _xor(Variable *Dest, Operand *Src0) { |
| 849 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
694 Context.insert<typename Traits::Insts::Xor>(Dest, Src0); | 850 Context.insert<typename Traits::Insts::Xor>(Dest, Src0); |
695 } | 851 } |
696 void _xorps(Variable *Dest, Operand *Src0) { | 852 void _xorps(Variable *Dest, Operand *Src0) { |
| 853 AutoMemorySandboxer<> _(this, &Dest, &Src0); |
697 Context.insert<typename Traits::Insts::Xorps>(Dest, Src0); | 854 Context.insert<typename Traits::Insts::Xorps>(Dest, Src0); |
698 } | 855 } |
699 void _xor_rmw(X86OperandMem *DestSrc0, Operand *Src1) { | 856 void _xor_rmw(X86OperandMem *DestSrc0, Operand *Src1) { |
| 857 AutoMemorySandboxer<> _(this, &DestSrc0, &Src1); |
700 Context.insert<typename Traits::Insts::XorRMW>(DestSrc0, Src1); | 858 Context.insert<typename Traits::Insts::XorRMW>(DestSrc0, Src1); |
701 } | 859 } |
702 | 860 |
703 void _iaca_start() { | 861 void _iaca_start() { |
704 if (!BuildDefs::minimal()) | 862 if (!BuildDefs::minimal()) |
705 Context.insert<typename Traits::Insts::IacaStart>(); | 863 Context.insert<typename Traits::Insts::IacaStart>(); |
706 } | 864 } |
707 void _iaca_end() { | 865 void _iaca_end() { |
708 if (!BuildDefs::minimal()) | 866 if (!BuildDefs::minimal()) |
709 Context.insert<typename Traits::Insts::IacaEnd>(); | 867 Context.insert<typename Traits::Insts::IacaEnd>(); |
(...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
839 | 997 |
840 static FixupKind PcRelFixup; | 998 static FixupKind PcRelFixup; |
841 static FixupKind AbsFixup; | 999 static FixupKind AbsFixup; |
842 }; | 1000 }; |
843 } // end of namespace X86NAMESPACE | 1001 } // end of namespace X86NAMESPACE |
844 } // end of namespace Ice | 1002 } // end of namespace Ice |
845 | 1003 |
846 #include "IceTargetLoweringX86BaseImpl.h" | 1004 #include "IceTargetLoweringX86BaseImpl.h" |
847 | 1005 |
848 #endif // SUBZERO_SRC_ICETARGETLOWERINGX86BASE_H | 1006 #endif // SUBZERO_SRC_ICETARGETLOWERINGX86BASE_H |
OLD | NEW |