| OLD | NEW |
| 1 //===- subzero/src/IceTargetLoweringX8664.cpp - x86-64 lowering -----------===// | 1 //===- subzero/src/IceTargetLoweringX8664.cpp - x86-64 lowering -----------===// |
| 2 // | 2 // |
| 3 // The Subzero Code Generator | 3 // The Subzero Code Generator |
| 4 // | 4 // |
| 5 // This file is distributed under the University of Illinois Open Source | 5 // This file is distributed under the University of Illinois Open Source |
| 6 // License. See LICENSE.TXT for details. | 6 // License. See LICENSE.TXT for details. |
| 7 // | 7 // |
| 8 //===----------------------------------------------------------------------===// | 8 //===----------------------------------------------------------------------===// |
| 9 /// | 9 /// |
| 10 /// \file | 10 /// \file |
| (...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 158 // add Adjustment, %esp | 158 // add Adjustment, %esp |
| 159 // %rsp = fake-def %esp | 159 // %rsp = fake-def %esp |
| 160 // add %r15, %rsp | 160 // add %r15, %rsp |
| 161 // .bundle_end | 161 // .bundle_end |
| 162 // | 162 // |
| 163 // The fake-defs ensure that the | 163 // The fake-defs ensure that the |
| 164 // | 164 // |
| 165 // add Adjustment, %esp | 165 // add Adjustment, %esp |
| 166 // | 166 // |
| 167 // instruction is not DCE'd. | 167 // instruction is not DCE'd. |
| 168 _bundle_lock(); | 168 AutoBundle _(this); |
| 169 _redefined(Context.insert<InstFakeDef>(esp, rsp)); | 169 _redefined(Context.insert<InstFakeDef>(esp, rsp)); |
| 170 _add(esp, Adjustment); | 170 _add(esp, Adjustment); |
| 171 _redefined(Context.insert<InstFakeDef>(rsp, esp)); | 171 _redefined(Context.insert<InstFakeDef>(rsp, esp)); |
| 172 _add(rsp, r15); | 172 _add(rsp, r15); |
| 173 _bundle_unlock(); | |
| 174 } | 173 } |
| 175 | 174 |
| 176 void TargetX8664::_mov_sp(Operand *NewValue) { | 175 void TargetX8664::_mov_sp(Operand *NewValue) { |
| 177 assert(NewValue->getType() == IceType_i32); | 176 assert(NewValue->getType() == IceType_i32); |
| 178 | 177 |
| 179 Variable *esp = getPhysicalRegister(Traits::RegisterSet::Reg_esp); | 178 Variable *esp = getPhysicalRegister(Traits::RegisterSet::Reg_esp); |
| 180 Variable *rsp = | 179 Variable *rsp = |
| 181 getPhysicalRegister(Traits::RegisterSet::Reg_rsp, IceType_i64); | 180 getPhysicalRegister(Traits::RegisterSet::Reg_rsp, IceType_i64); |
| 182 | 181 |
| 183 if (NeedSandboxing) { | 182 AutoBundle _(this); |
| 184 _bundle_lock(); | |
| 185 } | |
| 186 | 183 |
| 187 _redefined(Context.insert<InstFakeDef>(esp, rsp)); | 184 _redefined(Context.insert<InstFakeDef>(esp, rsp)); |
| 188 _redefined(_mov(esp, NewValue)); | 185 _redefined(_mov(esp, NewValue)); |
| 189 _redefined(Context.insert<InstFakeDef>(rsp, esp)); | 186 _redefined(Context.insert<InstFakeDef>(rsp, esp)); |
| 190 | 187 |
| 191 if (!NeedSandboxing) { | 188 if (!NeedSandboxing) { |
| 192 return; | 189 return; |
| 193 } | 190 } |
| 194 | 191 |
| 195 Variable *r15 = | 192 Variable *r15 = |
| 196 getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64); | 193 getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64); |
| 197 _add(rsp, r15); | 194 _add(rsp, r15); |
| 198 _bundle_unlock(); | |
| 199 } | 195 } |
| 200 | 196 |
| 201 void TargetX8664::_push_rbp() { | 197 void TargetX8664::_push_rbp() { |
| 202 assert(NeedSandboxing); | 198 assert(NeedSandboxing); |
| 203 | 199 |
| 204 Constant *_0 = Ctx->getConstantZero(IceType_i32); | 200 Constant *_0 = Ctx->getConstantZero(IceType_i32); |
| 205 Variable *ebp = | 201 Variable *ebp = |
| 206 getPhysicalRegister(Traits::RegisterSet::Reg_ebp, IceType_i32); | 202 getPhysicalRegister(Traits::RegisterSet::Reg_ebp, IceType_i32); |
| 207 Variable *rsp = | 203 Variable *rsp = |
| 208 getPhysicalRegister(Traits::RegisterSet::Reg_rsp, IceType_i64); | 204 getPhysicalRegister(Traits::RegisterSet::Reg_rsp, IceType_i64); |
| 209 auto *TopOfStack = llvm::cast<X86OperandMem>( | 205 auto *TopOfStack = llvm::cast<X86OperandMem>( |
| 210 legalize(X86OperandMem::create(Func, IceType_i32, rsp, _0), | 206 legalize(X86OperandMem::create(Func, IceType_i32, rsp, _0), |
| 211 Legal_Reg | Legal_Mem)); | 207 Legal_Reg | Legal_Mem)); |
| 212 | 208 |
| 213 // Emits a sequence: | 209 // Emits a sequence: |
| 214 // | 210 // |
| 215 // .bundle_start | 211 // .bundle_start |
| 216 // push 0 | 212 // push 0 |
| 217 // mov %ebp, %(rsp) | 213 // mov %ebp, %(rsp) |
| 218 // .bundle_end | 214 // .bundle_end |
| 219 // | 215 // |
| 220 // to avoid leaking the upper 32-bits (i.e., the sandbox address.) | 216 // to avoid leaking the upper 32-bits (i.e., the sandbox address.) |
| 221 _bundle_lock(); | 217 AutoBundle _(this); |
| 222 _push(_0); | 218 _push(_0); |
| 223 Context.insert<typename Traits::Insts::Store>(ebp, TopOfStack); | 219 Context.insert<typename Traits::Insts::Store>(ebp, TopOfStack); |
| 224 _bundle_unlock(); | |
| 225 } | 220 } |
| 226 | 221 |
| 227 Traits::X86OperandMem *TargetX8664::_sandbox_mem_reference(X86OperandMem *Mem) { | 222 Traits::X86OperandMem *TargetX8664::_sandbox_mem_reference(X86OperandMem *Mem) { |
| 228 // In x86_64-nacl, all memory references are relative to %r15 (i.e., %rzp.) | 223 // In x86_64-nacl, all memory references are relative to %r15 (i.e., %rzp.) |
| 229 // NaCl sandboxing also requires that any registers that are not %rsp and | 224 // NaCl sandboxing also requires that any registers that are not %rsp and |
| 230 // %rbp to be 'truncated' to 32-bit before memory access. | 225 // %rbp to be 'truncated' to 32-bit before memory access. |
| 231 assert(NeedSandboxing); | 226 assert(NeedSandboxing); |
| 232 Variable *Base = Mem->getBase(); | 227 Variable *Base = Mem->getBase(); |
| 233 Variable *Index = Mem->getIndex(); | 228 Variable *Index = Mem->getIndex(); |
| 234 uint16_t Shift = 0; | 229 uint16_t Shift = 0; |
| (...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 343 | 338 |
| 344 Variable *esp = | 339 Variable *esp = |
| 345 getPhysicalRegister(Traits::RegisterSet::Reg_esp, IceType_i32); | 340 getPhysicalRegister(Traits::RegisterSet::Reg_esp, IceType_i32); |
| 346 Variable *r15 = | 341 Variable *r15 = |
| 347 getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64); | 342 getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64); |
| 348 | 343 |
| 349 // .bundle_start | 344 // .bundle_start |
| 350 // sub Adjustment, %esp | 345 // sub Adjustment, %esp |
| 351 // add %r15, %rsp | 346 // add %r15, %rsp |
| 352 // .bundle_end | 347 // .bundle_end |
| 353 _bundle_lock(); | 348 AutoBundle _(this); |
| 354 _redefined(Context.insert<InstFakeDef>(esp, rsp)); | 349 _redefined(Context.insert<InstFakeDef>(esp, rsp)); |
| 355 _sub(esp, Adjustment); | 350 _sub(esp, Adjustment); |
| 356 _redefined(Context.insert<InstFakeDef>(rsp, esp)); | 351 _redefined(Context.insert<InstFakeDef>(rsp, esp)); |
| 357 _add(rsp, r15); | 352 _add(rsp, r15); |
| 358 _bundle_unlock(); | |
| 359 } | 353 } |
| 360 | 354 |
| 361 void TargetX8664::initSandbox() { | 355 void TargetX8664::initSandbox() { |
| 362 assert(NeedSandboxing); | 356 assert(NeedSandboxing); |
| 363 Context.init(Func->getEntryNode()); | 357 Context.init(Func->getEntryNode()); |
| 364 Context.setInsertPoint(Context.getCur()); | 358 Context.setInsertPoint(Context.getCur()); |
| 365 Variable *r15 = | 359 Variable *r15 = |
| 366 getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64); | 360 getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64); |
| 367 Context.insert<InstFakeDef>(r15); | 361 Context.insert<InstFakeDef>(r15); |
| 368 Context.insert<InstFakeUse>(r15); | 362 Context.insert<InstFakeUse>(r15); |
| 369 } | 363 } |
| 370 | 364 |
| 371 void TargetX8664::lowerIndirectJump(Variable *JumpTarget) { | 365 void TargetX8664::lowerIndirectJump(Variable *JumpTarget) { |
| 366 std::unique_ptr<AutoBundle> Bundler; |
| 367 |
| 372 if (!NeedSandboxing) { | 368 if (!NeedSandboxing) { |
| 373 Variable *T = makeReg(IceType_i64); | 369 Variable *T = makeReg(IceType_i64); |
| 374 _movzx(T, JumpTarget); | 370 _movzx(T, JumpTarget); |
| 375 JumpTarget = T; | 371 JumpTarget = T; |
| 376 } else { | 372 } else { |
| 377 Variable *T = makeReg(IceType_i32); | 373 Variable *T = makeReg(IceType_i32); |
| 378 Variable *T64 = makeReg(IceType_i64); | 374 Variable *T64 = makeReg(IceType_i64); |
| 379 Variable *r15 = | 375 Variable *r15 = |
| 380 getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64); | 376 getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64); |
| 381 | 377 |
| 382 _mov(T, JumpTarget); | 378 _mov(T, JumpTarget); |
| 383 _bundle_lock(); | 379 Bundler = makeUnique<AutoBundle>(this); |
| 384 const SizeT BundleSize = | 380 const SizeT BundleSize = |
| 385 1 << Func->getAssembler<>()->getBundleAlignLog2Bytes(); | 381 1 << Func->getAssembler<>()->getBundleAlignLog2Bytes(); |
| 386 _and(T, Ctx->getConstantInt32(~(BundleSize - 1))); | 382 _and(T, Ctx->getConstantInt32(~(BundleSize - 1))); |
| 387 _movzx(T64, T); | 383 _movzx(T64, T); |
| 388 _add(T64, r15); | 384 _add(T64, r15); |
| 389 JumpTarget = T64; | 385 JumpTarget = T64; |
| 390 } | 386 } |
| 391 | 387 |
| 392 _jmp(JumpTarget); | 388 _jmp(JumpTarget); |
| 393 if (NeedSandboxing) | |
| 394 _bundle_unlock(); | |
| 395 } | 389 } |
| 396 | 390 |
| 397 namespace { | 391 namespace { |
| 398 static inline TargetX8664::Traits::RegisterSet::AllRegisters | 392 static inline TargetX8664::Traits::RegisterSet::AllRegisters |
| 399 getRegisterForXmmArgNum(uint32_t ArgNum) { | 393 getRegisterForXmmArgNum(uint32_t ArgNum) { |
| 400 assert(ArgNum < TargetX8664::Traits::X86_MAX_XMM_ARGS); | 394 assert(ArgNum < TargetX8664::Traits::X86_MAX_XMM_ARGS); |
| 401 return static_cast<TargetX8664::Traits::RegisterSet::AllRegisters>( | 395 return static_cast<TargetX8664::Traits::RegisterSet::AllRegisters>( |
| 402 TargetX8664::Traits::RegisterSet::Reg_xmm0 + ArgNum); | 396 TargetX8664::Traits::RegisterSet::Reg_xmm0 + ArgNum); |
| 403 } | 397 } |
| 404 | 398 |
| (...skipping 187 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 592 // emitted call needs a i64 register (for textual asm.) | 586 // emitted call needs a i64 register (for textual asm.) |
| 593 Variable *T = makeReg(IceType_i64); | 587 Variable *T = makeReg(IceType_i64); |
| 594 _movzx(T, CallTargetR); | 588 _movzx(T, CallTargetR); |
| 595 CallTarget = T; | 589 CallTarget = T; |
| 596 } | 590 } |
| 597 NewCall = Context.insert<Traits::Insts::Call>(ReturnReg, CallTarget); | 591 NewCall = Context.insert<Traits::Insts::Call>(ReturnReg, CallTarget); |
| 598 } else { | 592 } else { |
| 599 ReturnAddress = InstX86Label::create(Func, this); | 593 ReturnAddress = InstX86Label::create(Func, this); |
| 600 ReturnAddress->setIsReturnLocation(true); | 594 ReturnAddress->setIsReturnLocation(true); |
| 601 constexpr bool SuppressMangling = true; | 595 constexpr bool SuppressMangling = true; |
| 602 if (CallTargetR == nullptr) { | 596 /* AutoBundle scoping */ { |
| 603 _bundle_lock(InstBundleLock::Opt_PadToEnd); | 597 std::unique_ptr<AutoBundle> Bundler; |
| 604 _push(Ctx->getConstantSym(0, ReturnAddress->getName(Func), | 598 if (CallTargetR == nullptr) { |
| 605 SuppressMangling)); | 599 Bundler = makeUnique<AutoBundle>(this, InstBundleLock::Opt_PadToEnd); |
| 606 } else { | 600 _push(Ctx->getConstantSym(0, ReturnAddress->getName(Func), |
| 607 Variable *T = makeReg(IceType_i32); | 601 SuppressMangling)); |
| 608 Variable *T64 = makeReg(IceType_i64); | 602 } else { |
| 609 Variable *r15 = | 603 Variable *T = makeReg(IceType_i32); |
| 610 getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64); | 604 Variable *T64 = makeReg(IceType_i64); |
| 605 Variable *r15 = |
| 606 getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64); |
| 611 | 607 |
| 612 _mov(T, CallTargetR); | 608 _mov(T, CallTargetR); |
| 613 _bundle_lock(InstBundleLock::Opt_PadToEnd); | 609 Bundler = makeUnique<AutoBundle>(this, InstBundleLock::Opt_PadToEnd); |
| 614 _push(Ctx->getConstantSym(0, ReturnAddress->getName(Func), | 610 _push(Ctx->getConstantSym(0, ReturnAddress->getName(Func), |
| 615 SuppressMangling)); | 611 SuppressMangling)); |
| 616 const SizeT BundleSize = | 612 const SizeT BundleSize = |
| 617 1 << Func->getAssembler<>()->getBundleAlignLog2Bytes(); | 613 1 << Func->getAssembler<>()->getBundleAlignLog2Bytes(); |
| 618 _and(T, Ctx->getConstantInt32(~(BundleSize - 1))); | 614 _and(T, Ctx->getConstantInt32(~(BundleSize - 1))); |
| 619 _movzx(T64, T); | 615 _movzx(T64, T); |
| 620 _add(T64, r15); | 616 _add(T64, r15); |
| 621 CallTarget = T64; | 617 CallTarget = T64; |
| 618 } |
| 619 |
| 620 NewCall = Context.insert<Traits::Insts::Jmp>(CallTarget); |
| 622 } | 621 } |
| 623 | |
| 624 NewCall = Context.insert<Traits::Insts::Jmp>(CallTarget); | |
| 625 _bundle_unlock(); | |
| 626 if (ReturnReg != nullptr) { | 622 if (ReturnReg != nullptr) { |
| 627 Context.insert<InstFakeDef>(ReturnReg); | 623 Context.insert<InstFakeDef>(ReturnReg); |
| 628 } | 624 } |
| 629 | 625 |
| 630 Context.insert(ReturnAddress); | 626 Context.insert(ReturnAddress); |
| 631 } | 627 } |
| 632 | 628 |
| 633 // Insert a register-kill pseudo instruction. | 629 // Insert a register-kill pseudo instruction. |
| 634 Context.insert<InstFakeKill>(NewCall); | 630 Context.insert<InstFakeKill>(NewCall); |
| 635 | 631 |
| (...skipping 215 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 851 getPhysicalRegister(Traits::RegisterSet::Reg_esp, IceType_i32); | 847 getPhysicalRegister(Traits::RegisterSet::Reg_esp, IceType_i32); |
| 852 Variable *r15 = | 848 Variable *r15 = |
| 853 getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64); | 849 getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64); |
| 854 | 850 |
| 855 if (!NeedSandboxing) { | 851 if (!NeedSandboxing) { |
| 856 _push(rbp); | 852 _push(rbp); |
| 857 _mov(rbp, rsp); | 853 _mov(rbp, rsp); |
| 858 } else { | 854 } else { |
| 859 _push_rbp(); | 855 _push_rbp(); |
| 860 | 856 |
| 861 _bundle_lock(); | 857 AutoBundle _(this); |
| 862 _redefined(Context.insert<InstFakeDef>(ebp, rbp)); | 858 _redefined(Context.insert<InstFakeDef>(ebp, rbp)); |
| 863 _redefined(Context.insert<InstFakeDef>(esp, rsp)); | 859 _redefined(Context.insert<InstFakeDef>(esp, rsp)); |
| 864 _mov(ebp, esp); | 860 _mov(ebp, esp); |
| 865 _redefined(Context.insert<InstFakeDef>(rsp, esp)); | 861 _redefined(Context.insert<InstFakeDef>(rsp, esp)); |
| 866 _add(rbp, r15); | 862 _add(rbp, r15); |
| 867 _bundle_unlock(); | |
| 868 } | 863 } |
| 869 // Keep ebp live for late-stage liveness analysis (e.g. asm-verbose mode). | 864 // Keep ebp live for late-stage liveness analysis (e.g. asm-verbose mode). |
| 870 Context.insert<InstFakeUse>(rbp); | 865 Context.insert<InstFakeUse>(rbp); |
| 871 } | 866 } |
| 872 | 867 |
| 873 // Align the variables area. SpillAreaPaddingBytes is the size of the region | 868 // Align the variables area. SpillAreaPaddingBytes is the size of the region |
| 874 // after the preserved registers and before the spill areas. | 869 // after the preserved registers and before the spill areas. |
| 875 // LocalsSlotsPaddingBytes is the amount of padding between the globals and | 870 // LocalsSlotsPaddingBytes is the amount of padding between the globals and |
| 876 // locals area if they are separate. | 871 // locals area if they are separate. |
| 877 assert(SpillAreaAlignmentBytes <= Traits::X86_STACK_ALIGNMENT_BYTES); | 872 assert(SpillAreaAlignmentBytes <= Traits::X86_STACK_ALIGNMENT_BYTES); |
| (...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1050 | 1045 |
| 1051 Variable *r15 = | 1046 Variable *r15 = |
| 1052 getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64); | 1047 getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64); |
| 1053 Variable *rcx = | 1048 Variable *rcx = |
| 1054 getPhysicalRegister(Traits::RegisterSet::Reg_rcx, IceType_i64); | 1049 getPhysicalRegister(Traits::RegisterSet::Reg_rcx, IceType_i64); |
| 1055 Variable *ecx = | 1050 Variable *ecx = |
| 1056 getPhysicalRegister(Traits::RegisterSet::Reg_ecx, IceType_i32); | 1051 getPhysicalRegister(Traits::RegisterSet::Reg_ecx, IceType_i32); |
| 1057 | 1052 |
| 1058 _pop(rcx); | 1053 _pop(rcx); |
| 1059 Context.insert<InstFakeDef>(ecx, rcx); | 1054 Context.insert<InstFakeDef>(ecx, rcx); |
| 1060 _bundle_lock(); | 1055 AutoBundle _(this); |
| 1061 _mov(ebp, ecx); | 1056 _mov(ebp, ecx); |
| 1062 | 1057 |
| 1063 _redefined(Context.insert<InstFakeDef>(rbp, ebp)); | 1058 _redefined(Context.insert<InstFakeDef>(rbp, ebp)); |
| 1064 _add(rbp, r15); | 1059 _add(rbp, r15); |
| 1065 _bundle_unlock(); | |
| 1066 } | 1060 } |
| 1067 } | 1061 } |
| 1068 | 1062 |
| 1069 // Add pop instructions for preserved registers. | 1063 // Add pop instructions for preserved registers. |
| 1070 llvm::SmallBitVector CalleeSaves = | 1064 llvm::SmallBitVector CalleeSaves = |
| 1071 getRegisterSet(RegSet_CalleeSave, RegSet_None); | 1065 getRegisterSet(RegSet_CalleeSave, RegSet_None); |
| 1072 llvm::SmallBitVector Popped(CalleeSaves.size()); | 1066 llvm::SmallBitVector Popped(CalleeSaves.size()); |
| 1073 for (int32_t i = CalleeSaves.size() - 1; i >= 0; --i) { | 1067 for (int32_t i = CalleeSaves.size() - 1; i >= 0; --i) { |
| 1074 if (i == Traits::RegisterSet::Reg_rbp && IsEbpBasedFrame) | 1068 if (i == Traits::RegisterSet::Reg_rbp && IsEbpBasedFrame) |
| 1075 continue; | 1069 continue; |
| (...skipping 14 matching lines...) Expand all Loading... |
| 1090 | 1084 |
| 1091 Variable *T_rcx = makeReg(IceType_i64, Traits::RegisterSet::Reg_rcx); | 1085 Variable *T_rcx = makeReg(IceType_i64, Traits::RegisterSet::Reg_rcx); |
| 1092 Variable *T_ecx = makeReg(IceType_i32, Traits::RegisterSet::Reg_ecx); | 1086 Variable *T_ecx = makeReg(IceType_i32, Traits::RegisterSet::Reg_ecx); |
| 1093 _pop(T_rcx); | 1087 _pop(T_rcx); |
| 1094 _mov(T_ecx, T_rcx); | 1088 _mov(T_ecx, T_rcx); |
| 1095 | 1089 |
| 1096 // lowerIndirectJump(T_ecx); | 1090 // lowerIndirectJump(T_ecx); |
| 1097 Variable *r15 = | 1091 Variable *r15 = |
| 1098 getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64); | 1092 getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64); |
| 1099 | 1093 |
| 1100 _bundle_lock(); | 1094 /* AutoBundle scoping */ { |
| 1101 const SizeT BundleSize = 1 | 1095 AutoBundle _(this); |
| 1102 << Func->getAssembler<>()->getBundleAlignLog2Bytes(); | 1096 const SizeT BundleSize = |
| 1103 _and(T_ecx, Ctx->getConstantInt32(~(BundleSize - 1))); | 1097 1 << Func->getAssembler<>()->getBundleAlignLog2Bytes(); |
| 1104 Context.insert<InstFakeDef>(T_rcx, T_ecx); | 1098 _and(T_ecx, Ctx->getConstantInt32(~(BundleSize - 1))); |
| 1105 _add(T_rcx, r15); | 1099 Context.insert<InstFakeDef>(T_rcx, T_ecx); |
| 1100 _add(T_rcx, r15); |
| 1106 | 1101 |
| 1107 _jmp(T_rcx); | 1102 _jmp(T_rcx); |
| 1108 _bundle_unlock(); | 1103 } |
| 1109 | 1104 |
| 1110 if (RI->getSrcSize()) { | 1105 if (RI->getSrcSize()) { |
| 1111 auto *RetValue = llvm::cast<Variable>(RI->getSrc(0)); | 1106 auto *RetValue = llvm::cast<Variable>(RI->getSrc(0)); |
| 1112 Context.insert<InstFakeUse>(RetValue); | 1107 Context.insert<InstFakeUse>(RetValue); |
| 1113 } | 1108 } |
| 1114 RI->setDeleted(); | 1109 RI->setDeleted(); |
| 1115 } | 1110 } |
| 1116 | 1111 |
| 1117 void TargetX8664::emitJumpTable(const Cfg *Func, | 1112 void TargetX8664::emitJumpTable(const Cfg *Func, |
| 1118 const InstJumpTable *JumpTable) const { | 1113 const InstJumpTable *JumpTable) const { |
| (...skipping 306 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1425 #define X(tag, sizeLog2, align, elts, elty, str) \ | 1420 #define X(tag, sizeLog2, align, elts, elty, str) \ |
| 1426 static_assert(_table1_##tag == _table2_##tag, \ | 1421 static_assert(_table1_##tag == _table2_##tag, \ |
| 1427 "Inconsistency between ICETYPEX8664_TABLE and ICETYPE_TABLE"); | 1422 "Inconsistency between ICETYPEX8664_TABLE and ICETYPE_TABLE"); |
| 1428 ICETYPE_TABLE | 1423 ICETYPE_TABLE |
| 1429 #undef X | 1424 #undef X |
| 1430 } // end of namespace dummy3 | 1425 } // end of namespace dummy3 |
| 1431 } // end of anonymous namespace | 1426 } // end of anonymous namespace |
| 1432 | 1427 |
| 1433 } // end of namespace X8664 | 1428 } // end of namespace X8664 |
| 1434 } // end of namespace Ice | 1429 } // end of namespace Ice |
| OLD | NEW |