OLD | NEW |
1 //===-- X86InstrInfo.cpp - X86 Instruction Information --------------------===// | 1 //===-- X86InstrInfo.cpp - X86 Instruction Information --------------------===// |
2 // | 2 // |
3 // The LLVM Compiler Infrastructure | 3 // The LLVM Compiler Infrastructure |
4 // | 4 // |
5 // This file is distributed under the University of Illinois Open Source | 5 // This file is distributed under the University of Illinois Open Source |
6 // License. See LICENSE.TXT for details. | 6 // License. See LICENSE.TXT for details. |
7 // | 7 // |
8 //===----------------------------------------------------------------------===// | 8 //===----------------------------------------------------------------------===// |
9 // | 9 // |
10 // This file contains the X86 implementation of the TargetInstrInfo class. | 10 // This file contains the X86 implementation of the TargetInstrInfo class. |
(...skipping 264 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
275 for (unsigned i = 0, e = array_lengthof(OpTbl2Addr); i != e; ++i) { | 275 for (unsigned i = 0, e = array_lengthof(OpTbl2Addr); i != e; ++i) { |
276 unsigned RegOp = OpTbl2Addr[i].RegOp; | 276 unsigned RegOp = OpTbl2Addr[i].RegOp; |
277 unsigned MemOp = OpTbl2Addr[i].MemOp; | 277 unsigned MemOp = OpTbl2Addr[i].MemOp; |
278 unsigned Flags = OpTbl2Addr[i].Flags; | 278 unsigned Flags = OpTbl2Addr[i].Flags; |
279 AddTableEntry(RegOp2MemOpTable2Addr, MemOp2RegOpTable, | 279 AddTableEntry(RegOp2MemOpTable2Addr, MemOp2RegOpTable, |
280 RegOp, MemOp, | 280 RegOp, MemOp, |
281 // Index 0, folded load and store, no alignment requirement. | 281 // Index 0, folded load and store, no alignment requirement. |
282 Flags | TB_INDEX_0 | TB_FOLDED_LOAD | TB_FOLDED_STORE); | 282 Flags | TB_INDEX_0 | TB_FOLDED_LOAD | TB_FOLDED_STORE); |
283 } | 283 } |
284 | 284 |
| 285 // @LOCALMOD-BEGIN |
| 286 uint16_t NoForwardForNaCl = STI.isTargetNaCl() ? TB_NO_FORWARD : 0; |
| 287 // @LOCALMOD-END |
| 288 |
285 static const X86OpTblEntry OpTbl0[] = { | 289 static const X86OpTblEntry OpTbl0[] = { |
286 { X86::BT16ri8, X86::BT16mi8, TB_FOLDED_LOAD }, | 290 { X86::BT16ri8, X86::BT16mi8, TB_FOLDED_LOAD }, |
287 { X86::BT32ri8, X86::BT32mi8, TB_FOLDED_LOAD }, | 291 { X86::BT32ri8, X86::BT32mi8, TB_FOLDED_LOAD }, |
288 { X86::BT64ri8, X86::BT64mi8, TB_FOLDED_LOAD }, | 292 { X86::BT64ri8, X86::BT64mi8, TB_FOLDED_LOAD }, |
289 { X86::CALL32r, X86::CALL32m, TB_FOLDED_LOAD }, | 293 { X86::CALL32r, X86::CALL32m, (uint16_t)(TB_FOLDED_LOAD | NoForwar
dForNaCl) }, |
290 { X86::CALL64r, X86::CALL64m, TB_FOLDED_LOAD }, | 294 { X86::CALL64r, X86::CALL64m, (uint16_t)(TB_FOLDED_LOAD | NoForwar
dForNaCl) }, |
291 { X86::CMP16ri, X86::CMP16mi, TB_FOLDED_LOAD }, | 295 { X86::CMP16ri, X86::CMP16mi, TB_FOLDED_LOAD }, |
292 { X86::CMP16ri8, X86::CMP16mi8, TB_FOLDED_LOAD }, | 296 { X86::CMP16ri8, X86::CMP16mi8, TB_FOLDED_LOAD }, |
293 { X86::CMP16rr, X86::CMP16mr, TB_FOLDED_LOAD }, | 297 { X86::CMP16rr, X86::CMP16mr, TB_FOLDED_LOAD }, |
294 { X86::CMP32ri, X86::CMP32mi, TB_FOLDED_LOAD }, | 298 { X86::CMP32ri, X86::CMP32mi, TB_FOLDED_LOAD }, |
295 { X86::CMP32ri8, X86::CMP32mi8, TB_FOLDED_LOAD }, | 299 { X86::CMP32ri8, X86::CMP32mi8, TB_FOLDED_LOAD }, |
296 { X86::CMP32rr, X86::CMP32mr, TB_FOLDED_LOAD }, | 300 { X86::CMP32rr, X86::CMP32mr, TB_FOLDED_LOAD }, |
297 { X86::CMP64ri32, X86::CMP64mi32, TB_FOLDED_LOAD }, | 301 { X86::CMP64ri32, X86::CMP64mi32, TB_FOLDED_LOAD }, |
298 { X86::CMP64ri8, X86::CMP64mi8, TB_FOLDED_LOAD }, | 302 { X86::CMP64ri8, X86::CMP64mi8, TB_FOLDED_LOAD }, |
299 { X86::CMP64rr, X86::CMP64mr, TB_FOLDED_LOAD }, | 303 { X86::CMP64rr, X86::CMP64mr, TB_FOLDED_LOAD }, |
300 { X86::CMP8ri, X86::CMP8mi, TB_FOLDED_LOAD }, | 304 { X86::CMP8ri, X86::CMP8mi, TB_FOLDED_LOAD }, |
301 { X86::CMP8rr, X86::CMP8mr, TB_FOLDED_LOAD }, | 305 { X86::CMP8rr, X86::CMP8mr, TB_FOLDED_LOAD }, |
302 { X86::DIV16r, X86::DIV16m, TB_FOLDED_LOAD }, | 306 { X86::DIV16r, X86::DIV16m, TB_FOLDED_LOAD }, |
303 { X86::DIV32r, X86::DIV32m, TB_FOLDED_LOAD }, | 307 { X86::DIV32r, X86::DIV32m, TB_FOLDED_LOAD }, |
304 { X86::DIV64r, X86::DIV64m, TB_FOLDED_LOAD }, | 308 { X86::DIV64r, X86::DIV64m, TB_FOLDED_LOAD }, |
305 { X86::DIV8r, X86::DIV8m, TB_FOLDED_LOAD }, | 309 { X86::DIV8r, X86::DIV8m, TB_FOLDED_LOAD }, |
306 { X86::EXTRACTPSrr, X86::EXTRACTPSmr, TB_FOLDED_STORE }, | 310 { X86::EXTRACTPSrr, X86::EXTRACTPSmr, TB_FOLDED_STORE }, |
307 { X86::IDIV16r, X86::IDIV16m, TB_FOLDED_LOAD }, | 311 { X86::IDIV16r, X86::IDIV16m, TB_FOLDED_LOAD }, |
308 { X86::IDIV32r, X86::IDIV32m, TB_FOLDED_LOAD }, | 312 { X86::IDIV32r, X86::IDIV32m, TB_FOLDED_LOAD }, |
309 { X86::IDIV64r, X86::IDIV64m, TB_FOLDED_LOAD }, | 313 { X86::IDIV64r, X86::IDIV64m, TB_FOLDED_LOAD }, |
310 { X86::IDIV8r, X86::IDIV8m, TB_FOLDED_LOAD }, | 314 { X86::IDIV8r, X86::IDIV8m, TB_FOLDED_LOAD }, |
311 { X86::IMUL16r, X86::IMUL16m, TB_FOLDED_LOAD }, | 315 { X86::IMUL16r, X86::IMUL16m, TB_FOLDED_LOAD }, |
312 { X86::IMUL32r, X86::IMUL32m, TB_FOLDED_LOAD }, | 316 { X86::IMUL32r, X86::IMUL32m, TB_FOLDED_LOAD }, |
313 { X86::IMUL64r, X86::IMUL64m, TB_FOLDED_LOAD }, | 317 { X86::IMUL64r, X86::IMUL64m, TB_FOLDED_LOAD }, |
314 { X86::IMUL8r, X86::IMUL8m, TB_FOLDED_LOAD }, | 318 { X86::IMUL8r, X86::IMUL8m, TB_FOLDED_LOAD }, |
315 { X86::JMP32r, X86::JMP32m, TB_FOLDED_LOAD }, | 319 { X86::JMP32r, X86::JMP32m, (uint16_t)(TB_FOLDED_LOAD | NoForwar
dForNaCl) }, |
316 { X86::JMP64r, X86::JMP64m, TB_FOLDED_LOAD }, | 320 { X86::JMP64r, X86::JMP64m, (uint16_t)(TB_FOLDED_LOAD | NoForwar
dForNaCl) }, |
317 { X86::MOV16ri, X86::MOV16mi, TB_FOLDED_STORE }, | 321 { X86::MOV16ri, X86::MOV16mi, TB_FOLDED_STORE }, |
318 { X86::MOV16rr, X86::MOV16mr, TB_FOLDED_STORE }, | 322 { X86::MOV16rr, X86::MOV16mr, TB_FOLDED_STORE }, |
319 { X86::MOV32ri, X86::MOV32mi, TB_FOLDED_STORE }, | 323 { X86::MOV32ri, X86::MOV32mi, TB_FOLDED_STORE }, |
320 { X86::MOV32rr, X86::MOV32mr, TB_FOLDED_STORE }, | 324 { X86::MOV32rr, X86::MOV32mr, TB_FOLDED_STORE }, |
321 { X86::MOV64ri32, X86::MOV64mi32, TB_FOLDED_STORE }, | 325 { X86::MOV64ri32, X86::MOV64mi32, TB_FOLDED_STORE }, |
322 { X86::MOV64rr, X86::MOV64mr, TB_FOLDED_STORE }, | 326 { X86::MOV64rr, X86::MOV64mr, TB_FOLDED_STORE }, |
323 { X86::MOV8ri, X86::MOV8mi, TB_FOLDED_STORE }, | 327 { X86::MOV8ri, X86::MOV8mi, TB_FOLDED_STORE }, |
324 { X86::MOV8rr, X86::MOV8mr, TB_FOLDED_STORE }, | 328 { X86::MOV8rr, X86::MOV8mr, TB_FOLDED_STORE }, |
325 { X86::MOV8rr_NOREX, X86::MOV8mr_NOREX, TB_FOLDED_STORE }, | 329 { X86::MOV8rr_NOREX, X86::MOV8mr_NOREX, TB_FOLDED_STORE }, |
326 { X86::MOVAPDrr, X86::MOVAPDmr, TB_FOLDED_STORE | TB_ALIGN_16 }, | 330 { X86::MOVAPDrr, X86::MOVAPDmr, TB_FOLDED_STORE | TB_ALIGN_16 }, |
(...skipping 18 matching lines...) Expand all Loading... |
345 { X86::SETGr, X86::SETGm, TB_FOLDED_STORE }, | 349 { X86::SETGr, X86::SETGm, TB_FOLDED_STORE }, |
346 { X86::SETLEr, X86::SETLEm, TB_FOLDED_STORE }, | 350 { X86::SETLEr, X86::SETLEm, TB_FOLDED_STORE }, |
347 { X86::SETLr, X86::SETLm, TB_FOLDED_STORE }, | 351 { X86::SETLr, X86::SETLm, TB_FOLDED_STORE }, |
348 { X86::SETNEr, X86::SETNEm, TB_FOLDED_STORE }, | 352 { X86::SETNEr, X86::SETNEm, TB_FOLDED_STORE }, |
349 { X86::SETNOr, X86::SETNOm, TB_FOLDED_STORE }, | 353 { X86::SETNOr, X86::SETNOm, TB_FOLDED_STORE }, |
350 { X86::SETNPr, X86::SETNPm, TB_FOLDED_STORE }, | 354 { X86::SETNPr, X86::SETNPm, TB_FOLDED_STORE }, |
351 { X86::SETNSr, X86::SETNSm, TB_FOLDED_STORE }, | 355 { X86::SETNSr, X86::SETNSm, TB_FOLDED_STORE }, |
352 { X86::SETOr, X86::SETOm, TB_FOLDED_STORE }, | 356 { X86::SETOr, X86::SETOm, TB_FOLDED_STORE }, |
353 { X86::SETPr, X86::SETPm, TB_FOLDED_STORE }, | 357 { X86::SETPr, X86::SETPm, TB_FOLDED_STORE }, |
354 { X86::SETSr, X86::SETSm, TB_FOLDED_STORE }, | 358 { X86::SETSr, X86::SETSm, TB_FOLDED_STORE }, |
355 { X86::TAILJMPr, X86::TAILJMPm, TB_FOLDED_LOAD }, | 359 { X86::TAILJMPr, X86::TAILJMPm, (uint16_t)(TB_FOLDED_LOAD | NoForwar
dForNaCl) }, |
356 { X86::TAILJMPr64, X86::TAILJMPm64, TB_FOLDED_LOAD }, | 360 { X86::TAILJMPr64, X86::TAILJMPm64, (uint16_t)(TB_FOLDED_LOAD | NoForwar
dForNaCl) }, |
357 { X86::TEST16ri, X86::TEST16mi, TB_FOLDED_LOAD }, | 361 { X86::TEST16ri, X86::TEST16mi, TB_FOLDED_LOAD }, |
358 { X86::TEST32ri, X86::TEST32mi, TB_FOLDED_LOAD }, | 362 { X86::TEST32ri, X86::TEST32mi, TB_FOLDED_LOAD }, |
359 { X86::TEST64ri32, X86::TEST64mi32, TB_FOLDED_LOAD }, | 363 { X86::TEST64ri32, X86::TEST64mi32, TB_FOLDED_LOAD }, |
360 { X86::TEST8ri, X86::TEST8mi, TB_FOLDED_LOAD }, | 364 { X86::TEST8ri, X86::TEST8mi, TB_FOLDED_LOAD }, |
361 // AVX 128-bit versions of foldable instructions | 365 // AVX 128-bit versions of foldable instructions |
362 { X86::VEXTRACTPSrr,X86::VEXTRACTPSmr, TB_FOLDED_STORE }, | 366 { X86::VEXTRACTPSrr,X86::VEXTRACTPSmr, TB_FOLDED_STORE }, |
363 { X86::VEXTRACTF128rr, X86::VEXTRACTF128mr, TB_FOLDED_STORE | TB_ALIGN_16 }, | 367 { X86::VEXTRACTF128rr, X86::VEXTRACTF128mr, TB_FOLDED_STORE | TB_ALIGN_16 }, |
364 { X86::VMOVAPDrr, X86::VMOVAPDmr, TB_FOLDED_STORE | TB_ALIGN_16 }, | 368 { X86::VMOVAPDrr, X86::VMOVAPDmr, TB_FOLDED_STORE | TB_ALIGN_16 }, |
365 { X86::VMOVAPSrr, X86::VMOVAPSmr, TB_FOLDED_STORE | TB_ALIGN_16 }, | 369 { X86::VMOVAPSrr, X86::VMOVAPSmr, TB_FOLDED_STORE | TB_ALIGN_16 }, |
366 { X86::VMOVDQArr, X86::VMOVDQAmr, TB_FOLDED_STORE | TB_ALIGN_16 }, | 370 { X86::VMOVDQArr, X86::VMOVDQAmr, TB_FOLDED_STORE | TB_ALIGN_16 }, |
(...skipping 2872 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3239 | 3243 |
3240 if (Opc) { | 3244 if (Opc) { |
3241 BuildMI(MBB, MI, DL, get(Opc), DestReg) | 3245 BuildMI(MBB, MI, DL, get(Opc), DestReg) |
3242 .addReg(SrcReg, getKillRegState(KillSrc)); | 3246 .addReg(SrcReg, getKillRegState(KillSrc)); |
3243 return; | 3247 return; |
3244 } | 3248 } |
3245 | 3249 |
3246 // Moving EFLAGS to / from another register requires a push and a pop. | 3250 // Moving EFLAGS to / from another register requires a push and a pop. |
3247 // Notice that we have to adjust the stack if we don't want to clobber the | 3251 // Notice that we have to adjust the stack if we don't want to clobber the |
3248 // first frame index. See X86FrameLowering.cpp - clobbersTheStack. | 3252 // first frame index. See X86FrameLowering.cpp - clobbersTheStack. |
| 3253 |
| 3254 // @LOCALMOD-BEGIN |
| 3255 // |
| 3256 // NaCl's sandbox doesn't allow usage of PUSHF/POPF. Instead use LAHF/SAHF |
| 3257 // which write the bottom 8 EFLAGS bits from/to AH. |
| 3258 bool FromEFLAGS = SrcReg == X86::EFLAGS; |
| 3259 bool ToEFLAGS = DestReg == X86::EFLAGS; |
| 3260 int Reg = FromEFLAGS ? DestReg : SrcReg; |
| 3261 bool is32 = X86::GR32RegClass.contains(Reg); |
| 3262 bool is64 = X86::GR64RegClass.contains(Reg); |
| 3263 int Mov = is64 ? X86::MOV64rr : X86::MOV32rr; |
| 3264 int Push = is64 ? X86::PUSH64r : X86::PUSH32r; |
| 3265 int Pop = is64 ? X86::POP64r : X86::POP32r; |
| 3266 int AX = is64 ? X86::RAX : X86::EAX; |
| 3267 if ((FromEFLAGS || ToEFLAGS) && (is32 || is64) && Subtarget.isTargetNaCl()) { |
| 3268 BuildMI(MBB, MI, DL, get(Push)).addReg(AX); |
| 3269 if (FromEFLAGS) { |
| 3270 BuildMI(MBB, MI, DL, get(X86::LAHF)); |
| 3271 BuildMI(MBB, MI, DL, get(Mov), Reg).addReg(AX); |
| 3272 } |
| 3273 if (ToEFLAGS) { |
| 3274 BuildMI(MBB, MI, DL, get(Mov), AX).addReg(Reg, getKillRegState(KillSrc)); |
| 3275 BuildMI(MBB, MI, DL, get(X86::SAHF)); |
| 3276 } |
| 3277 BuildMI(MBB, MI, DL, get(Pop), AX); |
| 3278 return; |
| 3279 } |
| 3280 // @LOCALMOD-END |
| 3281 |
3249 if (SrcReg == X86::EFLAGS) { | 3282 if (SrcReg == X86::EFLAGS) { |
3250 if (X86::GR64RegClass.contains(DestReg)) { | 3283 if (X86::GR64RegClass.contains(DestReg)) { |
3251 BuildMI(MBB, MI, DL, get(X86::PUSHF64)); | 3284 BuildMI(MBB, MI, DL, get(X86::PUSHF64)); |
3252 BuildMI(MBB, MI, DL, get(X86::POP64r), DestReg); | 3285 BuildMI(MBB, MI, DL, get(X86::POP64r), DestReg); |
3253 return; | 3286 return; |
3254 } | 3287 } |
3255 if (X86::GR32RegClass.contains(DestReg)) { | 3288 if (X86::GR32RegClass.contains(DestReg)) { |
3256 BuildMI(MBB, MI, DL, get(X86::PUSHF32)); | 3289 BuildMI(MBB, MI, DL, get(X86::PUSHF32)); |
3257 BuildMI(MBB, MI, DL, get(X86::POP32r), DestReg); | 3290 BuildMI(MBB, MI, DL, get(X86::POP32r), DestReg); |
3258 return; | 3291 return; |
(...skipping 2512 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5771 AU.setPreservesCFG(); | 5804 AU.setPreservesCFG(); |
5772 AU.addRequired<MachineDominatorTree>(); | 5805 AU.addRequired<MachineDominatorTree>(); |
5773 MachineFunctionPass::getAnalysisUsage(AU); | 5806 MachineFunctionPass::getAnalysisUsage(AU); |
5774 } | 5807 } |
5775 }; | 5808 }; |
5776 } | 5809 } |
5777 | 5810 |
5778 char LDTLSCleanup::ID = 0; | 5811 char LDTLSCleanup::ID = 0; |
5779 FunctionPass* | 5812 FunctionPass* |
5780 llvm::createCleanupLocalDynamicTLSPass() { return new LDTLSCleanup(); } | 5813 llvm::createCleanupLocalDynamicTLSPass() { return new LDTLSCleanup(); } |
OLD | NEW |