| OLD | NEW |
| 1 //===- subzero/src/IceTargetLoweringX8664.cpp - x86-64 lowering -----------===// | 1 //===- subzero/src/IceTargetLoweringX8664.cpp - x86-64 lowering -----------===// |
| 2 // | 2 // |
| 3 // The Subzero Code Generator | 3 // The Subzero Code Generator |
| 4 // | 4 // |
| 5 // This file is distributed under the University of Illinois Open Source | 5 // This file is distributed under the University of Illinois Open Source |
| 6 // License. See LICENSE.TXT for details. | 6 // License. See LICENSE.TXT for details. |
| 7 // | 7 // |
| 8 //===----------------------------------------------------------------------===// | 8 //===----------------------------------------------------------------------===// |
| 9 /// | 9 /// |
| 10 /// \file | 10 /// \file |
| (...skipping 279 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 290 | 290 |
| 291 void TargetX8664::emitGetIP(CfgNode *Node) { | 291 void TargetX8664::emitGetIP(CfgNode *Node) { |
| 292 // No IP base register is needed on X86-64. | 292 // No IP base register is needed on X86-64. |
| 293 (void)Node; | 293 (void)Node; |
| 294 } | 294 } |
| 295 | 295 |
| 296 Traits::X86OperandMem *TargetX8664::_sandbox_mem_reference(X86OperandMem *Mem) { | 296 Traits::X86OperandMem *TargetX8664::_sandbox_mem_reference(X86OperandMem *Mem) { |
| 297 // In x86_64-nacl, all memory references are relative to %r15 (i.e., %rzp.) | 297 // In x86_64-nacl, all memory references are relative to %r15 (i.e., %rzp.) |
| 298 // NaCl sandboxing also requires that any registers that are not %rsp and | 298 // NaCl sandboxing also requires that any registers that are not %rsp and |
| 299 // %rbp to be 'truncated' to 32-bit before memory access. | 299 // %rbp to be 'truncated' to 32-bit before memory access. |
| 300 assert(NeedSandboxing); | 300 if (SandboxingType == ST_None) { |
| 301 return Mem; |
| 302 } |
| 303 |
| 304 if (SandboxingType == ST_Nonsfi) { |
| 305 llvm::report_fatal_error( |
| 306 "_sandbox_mem_reference not implemented for nonsfi"); |
| 307 } |
| 308 |
| 301 Variable *Base = Mem->getBase(); | 309 Variable *Base = Mem->getBase(); |
| 302 Variable *Index = Mem->getIndex(); | 310 Variable *Index = Mem->getIndex(); |
| 303 uint16_t Shift = 0; | 311 uint16_t Shift = 0; |
| 304 Variable *r15 = | 312 Variable *ZeroReg = |
| 305 getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64); | 313 getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64); |
| 306 Constant *Offset = Mem->getOffset(); | 314 Constant *Offset = Mem->getOffset(); |
| 307 Variable *T = nullptr; | 315 Variable *T = nullptr; |
| 308 | 316 |
| 309 if (Mem->getIsRebased()) { | 317 if (Mem->getIsRebased()) { |
| 310 // If Mem.IsRebased, then we don't need to update Mem to contain a reference | 318 // If Mem.IsRebased, then we don't need to update Mem to contain a reference |
| 311 // to %r15, but we still need to truncate Mem.Index (if any) to 32-bit. | 319 // to a valid base register (%r15, %rsp, or %rbp), but we still need to |
| 312 assert(r15 == Base); | 320 // truncate Mem.Index (if any) to 32-bit. |
| 313 T = Index; | 321 assert(ZeroReg == Base || Base->isRematerializable()); |
| 322 T = makeReg(IceType_i32); |
| 323 _mov(T, Index); |
| 314 Shift = Mem->getShift(); | 324 Shift = Mem->getShift(); |
| 315 } else if (Base != nullptr && Index != nullptr) { | 325 } else { |
| 316 // Another approach could be to emit an | 326 if (Base != nullptr) { |
| 317 // | 327 if (Base->isRematerializable()) { |
| 318 // lea Mem, %T | 328 ZeroReg = Base; |
| 319 // | 329 } else { |
| 320 // And then update Mem.Base = r15, Mem.Index = T, Mem.Shift = 0 | 330 T = Base; |
| 321 llvm::report_fatal_error("memory reference contains base and index."); | 331 } |
| 322 } else if (Base != nullptr) { | 332 } |
| 323 T = Base; | 333 |
| 324 } else if (Index != nullptr) { | 334 if (Index != nullptr) { |
| 325 T = Index; | 335 assert(!Index->isRematerializable()); |
| 326 Shift = Mem->getShift(); | 336 if (T != nullptr) { |
| 337 llvm::report_fatal_error("memory reference contains base and index."); |
| 338 } |
| 339 T = Index; |
| 340 Shift = Mem->getShift(); |
| 341 } |
| 327 } | 342 } |
| 328 | 343 |
| 329 // NeedsLea is a flags indicating whether Mem needs to be materialized to a | 344 // NeedsLea is a flags indicating whether Mem needs to be materialized to a |
| 330 // GPR prior to being used. A LEA is needed if Mem.Offset is a constant | 345 // GPR prior to being used. A LEA is needed if Mem.Offset is a constant |
| 331 // relocatable, or if Mem.Offset is negative. In both these cases, the LEA is | 346 // relocatable, or if Mem.Offset is negative. In both these cases, the LEA is |
| 332 // needed to ensure the sandboxed memory operand will only use the lower | 347 // needed to ensure the sandboxed memory operand will only use the lower |
| 333 // 32-bits of T+Offset. | 348 // 32-bits of T+Offset. |
| 334 bool NeedsLea = false; | 349 bool NeedsLea = false; |
| 335 if (const auto *Offset = Mem->getOffset()) { | 350 if (const auto *Offset = Mem->getOffset()) { |
| 336 if (llvm::isa<ConstantRelocatable>(Offset)) { | 351 if (llvm::isa<ConstantRelocatable>(Offset)) { |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 391 Traits::X86OperandMem::DefaultSegment, NotRebased)); | 406 Traits::X86OperandMem::DefaultSegment, NotRebased)); |
| 392 | 407 |
| 393 T = makeReg(IceType_i64, RegNum); | 408 T = makeReg(IceType_i64, RegNum); |
| 394 _movzx(T, NewT); | 409 _movzx(T, NewT); |
| 395 Shift = 0; | 410 Shift = 0; |
| 396 Offset = nullptr; | 411 Offset = nullptr; |
| 397 } | 412 } |
| 398 | 413 |
| 399 static constexpr bool IsRebased = true; | 414 static constexpr bool IsRebased = true; |
| 400 return Traits::X86OperandMem::create( | 415 return Traits::X86OperandMem::create( |
| 401 Func, Mem->getType(), r15, Offset, T, Shift, | 416 Func, Mem->getType(), ZeroReg, Offset, T, Shift, |
| 402 Traits::X86OperandMem::DefaultSegment, IsRebased); | 417 Traits::X86OperandMem::DefaultSegment, IsRebased); |
| 403 } | 418 } |
| 404 | 419 |
| 405 void TargetX8664::_sub_sp(Operand *Adjustment) { | 420 void TargetX8664::_sub_sp(Operand *Adjustment) { |
| 406 Variable *rsp = | 421 Variable *rsp = |
| 407 getPhysicalRegister(Traits::RegisterSet::Reg_rsp, Traits::WordType); | 422 getPhysicalRegister(Traits::RegisterSet::Reg_rsp, Traits::WordType); |
| 408 if (!NeedSandboxing) { | 423 if (!NeedSandboxing) { |
| 409 _sub(rsp, Adjustment); | 424 _sub(rsp, Adjustment); |
| 410 return; | 425 return; |
| 411 } | 426 } |
| 412 | 427 |
| 413 Variable *esp = | 428 Variable *esp = |
| 414 getPhysicalRegister(Traits::RegisterSet::Reg_esp, IceType_i32); | 429 getPhysicalRegister(Traits::RegisterSet::Reg_esp, IceType_i32); |
| 415 Variable *r15 = | 430 Variable *r15 = |
| 416 getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64); | 431 getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64); |
| 417 | 432 |
| 418 // .bundle_start | 433 // .bundle_start |
| 419 // sub Adjustment, %esp | 434 // sub Adjustment, %esp |
| 420 // add %r15, %rsp | 435 // add %r15, %rsp |
| 421 // .bundle_end | 436 // .bundle_end |
| 422 AutoBundle _(this); | 437 AutoBundle _(this); |
| 423 _redefined(Context.insert<InstFakeDef>(esp, rsp)); | 438 _redefined(Context.insert<InstFakeDef>(esp, rsp)); |
| 424 _sub(esp, Adjustment); | 439 _sub(esp, Adjustment); |
| 425 _redefined(Context.insert<InstFakeDef>(rsp, esp)); | 440 _redefined(Context.insert<InstFakeDef>(rsp, esp)); |
| 426 _add(rsp, r15); | 441 _add(rsp, r15); |
| 427 } | 442 } |
| 428 | 443 |
| 444 void TargetX8664::initRebasePtr() { |
| 445 switch (SandboxingType) { |
| 446 case ST_Nonsfi: |
| 447 // Probably no implementation is needed, but error to be safe for now. |
| 448 llvm::report_fatal_error( |
| 449 "initRebasePtr() is not yet implemented on x32-nonsfi."); |
| 450 case ST_NaCl: |
| 451 RebasePtr = getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64); |
| 452 break; |
| 453 case ST_None: |
| 454 // nothing. |
| 455 break; |
| 456 } |
| 457 } |
| 458 |
| 429 void TargetX8664::initSandbox() { | 459 void TargetX8664::initSandbox() { |
| 430 assert(NeedSandboxing); | 460 assert(SandboxingType == ST_NaCl); |
| 431 Context.init(Func->getEntryNode()); | 461 Context.init(Func->getEntryNode()); |
| 432 Context.setInsertPoint(Context.getCur()); | 462 Context.setInsertPoint(Context.getCur()); |
| 433 Variable *r15 = | 463 Variable *r15 = |
| 434 getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64); | 464 getPhysicalRegister(Traits::RegisterSet::Reg_r15, IceType_i64); |
| 435 Context.insert<InstFakeDef>(r15); | 465 Context.insert<InstFakeDef>(r15); |
| 436 Context.insert<InstFakeUse>(r15); | 466 Context.insert<InstFakeUse>(r15); |
| 437 } | 467 } |
| 438 | 468 |
| 469 namespace { |
| 470 bool isRematerializable(const Variable *Var) { |
| 471 return Var != nullptr && Var->isRematerializable(); |
| 472 } |
| 473 } // end of anonymous namespace |
| 474 |
| 475 bool TargetX8664::legalizeOptAddrForSandbox(OptAddr *Addr) { |
| 476 if (SandboxingType == ST_Nonsfi) { |
| 477 llvm::report_fatal_error("Nonsfi not yet implemented for x8664."); |
| 478 } |
| 479 |
| 480 if (isRematerializable(Addr->Base)) { |
| 481 if (Addr->Index == RebasePtr) { |
| 482 Addr->Index = nullptr; |
| 483 Addr->Shift = 0; |
| 484 } |
| 485 return true; |
| 486 } |
| 487 |
| 488 if (isRematerializable(Addr->Index)) { |
| 489 if (Addr->Base == RebasePtr) { |
| 490 Addr->Base = nullptr; |
| 491 } |
| 492 return true; |
| 493 } |
| 494 |
| 495 assert(Addr->Base != RebasePtr && Addr->Index != RebasePtr); |
| 496 |
| 497 if (Addr->Base == nullptr) { |
| 498 return true; |
| 499 } |
| 500 |
| 501 if (Addr->Index == nullptr) { |
| 502 return true; |
| 503 } |
| 504 |
| 505 return false; |
| 506 } |
| 507 |
| 439 void TargetX8664::lowerIndirectJump(Variable *JumpTarget) { | 508 void TargetX8664::lowerIndirectJump(Variable *JumpTarget) { |
| 440 std::unique_ptr<AutoBundle> Bundler; | 509 std::unique_ptr<AutoBundle> Bundler; |
| 441 | 510 |
| 442 if (!NeedSandboxing) { | 511 if (!NeedSandboxing) { |
| 443 Variable *T = makeReg(IceType_i64); | 512 Variable *T = makeReg(IceType_i64); |
| 444 _movzx(T, JumpTarget); | 513 _movzx(T, JumpTarget); |
| 445 JumpTarget = T; | 514 JumpTarget = T; |
| 446 } else { | 515 } else { |
| 447 Variable *T = makeReg(IceType_i32); | 516 Variable *T = makeReg(IceType_i32); |
| 448 Variable *T64 = makeReg(IceType_i64); | 517 Variable *T64 = makeReg(IceType_i64); |
| (...skipping 409 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 858 #define X(tag, sizeLog2, align, elts, elty, str) \ | 927 #define X(tag, sizeLog2, align, elts, elty, str) \ |
| 859 static_assert(_table1_##tag == _table2_##tag, \ | 928 static_assert(_table1_##tag == _table2_##tag, \ |
| 860 "Inconsistency between ICETYPEX8664_TABLE and ICETYPE_TABLE"); | 929 "Inconsistency between ICETYPEX8664_TABLE and ICETYPE_TABLE"); |
| 861 ICETYPE_TABLE | 930 ICETYPE_TABLE |
| 862 #undef X | 931 #undef X |
| 863 } // end of namespace dummy3 | 932 } // end of namespace dummy3 |
| 864 } // end of anonymous namespace | 933 } // end of anonymous namespace |
| 865 | 934 |
| 866 } // end of namespace X8664 | 935 } // end of namespace X8664 |
| 867 } // end of namespace Ice | 936 } // end of namespace Ice |
| OLD | NEW |