| OLD | NEW |
| 1 // | 1 // |
| 2 // The Subzero Code Generator | 2 // The Subzero Code Generator |
| 3 // | 3 // |
| 4 // This file is distributed under the University of Illinois Open Source | 4 // This file is distributed under the University of Illinois Open Source |
| 5 // License. See LICENSE.TXT for details. | 5 // License. See LICENSE.TXT for details. |
| 6 // | 6 // |
| 7 //===----------------------------------------------------------------------===// | 7 //===----------------------------------------------------------------------===// |
| 8 /// | 8 /// |
| 9 /// \file | 9 /// \file |
| 10 /// \brief Implements the TargetLoweringMIPS32 class, which consists almost | 10 /// \brief Implements the TargetLoweringMIPS32 class, which consists almost |
| (...skipping 4312 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4323 lowerLoad(InstLoad::create(Func, T_Hi, AddrHi, IceType_i32)); | 4323 lowerLoad(InstLoad::create(Func, T_Hi, AddrHi, IceType_i32)); |
| 4324 _sync(); | 4324 _sync(); |
| 4325 _mov(Dest64->getLo(), T_Lo); | 4325 _mov(Dest64->getLo(), T_Lo); |
| 4326 _mov(Dest64->getHi(), T_Hi); | 4326 _mov(Dest64->getHi(), T_Hi); |
| 4327 // Adding a fake-use of T to ensure the atomic load is not removed if Dest | 4327 // Adding a fake-use of T to ensure the atomic load is not removed if Dest |
| 4328 // is unused. | 4328 // is unused. |
| 4329 Context.insert<InstFakeUse>(T_Lo); | 4329 Context.insert<InstFakeUse>(T_Lo); |
| 4330 Context.insert<InstFakeUse>(T_Hi); | 4330 Context.insert<InstFakeUse>(T_Hi); |
| 4331 } else { | 4331 } else { |
| 4332 auto *T = makeReg(DestTy); | 4332 auto *T = makeReg(DestTy); |
| 4333 lowerLoad(InstLoad::create(Func, T, | 4333 auto *Base = legalizeToReg(Instr->getArg(0)); |
| 4334 formMemoryOperand(Instr->getArg(0), DestTy))); | 4334 lowerLoad(InstLoad::create(Func, T, formMemoryOperand(Base, DestTy))); |
| 4335 _sync(); | 4335 _sync(); |
| 4336 _mov(Dest, T); | 4336 _mov(Dest, T); |
| 4337 // Adding a fake-use of T to ensure the atomic load is not removed if Dest | 4337 // Adding a fake-use of T to ensure the atomic load is not removed if Dest |
| 4338 // is unused. | 4338 // is unused. |
| 4339 Context.insert<InstFakeUse>(T); | 4339 Context.insert<InstFakeUse>(T); |
| 4340 } | 4340 } |
| 4341 return; | 4341 return; |
| 4342 } | 4342 } |
| 4343 case Intrinsics::AtomicStore: { | 4343 case Intrinsics::AtomicStore: { |
| 4344 // We require the memory address to be naturally aligned. Given that is the | 4344 // We require the memory address to be naturally aligned. Given that is the |
| (...skipping 26 matching lines...) Expand all Loading... |
| 4371 llvm::cast<ConstantInteger32>(Ctx->getConstantInt32(0))); | 4371 llvm::cast<ConstantInteger32>(Ctx->getConstantInt32(0))); |
| 4372 auto *AddrHi = OperandMIPS32Mem::create( | 4372 auto *AddrHi = OperandMIPS32Mem::create( |
| 4373 Func, IceType_i32, Base, | 4373 Func, IceType_i32, Base, |
| 4374 llvm::cast<ConstantInteger32>(Ctx->getConstantInt32(4))); | 4374 llvm::cast<ConstantInteger32>(Ctx->getConstantInt32(4))); |
| 4375 lowerStore(InstStore::create(Func, ValLo, AddrLo, IceType_i32)); | 4375 lowerStore(InstStore::create(Func, ValLo, AddrLo, IceType_i32)); |
| 4376 lowerStore(InstStore::create(Func, ValHi, AddrHi, IceType_i32)); | 4376 lowerStore(InstStore::create(Func, ValHi, AddrHi, IceType_i32)); |
| 4377 _sync(); | 4377 _sync(); |
| 4378 } else { | 4378 } else { |
| 4379 _sync(); | 4379 _sync(); |
| 4380 auto *Val = legalizeToReg(Instr->getArg(0)); | 4380 auto *Val = legalizeToReg(Instr->getArg(0)); |
| 4381 lowerStore(InstStore::create( | 4381 auto *Base = legalizeToReg(Instr->getArg(1)); |
| 4382 Func, Val, formMemoryOperand(Instr->getArg(1), DestTy))); | 4382 lowerStore(InstStore::create(Func, Val, formMemoryOperand(Base, DestTy))); |
| 4383 _sync(); | 4383 _sync(); |
| 4384 } | 4384 } |
| 4385 return; | 4385 return; |
| 4386 } | 4386 } |
| 4387 case Intrinsics::AtomicCmpxchg: { | 4387 case Intrinsics::AtomicCmpxchg: { |
| 4388 assert(isScalarIntegerType(DestTy)); | 4388 assert(isScalarIntegerType(DestTy)); |
| 4389 // We require the memory address to be naturally aligned. Given that is the | 4389 // We require the memory address to be naturally aligned. Given that is the |
| 4390 // case, then normal loads are atomic. | 4390 // case, then normal loads are atomic. |
| 4391 if (!Intrinsics::isMemoryOrderValid( | 4391 if (!Intrinsics::isMemoryOrderValid( |
| 4392 ID, getConstantMemoryOrder(Instr->getArg(3)), | 4392 ID, getConstantMemoryOrder(Instr->getArg(3)), |
| 4393 getConstantMemoryOrder(Instr->getArg(4)))) { | 4393 getConstantMemoryOrder(Instr->getArg(4)))) { |
| 4394 Func->setError("Unexpected memory ordering for AtomicCmpxchg"); | 4394 Func->setError("Unexpected memory ordering for AtomicCmpxchg"); |
| 4395 return; | 4395 return; |
| 4396 } | 4396 } |
| 4397 | 4397 |
| 4398 InstMIPS32Label *Exit = InstMIPS32Label::create(Func, this); | 4398 InstMIPS32Label *Exit = InstMIPS32Label::create(Func, this); |
| 4399 InstMIPS32Label *Retry = InstMIPS32Label::create(Func, this); | 4399 InstMIPS32Label *Retry = InstMIPS32Label::create(Func, this); |
| 4400 constexpr CfgNode *NoTarget = nullptr; | 4400 constexpr CfgNode *NoTarget = nullptr; |
| 4401 auto *New = Instr->getArg(2); | 4401 auto *New = Instr->getArg(2); |
| 4402 auto *Expected = Instr->getArg(1); | 4402 auto *Expected = Instr->getArg(1); |
| 4403 auto *ActualAddress = Instr->getArg(0); | 4403 auto *ActualAddress = Instr->getArg(0); |
| 4404 | 4404 |
| 4405 if (DestTy == IceType_i64) { | 4405 if (DestTy == IceType_i64) { |
| 4406 InstMIPS32Label *Retry1 = InstMIPS32Label::create(Func, this); | 4406 InstMIPS32Label *Retry1 = InstMIPS32Label::create(Func, this); |
| 4407 auto *T1 = I32Reg(); | 4407 auto *T1 = I32Reg(); |
| 4408 auto *T2 = I32Reg(); | 4408 auto *T2 = I32Reg(); |
| 4409 auto *T3 = I32Reg(); |
| 4410 auto *T4 = I32Reg(); |
| 4409 _sync(); | 4411 _sync(); |
| 4410 Variable *ValHi, *ValLo, *ExpectedLo, *ExpectedHi; | 4412 Variable *ValHi, *ValLo, *ExpectedLo, *ExpectedHi; |
| 4411 if (llvm::isa<ConstantUndef>(Expected)) { | 4413 if (llvm::isa<ConstantUndef>(Expected)) { |
| 4412 ExpectedLo = legalizeToReg(Ctx->getConstantZero(IceType_i32)); | 4414 ExpectedLo = legalizeToReg(Ctx->getConstantZero(IceType_i32)); |
| 4413 ExpectedHi = legalizeToReg(Ctx->getConstantZero(IceType_i32)); | 4415 ExpectedHi = legalizeToReg(Ctx->getConstantZero(IceType_i32)); |
| 4414 } else { | 4416 } else if (auto *Expected64 = llvm::dyn_cast<Variable64On32>(Expected)) { |
| 4415 auto *Expected64 = llvm::cast<Variable64On32>(Expected); | |
| 4416 ExpectedLo = legalizeToReg(loOperand(Expected64)); | 4417 ExpectedLo = legalizeToReg(loOperand(Expected64)); |
| 4417 ExpectedHi = legalizeToReg(hiOperand(Expected64)); | 4418 ExpectedHi = legalizeToReg(hiOperand(Expected64)); |
| 4419 } else if (auto *C64 = llvm::dyn_cast<ConstantInteger64>(Expected)) { |
| 4420 const uint64_t Value = C64->getValue(); |
| 4421 uint64_t Upper32Bits = (Value >> INT32_BITS) & 0xFFFFFFFF; |
| 4422 uint64_t Lower32Bits = Value & 0xFFFFFFFF; |
| 4423 ExpectedLo = legalizeToReg(Ctx->getConstantInt32(Lower32Bits)); |
| 4424 ExpectedHi = legalizeToReg(Ctx->getConstantInt32(Upper32Bits)); |
| 4425 } else { |
| 4426 llvm::report_fatal_error( |
| 4427 "AtomicCmpxchg: getArg(1) is nor Constant neither Variable64On32"); |
| 4418 } | 4428 } |
| 4419 if (auto *C64 = llvm::dyn_cast<ConstantInteger64>(New)) { | 4429 if (auto *C64 = llvm::dyn_cast<ConstantInteger64>(New)) { |
| 4420 const uint64_t Value = C64->getValue(); | 4430 const uint64_t Value = C64->getValue(); |
| 4421 uint64_t Upper32Bits = (Value >> INT32_BITS) & 0xFFFFFFFF; | 4431 uint64_t Upper32Bits = (Value >> INT32_BITS) & 0xFFFFFFFF; |
| 4422 uint64_t Lower32Bits = Value & 0xFFFFFFFF; | 4432 uint64_t Lower32Bits = Value & 0xFFFFFFFF; |
| 4423 ValLo = legalizeToReg(Ctx->getConstantInt32(Lower32Bits)); | 4433 ValLo = legalizeToReg(Ctx->getConstantInt32(Lower32Bits)); |
| 4424 ValHi = legalizeToReg(Ctx->getConstantInt32(Upper32Bits)); | 4434 ValHi = legalizeToReg(Ctx->getConstantInt32(Upper32Bits)); |
| 4425 } else { | 4435 } else { |
| 4426 auto *Val = llvm::cast<Variable64On32>(New); | 4436 auto *Val = llvm::cast<Variable64On32>(New); |
| 4427 ValLo = legalizeToReg(loOperand(Val)); | 4437 ValLo = legalizeToReg(loOperand(Val)); |
| 4428 ValHi = legalizeToReg(hiOperand(Val)); | 4438 ValHi = legalizeToReg(hiOperand(Val)); |
| 4429 } | 4439 } |
| 4430 auto *Dest64 = llvm::cast<Variable64On32>(Dest); | 4440 auto *Dest64 = llvm::cast<Variable64On32>(Dest); |
| 4431 auto *BaseR = legalizeToReg(ActualAddress); | 4441 auto *BaseR = legalizeToReg(ActualAddress); |
| 4432 auto *AddrLo = OperandMIPS32Mem::create( | 4442 auto *AddrLo = OperandMIPS32Mem::create( |
| 4433 Func, IceType_i32, BaseR, | 4443 Func, IceType_i32, BaseR, |
| 4434 llvm::cast<ConstantInteger32>(Ctx->getConstantInt32(0))); | 4444 llvm::cast<ConstantInteger32>(Ctx->getConstantInt32(0))); |
| 4435 auto *AddrHi = OperandMIPS32Mem::create( | 4445 auto *AddrHi = OperandMIPS32Mem::create( |
| 4436 Func, IceType_i32, BaseR, | 4446 Func, IceType_i32, BaseR, |
| 4437 llvm::cast<ConstantInteger32>(Ctx->getConstantInt32(4))); | 4447 llvm::cast<ConstantInteger32>(Ctx->getConstantInt32(4))); |
| 4448 lowerLoad(InstLoad::create(Func, T3, AddrLo)); |
| 4449 lowerLoad(InstLoad::create(Func, T4, AddrHi)); |
| 4438 Context.insert(Retry); | 4450 Context.insert(Retry); |
| 4439 _ll(T1, AddrLo); | 4451 Sandboxer(this).ll(T1, AddrLo); |
| 4440 _br(NoTarget, NoTarget, T1, ExpectedLo, Exit, CondMIPS32::Cond::NE); | 4452 _br(NoTarget, NoTarget, T1, ExpectedLo, Exit, CondMIPS32::Cond::NE); |
| 4441 _sc(ValLo, AddrLo); | 4453 Sandboxer(this).sc(ValLo, AddrLo); |
| 4442 _br(NoTarget, NoTarget, ValLo, getZero(), Retry, CondMIPS32::Cond::EQ); | 4454 _br(NoTarget, NoTarget, ValLo, getZero(), Retry, CondMIPS32::Cond::EQ); |
| 4443 _mov(Dest64->getLo(), T1); | |
| 4444 Context.insert(Retry1); | 4455 Context.insert(Retry1); |
| 4445 _ll(T2, AddrHi); | 4456 Sandboxer(this).ll(T2, AddrHi); |
| 4446 _br(NoTarget, NoTarget, T2, ExpectedHi, Exit, CondMIPS32::Cond::NE); | 4457 _br(NoTarget, NoTarget, T2, ExpectedHi, Exit, CondMIPS32::Cond::NE); |
| 4447 _sc(ValHi, AddrHi); | 4458 Sandboxer(this).sc(ValHi, AddrHi); |
| 4448 _br(NoTarget, NoTarget, ValHi, getZero(), Retry1, CondMIPS32::Cond::EQ); | 4459 _br(NoTarget, NoTarget, ValHi, getZero(), Retry1, CondMIPS32::Cond::EQ); |
| 4449 _mov(Dest64->getHi(), T2); | |
| 4450 Context.insert<InstFakeUse>(getZero()); | 4460 Context.insert<InstFakeUse>(getZero()); |
| 4451 Context.insert(Exit); | 4461 Context.insert(Exit); |
| 4462 _mov(Dest64->getLo(), T3); |
| 4463 _mov(Dest64->getHi(), T4); |
| 4452 _sync(); | 4464 _sync(); |
| 4453 } else if (DestTy == IceType_i8 || DestTy == IceType_i16) { | 4465 } else if (DestTy == IceType_i8 || DestTy == IceType_i16) { |
| 4454 auto *NewR = legalizeToReg(New); | 4466 auto *NewR = legalizeToReg(New); |
| 4455 auto *ExpectedR = legalizeToReg(Expected); | 4467 auto *ExpectedR = legalizeToReg(Expected); |
| 4456 auto *ActualAddressR = legalizeToReg(ActualAddress); | 4468 auto *ActualAddressR = legalizeToReg(ActualAddress); |
| 4457 const uint32_t ShiftAmount = | 4469 const uint32_t ShiftAmount = |
| 4458 (INT32_BITS - CHAR_BITS * typeWidthInBytes(DestTy)); | 4470 (INT32_BITS - CHAR_BITS * typeWidthInBytes(DestTy)); |
| 4459 const uint32_t Mask = (1 << (CHAR_BITS * typeWidthInBytes(DestTy))) - 1; | 4471 const uint32_t Mask = (1 << (CHAR_BITS * typeWidthInBytes(DestTy))) - 1; |
| 4460 auto *RegAt = getPhysicalRegister(RegMIPS32::Reg_AT); | 4472 auto *RegAt = getPhysicalRegister(RegMIPS32::Reg_AT); |
| 4461 auto *T1 = I32Reg(); | 4473 auto *T1 = I32Reg(); |
| (...skipping 11 matching lines...) Expand all Loading... |
| 4473 _andi(RegAt, ActualAddressR, 3); | 4485 _andi(RegAt, ActualAddressR, 3); |
| 4474 _sll(T2, RegAt, 3); | 4486 _sll(T2, RegAt, 3); |
| 4475 _ori(RegAt, getZero(), Mask); | 4487 _ori(RegAt, getZero(), Mask); |
| 4476 _sllv(T3, RegAt, T2); | 4488 _sllv(T3, RegAt, T2); |
| 4477 _nor(T4, getZero(), T3); | 4489 _nor(T4, getZero(), T3); |
| 4478 _andi(RegAt, ExpectedR, Mask); | 4490 _andi(RegAt, ExpectedR, Mask); |
| 4479 _sllv(T5, RegAt, T2); | 4491 _sllv(T5, RegAt, T2); |
| 4480 _andi(RegAt, NewR, Mask); | 4492 _andi(RegAt, NewR, Mask); |
| 4481 _sllv(T6, RegAt, T2); | 4493 _sllv(T6, RegAt, T2); |
| 4482 Context.insert(Retry); | 4494 Context.insert(Retry); |
| 4483 _ll(T7, formMemoryOperand(T1, DestTy)); | 4495 Sandboxer(this).ll(T7, formMemoryOperand(T1, DestTy)); |
| 4484 _and(T8, T7, T3); | 4496 _and(T8, T7, T3); |
| 4485 _br(NoTarget, NoTarget, T8, T5, Exit, CondMIPS32::Cond::NE); | 4497 _br(NoTarget, NoTarget, T8, T5, Exit, CondMIPS32::Cond::NE); |
| 4486 _and(RegAt, T7, T4); | 4498 _and(RegAt, T7, T4); |
| 4487 _or(T9, RegAt, T6); | 4499 _or(T9, RegAt, T6); |
| 4488 _sc(T9, formMemoryOperand(T1, DestTy)); | 4500 Sandboxer(this).sc(T9, formMemoryOperand(T1, DestTy)); |
| 4489 _br(NoTarget, NoTarget, getZero(), T9, Retry, CondMIPS32::Cond::EQ); | 4501 _br(NoTarget, NoTarget, getZero(), T9, Retry, CondMIPS32::Cond::EQ); |
| 4490 Context.insert<InstFakeUse>(getZero()); | 4502 Context.insert<InstFakeUse>(getZero()); |
| 4491 Context.insert(Exit); | 4503 Context.insert(Exit); |
| 4492 _srlv(RegAt, T8, T2); | 4504 _srlv(RegAt, T8, T2); |
| 4493 _sll(RegAt, RegAt, ShiftAmount); | 4505 _sll(RegAt, RegAt, ShiftAmount); |
| 4494 _sra(RegAt, RegAt, ShiftAmount); | 4506 _sra(RegAt, RegAt, ShiftAmount); |
| 4495 _mov(Dest, RegAt); | 4507 _mov(Dest, RegAt); |
| 4496 _sync(); | 4508 _sync(); |
| 4497 Context.insert<InstFakeUse>(ExpectedR); | 4509 Context.insert<InstFakeUse>(ExpectedR); |
| 4498 Context.insert<InstFakeUse>(NewR); | 4510 Context.insert<InstFakeUse>(NewR); |
| 4499 } else { | 4511 } else { |
| 4500 auto *T1 = I32Reg(); | 4512 auto *T1 = I32Reg(); |
| 4513 _sync(); |
| 4514 Context.insert(Retry); |
| 4501 auto *NewR = legalizeToReg(New); | 4515 auto *NewR = legalizeToReg(New); |
| 4502 auto *ExpectedR = legalizeToReg(Expected); | 4516 auto *ExpectedR = legalizeToReg(Expected); |
| 4503 auto *ActualAddressR = legalizeToReg(ActualAddress); | 4517 auto *ActualAddressR = legalizeToReg(ActualAddress); |
| 4504 _sync(); | 4518 Sandboxer(this).ll(T1, formMemoryOperand(ActualAddressR, DestTy)); |
| 4505 Context.insert(Retry); | |
| 4506 _ll(T1, formMemoryOperand(ActualAddressR, DestTy)); | |
| 4507 _br(NoTarget, NoTarget, T1, ExpectedR, Exit, CondMIPS32::Cond::NE); | 4519 _br(NoTarget, NoTarget, T1, ExpectedR, Exit, CondMIPS32::Cond::NE); |
| 4508 _sc(NewR, formMemoryOperand(ActualAddressR, DestTy)); | 4520 Sandboxer(this).sc(NewR, formMemoryOperand(ActualAddressR, DestTy)); |
| 4509 _br(NoTarget, NoTarget, NewR, getZero(), Retry, CondMIPS32::Cond::EQ); | 4521 _br(NoTarget, NoTarget, NewR, getZero(), Retry, CondMIPS32::Cond::EQ); |
| 4510 Context.insert<InstFakeUse>(getZero()); | 4522 Context.insert<InstFakeUse>(getZero()); |
| 4511 Context.insert(Exit); | 4523 Context.insert(Exit); |
| 4512 _mov(Dest, T1); | 4524 _mov(Dest, T1); |
| 4513 _sync(); | 4525 _sync(); |
| 4514 Context.insert<InstFakeUse>(ExpectedR); | 4526 Context.insert<InstFakeUse>(ExpectedR); |
| 4515 Context.insert<InstFakeUse>(NewR); | 4527 Context.insert<InstFakeUse>(NewR); |
| 4516 } | 4528 } |
| 4517 return; | 4529 return; |
| 4518 } | 4530 } |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4554 Func, IceType_i32, BaseR, | 4566 Func, IceType_i32, BaseR, |
| 4555 llvm::cast<ConstantInteger32>(Ctx->getConstantInt32(0))); | 4567 llvm::cast<ConstantInteger32>(Ctx->getConstantInt32(0))); |
| 4556 auto *AddrHi = OperandMIPS32Mem::create( | 4568 auto *AddrHi = OperandMIPS32Mem::create( |
| 4557 Func, IceType_i32, BaseR, | 4569 Func, IceType_i32, BaseR, |
| 4558 llvm::cast<ConstantInteger32>(Ctx->getConstantInt32(4))); | 4570 llvm::cast<ConstantInteger32>(Ctx->getConstantInt32(4))); |
| 4559 auto *RegAt = getPhysicalRegister(RegMIPS32::Reg_AT); | 4571 auto *RegAt = getPhysicalRegister(RegMIPS32::Reg_AT); |
| 4560 auto *T1 = I32Reg(); | 4572 auto *T1 = I32Reg(); |
| 4561 auto *T2 = I32Reg(); | 4573 auto *T2 = I32Reg(); |
| 4562 auto *T3 = I32Reg(); | 4574 auto *T3 = I32Reg(); |
| 4563 Context.insert(Retry); | 4575 Context.insert(Retry); |
| 4564 _ll(T1, AddrLo); | 4576 Sandboxer(this).ll(T1, AddrLo); |
| 4565 if (Operation == Intrinsics::AtomicExchange) { | 4577 if (Operation == Intrinsics::AtomicExchange) { |
| 4566 _mov(RegAt, ValLo); | 4578 _mov(RegAt, ValLo); |
| 4579 Context.insert<InstFakeUse>(T1); |
| 4567 } else if (Operation == Intrinsics::AtomicAdd) { | 4580 } else if (Operation == Intrinsics::AtomicAdd) { |
| 4568 createArithInst(Operation, RegAt, T1, ValLo); | 4581 createArithInst(Operation, RegAt, T1, ValLo); |
| 4569 _sltu(T2, RegAt, T1); | 4582 _sltu(T2, RegAt, T1); |
| 4570 } else if (Operation == Intrinsics::AtomicSub) { | 4583 } else if (Operation == Intrinsics::AtomicSub) { |
| 4571 createArithInst(Operation, RegAt, T1, ValLo); | 4584 createArithInst(Operation, RegAt, T1, ValLo); |
| 4572 _sltu(T2, T1, ValLo); | 4585 _sltu(T2, T1, ValLo); |
| 4573 } else { | 4586 } else { |
| 4574 createArithInst(Operation, RegAt, T1, ValLo); | 4587 createArithInst(Operation, RegAt, T1, ValLo); |
| 4575 } | 4588 } |
| 4576 _sc(RegAt, AddrLo); | 4589 Sandboxer(this).sc(RegAt, AddrLo); |
| 4577 _br(NoTarget, NoTarget, RegAt, getZero(), Retry, CondMIPS32::Cond::EQ); | 4590 _br(NoTarget, NoTarget, RegAt, getZero(), Retry, CondMIPS32::Cond::EQ); |
| 4578 Context.insert<InstFakeUse>(getZero()); | 4591 Context.insert<InstFakeUse>(getZero()); |
| 4579 _mov(Dest64->getLo(), T1); | 4592 _mov(Dest64->getLo(), T1); |
| 4580 Context.insert(Retry1); | 4593 Context.insert(Retry1); |
| 4581 _ll(T3, AddrHi); | 4594 Sandboxer(this).ll(T3, AddrHi); |
| 4582 if (Operation == Intrinsics::AtomicAdd || | 4595 if (Operation == Intrinsics::AtomicAdd || |
| 4583 Operation == Intrinsics::AtomicSub) { | 4596 Operation == Intrinsics::AtomicSub) { |
| 4584 _addu(RegAt, T2, ValHi); | 4597 _addu(RegAt, T2, ValHi); |
| 4585 createArithInst(Operation, RegAt, T3, RegAt); | 4598 createArithInst(Operation, RegAt, T3, RegAt); |
| 4586 } else if (Operation == Intrinsics::AtomicExchange) { | 4599 } else if (Operation == Intrinsics::AtomicExchange) { |
| 4587 _mov(RegAt, ValHi); | 4600 _mov(RegAt, ValHi); |
| 4601 Context.insert<InstFakeUse>(T3); |
| 4588 } else { | 4602 } else { |
| 4589 createArithInst(Operation, RegAt, T3, ValHi); | 4603 createArithInst(Operation, RegAt, T3, ValHi); |
| 4590 } | 4604 } |
| 4591 _sc(RegAt, AddrHi); | 4605 Sandboxer(this).sc(RegAt, AddrHi); |
| 4592 _br(NoTarget, NoTarget, RegAt, getZero(), Retry1, CondMIPS32::Cond::EQ); | 4606 _br(NoTarget, NoTarget, RegAt, getZero(), Retry1, CondMIPS32::Cond::EQ); |
| 4593 Context.insert<InstFakeUse>(getZero()); | 4607 Context.insert<InstFakeUse>(getZero()); |
| 4594 _mov(Dest64->getHi(), T3); | 4608 _mov(Dest64->getHi(), T3); |
| 4595 Context.insert<InstFakeUse>(ValLo); | 4609 Context.insert<InstFakeUse>(ValLo); |
| 4596 Context.insert<InstFakeUse>(ValHi); | 4610 Context.insert<InstFakeUse>(ValHi); |
| 4597 _sync(); | 4611 _sync(); |
| 4598 } else if (DestTy == IceType_i8 || DestTy == IceType_i16) { | 4612 } else if (DestTy == IceType_i8 || DestTy == IceType_i16) { |
| 4599 const uint32_t ShiftAmount = | 4613 const uint32_t ShiftAmount = |
| 4600 INT32_BITS - (CHAR_BITS * typeWidthInBytes(DestTy)); | 4614 INT32_BITS - (CHAR_BITS * typeWidthInBytes(DestTy)); |
| 4601 const uint32_t Mask = (1 << (CHAR_BITS * typeWidthInBytes(DestTy))) - 1; | 4615 const uint32_t Mask = (1 << (CHAR_BITS * typeWidthInBytes(DestTy))) - 1; |
| (...skipping 10 matching lines...) Expand all Loading... |
| 4612 _sync(); | 4626 _sync(); |
| 4613 _addiu(RegAt, getZero(), -4); | 4627 _addiu(RegAt, getZero(), -4); |
| 4614 _and(T1, ActualAddressR, RegAt); | 4628 _and(T1, ActualAddressR, RegAt); |
| 4615 _andi(RegAt, ActualAddressR, 3); | 4629 _andi(RegAt, ActualAddressR, 3); |
| 4616 _sll(T2, RegAt, 3); | 4630 _sll(T2, RegAt, 3); |
| 4617 _ori(RegAt, getZero(), Mask); | 4631 _ori(RegAt, getZero(), Mask); |
| 4618 _sllv(T3, RegAt, T2); | 4632 _sllv(T3, RegAt, T2); |
| 4619 _nor(T4, getZero(), T3); | 4633 _nor(T4, getZero(), T3); |
| 4620 _sllv(T5, NewR, T2); | 4634 _sllv(T5, NewR, T2); |
| 4621 Context.insert(Retry); | 4635 Context.insert(Retry); |
| 4622 _ll(T6, formMemoryOperand(T1, DestTy)); | 4636 Sandboxer(this).ll(T6, formMemoryOperand(T1, DestTy)); |
| 4623 if (Operation != Intrinsics::AtomicExchange) { | 4637 if (Operation != Intrinsics::AtomicExchange) { |
| 4624 createArithInst(Operation, RegAt, T6, T5); | 4638 createArithInst(Operation, RegAt, T6, T5); |
| 4625 _and(RegAt, RegAt, T3); | 4639 _and(RegAt, RegAt, T3); |
| 4626 } | 4640 } |
| 4627 _and(T7, T6, T4); | 4641 _and(T7, T6, T4); |
| 4628 if (Operation == Intrinsics::AtomicExchange) { | 4642 if (Operation == Intrinsics::AtomicExchange) { |
| 4629 _or(RegAt, T7, T5); | 4643 _or(RegAt, T7, T5); |
| 4630 } else { | 4644 } else { |
| 4631 _or(RegAt, T7, RegAt); | 4645 _or(RegAt, T7, RegAt); |
| 4632 } | 4646 } |
| 4633 _sc(RegAt, formMemoryOperand(T1, DestTy)); | 4647 Sandboxer(this).sc(RegAt, formMemoryOperand(T1, DestTy)); |
| 4634 _br(NoTarget, NoTarget, RegAt, getZero(), Retry, CondMIPS32::Cond::EQ); | 4648 _br(NoTarget, NoTarget, RegAt, getZero(), Retry, CondMIPS32::Cond::EQ); |
| 4635 Context.insert<InstFakeUse>(getZero()); | 4649 Context.insert<InstFakeUse>(getZero()); |
| 4636 _and(RegAt, T6, T3); | 4650 _and(RegAt, T6, T3); |
| 4637 _srlv(RegAt, RegAt, T2); | 4651 _srlv(RegAt, RegAt, T2); |
| 4638 _sll(RegAt, RegAt, ShiftAmount); | 4652 _sll(RegAt, RegAt, ShiftAmount); |
| 4639 _sra(RegAt, RegAt, ShiftAmount); | 4653 _sra(RegAt, RegAt, ShiftAmount); |
| 4640 _mov(Dest, RegAt); | 4654 _mov(Dest, RegAt); |
| 4641 _sync(); | 4655 _sync(); |
| 4642 Context.insert<InstFakeUse>(NewR); | 4656 Context.insert<InstFakeUse>(NewR); |
| 4643 Context.insert<InstFakeUse>(Dest); | 4657 Context.insert<InstFakeUse>(Dest); |
| 4644 } else { | 4658 } else { |
| 4645 auto *T1 = I32Reg(); | 4659 auto *T1 = I32Reg(); |
| 4646 auto *T2 = I32Reg(); | 4660 auto *T2 = I32Reg(); |
| 4647 auto *NewR = legalizeToReg(New); | 4661 auto *NewR = legalizeToReg(New); |
| 4648 auto *ActualAddressR = legalizeToReg(ActualAddress); | 4662 auto *ActualAddressR = legalizeToReg(ActualAddress); |
| 4649 _sync(); | 4663 _sync(); |
| 4650 Context.insert(Retry); | 4664 Context.insert(Retry); |
| 4651 _ll(T1, formMemoryOperand(ActualAddressR, DestTy)); | 4665 Sandboxer(this).ll(T1, formMemoryOperand(ActualAddressR, DestTy)); |
| 4652 if (Operation == Intrinsics::AtomicExchange) { | 4666 if (Operation == Intrinsics::AtomicExchange) { |
| 4653 _mov(T2, NewR); | 4667 _mov(T2, NewR); |
| 4654 } else { | 4668 } else { |
| 4655 createArithInst(Operation, T2, T1, NewR); | 4669 createArithInst(Operation, T2, T1, NewR); |
| 4656 } | 4670 } |
| 4657 _sc(T2, formMemoryOperand(ActualAddressR, DestTy)); | 4671 Sandboxer(this).sc(T2, formMemoryOperand(ActualAddressR, DestTy)); |
| 4658 _br(NoTarget, NoTarget, T2, getZero(), Retry, CondMIPS32::Cond::EQ); | 4672 _br(NoTarget, NoTarget, T2, getZero(), Retry, CondMIPS32::Cond::EQ); |
| 4659 Context.insert<InstFakeUse>(getZero()); | 4673 Context.insert<InstFakeUse>(getZero()); |
| 4660 _mov(Dest, T1); | 4674 _mov(Dest, T1); |
| 4661 _sync(); | 4675 _sync(); |
| 4662 Context.insert<InstFakeUse>(NewR); | 4676 Context.insert<InstFakeUse>(NewR); |
| 4663 Context.insert<InstFakeUse>(Dest); | 4677 Context.insert<InstFakeUse>(Dest); |
| 4664 } | 4678 } |
| 4665 return; | 4679 return; |
| 4666 } | 4680 } |
| 4667 case Intrinsics::AtomicFence: | 4681 case Intrinsics::AtomicFence: |
| (...skipping 10 matching lines...) Expand all Loading... |
| 4678 Func->setError("AtomicIsLockFree byte size should be compile-time const"); | 4692 Func->setError("AtomicIsLockFree byte size should be compile-time const"); |
| 4679 return; | 4693 return; |
| 4680 } | 4694 } |
| 4681 static constexpr int32_t NotLockFree = 0; | 4695 static constexpr int32_t NotLockFree = 0; |
| 4682 static constexpr int32_t LockFree = 1; | 4696 static constexpr int32_t LockFree = 1; |
| 4683 int32_t Result = NotLockFree; | 4697 int32_t Result = NotLockFree; |
| 4684 switch (CI->getValue()) { | 4698 switch (CI->getValue()) { |
| 4685 case 1: | 4699 case 1: |
| 4686 case 2: | 4700 case 2: |
| 4687 case 4: | 4701 case 4: |
| 4688 case 8: | |
| 4689 Result = LockFree; | 4702 Result = LockFree; |
| 4690 break; | 4703 break; |
| 4691 } | 4704 } |
| 4692 _addiu(T, getZero(), Result); | 4705 _addiu(T, getZero(), Result); |
| 4693 _mov(Dest, T); | 4706 _mov(Dest, T); |
| 4694 return; | 4707 return; |
| 4695 } | 4708 } |
| 4696 case Intrinsics::Bswap: { | 4709 case Intrinsics::Bswap: { |
| 4697 auto *Src = Instr->getArg(0); | 4710 auto *Src = Instr->getArg(0); |
| 4698 const Type SrcTy = Src->getType(); | 4711 const Type SrcTy = Src->getType(); |
| (...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4910 } | 4923 } |
| 4911 case Intrinsics::Memmove: { | 4924 case Intrinsics::Memmove: { |
| 4912 llvm::report_fatal_error("memmove should have been prelowered."); | 4925 llvm::report_fatal_error("memmove should have been prelowered."); |
| 4913 return; | 4926 return; |
| 4914 } | 4927 } |
| 4915 case Intrinsics::Memset: { | 4928 case Intrinsics::Memset: { |
| 4916 llvm::report_fatal_error("memset should have been prelowered."); | 4929 llvm::report_fatal_error("memset should have been prelowered."); |
| 4917 return; | 4930 return; |
| 4918 } | 4931 } |
| 4919 case Intrinsics::NaClReadTP: { | 4932 case Intrinsics::NaClReadTP: { |
| 4920 if (getFlags().getUseSandboxing()) { | 4933 if (SandboxingType != ST_NaCl) |
| 4921 UnimplementedLoweringError(this, Instr); | 4934 llvm::report_fatal_error("nacl-read-tp should have been prelowered."); |
| 4922 } else { | 4935 else { |
| 4923 InstCall *Call = | 4936 auto *T8 = makeReg(IceType_i32, RegMIPS32::Reg_T8); |
| 4924 makeHelperCall(RuntimeHelper::H_call_read_tp, Instr->getDest(), 0); | 4937 Context.insert<InstFakeDef>(T8); |
| 4925 lowerCall(Call); | 4938 Variable *TP = legalizeToReg(OperandMIPS32Mem::create( |
| 4939 Func, getPointerType(), T8, |
| 4940 llvm::cast<ConstantInteger32>(Ctx->getConstantZero(IceType_i32)))); |
| 4941 _mov(Dest, TP); |
| 4926 } | 4942 } |
| 4927 return; | 4943 return; |
| 4928 } | 4944 } |
| 4929 case Intrinsics::Setjmp: { | 4945 case Intrinsics::Setjmp: { |
| 4930 llvm::report_fatal_error("setjmp should have been prelowered."); | 4946 llvm::report_fatal_error("setjmp should have been prelowered."); |
| 4931 return; | 4947 return; |
| 4932 } | 4948 } |
| 4933 case Intrinsics::Sqrt: { | 4949 case Intrinsics::Sqrt: { |
| 4934 if (isScalarFloatingType(DestTy)) { | 4950 if (isScalarFloatingType(DestTy)) { |
| 4935 Variable *T = makeReg(DestTy); | 4951 Variable *T = makeReg(DestTy); |
| (...skipping 949 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5885 void TargetMIPS32::Sandboxer::createAutoBundle() { | 5901 void TargetMIPS32::Sandboxer::createAutoBundle() { |
| 5886 Bundler = makeUnique<AutoBundle>(Target, BundleOption); | 5902 Bundler = makeUnique<AutoBundle>(Target, BundleOption); |
| 5887 } | 5903 } |
| 5888 | 5904 |
| 5889 void TargetMIPS32::Sandboxer::addiu_sp(uint32_t StackOffset) { | 5905 void TargetMIPS32::Sandboxer::addiu_sp(uint32_t StackOffset) { |
| 5890 Variable *SP = Target->getPhysicalRegister(RegMIPS32::Reg_SP); | 5906 Variable *SP = Target->getPhysicalRegister(RegMIPS32::Reg_SP); |
| 5891 if (!Target->NeedSandboxing) { | 5907 if (!Target->NeedSandboxing) { |
| 5892 Target->_addiu(SP, SP, StackOffset); | 5908 Target->_addiu(SP, SP, StackOffset); |
| 5893 return; | 5909 return; |
| 5894 } | 5910 } |
| 5895 Variable *T7 = Target->getPhysicalRegister(RegMIPS32::Reg_T7); | 5911 auto *T7 = Target->makeReg(IceType_i32, RegMIPS32::Reg_T7); |
| 5912 Target->Context.insert<InstFakeDef>(T7); |
| 5896 createAutoBundle(); | 5913 createAutoBundle(); |
| 5897 Target->_addiu(SP, SP, StackOffset); | 5914 Target->_addiu(SP, SP, StackOffset); |
| 5898 Target->_and(SP, SP, T7); | 5915 Target->_and(SP, SP, T7); |
| 5899 } | 5916 } |
| 5900 | 5917 |
| 5901 void TargetMIPS32::Sandboxer::lw(Variable *Dest, OperandMIPS32Mem *Mem) { | 5918 void TargetMIPS32::Sandboxer::lw(Variable *Dest, OperandMIPS32Mem *Mem) { |
| 5902 Variable *Base = Mem->getBase(); | 5919 Variable *Base = Mem->getBase(); |
| 5903 Variable *T7 = Target->getPhysicalRegister(RegMIPS32::Reg_T7); | 5920 if (Target->NeedSandboxing && (Target->getStackReg() != Base->getRegNum()) && |
| 5904 if (Target->NeedSandboxing && (Target->getStackReg() != Base->getRegNum())) { | 5921 (RegMIPS32::Reg_T8 != Base->getRegNum())) { |
| 5922 auto *T7 = Target->makeReg(IceType_i32, RegMIPS32::Reg_T7); |
| 5923 Target->Context.insert<InstFakeDef>(T7); |
| 5905 createAutoBundle(); | 5924 createAutoBundle(); |
| 5906 Target->_and(Base, Base, T7); | 5925 Target->_and(Base, Base, T7); |
| 5907 } | 5926 } |
| 5908 Target->_lw(Dest, Mem); | 5927 Target->_lw(Dest, Mem); |
| 5909 if (Target->NeedSandboxing && (Dest->getRegNum() == Target->getStackReg())) | 5928 if (Target->NeedSandboxing && (Dest->getRegNum() == Target->getStackReg())) { |
| 5929 auto *T7 = Target->makeReg(IceType_i32, RegMIPS32::Reg_T7); |
| 5930 Target->Context.insert<InstFakeDef>(T7); |
| 5910 Target->_and(Dest, Dest, T7); | 5931 Target->_and(Dest, Dest, T7); |
| 5932 } |
| 5933 } |
| 5934 |
| 5935 void TargetMIPS32::Sandboxer::ll(Variable *Dest, OperandMIPS32Mem *Mem) { |
| 5936 Variable *Base = Mem->getBase(); |
| 5937 if (Target->NeedSandboxing && (Target->getStackReg() != Base->getRegNum())) { |
| 5938 auto *T7 = Target->makeReg(IceType_i32, RegMIPS32::Reg_T7); |
| 5939 Target->Context.insert<InstFakeDef>(T7); |
| 5940 createAutoBundle(); |
| 5941 Target->_and(Base, Base, T7); |
| 5942 } |
| 5943 Target->_ll(Dest, Mem); |
| 5944 if (Target->NeedSandboxing && (Dest->getRegNum() == Target->getStackReg())) { |
| 5945 auto *T7 = Target->makeReg(IceType_i32, RegMIPS32::Reg_T7); |
| 5946 Target->Context.insert<InstFakeDef>(T7); |
| 5947 Target->_and(Dest, Dest, T7); |
| 5948 } |
| 5949 } |
| 5950 |
| 5951 void TargetMIPS32::Sandboxer::sc(Variable *Dest, OperandMIPS32Mem *Mem) { |
| 5952 Variable *Base = Mem->getBase(); |
| 5953 if (Target->NeedSandboxing && (Target->getStackReg() != Base->getRegNum())) { |
| 5954 auto *T7 = Target->makeReg(IceType_i32, RegMIPS32::Reg_T7); |
| 5955 Target->Context.insert<InstFakeDef>(T7); |
| 5956 createAutoBundle(); |
| 5957 Target->_and(Base, Base, T7); |
| 5958 } |
| 5959 Target->_sc(Dest, Mem); |
| 5911 } | 5960 } |
| 5912 | 5961 |
| 5913 void TargetMIPS32::Sandboxer::sw(Variable *Dest, OperandMIPS32Mem *Mem) { | 5962 void TargetMIPS32::Sandboxer::sw(Variable *Dest, OperandMIPS32Mem *Mem) { |
| 5914 Variable *Base = Mem->getBase(); | 5963 Variable *Base = Mem->getBase(); |
| 5915 Variable *T7 = Target->getPhysicalRegister(RegMIPS32::Reg_T7); | |
| 5916 if (Target->NeedSandboxing && (Target->getStackReg() != Base->getRegNum())) { | 5964 if (Target->NeedSandboxing && (Target->getStackReg() != Base->getRegNum())) { |
| 5965 auto *T7 = Target->makeReg(IceType_i32, RegMIPS32::Reg_T7); |
| 5966 Target->Context.insert<InstFakeDef>(T7); |
| 5917 createAutoBundle(); | 5967 createAutoBundle(); |
| 5918 Target->_and(Base, Base, T7); | 5968 Target->_and(Base, Base, T7); |
| 5919 } | 5969 } |
| 5920 Target->_sw(Dest, Mem); | 5970 Target->_sw(Dest, Mem); |
| 5921 } | 5971 } |
| 5922 | 5972 |
| 5923 void TargetMIPS32::Sandboxer::lwc1(Variable *Dest, OperandMIPS32Mem *Mem, | 5973 void TargetMIPS32::Sandboxer::lwc1(Variable *Dest, OperandMIPS32Mem *Mem, |
| 5924 RelocOp Reloc) { | 5974 RelocOp Reloc) { |
| 5925 Variable *Base = Mem->getBase(); | 5975 Variable *Base = Mem->getBase(); |
| 5926 Variable *T7 = Target->getPhysicalRegister(RegMIPS32::Reg_T7); | |
| 5927 if (Target->NeedSandboxing && (Target->getStackReg() != Base->getRegNum())) { | 5976 if (Target->NeedSandboxing && (Target->getStackReg() != Base->getRegNum())) { |
| 5977 auto *T7 = Target->makeReg(IceType_i32, RegMIPS32::Reg_T7); |
| 5978 Target->Context.insert<InstFakeDef>(T7); |
| 5928 createAutoBundle(); | 5979 createAutoBundle(); |
| 5929 Target->_and(Base, Base, T7); | 5980 Target->_and(Base, Base, T7); |
| 5930 } | 5981 } |
| 5931 Target->_lwc1(Dest, Mem, Reloc); | 5982 Target->_lwc1(Dest, Mem, Reloc); |
| 5932 if (Target->NeedSandboxing && (Dest->getRegNum() == Target->getStackReg())) | 5983 if (Target->NeedSandboxing && (Dest->getRegNum() == Target->getStackReg())) { |
| 5984 auto *T7 = Target->makeReg(IceType_i32, RegMIPS32::Reg_T7); |
| 5985 Target->Context.insert<InstFakeDef>(T7); |
| 5933 Target->_and(Dest, Dest, T7); | 5986 Target->_and(Dest, Dest, T7); |
| 5987 } |
| 5934 } | 5988 } |
| 5935 | 5989 |
| 5936 void TargetMIPS32::Sandboxer::ldc1(Variable *Dest, OperandMIPS32Mem *Mem, | 5990 void TargetMIPS32::Sandboxer::ldc1(Variable *Dest, OperandMIPS32Mem *Mem, |
| 5937 RelocOp Reloc) { | 5991 RelocOp Reloc) { |
| 5938 Variable *Base = Mem->getBase(); | 5992 Variable *Base = Mem->getBase(); |
| 5939 Variable *T7 = Target->getPhysicalRegister(RegMIPS32::Reg_T7); | |
| 5940 if (Target->NeedSandboxing && (Target->getStackReg() != Base->getRegNum())) { | 5993 if (Target->NeedSandboxing && (Target->getStackReg() != Base->getRegNum())) { |
| 5994 auto *T7 = Target->makeReg(IceType_i32, RegMIPS32::Reg_T7); |
| 5995 Target->Context.insert<InstFakeDef>(T7); |
| 5941 createAutoBundle(); | 5996 createAutoBundle(); |
| 5942 Target->_and(Base, Base, T7); | 5997 Target->_and(Base, Base, T7); |
| 5943 } | 5998 } |
| 5944 Target->_ldc1(Dest, Mem, Reloc); | 5999 Target->_ldc1(Dest, Mem, Reloc); |
| 5945 if (Target->NeedSandboxing && (Dest->getRegNum() == Target->getStackReg())) | 6000 if (Target->NeedSandboxing && (Dest->getRegNum() == Target->getStackReg())) { |
| 6001 auto *T7 = Target->makeReg(IceType_i32, RegMIPS32::Reg_T7); |
| 6002 Target->Context.insert<InstFakeDef>(T7); |
| 5946 Target->_and(Dest, Dest, T7); | 6003 Target->_and(Dest, Dest, T7); |
| 6004 } |
| 5947 } | 6005 } |
| 5948 | 6006 |
| 5949 void TargetMIPS32::Sandboxer::ret(Variable *RetAddr, Variable *RetValue) { | 6007 void TargetMIPS32::Sandboxer::ret(Variable *RetAddr, Variable *RetValue) { |
| 5950 if (!Target->NeedSandboxing) { | 6008 if (!Target->NeedSandboxing) { |
| 5951 Target->_ret(RetAddr, RetValue); | 6009 Target->_ret(RetAddr, RetValue); |
| 5952 } | 6010 } |
| 5953 Variable *T6 = Target->getPhysicalRegister(RegMIPS32::Reg_T6); | 6011 auto *T6 = Target->makeReg(IceType_i32, RegMIPS32::Reg_T6); |
| 6012 Target->Context.insert<InstFakeDef>(T6); |
| 5954 createAutoBundle(); | 6013 createAutoBundle(); |
| 5955 Target->_and(RetAddr, RetAddr, T6); | 6014 Target->_and(RetAddr, RetAddr, T6); |
| 5956 Target->_ret(RetAddr, RetValue); | 6015 Target->_ret(RetAddr, RetValue); |
| 5957 } | 6016 } |
| 5958 | 6017 |
| 5959 void TargetMIPS32::Sandboxer::reset_sp(Variable *Src) { | 6018 void TargetMIPS32::Sandboxer::reset_sp(Variable *Src) { |
| 5960 Variable *SP = Target->getPhysicalRegister(RegMIPS32::Reg_SP); | 6019 Variable *SP = Target->getPhysicalRegister(RegMIPS32::Reg_SP); |
| 5961 if (!Target->NeedSandboxing) { | 6020 if (!Target->NeedSandboxing) { |
| 5962 Target->_mov(SP, Src); | 6021 Target->_mov(SP, Src); |
| 5963 return; | 6022 return; |
| 5964 } | 6023 } |
| 5965 Variable *T7 = Target->getPhysicalRegister(RegMIPS32::Reg_T7); | 6024 auto *T7 = Target->makeReg(IceType_i32, RegMIPS32::Reg_T7); |
| 6025 Target->Context.insert<InstFakeDef>(T7); |
| 5966 createAutoBundle(); | 6026 createAutoBundle(); |
| 5967 Target->_mov(SP, Src); | 6027 Target->_mov(SP, Src); |
| 5968 Target->_and(SP, SP, T7); | 6028 Target->_and(SP, SP, T7); |
| 5969 Target->getContext().insert<InstFakeUse>(SP); | 6029 Target->getContext().insert<InstFakeUse>(SP); |
| 5970 } | 6030 } |
| 5971 | 6031 |
| 5972 InstMIPS32Call *TargetMIPS32::Sandboxer::jal(Variable *ReturnReg, | 6032 InstMIPS32Call *TargetMIPS32::Sandboxer::jal(Variable *ReturnReg, |
| 5973 Operand *CallTarget) { | 6033 Operand *CallTarget) { |
| 5974 if (Target->NeedSandboxing) { | 6034 if (Target->NeedSandboxing) { |
| 5975 createAutoBundle(); | 6035 createAutoBundle(); |
| 5976 if (auto *CallTargetR = llvm::dyn_cast<Variable>(CallTarget)) { | 6036 if (auto *CallTargetR = llvm::dyn_cast<Variable>(CallTarget)) { |
| 5977 Variable *T6 = Target->getPhysicalRegister(RegMIPS32::Reg_T6); | 6037 auto *T6 = Target->makeReg(IceType_i32, RegMIPS32::Reg_T6); |
| 6038 Target->Context.insert<InstFakeDef>(T6); |
| 5978 Target->_and(CallTargetR, CallTargetR, T6); | 6039 Target->_and(CallTargetR, CallTargetR, T6); |
| 5979 } | 6040 } |
| 5980 } | 6041 } |
| 5981 return Target->Context.insert<InstMIPS32Call>(ReturnReg, CallTarget); | 6042 return Target->Context.insert<InstMIPS32Call>(ReturnReg, CallTarget); |
| 5982 } | 6043 } |
| 5983 | 6044 |
| 5984 } // end of namespace MIPS32 | 6045 } // end of namespace MIPS32 |
| 5985 } // end of namespace Ice | 6046 } // end of namespace Ice |
| OLD | NEW |