OLD | NEW |
1 //===- subzero/src/IceTargetLowering.cpp - Basic lowering implementation --===// | 1 //===- subzero/src/IceTargetLowering.cpp - Basic lowering implementation --===// |
2 // | 2 // |
3 // The Subzero Code Generator | 3 // The Subzero Code Generator |
4 // | 4 // |
5 // This file is distributed under the University of Illinois Open Source | 5 // This file is distributed under the University of Illinois Open Source |
6 // License. See LICENSE.TXT for details. | 6 // License. See LICENSE.TXT for details. |
7 // | 7 // |
8 //===----------------------------------------------------------------------===// | 8 //===----------------------------------------------------------------------===// |
9 // | 9 // |
10 // This file implements the skeleton of the TargetLowering class, | 10 // This file implements the skeleton of the TargetLowering class, |
(...skipping 226 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
237 if (Variable *Dest = Inst->getDest()) { | 237 if (Variable *Dest = Inst->getDest()) { |
238 // TODO(stichnot): We may need to consider all source | 238 // TODO(stichnot): We may need to consider all source |
239 // operands, not just the first one, if using 3-address | 239 // operands, not just the first one, if using 3-address |
240 // instructions. | 240 // instructions. |
241 if (Inst->getSrcSize() > 0 && Inst->getSrc(0) == Dest) | 241 if (Inst->getSrcSize() > 0 && Inst->getSrc(0) == Dest) |
242 Inst->setDestNonKillable(); | 242 Inst->setDestNonKillable(); |
243 } | 243 } |
244 } | 244 } |
245 } | 245 } |
246 | 246 |
| 247 void TargetLowering::sortVarsByAlignment(VarList &Dest, |
| 248 const VarList &Source) const { |
| 249 // Sort the variables into buckets according to the log of their width |
| 250 // in bytes. |
| 251 const SizeT NumBuckets = maxStackSlotSizeLog2() - minStackSlotSizeLog2() + 1; |
| 252 llvm::SmallVector<VarList, 10> Buckets; |
| 253 Buckets.resize(NumBuckets); |
| 254 |
| 255 for (Variable *Var : Source) { |
| 256 uint32_t NaturalAlignment = typeWidthInBytesOnStack(Var->getType()); |
| 257 SizeT LogNaturalAlignment = llvm::findFirstSet(NaturalAlignment); |
| 258 assert(LogNaturalAlignment >= minStackSlotSizeLog2()); |
| 259 assert(LogNaturalAlignment <= maxStackSlotSizeLog2()); |
| 260 SizeT BucketIndex = LogNaturalAlignment - minStackSlotSizeLog2(); |
| 261 Buckets[BucketIndex].push_back(Var); |
| 262 } |
| 263 |
| 264 for (SizeT I = 0, E = NumBuckets; I < E; ++I) { |
| 265 VarList &List = Buckets[NumBuckets - I - 1]; |
| 266 Dest.insert(Dest.end(), List.begin(), List.end()); |
| 267 } |
| 268 } |
| 269 |
| 270 void TargetLowering::getVarStackSlotParams( |
| 271 VarList &SortedSpilledVariables, llvm::SmallBitVector &RegsUsed, |
| 272 size_t *GlobalsSize, size_t *SpillAreaSizeBytes, |
| 273 uint32_t *SpillAreaAlignmentBytes, uint32_t *LocalsSlotsAlignmentBytes, |
| 274 std::function<bool(Variable *)> TargetVarHook) { |
| 275 const VariablesMetadata *VMetadata = Func->getVMetadata(); |
| 276 llvm::BitVector IsVarReferenced(Func->getNumVariables()); |
| 277 for (CfgNode *Node : Func->getNodes()) { |
| 278 for (Inst &Inst : Node->getInsts()) { |
| 279 if (Inst.isDeleted()) |
| 280 continue; |
| 281 if (const Variable *Var = Inst.getDest()) |
| 282 IsVarReferenced[Var->getIndex()] = true; |
| 283 for (SizeT I = 0; I < Inst.getSrcSize(); ++I) { |
| 284 Operand *Src = Inst.getSrc(I); |
| 285 SizeT NumVars = Src->getNumVars(); |
| 286 for (SizeT J = 0; J < NumVars; ++J) { |
| 287 const Variable *Var = Src->getVar(J); |
| 288 IsVarReferenced[Var->getIndex()] = true; |
| 289 } |
| 290 } |
| 291 } |
| 292 } |
| 293 |
| 294 // If SimpleCoalescing is false, each variable without a register |
| 295 // gets its own unique stack slot, which leads to large stack |
| 296 // frames. If SimpleCoalescing is true, then each "global" variable |
| 297 // without a register gets its own slot, but "local" variable slots |
| 298 // are reused across basic blocks. E.g., if A and B are local to |
| 299 // block 1 and C is local to block 2, then C may share a slot with A or B. |
| 300 // |
| 301 // We cannot coalesce stack slots if this function calls a "returns twice" |
| 302 // function. In that case, basic blocks may be revisited, and variables |
| 303 // local to those basic blocks are actually live until after the |
| 304 // called function returns a second time. |
| 305 const bool SimpleCoalescing = !callsReturnsTwice(); |
| 306 |
| 307 std::vector<size_t> LocalsSize(Func->getNumNodes()); |
| 308 const VarList &Variables = Func->getVariables(); |
| 309 VarList SpilledVariables; |
| 310 for (Variable *Var : Variables) { |
| 311 if (Var->hasReg()) { |
| 312 RegsUsed[Var->getRegNum()] = true; |
| 313 continue; |
| 314 } |
| 315 // An argument either does not need a stack slot (if passed in a |
| 316 // register) or already has one (if passed on the stack). |
| 317 if (Var->getIsArg()) |
| 318 continue; |
| 319 // An unreferenced variable doesn't need a stack slot. |
| 320 if (!IsVarReferenced[Var->getIndex()]) |
| 321 continue; |
| 322 // Check a target-specific variable (it may end up sharing stack slots) |
| 323 // and not need accounting here. |
| 324 if (TargetVarHook(Var)) |
| 325 continue; |
| 326 SpilledVariables.push_back(Var); |
| 327 } |
| 328 |
| 329 SortedSpilledVariables.reserve(SpilledVariables.size()); |
| 330 sortVarsByAlignment(SortedSpilledVariables, SpilledVariables); |
| 331 |
| 332 for (Variable *Var : SortedSpilledVariables) { |
| 333 size_t Increment = typeWidthInBytesOnStack(Var->getType()); |
| 334 // We have sorted by alignment, so the first variable we encounter that |
| 335 // is located in each area determines the max alignment for the area. |
| 336 if (!*SpillAreaAlignmentBytes) |
| 337 *SpillAreaAlignmentBytes = Increment; |
| 338 if (SimpleCoalescing && VMetadata->isTracked(Var)) { |
| 339 if (VMetadata->isMultiBlock(Var)) { |
| 340 *GlobalsSize += Increment; |
| 341 } else { |
| 342 SizeT NodeIndex = VMetadata->getLocalUseNode(Var)->getIndex(); |
| 343 LocalsSize[NodeIndex] += Increment; |
| 344 if (LocalsSize[NodeIndex] > *SpillAreaSizeBytes) |
| 345 *SpillAreaSizeBytes = LocalsSize[NodeIndex]; |
| 346 if (!*LocalsSlotsAlignmentBytes) |
| 347 *LocalsSlotsAlignmentBytes = Increment; |
| 348 } |
| 349 } else { |
| 350 *SpillAreaSizeBytes += Increment; |
| 351 } |
| 352 } |
| 353 } |
| 354 |
| 355 void TargetLowering::assignVarStackSlots(VarList &SortedSpilledVariables, |
| 356 size_t SpillAreaStart, |
| 357 size_t SpillAreaSizeBytes, |
| 358 size_t GlobalsAndSubsequentPaddingSize, |
| 359 bool UsesFramePointer) { |
| 360 const VariablesMetadata *VMetadata = Func->getVMetadata(); |
| 361 size_t GlobalsSpaceUsed = SpillAreaStart; |
| 362 size_t NextStackOffset = SpillAreaStart; |
| 363 std::vector<size_t> LocalsSize(Func->getNumNodes()); |
| 364 const bool SimpleCoalescing = !callsReturnsTwice(); |
| 365 for (Variable *Var : SortedSpilledVariables) { |
| 366 size_t Increment = typeWidthInBytesOnStack(Var->getType()); |
| 367 if (SimpleCoalescing && VMetadata->isTracked(Var)) { |
| 368 if (VMetadata->isMultiBlock(Var)) { |
| 369 GlobalsSpaceUsed += Increment; |
| 370 NextStackOffset = GlobalsSpaceUsed; |
| 371 } else { |
| 372 SizeT NodeIndex = VMetadata->getLocalUseNode(Var)->getIndex(); |
| 373 LocalsSize[NodeIndex] += Increment; |
| 374 NextStackOffset = SpillAreaStart + GlobalsAndSubsequentPaddingSize + |
| 375 LocalsSize[NodeIndex]; |
| 376 } |
| 377 } else { |
| 378 NextStackOffset += Increment; |
| 379 } |
| 380 if (UsesFramePointer) |
| 381 Var->setStackOffset(-NextStackOffset); |
| 382 else |
| 383 Var->setStackOffset(SpillAreaSizeBytes - NextStackOffset); |
| 384 } |
| 385 } |
| 386 |
247 InstCall *TargetLowering::makeHelperCall(const IceString &Name, Variable *Dest, | 387 InstCall *TargetLowering::makeHelperCall(const IceString &Name, Variable *Dest, |
248 SizeT MaxSrcs) { | 388 SizeT MaxSrcs) { |
249 const bool HasTailCall = false; | 389 const bool HasTailCall = false; |
250 Constant *CallTarget = Ctx->getConstantExternSym(Name); | 390 Constant *CallTarget = Ctx->getConstantExternSym(Name); |
251 InstCall *Call = | 391 InstCall *Call = |
252 InstCall::create(Func, MaxSrcs, Dest, CallTarget, HasTailCall); | 392 InstCall::create(Func, MaxSrcs, Dest, CallTarget, HasTailCall); |
253 return Call; | 393 return Call; |
254 } | 394 } |
255 | 395 |
256 void TargetLowering::emitWithoutPrefix(const ConstantRelocatable *C) const { | 396 void TargetLowering::emitWithoutPrefix(const ConstantRelocatable *C) const { |
(...skipping 28 matching lines...) Expand all Loading... |
285 return std::unique_ptr<TargetDataLowering>(TargetData##X::create(Ctx)); | 425 return std::unique_ptr<TargetDataLowering>(TargetData##X::create(Ctx)); |
286 #include "llvm/Config/SZTargets.def" | 426 #include "llvm/Config/SZTargets.def" |
287 | 427 |
288 llvm_unreachable("Unsupported target data lowering"); | 428 llvm_unreachable("Unsupported target data lowering"); |
289 return nullptr; | 429 return nullptr; |
290 } | 430 } |
291 | 431 |
292 TargetDataLowering::~TargetDataLowering() {} | 432 TargetDataLowering::~TargetDataLowering() {} |
293 | 433 |
294 } // end of namespace Ice | 434 } // end of namespace Ice |
OLD | NEW |