Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 //===- subzero/src/IceTargetLowering.cpp - Basic lowering implementation --===// | 1 //===- subzero/src/IceTargetLowering.cpp - Basic lowering implementation --===// |
| 2 // | 2 // |
| 3 // The Subzero Code Generator | 3 // The Subzero Code Generator |
| 4 // | 4 // |
| 5 // This file is distributed under the University of Illinois Open Source | 5 // This file is distributed under the University of Illinois Open Source |
| 6 // License. See LICENSE.TXT for details. | 6 // License. See LICENSE.TXT for details. |
| 7 // | 7 // |
| 8 //===----------------------------------------------------------------------===// | 8 //===----------------------------------------------------------------------===// |
| 9 // | 9 // |
| 10 // This file implements the skeleton of the TargetLowering class, | 10 // This file implements the skeleton of the TargetLowering class, |
| (...skipping 226 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 237 if (Variable *Dest = Inst->getDest()) { | 237 if (Variable *Dest = Inst->getDest()) { |
| 238 // TODO(stichnot): We may need to consider all source | 238 // TODO(stichnot): We may need to consider all source |
| 239 // operands, not just the first one, if using 3-address | 239 // operands, not just the first one, if using 3-address |
| 240 // instructions. | 240 // instructions. |
| 241 if (Inst->getSrcSize() > 0 && Inst->getSrc(0) == Dest) | 241 if (Inst->getSrcSize() > 0 && Inst->getSrc(0) == Dest) |
| 242 Inst->setDestNonKillable(); | 242 Inst->setDestNonKillable(); |
| 243 } | 243 } |
| 244 } | 244 } |
| 245 } | 245 } |
| 246 | 246 |
| 247 void TargetLowering::sortVarsByAlignment(VarList &Dest, | |
| 248 const VarList &Source) const { | |
| 249 // Sort the variables into buckets according to the log of their width | |
|
Jim Stichnoth
2015/05/30 16:58:54
I realize this is just code being moved around, bu
jvoung (off chromium)
2015/06/01 18:00:23
Done.
| |
| 250 // in bytes. | |
| 251 const SizeT NumBuckets = maxStackSlotSizeLog2() - minStackSlotSizeLog2() + 1; | |
| 252 llvm::SmallVector<VarList, 10> Buckets; | |
| 253 Buckets.resize(NumBuckets); | |
| 254 | |
| 255 for (Variable *Var : Source) { | |
| 256 uint32_t NaturalAlignment = typeWidthInBytesOnStack(Var->getType()); | |
| 257 SizeT LogNaturalAlignment = llvm::findFirstSet(NaturalAlignment); | |
| 258 assert(LogNaturalAlignment >= minStackSlotSizeLog2()); | |
| 259 assert(LogNaturalAlignment <= maxStackSlotSizeLog2()); | |
| 260 SizeT BucketIndex = LogNaturalAlignment - minStackSlotSizeLog2(); | |
| 261 Buckets[BucketIndex].push_back(Var); | |
| 262 } | |
| 263 | |
| 264 for (SizeT I = 0, E = NumBuckets; I < E; ++I) { | |
| 265 VarList &List = Buckets[NumBuckets - I - 1]; | |
| 266 Dest.insert(Dest.end(), List.begin(), List.end()); | |
| 267 } | |
| 268 } | |
| 269 | |
| 270 void TargetLowering::getVarStackSlotParams( | |
| 271 VarList &SortedSpilledVariables, llvm::SmallBitVector &RegsUsed, | |
| 272 size_t *GlobalsSize, size_t *SpillAreaSizeBytes, | |
| 273 uint32_t *SpillAreaAlignmentBytes, uint32_t *LocalsSlotsAlignmentBytes, | |
| 274 std::function<bool(Variable *)> TargetVarHook) { | |
| 275 const VariablesMetadata *VMetadata = Func->getVMetadata(); | |
| 276 llvm::BitVector IsVarReferenced(Func->getNumVariables()); | |
| 277 for (CfgNode *Node : Func->getNodes()) { | |
| 278 for (Inst &Inst : Node->getInsts()) { | |
| 279 if (Inst.isDeleted()) | |
| 280 continue; | |
| 281 if (const Variable *Var = Inst.getDest()) | |
| 282 IsVarReferenced[Var->getIndex()] = true; | |
| 283 for (SizeT I = 0; I < Inst.getSrcSize(); ++I) { | |
| 284 Operand *Src = Inst.getSrc(I); | |
| 285 SizeT NumVars = Src->getNumVars(); | |
| 286 for (SizeT J = 0; J < NumVars; ++J) { | |
| 287 const Variable *Var = Src->getVar(J); | |
| 288 IsVarReferenced[Var->getIndex()] = true; | |
| 289 } | |
| 290 } | |
| 291 } | |
| 292 } | |
| 293 | |
| 294 // If SimpleCoalescing is false, each variable without a register | |
| 295 // gets its own unique stack slot, which leads to large stack | |
| 296 // frames. If SimpleCoalescing is true, then each "global" variable | |
| 297 // without a register gets its own slot, but "local" variable slots | |
| 298 // are reused across basic blocks. E.g., if A and B are local to | |
| 299 // block 1 and C is local to block 2, then C may share a slot with A or B. | |
| 300 // | |
| 301 // We cannot coalesce stack slots if this function calls a "returns twice" | |
| 302 // function. In that case, basic blocks may be revisited, and variables | |
| 303 // local to those basic blocks are actually live until after the | |
| 304 // called function returns a second time. | |
| 305 const bool SimpleCoalescing = !callsReturnsTwice(); | |
| 306 | |
| 307 std::vector<size_t> LocalsSize(Func->getNumNodes()); | |
| 308 const VarList &Variables = Func->getVariables(); | |
| 309 VarList SpilledVariables; | |
| 310 for (Variable *Var : Variables) { | |
| 311 if (Var->hasReg()) { | |
| 312 RegsUsed[Var->getRegNum()] = true; | |
| 313 continue; | |
| 314 } | |
| 315 // An argument either does not need a stack slot (if passed in a | |
| 316 // register) or already has one (if passed on the stack). | |
| 317 if (Var->getIsArg()) | |
| 318 continue; | |
| 319 // An unreferenced variable doesn't need a stack slot. | |
| 320 if (!IsVarReferenced[Var->getIndex()]) | |
| 321 continue; | |
| 322 // Check a target-specific variable (it may end up sharing stack slots) | |
| 323 // and not need accounting here. | |
| 324 if (TargetVarHook(Var)) | |
| 325 continue; | |
| 326 SpilledVariables.push_back(Var); | |
| 327 } | |
| 328 | |
| 329 SortedSpilledVariables.reserve(SpilledVariables.size()); | |
| 330 sortVarsByAlignment(SortedSpilledVariables, SpilledVariables); | |
| 331 | |
| 332 for (Variable *Var : SortedSpilledVariables) { | |
| 333 size_t Increment = typeWidthInBytesOnStack(Var->getType()); | |
| 334 // We have sorted by alignment, so the first variable we encounter that | |
| 335 // is located in each area determines the max alignment for the area. | |
| 336 if (!*SpillAreaAlignmentBytes) | |
| 337 *SpillAreaAlignmentBytes = Increment; | |
| 338 if (SimpleCoalescing && VMetadata->isTracked(Var)) { | |
| 339 if (VMetadata->isMultiBlock(Var)) { | |
| 340 *GlobalsSize += Increment; | |
| 341 } else { | |
| 342 SizeT NodeIndex = VMetadata->getLocalUseNode(Var)->getIndex(); | |
| 343 LocalsSize[NodeIndex] += Increment; | |
| 344 if (LocalsSize[NodeIndex] > *SpillAreaSizeBytes) | |
| 345 *SpillAreaSizeBytes = LocalsSize[NodeIndex]; | |
| 346 if (!*LocalsSlotsAlignmentBytes) | |
| 347 *LocalsSlotsAlignmentBytes = Increment; | |
| 348 } | |
| 349 } else { | |
| 350 *SpillAreaSizeBytes += Increment; | |
| 351 } | |
| 352 } | |
| 353 } | |
| 354 | |
| 355 void TargetLowering::alignStackSpillAreas(uint32_t SpillAreaStartOffset, | |
| 356 uint32_t SpillAreaAlignmentBytes, | |
| 357 size_t GlobalsSize, | |
| 358 uint32_t LocalsSlotsAlignmentBytes, | |
| 359 uint32_t *SpillAreaPaddingBytes, | |
| 360 uint32_t *LocalsSlotsPaddingBytes) { | |
| 361 if (SpillAreaAlignmentBytes) { | |
| 362 uint32_t PaddingStart = SpillAreaStartOffset; | |
| 363 uint32_t SpillAreaStart = | |
| 364 Utils::applyAlignment(PaddingStart, SpillAreaAlignmentBytes); | |
| 365 *SpillAreaPaddingBytes = SpillAreaStart - PaddingStart; | |
| 366 } | |
| 367 | |
| 368 // If there are separate globals and locals areas, make sure the | |
| 369 // locals area is aligned by padding the end of the globals area. | |
| 370 if (LocalsSlotsAlignmentBytes) { | |
| 371 uint32_t GlobalsAndSubsequentPaddingSize = GlobalsSize; | |
| 372 GlobalsAndSubsequentPaddingSize = | |
| 373 Utils::applyAlignment(GlobalsSize, LocalsSlotsAlignmentBytes); | |
| 374 *LocalsSlotsPaddingBytes = GlobalsAndSubsequentPaddingSize - GlobalsSize; | |
| 375 } | |
| 376 } | |
| 377 | |
| 378 void TargetLowering::assignVarStackSlots(VarList &SortedSpilledVariables, | |
| 379 size_t SpillAreaPaddingBytes, | |
| 380 size_t SpillAreaSizeBytes, | |
| 381 size_t GlobalsAndSubsequentPaddingSize, | |
| 382 bool UsesFramePointer) { | |
| 383 const VariablesMetadata *VMetadata = Func->getVMetadata(); | |
| 384 size_t GlobalsSpaceUsed = SpillAreaPaddingBytes; | |
| 385 size_t NextStackOffset = SpillAreaPaddingBytes; | |
| 386 std::vector<size_t> LocalsSize(Func->getNumNodes()); | |
| 387 const bool SimpleCoalescing = !callsReturnsTwice(); | |
| 388 for (Variable *Var : SortedSpilledVariables) { | |
| 389 size_t Increment = typeWidthInBytesOnStack(Var->getType()); | |
| 390 if (SimpleCoalescing && VMetadata->isTracked(Var)) { | |
| 391 if (VMetadata->isMultiBlock(Var)) { | |
| 392 GlobalsSpaceUsed += Increment; | |
| 393 NextStackOffset = GlobalsSpaceUsed; | |
| 394 } else { | |
| 395 SizeT NodeIndex = VMetadata->getLocalUseNode(Var)->getIndex(); | |
| 396 LocalsSize[NodeIndex] += Increment; | |
| 397 NextStackOffset = SpillAreaPaddingBytes + | |
| 398 GlobalsAndSubsequentPaddingSize + | |
| 399 LocalsSize[NodeIndex]; | |
| 400 } | |
| 401 } else { | |
| 402 NextStackOffset += Increment; | |
| 403 } | |
| 404 if (UsesFramePointer) | |
| 405 Var->setStackOffset(-NextStackOffset); | |
| 406 else | |
| 407 Var->setStackOffset(SpillAreaSizeBytes - NextStackOffset); | |
| 408 } | |
| 409 } | |
| 410 | |
| 247 InstCall *TargetLowering::makeHelperCall(const IceString &Name, Variable *Dest, | 411 InstCall *TargetLowering::makeHelperCall(const IceString &Name, Variable *Dest, |
| 248 SizeT MaxSrcs) { | 412 SizeT MaxSrcs) { |
| 249 const bool HasTailCall = false; | 413 const bool HasTailCall = false; |
| 250 Constant *CallTarget = Ctx->getConstantExternSym(Name); | 414 Constant *CallTarget = Ctx->getConstantExternSym(Name); |
| 251 InstCall *Call = | 415 InstCall *Call = |
| 252 InstCall::create(Func, MaxSrcs, Dest, CallTarget, HasTailCall); | 416 InstCall::create(Func, MaxSrcs, Dest, CallTarget, HasTailCall); |
| 253 return Call; | 417 return Call; |
| 254 } | 418 } |
| 255 | 419 |
| 256 void TargetLowering::emitWithoutPrefix(const ConstantRelocatable *C) const { | 420 void TargetLowering::emitWithoutPrefix(const ConstantRelocatable *C) const { |
| (...skipping 28 matching lines...) Expand all Loading... | |
| 285 return std::unique_ptr<TargetDataLowering>(TargetData##X::create(Ctx)); | 449 return std::unique_ptr<TargetDataLowering>(TargetData##X::create(Ctx)); |
| 286 #include "llvm/Config/SZTargets.def" | 450 #include "llvm/Config/SZTargets.def" |
| 287 | 451 |
| 288 llvm_unreachable("Unsupported target data lowering"); | 452 llvm_unreachable("Unsupported target data lowering"); |
| 289 return nullptr; | 453 return nullptr; |
| 290 } | 454 } |
| 291 | 455 |
| 292 TargetDataLowering::~TargetDataLowering() {} | 456 TargetDataLowering::~TargetDataLowering() {} |
| 293 | 457 |
| 294 } // end of namespace Ice | 458 } // end of namespace Ice |
| OLD | NEW |