| Index: src/IceTargetLowering.cpp
|
| diff --git a/src/IceTargetLowering.cpp b/src/IceTargetLowering.cpp
|
| index db332cbd0e8babeda952b8507e869202a14f9ed9..a41161b53fab300a52f55570cb8f076276b80c17 100644
|
| --- a/src/IceTargetLowering.cpp
|
| +++ b/src/IceTargetLowering.cpp
|
| @@ -244,6 +244,146 @@ void TargetLowering::inferTwoAddress() {
|
| }
|
| }
|
|
|
| +void TargetLowering::sortVarsByAlignment(VarList &Dest,
|
| + const VarList &Source) const {
|
| + // Sort the variables into buckets according to the log of their width
|
| + // in bytes.
|
| + const SizeT NumBuckets = maxStackSlotSizeLog2() - minStackSlotSizeLog2() + 1;
|
| + llvm::SmallVector<VarList, 10> Buckets;
|
| + Buckets.resize(NumBuckets);
|
| +
|
| + for (Variable *Var : Source) {
|
| + uint32_t NaturalAlignment = typeWidthInBytesOnStack(Var->getType());
|
| + SizeT LogNaturalAlignment = llvm::findFirstSet(NaturalAlignment);
|
| + assert(LogNaturalAlignment >= minStackSlotSizeLog2());
|
| + assert(LogNaturalAlignment <= maxStackSlotSizeLog2());
|
| + SizeT BucketIndex = LogNaturalAlignment - minStackSlotSizeLog2();
|
| + Buckets[BucketIndex].push_back(Var);
|
| + }
|
| +
|
| + for (SizeT I = 0, E = NumBuckets; I < E; ++I) {
|
| + VarList &List = Buckets[NumBuckets - I - 1];
|
| + Dest.insert(Dest.end(), List.begin(), List.end());
|
| + }
|
| +}
|
| +
|
| +void TargetLowering::getVarStackSlotParams(
|
| + VarList &SortedSpilledVariables, llvm::SmallBitVector &RegsUsed,
|
| + size_t *GlobalsSize, size_t *SpillAreaSizeBytes,
|
| + uint32_t *SpillAreaAlignmentBytes, uint32_t *LocalsSlotsAlignmentBytes,
|
| + std::function<bool(Variable *)> TargetVarHook) {
|
| + const VariablesMetadata *VMetadata = Func->getVMetadata();
|
| + llvm::BitVector IsVarReferenced(Func->getNumVariables());
|
| + for (CfgNode *Node : Func->getNodes()) {
|
| + for (Inst &Inst : Node->getInsts()) {
|
| + if (Inst.isDeleted())
|
| + continue;
|
| + if (const Variable *Var = Inst.getDest())
|
| + IsVarReferenced[Var->getIndex()] = true;
|
| + for (SizeT I = 0; I < Inst.getSrcSize(); ++I) {
|
| + Operand *Src = Inst.getSrc(I);
|
| + SizeT NumVars = Src->getNumVars();
|
| + for (SizeT J = 0; J < NumVars; ++J) {
|
| + const Variable *Var = Src->getVar(J);
|
| + IsVarReferenced[Var->getIndex()] = true;
|
| + }
|
| + }
|
| + }
|
| + }
|
| +
|
| + // If SimpleCoalescing is false, each variable without a register
|
| + // gets its own unique stack slot, which leads to large stack
|
| + // frames. If SimpleCoalescing is true, then each "global" variable
|
| + // without a register gets its own slot, but "local" variable slots
|
| + // are reused across basic blocks. E.g., if A and B are local to
|
| + // block 1 and C is local to block 2, then C may share a slot with A or B.
|
| + //
|
| + // We cannot coalesce stack slots if this function calls a "returns twice"
|
| + // function. In that case, basic blocks may be revisited, and variables
|
| + // local to those basic blocks are actually live until after the
|
| + // called function returns a second time.
|
| + const bool SimpleCoalescing = !callsReturnsTwice();
|
| +
|
| + std::vector<size_t> LocalsSize(Func->getNumNodes());
|
| + const VarList &Variables = Func->getVariables();
|
| + VarList SpilledVariables;
|
| + for (Variable *Var : Variables) {
|
| + if (Var->hasReg()) {
|
| + RegsUsed[Var->getRegNum()] = true;
|
| + continue;
|
| + }
|
| + // An argument either does not need a stack slot (if passed in a
|
| + // register) or already has one (if passed on the stack).
|
| + if (Var->getIsArg())
|
| + continue;
|
| + // An unreferenced variable doesn't need a stack slot.
|
| + if (!IsVarReferenced[Var->getIndex()])
|
| + continue;
|
| + // Check a target-specific variable (it may end up sharing stack slots)
|
| + // and not need accounting here.
|
| + if (TargetVarHook(Var))
|
| + continue;
|
| + SpilledVariables.push_back(Var);
|
| + }
|
| +
|
| + SortedSpilledVariables.reserve(SpilledVariables.size());
|
| + sortVarsByAlignment(SortedSpilledVariables, SpilledVariables);
|
| +
|
| + for (Variable *Var : SortedSpilledVariables) {
|
| + size_t Increment = typeWidthInBytesOnStack(Var->getType());
|
| + // We have sorted by alignment, so the first variable we encounter that
|
| + // is located in each area determines the max alignment for the area.
|
| + if (!*SpillAreaAlignmentBytes)
|
| + *SpillAreaAlignmentBytes = Increment;
|
| + if (SimpleCoalescing && VMetadata->isTracked(Var)) {
|
| + if (VMetadata->isMultiBlock(Var)) {
|
| + *GlobalsSize += Increment;
|
| + } else {
|
| + SizeT NodeIndex = VMetadata->getLocalUseNode(Var)->getIndex();
|
| + LocalsSize[NodeIndex] += Increment;
|
| + if (LocalsSize[NodeIndex] > *SpillAreaSizeBytes)
|
| + *SpillAreaSizeBytes = LocalsSize[NodeIndex];
|
| + if (!*LocalsSlotsAlignmentBytes)
|
| + *LocalsSlotsAlignmentBytes = Increment;
|
| + }
|
| + } else {
|
| + *SpillAreaSizeBytes += Increment;
|
| + }
|
| + }
|
| +}
|
| +
|
| +void TargetLowering::assignVarStackSlots(VarList &SortedSpilledVariables,
|
| + size_t SpillAreaStart,
|
| + size_t SpillAreaSizeBytes,
|
| + size_t GlobalsAndSubsequentPaddingSize,
|
| + bool UsesFramePointer) {
|
| + const VariablesMetadata *VMetadata = Func->getVMetadata();
|
| + size_t GlobalsSpaceUsed = SpillAreaStart;
|
| + size_t NextStackOffset = SpillAreaStart;
|
| + std::vector<size_t> LocalsSize(Func->getNumNodes());
|
| + const bool SimpleCoalescing = !callsReturnsTwice();
|
| + for (Variable *Var : SortedSpilledVariables) {
|
| + size_t Increment = typeWidthInBytesOnStack(Var->getType());
|
| + if (SimpleCoalescing && VMetadata->isTracked(Var)) {
|
| + if (VMetadata->isMultiBlock(Var)) {
|
| + GlobalsSpaceUsed += Increment;
|
| + NextStackOffset = GlobalsSpaceUsed;
|
| + } else {
|
| + SizeT NodeIndex = VMetadata->getLocalUseNode(Var)->getIndex();
|
| + LocalsSize[NodeIndex] += Increment;
|
| + NextStackOffset = SpillAreaStart + GlobalsAndSubsequentPaddingSize +
|
| + LocalsSize[NodeIndex];
|
| + }
|
| + } else {
|
| + NextStackOffset += Increment;
|
| + }
|
| + if (UsesFramePointer)
|
| + Var->setStackOffset(-NextStackOffset);
|
| + else
|
| + Var->setStackOffset(SpillAreaSizeBytes - NextStackOffset);
|
| + }
|
| +}
|
| +
|
| InstCall *TargetLowering::makeHelperCall(const IceString &Name, Variable *Dest,
|
| SizeT MaxSrcs) {
|
| const bool HasTailCall = false;
|
|
|