OLD | NEW |
1 //===- subzero/src/IceTargetLowering.h - Lowering interface -----*- C++ -*-===// | 1 //===- subzero/src/IceTargetLowering.h - Lowering interface -----*- C++ -*-===// |
2 // | 2 // |
3 // The Subzero Code Generator | 3 // The Subzero Code Generator |
4 // | 4 // |
5 // This file is distributed under the University of Illinois Open Source | 5 // This file is distributed under the University of Illinois Open Source |
6 // License. See LICENSE.TXT for details. | 6 // License. See LICENSE.TXT for details. |
7 // | 7 // |
8 //===----------------------------------------------------------------------===// | 8 //===----------------------------------------------------------------------===// |
9 // | 9 // |
10 // This file declares the TargetLowering, LoweringContext, and | 10 // This file declares the TargetLowering, LoweringContext, and |
(...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
158 // scratch registers as killed by a call. If a Type is not | 158 // scratch registers as killed by a call. If a Type is not |
159 // provided, a target-specific default type is used. | 159 // provided, a target-specific default type is used. |
160 virtual Variable *getPhysicalRegister(SizeT RegNum, | 160 virtual Variable *getPhysicalRegister(SizeT RegNum, |
161 Type Ty = IceType_void) = 0; | 161 Type Ty = IceType_void) = 0; |
162 // Returns a printable name for the register. | 162 // Returns a printable name for the register. |
163 virtual IceString getRegName(SizeT RegNum, Type Ty) const = 0; | 163 virtual IceString getRegName(SizeT RegNum, Type Ty) const = 0; |
164 | 164 |
165 virtual bool hasFramePointer() const { return false; } | 165 virtual bool hasFramePointer() const { return false; } |
166 virtual SizeT getFrameOrStackReg() const = 0; | 166 virtual SizeT getFrameOrStackReg() const = 0; |
167 virtual size_t typeWidthInBytesOnStack(Type Ty) const = 0; | 167 virtual size_t typeWidthInBytesOnStack(Type Ty) const = 0; |
| 168 // The base 2 logarithm of the width in bytes of the smallest stack slot. |
| 169 virtual size_t minStackSlotSizeLog2() const = 0; |
| 170 virtual size_t maxStackSlotSizeLog2() const = 0; |
| 171 |
168 bool hasComputedFrame() const { return HasComputedFrame; } | 172 bool hasComputedFrame() const { return HasComputedFrame; } |
169 // Returns true if this function calls a function that has the | 173 // Returns true if this function calls a function that has the |
170 // "returns twice" attribute. | 174 // "returns twice" attribute. |
171 bool callsReturnsTwice() const { return CallsReturnsTwice; } | 175 bool callsReturnsTwice() const { return CallsReturnsTwice; } |
172 void setCallsReturnsTwice(bool RetTwice) { CallsReturnsTwice = RetTwice; } | 176 void setCallsReturnsTwice(bool RetTwice) { CallsReturnsTwice = RetTwice; } |
173 int32_t getStackAdjustment() const { return StackAdjustment; } | 177 int32_t getStackAdjustment() const { return StackAdjustment; } |
174 void updateStackAdjustment(int32_t Offset) { StackAdjustment += Offset; } | 178 void updateStackAdjustment(int32_t Offset) { StackAdjustment += Offset; } |
175 void resetStackAdjustment() { StackAdjustment = 0; } | 179 void resetStackAdjustment() { StackAdjustment = 0; } |
176 SizeT makeNextLabelNumber() { return NextLabelNumber++; } | 180 SizeT makeNextLabelNumber() { return NextLabelNumber++; } |
177 LoweringContext &getContext() { return Context; } | 181 LoweringContext &getContext() { return Context; } |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
252 virtual void doAddressOptStore() {} | 256 virtual void doAddressOptStore() {} |
253 virtual void randomlyInsertNop(float Probability) = 0; | 257 virtual void randomlyInsertNop(float Probability) = 0; |
254 // This gives the target an opportunity to post-process the lowered | 258 // This gives the target an opportunity to post-process the lowered |
255 // expansion before returning. | 259 // expansion before returning. |
256 virtual void postLower() {} | 260 virtual void postLower() {} |
257 | 261 |
258 // Find two-address non-SSA instructions and set the DestNonKillable flag | 262 // Find two-address non-SSA instructions and set the DestNonKillable flag |
259 // to keep liveness analysis consistent. | 263 // to keep liveness analysis consistent. |
260 void inferTwoAddress(); | 264 void inferTwoAddress(); |
261 | 265 |
| 266 // Make a pass over the Cfg to determine which variables need stack slots |
| 267 // and place them in a sorted list (SortedSpilledVariables). Among those, |
| 268 // vars, classify the spill variables as local to the basic block vs |
| 269 // global (multi-block) in order to compute the parameters GlobalsSize |
| 270 // and SpillAreaSizeBytes (represents locals or general vars if the |
| 271 // coalescing of locals is disallowed) along with alignments required |
| 272 // for variables in each area. We rely on accurate VMetadata in order to |
| 273 // classify a variable as global vs local (otherwise the variable is |
| 274 // conservatively global). The in-args should be initialized to 0. |
| 275 // |
| 276 // This is only a pre-pass and the actual stack slot assignment is |
| 277 // handled separately. |
| 278 // |
| 279 // There may be target-specific Variable types, which will be handled |
| 280 // by TargetVarHook. If the TargetVarHook returns true, then the variable |
| 281 // is skipped and not considered with the rest of the spilled variables. |
| 282 void getVarStackSlotParams(VarList &SortedSpilledVariables, |
| 283 llvm::SmallBitVector &RegsUsed, |
| 284 size_t *GlobalsSize, size_t *SpillAreaSizeBytes, |
| 285 uint32_t *SpillAreaAlignmentBytes, |
| 286 uint32_t *LocalsSlotsAlignmentBytes, |
| 287 std::function<bool(Variable *)> TargetVarHook); |
| 288 |
| 289 // Calculate the amount of padding needed to align the local and global |
| 290 // areas to the required alignment. This assumes the globals/locals layout |
| 291 // used by getVarStackSlotParams and assignVarStackSlots. |
| 292 void alignStackSpillAreas(uint32_t SpillAreaStartOffset, |
| 293 uint32_t SpillAreaAlignmentBytes, |
| 294 size_t GlobalsSize, |
| 295 uint32_t LocalsSlotsAlignmentBytes, |
| 296 uint32_t *SpillAreaPaddingBytes, |
| 297 uint32_t *LocalsSlotsPaddingBytes); |
| 298 |
| 299 // Make a pass through the SortedSpilledVariables and actually assign |
| 300 // stack slots. SpillAreaPaddingBytes takes into account stack alignment |
| 301 // padding. The SpillArea starts after that amount of padding. |
| 302 // This matches the scheme in getVarStackSlotParams, where there may |
| 303 // be a separate multi-block global var spill area and a local var |
| 304 // spill area. |
| 305 void assignVarStackSlots(VarList &SortedSpilledVariables, |
| 306 size_t SpillAreaPaddingBytes, |
| 307 size_t SpillAreaSizeBytes, |
| 308 size_t GlobalsAndSubsequentPaddingSize, |
| 309 bool UsesFramePointer); |
| 310 |
| 311 // Sort the variables in Source based on required alignment. |
| 312 // The variables with the largest alignment need are placed in the front |
| 313 // of the Dest list. |
| 314 void sortVarsByAlignment(VarList &Dest, const VarList &Source) const; |
| 315 |
262 // Make a call to an external helper function. | 316 // Make a call to an external helper function. |
263 InstCall *makeHelperCall(const IceString &Name, Variable *Dest, | 317 InstCall *makeHelperCall(const IceString &Name, Variable *Dest, |
264 SizeT MaxSrcs); | 318 SizeT MaxSrcs); |
265 | 319 |
| 320 void |
| 321 _bundle_lock(InstBundleLock::Option BundleOption = InstBundleLock::Opt_None) { |
| 322 Context.insert(InstBundleLock::create(Func, BundleOption)); |
| 323 } |
| 324 void _bundle_unlock() { Context.insert(InstBundleUnlock::create(Func)); } |
| 325 |
266 Cfg *Func; | 326 Cfg *Func; |
267 GlobalContext *Ctx; | 327 GlobalContext *Ctx; |
268 bool HasComputedFrame; | 328 bool HasComputedFrame; |
269 bool CallsReturnsTwice; | 329 bool CallsReturnsTwice; |
270 // StackAdjustment keeps track of the current stack offset from its | 330 // StackAdjustment keeps track of the current stack offset from its |
271 // natural location, as arguments are pushed for a function call. | 331 // natural location, as arguments are pushed for a function call. |
272 int32_t StackAdjustment; | 332 int32_t StackAdjustment; |
273 SizeT NextLabelNumber; | 333 SizeT NextLabelNumber; |
274 LoweringContext Context; | 334 LoweringContext Context; |
275 | 335 |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
328 virtual void lowerConstants() const = 0; | 388 virtual void lowerConstants() const = 0; |
329 | 389 |
330 protected: | 390 protected: |
331 explicit TargetDataLowering(GlobalContext *Ctx) : Ctx(Ctx) {} | 391 explicit TargetDataLowering(GlobalContext *Ctx) : Ctx(Ctx) {} |
332 GlobalContext *Ctx; | 392 GlobalContext *Ctx; |
333 }; | 393 }; |
334 | 394 |
335 } // end of namespace Ice | 395 } // end of namespace Ice |
336 | 396 |
337 #endif // SUBZERO_SRC_ICETARGETLOWERING_H | 397 #endif // SUBZERO_SRC_ICETARGETLOWERING_H |
OLD | NEW |