Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3)

Side by Side Diff: src/IceTargetLowering.h

Issue 1341423002: Reflow comments to use the full width. (Closed) Base URL: https://chromium.googlesource.com/native_client/pnacl-subzero.git@master
Patch Set: Fix spelling and rebase Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/IceTLS.h ('k') | src/IceTargetLowering.cpp » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 //===- subzero/src/IceTargetLowering.h - Lowering interface -----*- C++ -*-===// 1 //===- subzero/src/IceTargetLowering.h - Lowering interface -----*- C++ -*-===//
2 // 2 //
3 // The Subzero Code Generator 3 // The Subzero Code Generator
4 // 4 //
5 // This file is distributed under the University of Illinois Open Source 5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details. 6 // License. See LICENSE.TXT for details.
7 // 7 //
8 //===----------------------------------------------------------------------===// 8 //===----------------------------------------------------------------------===//
9 /// 9 ///
10 /// \file 10 /// \file
11 /// This file declares the TargetLowering, LoweringContext, and 11 /// This file declares the TargetLowering, LoweringContext, and
12 /// TargetDataLowering classes. TargetLowering is an abstract class 12 /// TargetDataLowering classes. TargetLowering is an abstract class used to
13 /// used to drive the translation/lowering process. LoweringContext 13 /// drive the translation/lowering process. LoweringContext maintains a context
14 /// maintains a context for lowering each instruction, offering 14 /// for lowering each instruction, offering conveniences such as iterating over
15 /// conveniences such as iterating over non-deleted instructions. 15 /// non-deleted instructions. TargetDataLowering is an abstract class used to
16 /// TargetDataLowering is an abstract class used to drive the 16 /// drive the lowering/emission of global initializers, external global
17 /// lowering/emission of global initializers, external global
18 /// declarations, and internal constant pools. 17 /// declarations, and internal constant pools.
19 /// 18 ///
20 //===----------------------------------------------------------------------===// 19 //===----------------------------------------------------------------------===//
21 20
22 #ifndef SUBZERO_SRC_ICETARGETLOWERING_H 21 #ifndef SUBZERO_SRC_ICETARGETLOWERING_H
23 #define SUBZERO_SRC_ICETARGETLOWERING_H 22 #define SUBZERO_SRC_ICETARGETLOWERING_H
24 23
25 #include "IceDefs.h" 24 #include "IceDefs.h"
26 #include "IceInst.h" // for the names of the Inst subtypes 25 #include "IceInst.h" // for the names of the Inst subtypes
27 #include "IceOperand.h" 26 #include "IceOperand.h"
28 #include "IceTypes.h" 27 #include "IceTypes.h"
29 28
30 namespace Ice { 29 namespace Ice {
31 30
32 /// LoweringContext makes it easy to iterate through non-deleted 31 /// LoweringContext makes it easy to iterate through non-deleted instructions in
33 /// instructions in a node, and insert new (lowered) instructions at 32 /// a node, and insert new (lowered) instructions at the current point. Along
34 /// the current point. Along with the instruction list container and 33 /// with the instruction list container and associated iterators, it holds the
35 /// associated iterators, it holds the current node, which is needed 34 /// current node, which is needed when inserting new instructions in order to
36 /// when inserting new instructions in order to track whether variables 35 /// track whether variables are used as single-block or multi-block.
37 /// are used as single-block or multi-block.
38 class LoweringContext { 36 class LoweringContext {
39 LoweringContext(const LoweringContext &) = delete; 37 LoweringContext(const LoweringContext &) = delete;
40 LoweringContext &operator=(const LoweringContext &) = delete; 38 LoweringContext &operator=(const LoweringContext &) = delete;
41 39
42 public: 40 public:
43 LoweringContext() = default; 41 LoweringContext() = default;
44 ~LoweringContext() = default; 42 ~LoweringContext() = default;
45 void init(CfgNode *Node); 43 void init(CfgNode *Node);
46 Inst *getNextInst() const { 44 Inst *getNextInst() const {
47 if (Next == End) 45 if (Next == End)
(...skipping 17 matching lines...) Expand all
65 void advanceNext() { advanceForward(Next); } 63 void advanceNext() { advanceForward(Next); }
66 void setCur(InstList::iterator C) { Cur = C; } 64 void setCur(InstList::iterator C) { Cur = C; }
67 void setNext(InstList::iterator N) { Next = N; } 65 void setNext(InstList::iterator N) { Next = N; }
68 void rewind(); 66 void rewind();
69 void setInsertPoint(const InstList::iterator &Position) { Next = Position; } 67 void setInsertPoint(const InstList::iterator &Position) { Next = Position; }
70 68
71 private: 69 private:
72 /// Node is the argument to Inst::updateVars(). 70 /// Node is the argument to Inst::updateVars().
73 CfgNode *Node = nullptr; 71 CfgNode *Node = nullptr;
74 Inst *LastInserted = nullptr; 72 Inst *LastInserted = nullptr;
75 /// Cur points to the current instruction being considered. It is 73 /// Cur points to the current instruction being considered. It is guaranteed
76 /// guaranteed to point to a non-deleted instruction, or to be End. 74 /// to point to a non-deleted instruction, or to be End.
77 InstList::iterator Cur; 75 InstList::iterator Cur;
78 /// Next doubles as a pointer to the next valid instruction (if any), 76 /// Next doubles as a pointer to the next valid instruction (if any), and the
79 /// and the new-instruction insertion point. It is also updated for 77 /// new-instruction insertion point. It is also updated for the caller in case
80 /// the caller in case the lowering consumes more than one high-level 78 /// the lowering consumes more than one high-level instruction. It is
81 /// instruction. It is guaranteed to point to a non-deleted 79 /// guaranteed to point to a non-deleted instruction after Cur, or to be End.
82 /// instruction after Cur, or to be End. TODO: Consider separating 80 // TODO: Consider separating the notion of "next valid instruction" and "new
83 /// the notion of "next valid instruction" and "new instruction 81 // instruction insertion point", to avoid confusion when previously-deleted
84 /// insertion point", to avoid confusion when previously-deleted 82 // instructions come between the two points.
85 /// instructions come between the two points.
86 InstList::iterator Next; 83 InstList::iterator Next;
87 /// Begin is a copy of Insts.begin(), used if iterators are moved backward. 84 /// Begin is a copy of Insts.begin(), used if iterators are moved backward.
88 InstList::iterator Begin; 85 InstList::iterator Begin;
89 /// End is a copy of Insts.end(), used if Next needs to be advanced. 86 /// End is a copy of Insts.end(), used if Next needs to be advanced.
90 InstList::iterator End; 87 InstList::iterator End;
91 88
92 void skipDeleted(InstList::iterator &I) const; 89 void skipDeleted(InstList::iterator &I) const;
93 void advanceForward(InstList::iterator &I) const; 90 void advanceForward(InstList::iterator &I) const;
94 }; 91 };
95 92
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
152 149
153 /// Tries to do address mode optimization on a single instruction. 150 /// Tries to do address mode optimization on a single instruction.
154 void doAddressOpt(); 151 void doAddressOpt();
155 /// Randomly insert NOPs. 152 /// Randomly insert NOPs.
156 void doNopInsertion(RandomNumberGenerator &RNG); 153 void doNopInsertion(RandomNumberGenerator &RNG);
157 /// Lowers a single non-Phi instruction. 154 /// Lowers a single non-Phi instruction.
158 void lower(); 155 void lower();
159 /// Inserts and lowers a single high-level instruction at a specific insertion 156 /// Inserts and lowers a single high-level instruction at a specific insertion
160 /// point. 157 /// point.
161 void lowerInst(CfgNode *Node, InstList::iterator Next, InstHighLevel *Instr); 158 void lowerInst(CfgNode *Node, InstList::iterator Next, InstHighLevel *Instr);
162 /// Does preliminary lowering of the set of Phi instructions in the 159 /// Does preliminary lowering of the set of Phi instructions in the current
163 /// current node. The main intention is to do what's needed to keep 160 /// node. The main intention is to do what's needed to keep the unlowered Phi
164 /// the unlowered Phi instructions consistent with the lowered 161 /// instructions consistent with the lowered non-Phi instructions, e.g. to
165 /// non-Phi instructions, e.g. to lower 64-bit operands on a 32-bit 162 /// lower 64-bit operands on a 32-bit target.
166 /// target.
167 virtual void prelowerPhis() {} 163 virtual void prelowerPhis() {}
168 /// Tries to do branch optimization on a single instruction. Returns 164 /// Tries to do branch optimization on a single instruction. Returns true if
169 /// true if some optimization was done. 165 /// some optimization was done.
170 virtual bool doBranchOpt(Inst * /*I*/, const CfgNode * /*NextNode*/) { 166 virtual bool doBranchOpt(Inst * /*I*/, const CfgNode * /*NextNode*/) {
171 return false; 167 return false;
172 } 168 }
173 169
174 virtual SizeT getNumRegisters() const = 0; 170 virtual SizeT getNumRegisters() const = 0;
175 /// Returns a variable pre-colored to the specified physical 171 /// Returns a variable pre-colored to the specified physical register. This is
176 /// register. This is generally used to get very direct access to 172 /// generally used to get very direct access to the register such as in the
177 /// the register such as in the prolog or epilog or for marking 173 /// prolog or epilog or for marking scratch registers as killed by a call. If
178 /// scratch registers as killed by a call. If a Type is not 174 /// a Type is not provided, a target-specific default type is used.
179 /// provided, a target-specific default type is used.
180 virtual Variable *getPhysicalRegister(SizeT RegNum, 175 virtual Variable *getPhysicalRegister(SizeT RegNum,
181 Type Ty = IceType_void) = 0; 176 Type Ty = IceType_void) = 0;
182 /// Returns a printable name for the register. 177 /// Returns a printable name for the register.
183 virtual IceString getRegName(SizeT RegNum, Type Ty) const = 0; 178 virtual IceString getRegName(SizeT RegNum, Type Ty) const = 0;
184 179
185 virtual bool hasFramePointer() const { return false; } 180 virtual bool hasFramePointer() const { return false; }
186 virtual SizeT getFrameOrStackReg() const = 0; 181 virtual SizeT getFrameOrStackReg() const = 0;
187 virtual size_t typeWidthInBytesOnStack(Type Ty) const = 0; 182 virtual size_t typeWidthInBytesOnStack(Type Ty) const = 0;
188 183
189 bool hasComputedFrame() const { return HasComputedFrame; } 184 bool hasComputedFrame() const { return HasComputedFrame; }
190 /// Returns true if this function calls a function that has the 185 /// Returns true if this function calls a function that has the "returns
191 /// "returns twice" attribute. 186 /// twice" attribute.
192 bool callsReturnsTwice() const { return CallsReturnsTwice; } 187 bool callsReturnsTwice() const { return CallsReturnsTwice; }
193 void setCallsReturnsTwice(bool RetTwice) { CallsReturnsTwice = RetTwice; } 188 void setCallsReturnsTwice(bool RetTwice) { CallsReturnsTwice = RetTwice; }
194 int32_t getStackAdjustment() const { return StackAdjustment; } 189 int32_t getStackAdjustment() const { return StackAdjustment; }
195 void updateStackAdjustment(int32_t Offset) { StackAdjustment += Offset; } 190 void updateStackAdjustment(int32_t Offset) { StackAdjustment += Offset; }
196 void resetStackAdjustment() { StackAdjustment = 0; } 191 void resetStackAdjustment() { StackAdjustment = 0; }
197 SizeT makeNextLabelNumber() { return NextLabelNumber++; } 192 SizeT makeNextLabelNumber() { return NextLabelNumber++; }
198 SizeT makeNextJumpTableNumber() { return NextJumpTableNumber++; } 193 SizeT makeNextJumpTableNumber() { return NextJumpTableNumber++; }
199 LoweringContext &getContext() { return Context; } 194 LoweringContext &getContext() { return Context; }
200 195
201 enum RegSet { 196 enum RegSet {
(...skipping 11 matching lines...) Expand all
213 virtual const llvm::SmallBitVector &getRegisterSetForType(Type Ty) const = 0; 208 virtual const llvm::SmallBitVector &getRegisterSetForType(Type Ty) const = 0;
214 virtual const llvm::SmallBitVector &getAliasesForRegister(SizeT) const = 0; 209 virtual const llvm::SmallBitVector &getAliasesForRegister(SizeT) const = 0;
215 210
216 void regAlloc(RegAllocKind Kind); 211 void regAlloc(RegAllocKind Kind);
217 212
218 virtual void 213 virtual void
219 makeRandomRegisterPermutation(llvm::SmallVectorImpl<int32_t> &Permutation, 214 makeRandomRegisterPermutation(llvm::SmallVectorImpl<int32_t> &Permutation,
220 const llvm::SmallBitVector &ExcludeRegisters, 215 const llvm::SmallBitVector &ExcludeRegisters,
221 uint64_t Salt) const = 0; 216 uint64_t Salt) const = 0;
222 217
223 /// Save/restore any mutable state for the situation where code 218 /// Save/restore any mutable state for the situation where code emission needs
224 /// emission needs multiple passes, such as sandboxing or relaxation. 219 /// multiple passes, such as sandboxing or relaxation. Subclasses may provide
225 /// Subclasses may provide their own implementation, but should be 220 /// their own implementation, but should be sure to also call the parent
226 /// sure to also call the parent class's methods. 221 /// class's methods.
227 virtual void snapshotEmitState() { 222 virtual void snapshotEmitState() {
228 SnapshotStackAdjustment = StackAdjustment; 223 SnapshotStackAdjustment = StackAdjustment;
229 } 224 }
230 virtual void rollbackEmitState() { 225 virtual void rollbackEmitState() {
231 StackAdjustment = SnapshotStackAdjustment; 226 StackAdjustment = SnapshotStackAdjustment;
232 } 227 }
233 228
234 /// Get the minimum number of clusters required for a jump table to be 229 /// Get the minimum number of clusters required for a jump table to be
235 /// considered. 230 /// considered.
236 virtual SizeT getMinJumpTableSize() const = 0; 231 virtual SizeT getMinJumpTableSize() const = 0;
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
278 virtual void lowerStore(const InstStore *Inst) = 0; 273 virtual void lowerStore(const InstStore *Inst) = 0;
279 virtual void lowerSwitch(const InstSwitch *Inst) = 0; 274 virtual void lowerSwitch(const InstSwitch *Inst) = 0;
280 virtual void lowerUnreachable(const InstUnreachable *Inst) = 0; 275 virtual void lowerUnreachable(const InstUnreachable *Inst) = 0;
281 virtual void lowerOther(const Inst *Instr); 276 virtual void lowerOther(const Inst *Instr);
282 277
283 virtual void doAddressOptLoad() {} 278 virtual void doAddressOptLoad() {}
284 virtual void doAddressOptStore() {} 279 virtual void doAddressOptStore() {}
285 virtual void doMockBoundsCheck(Operand *) {} 280 virtual void doMockBoundsCheck(Operand *) {}
286 virtual void randomlyInsertNop(float Probability, 281 virtual void randomlyInsertNop(float Probability,
287 RandomNumberGenerator &RNG) = 0; 282 RandomNumberGenerator &RNG) = 0;
288 /// This gives the target an opportunity to post-process the lowered 283 /// This gives the target an opportunity to post-process the lowered expansion
289 /// expansion before returning. 284 /// before returning.
290 virtual void postLower() {} 285 virtual void postLower() {}
291 286
292 /// Find two-address non-SSA instructions and set the DestNonKillable flag 287 /// Find two-address non-SSA instructions and set the DestNonKillable flag to
293 /// to keep liveness analysis consistent. 288 /// keep liveness analysis consistent.
294 void inferTwoAddress(); 289 void inferTwoAddress();
295 290
296 /// Make a pass over the Cfg to determine which variables need stack slots 291 /// Make a pass over the Cfg to determine which variables need stack slots and
297 /// and place them in a sorted list (SortedSpilledVariables). Among those, 292 /// place them in a sorted list (SortedSpilledVariables). Among those, vars,
298 /// vars, classify the spill variables as local to the basic block vs 293 /// classify the spill variables as local to the basic block vs global
299 /// global (multi-block) in order to compute the parameters GlobalsSize 294 /// (multi-block) in order to compute the parameters GlobalsSize and
300 /// and SpillAreaSizeBytes (represents locals or general vars if the 295 /// SpillAreaSizeBytes (represents locals or general vars if the coalescing of
301 /// coalescing of locals is disallowed) along with alignments required 296 /// locals is disallowed) along with alignments required for variables in each
302 /// for variables in each area. We rely on accurate VMetadata in order to 297 /// area. We rely on accurate VMetadata in order to classify a variable as
303 /// classify a variable as global vs local (otherwise the variable is 298 /// global vs local (otherwise the variable is conservatively global). The
304 /// conservatively global). The in-args should be initialized to 0. 299 /// in-args should be initialized to 0.
305 /// 300 ///
306 /// This is only a pre-pass and the actual stack slot assignment is 301 /// This is only a pre-pass and the actual stack slot assignment is handled
307 /// handled separately. 302 /// separately.
308 /// 303 ///
309 /// There may be target-specific Variable types, which will be handled 304 /// There may be target-specific Variable types, which will be handled by
310 /// by TargetVarHook. If the TargetVarHook returns true, then the variable 305 /// TargetVarHook. If the TargetVarHook returns true, then the variable is
311 /// is skipped and not considered with the rest of the spilled variables. 306 /// skipped and not considered with the rest of the spilled variables.
312 void getVarStackSlotParams(VarList &SortedSpilledVariables, 307 void getVarStackSlotParams(VarList &SortedSpilledVariables,
313 llvm::SmallBitVector &RegsUsed, 308 llvm::SmallBitVector &RegsUsed,
314 size_t *GlobalsSize, size_t *SpillAreaSizeBytes, 309 size_t *GlobalsSize, size_t *SpillAreaSizeBytes,
315 uint32_t *SpillAreaAlignmentBytes, 310 uint32_t *SpillAreaAlignmentBytes,
316 uint32_t *LocalsSlotsAlignmentBytes, 311 uint32_t *LocalsSlotsAlignmentBytes,
317 std::function<bool(Variable *)> TargetVarHook); 312 std::function<bool(Variable *)> TargetVarHook);
318 313
319 /// Calculate the amount of padding needed to align the local and global 314 /// Calculate the amount of padding needed to align the local and global areas
320 /// areas to the required alignment. This assumes the globals/locals layout 315 /// to the required alignment. This assumes the globals/locals layout used by
321 /// used by getVarStackSlotParams and assignVarStackSlots. 316 /// getVarStackSlotParams and assignVarStackSlots.
322 void alignStackSpillAreas(uint32_t SpillAreaStartOffset, 317 void alignStackSpillAreas(uint32_t SpillAreaStartOffset,
323 uint32_t SpillAreaAlignmentBytes, 318 uint32_t SpillAreaAlignmentBytes,
324 size_t GlobalsSize, 319 size_t GlobalsSize,
325 uint32_t LocalsSlotsAlignmentBytes, 320 uint32_t LocalsSlotsAlignmentBytes,
326 uint32_t *SpillAreaPaddingBytes, 321 uint32_t *SpillAreaPaddingBytes,
327 uint32_t *LocalsSlotsPaddingBytes); 322 uint32_t *LocalsSlotsPaddingBytes);
328 323
329 /// Make a pass through the SortedSpilledVariables and actually assign 324 /// Make a pass through the SortedSpilledVariables and actually assign stack
330 /// stack slots. SpillAreaPaddingBytes takes into account stack alignment 325 /// slots. SpillAreaPaddingBytes takes into account stack alignment padding.
331 /// padding. The SpillArea starts after that amount of padding. 326 /// The SpillArea starts after that amount of padding. This matches the scheme
332 /// This matches the scheme in getVarStackSlotParams, where there may 327 /// in getVarStackSlotParams, where there may be a separate multi-block global
333 /// be a separate multi-block global var spill area and a local var 328 /// var spill area and a local var spill area.
334 /// spill area.
335 void assignVarStackSlots(VarList &SortedSpilledVariables, 329 void assignVarStackSlots(VarList &SortedSpilledVariables,
336 size_t SpillAreaPaddingBytes, 330 size_t SpillAreaPaddingBytes,
337 size_t SpillAreaSizeBytes, 331 size_t SpillAreaSizeBytes,
338 size_t GlobalsAndSubsequentPaddingSize, 332 size_t GlobalsAndSubsequentPaddingSize,
339 bool UsesFramePointer); 333 bool UsesFramePointer);
340 334
341 /// Sort the variables in Source based on required alignment. 335 /// Sort the variables in Source based on required alignment. The variables
342 /// The variables with the largest alignment need are placed in the front 336 /// with the largest alignment need are placed in the front of the Dest list.
343 /// of the Dest list.
344 void sortVarsByAlignment(VarList &Dest, const VarList &Source) const; 337 void sortVarsByAlignment(VarList &Dest, const VarList &Source) const;
345 338
346 /// Make a call to an external helper function. 339 /// Make a call to an external helper function.
347 InstCall *makeHelperCall(const IceString &Name, Variable *Dest, 340 InstCall *makeHelperCall(const IceString &Name, Variable *Dest,
348 SizeT MaxSrcs); 341 SizeT MaxSrcs);
349 342
350 void 343 void
351 _bundle_lock(InstBundleLock::Option BundleOption = InstBundleLock::Opt_None) { 344 _bundle_lock(InstBundleLock::Option BundleOption = InstBundleLock::Opt_None) {
352 Context.insert(InstBundleLock::create(Func, BundleOption)); 345 Context.insert(InstBundleLock::create(Func, BundleOption));
353 } 346 }
354 void _bundle_unlock() { Context.insert(InstBundleUnlock::create(Func)); } 347 void _bundle_unlock() { Context.insert(InstBundleUnlock::create(Func)); }
355 void _set_dest_nonkillable() { 348 void _set_dest_nonkillable() {
356 Context.getLastInserted()->setDestNonKillable(); 349 Context.getLastInserted()->setDestNonKillable();
357 } 350 }
358 351
359 bool shouldOptimizeMemIntrins(); 352 bool shouldOptimizeMemIntrins();
360 353
361 Cfg *Func; 354 Cfg *Func;
362 GlobalContext *Ctx; 355 GlobalContext *Ctx;
363 bool HasComputedFrame = false; 356 bool HasComputedFrame = false;
364 bool CallsReturnsTwice = false; 357 bool CallsReturnsTwice = false;
365 /// StackAdjustment keeps track of the current stack offset from its 358 /// StackAdjustment keeps track of the current stack offset from its natural
366 /// natural location, as arguments are pushed for a function call. 359 /// location, as arguments are pushed for a function call.
367 int32_t StackAdjustment = 0; 360 int32_t StackAdjustment = 0;
368 SizeT NextLabelNumber = 0; 361 SizeT NextLabelNumber = 0;
369 SizeT NextJumpTableNumber = 0; 362 SizeT NextJumpTableNumber = 0;
370 LoweringContext Context; 363 LoweringContext Context;
371 364
372 // Runtime helper function names 365 // Runtime helper function names
373 const static constexpr char *H_bitcast_16xi1_i16 = "__Sz_bitcast_16xi1_i16"; 366 const static constexpr char *H_bitcast_16xi1_i16 = "__Sz_bitcast_16xi1_i16";
374 const static constexpr char *H_bitcast_8xi1_i8 = "__Sz_bitcast_8xi1_i8"; 367 const static constexpr char *H_bitcast_8xi1_i8 = "__Sz_bitcast_8xi1_i8";
375 const static constexpr char *H_bitcast_i16_16xi1 = "__Sz_bitcast_i16_16xi1"; 368 const static constexpr char *H_bitcast_i16_16xi1 = "__Sz_bitcast_i16_16xi1";
376 const static constexpr char *H_bitcast_i8_8xi1 = "__Sz_bitcast_i8_8xi1"; 369 const static constexpr char *H_bitcast_i8_8xi1 = "__Sz_bitcast_i8_8xi1";
(...skipping 27 matching lines...) Expand all
404 const static constexpr char *H_uitofp_i32_f64 = "__Sz_uitofp_i32_f64"; 397 const static constexpr char *H_uitofp_i32_f64 = "__Sz_uitofp_i32_f64";
405 const static constexpr char *H_uitofp_i64_f32 = "__Sz_uitofp_i64_f32"; 398 const static constexpr char *H_uitofp_i64_f32 = "__Sz_uitofp_i64_f32";
406 const static constexpr char *H_uitofp_i64_f64 = "__Sz_uitofp_i64_f64"; 399 const static constexpr char *H_uitofp_i64_f64 = "__Sz_uitofp_i64_f64";
407 const static constexpr char *H_urem_i32 = "__umodsi3"; 400 const static constexpr char *H_urem_i32 = "__umodsi3";
408 const static constexpr char *H_urem_i64 = "__umoddi3"; 401 const static constexpr char *H_urem_i64 = "__umoddi3";
409 402
410 private: 403 private:
411 int32_t SnapshotStackAdjustment = 0; 404 int32_t SnapshotStackAdjustment = 0;
412 }; 405 };
413 406
414 /// TargetDataLowering is used for "lowering" data including initializers 407 /// TargetDataLowering is used for "lowering" data including initializers for
415 /// for global variables, and the internal constant pools. It is separated 408 /// global variables, and the internal constant pools. It is separated out from
416 /// out from TargetLowering because it does not require a Cfg. 409 /// TargetLowering because it does not require a Cfg.
417 class TargetDataLowering { 410 class TargetDataLowering {
418 TargetDataLowering() = delete; 411 TargetDataLowering() = delete;
419 TargetDataLowering(const TargetDataLowering &) = delete; 412 TargetDataLowering(const TargetDataLowering &) = delete;
420 TargetDataLowering &operator=(const TargetDataLowering &) = delete; 413 TargetDataLowering &operator=(const TargetDataLowering &) = delete;
421 414
422 public: 415 public:
423 static std::unique_ptr<TargetDataLowering> createLowering(GlobalContext *Ctx); 416 static std::unique_ptr<TargetDataLowering> createLowering(GlobalContext *Ctx);
424 virtual ~TargetDataLowering(); 417 virtual ~TargetDataLowering();
425 418
426 virtual void lowerGlobals(const VariableDeclarationList &Vars, 419 virtual void lowerGlobals(const VariableDeclarationList &Vars,
427 const IceString &SectionSuffix) = 0; 420 const IceString &SectionSuffix) = 0;
428 virtual void lowerConstants() = 0; 421 virtual void lowerConstants() = 0;
429 virtual void lowerJumpTables() = 0; 422 virtual void lowerJumpTables() = 0;
430 423
431 protected: 424 protected:
432 void emitGlobal(const VariableDeclaration &Var, 425 void emitGlobal(const VariableDeclaration &Var,
433 const IceString &SectionSuffix); 426 const IceString &SectionSuffix);
434 427
435 /// For now, we assume .long is the right directive for emitting 4 byte 428 /// For now, we assume .long is the right directive for emitting 4 byte emit
436 /// emit global relocations. However, LLVM MIPS usually uses .4byte instead. 429 /// global relocations. However, LLVM MIPS usually uses .4byte instead.
437 /// Perhaps there is some difference when the location is unaligned. 430 /// Perhaps there is some difference when the location is unaligned.
438 static const char *getEmit32Directive() { return ".long"; } 431 static const char *getEmit32Directive() { return ".long"; }
439 432
440 explicit TargetDataLowering(GlobalContext *Ctx) : Ctx(Ctx) {} 433 explicit TargetDataLowering(GlobalContext *Ctx) : Ctx(Ctx) {}
441 GlobalContext *Ctx; 434 GlobalContext *Ctx;
442 }; 435 };
443 436
444 /// TargetHeaderLowering is used to "lower" the header of an output file. 437 /// TargetHeaderLowering is used to "lower" the header of an output file. It
445 /// It writes out the target-specific header attributes. E.g., for ARM 438 /// writes out the target-specific header attributes. E.g., for ARM this writes
446 /// this writes out the build attributes (float ABI, etc.). 439 /// out the build attributes (float ABI, etc.).
447 class TargetHeaderLowering { 440 class TargetHeaderLowering {
448 TargetHeaderLowering() = delete; 441 TargetHeaderLowering() = delete;
449 TargetHeaderLowering(const TargetHeaderLowering &) = delete; 442 TargetHeaderLowering(const TargetHeaderLowering &) = delete;
450 TargetHeaderLowering &operator=(const TargetHeaderLowering &) = delete; 443 TargetHeaderLowering &operator=(const TargetHeaderLowering &) = delete;
451 444
452 public: 445 public:
453 static std::unique_ptr<TargetHeaderLowering> 446 static std::unique_ptr<TargetHeaderLowering>
454 createLowering(GlobalContext *Ctx); 447 createLowering(GlobalContext *Ctx);
455 virtual ~TargetHeaderLowering(); 448 virtual ~TargetHeaderLowering();
456 449
457 virtual void lower() {} 450 virtual void lower() {}
458 451
459 protected: 452 protected:
460 explicit TargetHeaderLowering(GlobalContext *Ctx) : Ctx(Ctx) {} 453 explicit TargetHeaderLowering(GlobalContext *Ctx) : Ctx(Ctx) {}
461 GlobalContext *Ctx; 454 GlobalContext *Ctx;
462 }; 455 };
463 456
464 } // end of namespace Ice 457 } // end of namespace Ice
465 458
466 #endif // SUBZERO_SRC_ICETARGETLOWERING_H 459 #endif // SUBZERO_SRC_ICETARGETLOWERING_H
OLDNEW
« no previous file with comments | « src/IceTLS.h ('k') | src/IceTargetLowering.cpp » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698