Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(289)

Side by Side Diff: lib/Transforms/NaCl/RewriteAtomics.cpp

Issue 927493002: PNaCl: Impl the other atomicrmw operations: nand, max, min, umax, and umin. Base URL: https://chromium.googlesource.com/native_client/pnacl-llvm.git@master
Patch Set: Created 5 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 //===- RewriteAtomics.cpp - Stabilize instructions used for concurrency ---===// 1 //===- RewriteAtomics.cpp - Stabilize instructions used for concurrency ---===//
2 // 2 //
3 // The LLVM Compiler Infrastructure 3 // The LLVM Compiler Infrastructure
4 // 4 //
5 // This file is distributed under the University of Illinois Open Source 5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details. 6 // License. See LICENSE.TXT for details.
7 // 7 //
8 //===----------------------------------------------------------------------===// 8 //===----------------------------------------------------------------------===//
9 // 9 //
10 // This pass encodes atomics, volatiles and fences using NaCl intrinsics 10 // This pass encodes atomics, volatiles and fences using NaCl intrinsics
11 // instead of LLVM's regular IR instructions. 11 // instead of LLVM's regular IR instructions.
12 // 12 //
13 // All of the above are transformed into one of the 13 // All of the above are transformed into one of the
14 // @llvm.nacl.atomic.* intrinsics. 14 // @llvm.nacl.atomic.* intrinsics.
15 // 15 //
16 //===----------------------------------------------------------------------===// 16 //===----------------------------------------------------------------------===//
17 17
18 #include "llvm/ADT/Triple.h"
18 #include "llvm/ADT/Twine.h" 19 #include "llvm/ADT/Twine.h"
20 #include "llvm/Analysis/NaCl/PNaClSimplificationAnalyses.h"
19 #include "llvm/IR/DataLayout.h" 21 #include "llvm/IR/DataLayout.h"
20 #include "llvm/IR/Function.h" 22 #include "llvm/IR/Function.h"
21 #include "llvm/IR/InlineAsm.h" 23 #include "llvm/IR/InlineAsm.h"
22 #include "llvm/IR/InstVisitor.h" 24 #include "llvm/IR/InstVisitor.h"
23 #include "llvm/IR/Instructions.h" 25 #include "llvm/IR/Instructions.h"
24 #include "llvm/IR/Intrinsics.h" 26 #include "llvm/IR/Intrinsics.h"
25 #include "llvm/IR/Module.h" 27 #include "llvm/IR/Module.h"
26 #include "llvm/IR/NaClAtomicIntrinsics.h" 28 #include "llvm/IR/NaClAtomicIntrinsics.h"
27 #include "llvm/Pass.h" 29 #include "llvm/Pass.h"
28 #include "llvm/Support/CommandLine.h" 30 #include "llvm/Support/CommandLine.h"
29 #include "llvm/Support/Compiler.h" 31 #include "llvm/Support/Compiler.h"
30 #include "llvm/Support/raw_ostream.h" 32 #include "llvm/Support/raw_ostream.h"
33 #include "llvm/Support/TargetRegistry.h"
31 #include "llvm/Transforms/NaCl.h" 34 #include "llvm/Transforms/NaCl.h"
32 #include <climits> 35 #include <climits>
33 #include <string> 36 #include <string>
34 37
35 using namespace llvm; 38 using namespace llvm;
36 39
37 static cl::opt<bool> PNaClMemoryOrderSeqCstOnly( 40 static cl::opt<bool> PNaClMemoryOrderSeqCstOnly(
38 "pnacl-memory-order-seq-cst-only", 41 "pnacl-memory-order-seq-cst-only",
39 cl::desc("PNaCl should upgrade all atomic memory orders to seq_cst"), 42 cl::desc("PNaCl should upgrade all atomic memory orders to seq_cst"),
40 cl::init(false)); 43 cl::init(false));
41 44
42 namespace { 45 namespace {
43 46
44 class RewriteAtomics : public ModulePass { 47 struct Initer {
45 public: 48 Initer() {}
46 static char ID; // Pass identification, replacement for typeid 49 Initer(Initer &&Rhs)
47 RewriteAtomics() : ModulePass(ID) { 50 : Initialized(Rhs.Initialized), Target(std::move(Rhs.Target)),
48 // This is a module pass because it may have to introduce 51 AtomicRMWExpander(std::move(Rhs.AtomicRMWExpander)) {}
49 // intrinsic declarations into the module and modify a global function. 52 Initer &operator=(Initer &&Rhs) {
50 initializeRewriteAtomicsPass(*PassRegistry::getPassRegistry()); 53 Initialized = Rhs.Initialized;
54 Target = std::move(Rhs.Target);
55 AtomicRMWExpander = std::move(Rhs.AtomicRMWExpander);
56 return *this;
51 } 57 }
52 58
53 virtual bool runOnModule(Module &M); 59 bool Initialized = false;
60 std::unique_ptr<TargetMachine> Target = nullptr;
61 std::unique_ptr<FunctionPass> AtomicRMWExpander = nullptr;
62
63 bool initialize() {
64 if (!Initialized) {
65 // For rewritting nand, (u)max, (u)min rmw atomics:
Richard Diamond 2015/07/30 11:47:08 Note to (re)views: once http://reviews.llvm.org/D1
Richard Diamond 2015/07/30 11:48:08 reviewers*
66 // First we need a target machine to appease its lordship:
67
68 // Get the target specific parser.
69 std::string Error;
70 Triple TheTriple = Triple("i686-none-nacl");
71 const llvm::Target *TheTarget =
72 TargetRegistry::lookupTarget("", TheTriple, Error);
73 if (!TheTarget) {
74 errs() << "Looking up 'i686-none-nacl':"
75 << ": " << Error;
76 report_fatal_error("Did you forget to initialize the x86 target?");
77 }
78
79 // Create the target machine:
80 Target.reset(TheTarget->createTargetMachine(
81 TheTriple.getTriple(), "generic", "", TargetOptions(), Reloc::Default,
82 CodeModel::Default, CodeGenOpt::Default));
83 assert(Target != nullptr);
84 AtomicRMWExpander.reset(createAtomicExpandPass(Target.get()));
JF 2015/07/27 19:49:28 Now that you have an analysis pass, can you move t
85
86 Initialized = true;
87 return true;
88 } else {
89 return false;
90 }
91 }
92 };
93
94 class RewriteAtomicsPass {
95 Initer Init;
96
97 public:
98 static StringRef name() { return "RewriteAtomicsPass"; }
99
100 RewriteAtomicsPass() { Init.initialize(); }
101 RewriteAtomicsPass(RewriteAtomicsPass &&Rhs) : Init(std::move(Rhs.Init)) {}
102 RewriteAtomicsPass &operator=(RewriteAtomicsPass &&Rhs) {
103 Init = std::move(Rhs.Init);
104 return *this;
105 }
106
107 PreservedAnalyses run(Function &F, AnalysisManager<Function> *AM);
54 }; 108 };
55 109
56 template <class T> std::string ToStr(const T &V) { 110 template <class T> std::string ToStr(const T &V) {
57 std::string S; 111 std::string S;
58 raw_string_ostream OS(S); 112 raw_string_ostream OS(S);
59 OS << const_cast<T &>(V); 113 OS << const_cast<T &>(V);
60 return OS.str(); 114 return OS.str();
61 } 115 }
62 116
63 class AtomicVisitor : public InstVisitor<AtomicVisitor> { 117 class AtomicVisitor : public InstVisitor<AtomicVisitor> {
64 public: 118 public:
65 AtomicVisitor(Module &M, Pass &P) 119 AtomicVisitor(Module &M)
66 : M(M), C(M.getContext()), 120 : M(M), C(M.getContext()), TD(M.getDataLayout()), AI(C) {}
67 TD(M.getDataLayout()), AI(C),
68 ModifiedModule(false) {}
69 ~AtomicVisitor() {} 121 ~AtomicVisitor() {}
70 bool modifiedModule() const { return ModifiedModule; } 122 bool modifiedFunction() const { return Modified; }
123 bool needsAtomicExpand() const { return NeedsAtomicExpand; }
71 124
72 void visitLoadInst(LoadInst &I); 125 void visitLoadInst(LoadInst &I);
73 void visitStoreInst(StoreInst &I); 126 void visitStoreInst(StoreInst &I);
74 void visitAtomicCmpXchgInst(AtomicCmpXchgInst &I); 127 void visitAtomicCmpXchgInst(AtomicCmpXchgInst &I);
75 void visitAtomicRMWInst(AtomicRMWInst &I); 128 void visitAtomicRMWInst(AtomicRMWInst &I);
76 void visitFenceInst(FenceInst &I); 129 void visitFenceInst(FenceInst &I);
77 130
78 private: 131 private:
79 Module &M; 132 Module &M;
80 LLVMContext &C; 133 LLVMContext &C;
81 const DataLayout TD; 134 const DataLayout TD;
82 NaCl::AtomicIntrinsics AI; 135 NaCl::AtomicIntrinsics AI;
83 bool ModifiedModule; 136 bool Modified = false;
137 bool NeedsAtomicExpand = false;
84 138
85 AtomicVisitor() = delete; 139 AtomicVisitor() = delete;
86 AtomicVisitor(const AtomicVisitor &) = delete; 140 AtomicVisitor(const AtomicVisitor &) = delete;
87 AtomicVisitor &operator=(const AtomicVisitor &) = delete; 141 AtomicVisitor &operator=(const AtomicVisitor &) = delete;
88 142
89 /// Create an integer constant holding a NaCl::MemoryOrder that can be 143 /// Create an integer constant holding a NaCl::MemoryOrder that can be
90 /// passed as an argument to one of the @llvm.nacl.atomic.* 144 /// passed as an argument to one of the @llvm.nacl.atomic.*
91 /// intrinsics. This function may strengthen the ordering initially 145 /// intrinsics. This function may strengthen the ordering initially
92 /// specified by the instruction \p I for stability purpose. 146 /// specified by the instruction \p I for stability purpose.
93 template <class Instruction> 147 template <class Instruction>
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
148 Type *IntNPtr = Type::getIntNPtrTy(AV.C, BitSize); 202 Type *IntNPtr = Type::getIntNPtrTy(AV.C, BitSize);
149 P = AV.createCast(I, P, IntNPtr, P->getName() + ".cast"); 203 P = AV.createCast(I, P, IntNPtr, P->getName() + ".cast");
150 PET = P->getType()->getPointerElementType(); 204 PET = P->getType()->getPointerElementType();
151 } 205 }
152 AV.checkSizeMatchesType(I, BitSize, PET); 206 AV.checkSizeMatchesType(I, BitSize, PET);
153 } 207 }
154 }; 208 };
155 }; 209 };
156 } 210 }
157 211
158 char RewriteAtomics::ID = 0; 212 static bool
159 INITIALIZE_PASS(RewriteAtomics, "nacl-rewrite-atomics", 213 ExpandAtomicInstructions(Function &F,
160 "rewrite atomics, volatiles and fences into stable " 214 std::unique_ptr<FunctionPass> &AtomicRMWExpander,
161 "@llvm.nacl.atomics.* intrinsics", 215 AtomicInfo &Info) {
162 false, false) 216 bool Changed = false;
217 AtomicVisitor AV(*F.getParent());
163 218
164 bool RewriteAtomics::runOnModule(Module &M) { 219 auto &CmpXchgs = Info.getCmpXchgs();
165 AtomicVisitor AV(M, *this); 220 for (auto *CmpXchg : CmpXchgs) {
166 AV.visit(M); 221 AV.visitAtomicCmpXchgInst(*CmpXchg);
167 return AV.modifiedModule(); 222 Changed = true;
223 }
224
225 auto &Loads = Info.getLoads();
226 for (auto *Load : Loads) {
227 AV.visitLoadInst(*Load);
228 Changed = true;
229 }
230
231 auto &Stores = Info.getStores();
232 for (auto *Store : Stores) {
233 AV.visitStoreInst(*Store);
234 Changed = true;
235 }
236
237 auto &RMWs = Info.getRMWs();
238 for (auto *RMW : RMWs) {
239 AV.visitAtomicRMWInst(*RMW);
240 Changed = true;
241 }
242
243 auto &Fences = Info.getFences();
244 for (auto *Fence : Fences) {
245 AV.visitFenceInst(*Fence);
246 Changed = true;
247 }
248
249 // Expand any leftover RMW atomics:
250 // This is done after because otherwise -atomic-expand will expand stuff we're
251 // capable of expanding, leaving us with less efficient code.
252 if (Info.needsAtomicExpand()) {
253 const bool Expanded = AtomicRMWExpander->runOnFunction(F);
JF 2015/07/27 19:49:28 Once the target is present, this can be done autom
254 (void)Expanded;
255 assert(Expanded);
256 // revisit the function, rewriting cmpxchg to the corresponding
257 // llvm.nacl.etc.etc.
258 AV.visit(F);
259 Changed = true;
260 }
261 return Changed;
262 }
263
264 PreservedAnalyses RewriteAtomicsPass::run(Function &F,
265 AnalysisManager<Function> *AM) {
266 auto &Info = AM->getResult<AtomicAnalysis>(F);
267
268 if (ExpandAtomicInstructions(F, Init.AtomicRMWExpander, Info)) {
269 return PreservedAnalyses::none();
270 } else {
271 return PreservedAnalyses::all();
272 }
168 } 273 }
169 274
170 template <class Instruction> 275 template <class Instruction>
171 ConstantInt *AtomicVisitor::freezeMemoryOrder(const Instruction &I, 276 ConstantInt *AtomicVisitor::freezeMemoryOrder(const Instruction &I,
172 AtomicOrdering O) const { 277 AtomicOrdering O) const {
173 NaCl::MemoryOrder AO = NaCl::MemoryOrderInvalid; 278 NaCl::MemoryOrder AO = NaCl::MemoryOrderInvalid;
174 279
175 // TODO Volatile load/store are promoted to sequentially consistent 280 // TODO Volatile load/store are promoted to sequentially consistent
176 // for now. We could do something weaker. 281 // for now. We could do something weaker.
177 if (const LoadInst *L = dyn_cast<LoadInst>(&I)) { 282 if (const LoadInst *L = dyn_cast<LoadInst>(&I)) {
178 if (L->isVolatile()) 283 if (L->isVolatile())
179 AO = NaCl::MemoryOrderSequentiallyConsistent; 284 AO = NaCl::MemoryOrderSequentiallyConsistent;
180 } else if (const StoreInst *S = dyn_cast<StoreInst>(&I)) { 285 } else if (const StoreInst *S = dyn_cast<StoreInst>(&I)) {
181 if (S->isVolatile()) 286 if (S->isVolatile())
182 AO = NaCl::MemoryOrderSequentiallyConsistent; 287 AO = NaCl::MemoryOrderSequentiallyConsistent;
183 } 288 }
184 289
185 if (AO == NaCl::MemoryOrderInvalid) { 290 if (AO == NaCl::MemoryOrderInvalid) {
186 switch (O) { 291 switch (O) {
187 case NotAtomic: llvm_unreachable("unexpected memory order"); 292 case NotAtomic:
293 llvm_unreachable("unexpected memory order");
188 // Monotonic is a strict superset of Unordered. Both can therefore 294 // Monotonic is a strict superset of Unordered. Both can therefore
189 // map to Relaxed ordering, which is in the C11/C++11 standard. 295 // map to Relaxed ordering, which is in the C11/C++11 standard.
190 case Unordered: AO = NaCl::MemoryOrderRelaxed; break; 296 case Unordered:
191 case Monotonic: AO = NaCl::MemoryOrderRelaxed; break; 297 AO = NaCl::MemoryOrderRelaxed;
298 break;
299 case Monotonic:
300 AO = NaCl::MemoryOrderRelaxed;
301 break;
192 // TODO Consume is currently unspecified by LLVM's internal IR. 302 // TODO Consume is currently unspecified by LLVM's internal IR.
193 case Acquire: AO = NaCl::MemoryOrderAcquire; break; 303 case Acquire:
194 case Release: AO = NaCl::MemoryOrderRelease; break; 304 AO = NaCl::MemoryOrderAcquire;
195 case AcquireRelease: AO = NaCl::MemoryOrderAcquireRelease; break; 305 break;
306 case Release:
307 AO = NaCl::MemoryOrderRelease;
308 break;
309 case AcquireRelease:
310 AO = NaCl::MemoryOrderAcquireRelease;
311 break;
196 case SequentiallyConsistent: 312 case SequentiallyConsistent:
197 AO = NaCl::MemoryOrderSequentiallyConsistent; break; 313 AO = NaCl::MemoryOrderSequentiallyConsistent;
314 break;
198 } 315 }
199 } 316 }
200 317
201 // TODO For now only acquire/release/acq_rel/seq_cst are allowed. 318 // TODO For now only acquire/release/acq_rel/seq_cst are allowed.
202 if (PNaClMemoryOrderSeqCstOnly || AO == NaCl::MemoryOrderRelaxed) 319 if (PNaClMemoryOrderSeqCstOnly || AO == NaCl::MemoryOrderRelaxed)
203 AO = NaCl::MemoryOrderSequentiallyConsistent; 320 AO = NaCl::MemoryOrderSequentiallyConsistent;
204 321
205 return ConstantInt::get(Type::getInt32Ty(C), AO); 322 return ConstantInt::get(Type::getInt32Ty(C), AO);
206 } 323 }
207 324
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
290 Success, 1, Name + ".insert.success", &I); 407 Success, 1, Name + ".insert.success", &I);
291 } else if (!Call->getType()->isVoidTy() && DstType != OverloadedType) { 408 } else if (!Call->getType()->isVoidTy() && DstType != OverloadedType) {
292 // The call returns a value which needs to be cast to a non-integer. 409 // The call returns a value which needs to be cast to a non-integer.
293 Res = createCast(I, Call, DstType, Name + ".cast"); 410 Res = createCast(I, Call, DstType, Name + ".cast");
294 Res->setDebugLoc(I.getDebugLoc()); 411 Res->setDebugLoc(I.getDebugLoc());
295 } 412 }
296 413
297 I.replaceAllUsesWith(Res); 414 I.replaceAllUsesWith(Res);
298 I.eraseFromParent(); 415 I.eraseFromParent();
299 Call->setName(Name); 416 Call->setName(Name);
300 ModifiedModule = true; 417 Modified = true;
301 } 418 }
302 419
303 /// %res = load {atomic|volatile} T* %ptr memory_order, align sizeof(T) 420 /// %res = load {atomic|volatile} T* %ptr memory_order, align sizeof(T)
304 /// becomes: 421 /// becomes:
305 /// %res = call T @llvm.nacl.atomic.load.i<size>(%ptr, memory_order) 422 /// %res = call T @llvm.nacl.atomic.load.i<size>(%ptr, memory_order)
306 void AtomicVisitor::visitLoadInst(LoadInst &I) { 423 void AtomicVisitor::visitLoadInst(LoadInst &I) {
307 if (I.isSimple()) 424 if (I.isSimple())
308 return; 425 return;
309 PointerHelper<LoadInst> PH(*this, I); 426 PointerHelper<LoadInst> PH(*this, I);
310 const NaCl::AtomicIntrinsics::AtomicIntrinsic *Intrinsic = 427 const NaCl::AtomicIntrinsics::AtomicIntrinsic *Intrinsic =
(...skipping 29 matching lines...) Expand all
340 replaceInstructionWithIntrinsicCall(I, Intrinsic, PH.OriginalPET, PH.PET, 457 replaceInstructionWithIntrinsicCall(I, Intrinsic, PH.OriginalPET, PH.PET,
341 Args); 458 Args);
342 } 459 }
343 460
344 /// %res = atomicrmw OP T* %ptr, T %val memory_order 461 /// %res = atomicrmw OP T* %ptr, T %val memory_order
345 /// becomes: 462 /// becomes:
346 /// %res = call T @llvm.nacl.atomic.rmw.i<size>(OP, %ptr, %val, memory_order) 463 /// %res = call T @llvm.nacl.atomic.rmw.i<size>(OP, %ptr, %val, memory_order)
347 void AtomicVisitor::visitAtomicRMWInst(AtomicRMWInst &I) { 464 void AtomicVisitor::visitAtomicRMWInst(AtomicRMWInst &I) {
348 NaCl::AtomicRMWOperation Op; 465 NaCl::AtomicRMWOperation Op;
349 switch (I.getOperation()) { 466 switch (I.getOperation()) {
350 default: report_fatal_error("unsupported atomicrmw operation: " + ToStr(I)); 467 default: { return; }
351 case AtomicRMWInst::Add: Op = NaCl::AtomicAdd; break; 468 case AtomicRMWInst::Add:
352 case AtomicRMWInst::Sub: Op = NaCl::AtomicSub; break; 469 Op = NaCl::AtomicAdd;
353 case AtomicRMWInst::And: Op = NaCl::AtomicAnd; break; 470 break;
354 case AtomicRMWInst::Or: Op = NaCl::AtomicOr; break; 471 case AtomicRMWInst::Sub:
355 case AtomicRMWInst::Xor: Op = NaCl::AtomicXor; break; 472 Op = NaCl::AtomicSub;
356 case AtomicRMWInst::Xchg: Op = NaCl::AtomicExchange; break; 473 break;
474 case AtomicRMWInst::And:
475 Op = NaCl::AtomicAnd;
476 break;
477 case AtomicRMWInst::Or:
478 Op = NaCl::AtomicOr;
479 break;
480 case AtomicRMWInst::Xor:
481 Op = NaCl::AtomicXor;
482 break;
483 case AtomicRMWInst::Xchg:
484 Op = NaCl::AtomicExchange;
485 break;
357 } 486 }
358 PointerHelper<AtomicRMWInst> PH(*this, I); 487 PointerHelper<AtomicRMWInst> PH(*this, I);
359 const NaCl::AtomicIntrinsics::AtomicIntrinsic *Intrinsic = 488 const NaCl::AtomicIntrinsics::AtomicIntrinsic *Intrinsic =
360 findAtomicIntrinsic(I, Intrinsic::nacl_atomic_rmw, PH.PET); 489 findAtomicIntrinsic(I, Intrinsic::nacl_atomic_rmw, PH.PET);
361 checkSizeMatchesType(I, PH.BitSize, I.getValOperand()->getType()); 490 checkSizeMatchesType(I, PH.BitSize, I.getValOperand()->getType());
362 Value *Args[] = {ConstantInt::get(Type::getInt32Ty(C), Op), PH.P, 491 Value *Args[] = {ConstantInt::get(Type::getInt32Ty(C), Op), PH.P,
363 I.getValOperand(), freezeMemoryOrder(I, I.getOrdering())}; 492 I.getValOperand(), freezeMemoryOrder(I, I.getOrdering())};
364 replaceInstructionWithIntrinsicCall(I, Intrinsic, PH.OriginalPET, PH.PET, 493 replaceInstructionWithIntrinsicCall(I, Intrinsic, PH.OriginalPET, PH.PET,
365 Args); 494 Args);
366 } 495 }
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
419 replaceInstructionWithIntrinsicCall(I, Intrinsic, T, T, 548 replaceInstructionWithIntrinsicCall(I, Intrinsic, T, T,
420 ArrayRef<Value *>()); 549 ArrayRef<Value *>());
421 } else { 550 } else {
422 const NaCl::AtomicIntrinsics::AtomicIntrinsic *Intrinsic = 551 const NaCl::AtomicIntrinsics::AtomicIntrinsic *Intrinsic =
423 findAtomicIntrinsic(I, Intrinsic::nacl_atomic_fence, T); 552 findAtomicIntrinsic(I, Intrinsic::nacl_atomic_fence, T);
424 Value *Args[] = {freezeMemoryOrder(I, I.getOrdering())}; 553 Value *Args[] = {freezeMemoryOrder(I, I.getOrdering())};
425 replaceInstructionWithIntrinsicCall(I, Intrinsic, T, T, Args); 554 replaceInstructionWithIntrinsicCall(I, Intrinsic, T, T, Args);
426 } 555 }
427 } 556 }
428 557
429 ModulePass *llvm::createRewriteAtomicsPass() { return new RewriteAtomics(); } 558 /// Wrapper for the legacy pass manager.
559 class RewriteAtomics : public FunctionPass {
560 public:
561 static char ID; // Pass identification, replacement for typeid
562 RewriteAtomics() : FunctionPass(ID) {
563 initializeRewriteAtomicsPass(*PassRegistry::getPassRegistry());
564 }
565
566 Initer Init;
567
568 using llvm::Pass::doInitialization;
569 bool doInitialization(Module &M) override {
570 (void)M;
571 return Init.initialize();
572 }
573
574 bool runOnFunction(Function &F) override {
575 auto &Info = getAnalysis<AtomicAnalysisWrapperPass>().getInfo();
576 return ExpandAtomicInstructions(F, Init.AtomicRMWExpander, Info);
577 }
578
579 void getAnalysisUsage(AnalysisUsage &AU) const override {
580 AU.addRequired<AtomicAnalysisWrapperPass>();
581 }
582 };
583 char RewriteAtomics::ID = 0;
584 INITIALIZE_PASS_BEGIN(RewriteAtomics, "nacl-rewrite-atomics",
585 "rewrite atomics, volatiles and fences into stable "
586 "@llvm.nacl.atomics.* intrinsics",
587 false, false)
588 INITIALIZE_PASS_DEPENDENCY(AtomicAnalysisWrapperPass);
589 INITIALIZE_PASS_END(RewriteAtomics, "nacl-rewrite-atomics",
590 "rewrite atomics, volatiles and fences into stable "
591 "@llvm.nacl.atomics.* intrinsics",
592 false, false)
593
594 FunctionPass *llvm::createRewriteAtomicsPass() { return new RewriteAtomics(); }
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698