OLD | NEW |
---|---|
(Empty) | |
1 ; RUN: not pnacl-abicheck < %s | FileCheck %s | |
2 | |
3 declare void @llvm.memcpy.p0i8.p0i8.i32(i8*, i8*, i32, i32, i1) | |
Jim Stichnoth
2015/01/08 19:29:25
Can these 3 be removed? They aren't being used in
JF
2015/01/08 19:36:25
Done.
| |
4 declare void @llvm.memmove.p0i8.p0i8.i32(i8*, i8*, i32, i32, i1) | |
5 declare void @llvm.memset.p0i8.i32(i8*, i8, i32, i32, i1) | |
6 | |
7 declare i8 @llvm.nacl.atomic.load.i8(i8*, i32) | |
8 declare i16 @llvm.nacl.atomic.load.i16(i16*, i32) | |
9 declare i32 @llvm.nacl.atomic.load.i32(i32*, i32) | |
10 declare i64 @llvm.nacl.atomic.load.i64(i64*, i32) | |
11 declare void @llvm.nacl.atomic.store.i8(i8, i8*, i32) | |
12 declare void @llvm.nacl.atomic.store.i16(i16, i16*, i32) | |
13 declare void @llvm.nacl.atomic.store.i32(i32, i32*, i32) | |
14 declare void @llvm.nacl.atomic.store.i64(i64, i64*, i32) | |
15 declare i8 @llvm.nacl.atomic.rmw.i8(i32, i8*, i8, i32) | |
16 declare i16 @llvm.nacl.atomic.rmw.i16(i32, i16*, i16, i32) | |
17 declare i32 @llvm.nacl.atomic.rmw.i32(i32, i32*, i32, i32) | |
18 declare i64 @llvm.nacl.atomic.rmw.i64(i32, i64*, i64, i32) | |
19 declare i8 @llvm.nacl.atomic.cmpxchg.i8(i8*, i8, i8, i32, i32) | |
20 declare i16 @llvm.nacl.atomic.cmpxchg.i16(i16*, i16, i16, i32, i32) | |
21 declare i32 @llvm.nacl.atomic.cmpxchg.i32(i32*, i32, i32, i32, i32) | |
22 declare i64 @llvm.nacl.atomic.cmpxchg.i64(i64*, i64, i64, i32, i32) | |
23 declare void @llvm.nacl.atomic.fence(i32) | |
24 declare void @llvm.nacl.atomic.fence.all() | |
25 declare i1 @llvm.nacl.atomic.is.lock.free(i32, i8*) | |
26 | |
27 | |
28 ; Load | |
29 | |
30 define internal i32 @test_load_invalid_7() { | |
31 %ptr = inttoptr i32 undef to i32* | |
32 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 7) | |
33 ret i32 %1 | |
34 } | |
35 ; CHECK: test_load_invalid_7 disallowed: invalid memory order | |
36 | |
37 define internal i32 @test_load_invalid_0() { | |
38 %ptr = inttoptr i32 undef to i32* | |
39 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 0) | |
40 ret i32 %1 | |
41 } | |
42 ; CHECK: test_load_invalid_0 disallowed: invalid memory order | |
43 | |
44 define internal i32 @test_load_seqcst() { | |
45 %ptr = inttoptr i32 undef to i32* | |
46 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 6) | |
47 ret i32 %1 | |
48 } | |
49 ; CHECK-NOT: disallowed | |
50 | |
51 define internal i32 @test_load_acqrel() { | |
52 %ptr = inttoptr i32 undef to i32* | |
53 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 5) | |
54 ret i32 %1 | |
55 } | |
56 ; CHECK: test_load_acqrel disallowed: invalid memory order | |
57 | |
58 define internal i32 @test_load_release() { | |
59 %ptr = inttoptr i32 undef to i32* | |
60 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 4) | |
61 ret i32 %1 | |
62 } | |
63 ; CHECK: test_load_release disallowed: invalid memory order | |
64 | |
65 define internal i32 @test_load_acquire() { | |
66 %ptr = inttoptr i32 undef to i32* | |
67 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 3) | |
68 ret i32 %1 | |
69 } | |
70 ; CHECK-NOT: disallowed | |
71 | |
72 define internal i32 @test_load_consume() { | |
73 %ptr = inttoptr i32 undef to i32* | |
74 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 2) | |
75 ret i32 %1 | |
76 } | |
77 ; CHECK: test_load_consume disallowed: invalid memory order | |
78 | |
79 define internal i32 @test_load_relaxed() { | |
80 %ptr = inttoptr i32 undef to i32* | |
81 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 1) | |
82 ret i32 %1 | |
83 } | |
84 ; CHECK: test_load_relaxed disallowed: invalid memory order | |
85 | |
86 | |
87 ; Store | |
88 | |
89 define internal void @test_store_invalid_7() { | |
90 %ptr = inttoptr i32 undef to i32* | |
91 call void @llvm.nacl.atomic.store.i32(i32 undef, i32* %ptr, i32 7) | |
92 ret void | |
93 } | |
94 ; CHECK: test_store_invalid_7 disallowed: invalid memory order | |
95 | |
96 define internal void @test_store_invalid_0() { | |
97 %ptr = inttoptr i32 undef to i32* | |
98 call void @llvm.nacl.atomic.store.i32(i32 undef, i32* %ptr, i32 0) | |
99 ret void | |
100 } | |
101 ; CHECK: test_store_invalid_0 disallowed: invalid memory order | |
102 | |
103 define internal void @test_store_seqcst() { | |
104 %ptr = inttoptr i32 undef to i32* | |
105 call void @llvm.nacl.atomic.store.i32(i32 undef, i32* %ptr, i32 6) | |
106 ret void | |
107 } | |
108 ; CHECK-NOT: disallowed | |
109 | |
110 define internal void @test_store_acqrel() { | |
111 %ptr = inttoptr i32 undef to i32* | |
112 call void @llvm.nacl.atomic.store.i32(i32 undef, i32* %ptr, i32 5) | |
113 ret void | |
114 } | |
115 ; CHECK: test_store_acqrel disallowed: invalid memory order | |
116 | |
117 define internal void @test_store_release() { | |
118 %ptr = inttoptr i32 undef to i32* | |
119 call void @llvm.nacl.atomic.store.i32(i32 undef, i32* %ptr, i32 4) | |
120 ret void | |
121 } | |
122 ; CHECK-NOT: disallowed | |
123 | |
124 define internal void @test_store_acquire() { | |
125 %ptr = inttoptr i32 undef to i32* | |
126 call void @llvm.nacl.atomic.store.i32(i32 undef, i32* %ptr, i32 3) | |
127 ret void | |
128 } | |
129 ; CHECK: test_store_acquire disallowed: invalid memory order | |
130 | |
131 define internal void @test_store_consume() { | |
132 %ptr = inttoptr i32 undef to i32* | |
133 call void @llvm.nacl.atomic.store.i32(i32 undef, i32* %ptr, i32 2) | |
134 ret void | |
135 } | |
136 ; CHECK: test_store_consume disallowed: invalid memory order | |
137 | |
138 define internal void @test_store_relaxed() { | |
139 %ptr = inttoptr i32 undef to i32* | |
140 call void @llvm.nacl.atomic.store.i32(i32 undef, i32* %ptr, i32 1) | |
141 ret void | |
142 } | |
143 ; CHECK: test_store_relaxed disallowed: invalid memory order | |
144 | |
145 | |
146 ; rmw | |
147 | |
148 define internal i32 @test_rmw_invalid_7() { | |
149 %ptr = inttoptr i32 undef to i32* | |
150 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 0, i32 7) | |
151 ret i32 %1 | |
152 } | |
153 ; CHECK: test_rmw_invalid_7 disallowed: invalid memory order | |
154 | |
155 define internal i32 @test_rmw_invalid_0() { | |
156 %ptr = inttoptr i32 undef to i32* | |
157 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 0, i32 0) | |
158 ret i32 %1 | |
159 } | |
160 ; CHECK: test_rmw_invalid_0 disallowed: invalid memory order | |
161 | |
162 define internal i32 @test_rmw_seqcst() { | |
163 %ptr = inttoptr i32 undef to i32* | |
164 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 0, i32 6) | |
165 ret i32 %1 | |
166 } | |
167 ; CHECK-NOT: disallowed | |
168 | |
169 define internal i32 @test_rmw_acqrel() { | |
170 %ptr = inttoptr i32 undef to i32* | |
171 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 0, i32 5) | |
172 ret i32 %1 | |
173 } | |
174 ; CHECK-NOT: disallowed | |
175 | |
176 define internal i32 @test_rmw_release() { | |
177 %ptr = inttoptr i32 undef to i32* | |
178 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 0, i32 4) | |
179 ret i32 %1 | |
180 } | |
181 ; CHECK-NOT: disallowed | |
182 | |
183 define internal i32 @test_rmw_acquire() { | |
184 %ptr = inttoptr i32 undef to i32* | |
185 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 0, i32 3) | |
186 ret i32 %1 | |
187 } | |
188 ; CHECK-NOT: disallowed | |
189 | |
190 define internal i32 @test_rmw_consume() { | |
191 %ptr = inttoptr i32 undef to i32* | |
192 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 0, i32 2) | |
193 ret i32 %1 | |
194 } | |
195 ; CHECK: test_rmw_consume disallowed: invalid memory order | |
196 | |
197 define internal i32 @test_rmw_relaxed() { | |
198 %ptr = inttoptr i32 undef to i32* | |
199 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 0, i32 1) | |
200 ret i32 %1 | |
201 } | |
202 ; CHECK: test_rmw_relaxed disallowed: invalid memory order | |
203 | |
204 | |
205 ; cmpxchg | |
206 | |
207 define internal i32 @test_cmpxchg_invalid_7(i32 %oldval, i32 %newval) { | |
208 %ptr = inttoptr i32 undef to i32* | |
209 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 7, i32 7) | |
210 ret i32 %1 | |
211 } | |
212 ; CHECK: test_cmpxchg_invalid_7 disallowed: invalid memory order | |
213 | |
214 define internal i32 @test_cmpxchg_invalid_0(i32 %oldval, i32 %newval) { | |
215 %ptr = inttoptr i32 undef to i32* | |
216 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 0, i32 0) | |
217 ret i32 %1 | |
218 } | |
219 ; CHECK: test_cmpxchg_invalid_0 disallowed: invalid memory order | |
220 | |
221 ; seq_cst | |
222 | |
223 define internal i32 @test_cmpxchg_seqcst_seqcst(i32 %oldval, i32 %newval) { | |
224 %ptr = inttoptr i32 undef to i32* | |
225 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 6, i32 6) | |
226 ret i32 %1 | |
227 } | |
228 ; CHECK-NOT: disallowed | |
229 | |
230 define internal i32 @test_cmpxchg_seqcst_acqrel(i32 %oldval, i32 %newval) { | |
231 %ptr = inttoptr i32 undef to i32* | |
232 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 6, i32 5) | |
233 ret i32 %1 | |
234 } | |
235 ; CHECK: test_cmpxchg_seqcst_acqrel disallowed: invalid memory order | |
236 | |
237 define internal i32 @test_cmpxchg_seqcst_release(i32 %oldval, i32 %newval) { | |
238 %ptr = inttoptr i32 undef to i32* | |
239 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 6, i32 4) | |
240 ret i32 %1 | |
241 } | |
242 ; CHECK: test_cmpxchg_seqcst_release disallowed: invalid memory order | |
243 | |
244 define internal i32 @test_cmpxchg_seqcst_acquire(i32 %oldval, i32 %newval) { | |
245 %ptr = inttoptr i32 undef to i32* | |
246 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 6, i32 3) | |
247 ret i32 %1 | |
248 } | |
249 ; CHECK-NOT: disallowed | |
250 | |
251 define internal i32 @test_cmpxchg_seqcst_consume(i32 %oldval, i32 %newval) { | |
252 %ptr = inttoptr i32 undef to i32* | |
253 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 6, i32 2) | |
254 ret i32 %1 | |
255 } | |
256 ; CHECK: test_cmpxchg_seqcst_consume disallowed: invalid memory order | |
257 | |
258 define internal i32 @test_cmpxchg_seqcst_relaxed(i32 %oldval, i32 %newval) { | |
259 %ptr = inttoptr i32 undef to i32* | |
260 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 6, i32 1) | |
261 ret i32 %1 | |
262 } | |
263 ; CHECK: test_cmpxchg_seqcst_relaxed disallowed: invalid memory order | |
264 | |
265 ; acq_rel | |
266 | |
267 define internal i32 @test_cmpxchg_acqrel_seqcst(i32 %oldval, i32 %newval) { | |
268 %ptr = inttoptr i32 undef to i32* | |
269 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 5, i32 6) | |
270 ret i32 %1 | |
271 } | |
272 ; CHECK: test_cmpxchg_acqrel_seqcst disallowed: invalid memory order | |
273 | |
274 define internal i32 @test_cmpxchg_acqrel_acqrel(i32 %oldval, i32 %newval) { | |
275 %ptr = inttoptr i32 undef to i32* | |
276 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 5, i32 5) | |
277 ret i32 %1 | |
278 } | |
279 ; CHECK: test_cmpxchg_acqrel_acqrel disallowed: invalid memory order | |
280 | |
281 define internal i32 @test_cmpxchg_acqrel_release(i32 %oldval, i32 %newval) { | |
282 %ptr = inttoptr i32 undef to i32* | |
283 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 5, i32 4) | |
284 ret i32 %1 | |
285 } | |
286 ; CHECK: test_cmpxchg_acqrel_release disallowed: invalid memory order | |
287 | |
288 define internal i32 @test_cmpxchg_acqrel_acquire(i32 %oldval, i32 %newval) { | |
289 %ptr = inttoptr i32 undef to i32* | |
290 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 5, i32 3) | |
291 ret i32 %1 | |
292 } | |
293 ; CHECK-NOT: disallowed | |
294 | |
295 define internal i32 @test_cmpxchg_acqrel_consume(i32 %oldval, i32 %newval) { | |
296 %ptr = inttoptr i32 undef to i32* | |
297 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 5, i32 2) | |
298 ret i32 %1 | |
299 } | |
300 ; CHECK: test_cmpxchg_acqrel_consume disallowed: invalid memory order | |
301 | |
302 define internal i32 @test_cmpxchg_acqrel_relaxed(i32 %oldval, i32 %newval) { | |
303 %ptr = inttoptr i32 undef to i32* | |
304 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 5, i32 1) | |
305 ret i32 %1 | |
306 } | |
307 ; CHECK: test_cmpxchg_acqrel_relaxed disallowed: invalid memory order | |
308 | |
309 ; release | |
310 | |
311 define internal i32 @test_cmpxchg_release_seqcst(i32 %oldval, i32 %newval) { | |
312 %ptr = inttoptr i32 undef to i32* | |
313 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 4, i32 6) | |
314 ret i32 %1 | |
315 } | |
316 ; CHECK: test_cmpxchg_release_seqcst disallowed: invalid memory order | |
317 | |
318 define internal i32 @test_cmpxchg_release_acqrel(i32 %oldval, i32 %newval) { | |
319 %ptr = inttoptr i32 undef to i32* | |
320 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 4, i32 5) | |
321 ret i32 %1 | |
322 } | |
323 ; CHECK: test_cmpxchg_release_acqrel disallowed: invalid memory order | |
324 | |
325 define internal i32 @test_cmpxchg_release_release(i32 %oldval, i32 %newval) { | |
326 %ptr = inttoptr i32 undef to i32* | |
327 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 4, i32 4) | |
328 ret i32 %1 | |
329 } | |
330 ; CHECK: test_cmpxchg_release_release disallowed: invalid memory order | |
331 | |
332 define internal i32 @test_cmpxchg_release_acquire(i32 %oldval, i32 %newval) { | |
333 %ptr = inttoptr i32 undef to i32* | |
334 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 4, i32 3) | |
335 ret i32 %1 | |
336 } | |
337 ; CHECK: test_cmpxchg_release_acquire disallowed: invalid memory order | |
338 | |
339 define internal i32 @test_cmpxchg_release_consume(i32 %oldval, i32 %newval) { | |
340 %ptr = inttoptr i32 undef to i32* | |
341 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 4, i32 2) | |
342 ret i32 %1 | |
343 } | |
344 ; CHECK: test_cmpxchg_release_consume disallowed: invalid memory order | |
345 | |
346 define internal i32 @test_cmpxchg_release_relaxed(i32 %oldval, i32 %newval) { | |
347 %ptr = inttoptr i32 undef to i32* | |
348 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 4, i32 1) | |
349 ret i32 %1 | |
350 } | |
351 ; CHECK: test_cmpxchg_release_relaxed disallowed: invalid memory order | |
352 | |
353 ; acquire | |
354 | |
355 define internal i32 @test_cmpxchg_acquire_seqcst(i32 %oldval, i32 %newval) { | |
356 %ptr = inttoptr i32 undef to i32* | |
357 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 3, i32 6) | |
358 ret i32 %1 | |
359 } | |
360 ; CHECK: test_cmpxchg_acquire_seqcst disallowed: invalid memory order | |
361 | |
362 define internal i32 @test_cmpxchg_acquire_acqrel(i32 %oldval, i32 %newval) { | |
363 %ptr = inttoptr i32 undef to i32* | |
364 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 3, i32 5) | |
365 ret i32 %1 | |
366 } | |
367 ; CHECK: test_cmpxchg_acquire_acqrel disallowed: invalid memory order | |
368 | |
369 define internal i32 @test_cmpxchg_acquire_release(i32 %oldval, i32 %newval) { | |
370 %ptr = inttoptr i32 undef to i32* | |
371 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 3, i32 4) | |
372 ret i32 %1 | |
373 } | |
374 ; CHECK: test_cmpxchg_acquire_release disallowed: invalid memory order | |
375 | |
376 define internal i32 @test_cmpxchg_acquire_acquire(i32 %oldval, i32 %newval) { | |
377 %ptr = inttoptr i32 undef to i32* | |
378 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 3, i32 3) | |
379 ret i32 %1 | |
380 } | |
381 ; CHECK-NOT: disallowed | |
382 | |
383 define internal i32 @test_cmpxchg_acquire_consume(i32 %oldval, i32 %newval) { | |
384 %ptr = inttoptr i32 undef to i32* | |
385 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 3, i32 2) | |
386 ret i32 %1 | |
387 } | |
388 ; CHECK: test_cmpxchg_acquire_consume disallowed: invalid memory order | |
389 | |
390 define internal i32 @test_cmpxchg_acquire_relaxed(i32 %oldval, i32 %newval) { | |
391 %ptr = inttoptr i32 undef to i32* | |
392 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 3, i32 1) | |
393 ret i32 %1 | |
394 } | |
395 ; CHECK: test_cmpxchg_acquire_relaxed disallowed: invalid memory order | |
396 | |
397 ; consume | |
398 | |
399 define internal i32 @test_cmpxchg_consume_seqcst(i32 %oldval, i32 %newval) { | |
400 %ptr = inttoptr i32 undef to i32* | |
401 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 2, i32 6) | |
402 ret i32 %1 | |
403 } | |
404 ; CHECK: test_cmpxchg_consume_seqcst disallowed: invalid memory order | |
405 | |
406 define internal i32 @test_cmpxchg_consume_acqrel(i32 %oldval, i32 %newval) { | |
407 %ptr = inttoptr i32 undef to i32* | |
408 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 2, i32 5) | |
409 ret i32 %1 | |
410 } | |
411 ; CHECK: test_cmpxchg_consume_acqrel disallowed: invalid memory order | |
412 | |
413 define internal i32 @test_cmpxchg_consume_release(i32 %oldval, i32 %newval) { | |
414 %ptr = inttoptr i32 undef to i32* | |
415 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 2, i32 4) | |
416 ret i32 %1 | |
417 } | |
418 ; CHECK: test_cmpxchg_consume_release disallowed: invalid memory order | |
419 | |
420 define internal i32 @test_cmpxchg_consume_acquire(i32 %oldval, i32 %newval) { | |
421 %ptr = inttoptr i32 undef to i32* | |
422 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 2, i32 3) | |
423 ret i32 %1 | |
424 } | |
425 ; CHECK: test_cmpxchg_consume_acquire disallowed: invalid memory order | |
426 | |
427 define internal i32 @test_cmpxchg_consume_consume(i32 %oldval, i32 %newval) { | |
428 %ptr = inttoptr i32 undef to i32* | |
429 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 2, i32 2) | |
430 ret i32 %1 | |
431 } | |
432 ; CHECK: test_cmpxchg_consume_consume disallowed: invalid memory order | |
433 | |
434 define internal i32 @test_cmpxchg_consume_relaxed(i32 %oldval, i32 %newval) { | |
435 %ptr = inttoptr i32 undef to i32* | |
436 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 2, i32 1) | |
437 ret i32 %1 | |
438 } | |
439 ; CHECK: test_cmpxchg_consume_relaxed disallowed: invalid memory order | |
440 | |
441 ; relaxed | |
442 | |
443 define internal i32 @test_cmpxchg_relaxed_seqcst(i32 %oldval, i32 %newval) { | |
444 %ptr = inttoptr i32 undef to i32* | |
445 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 1, i32 6) | |
446 ret i32 %1 | |
447 } | |
448 ; CHECK: test_cmpxchg_relaxed_seqcst disallowed: invalid memory order | |
449 | |
450 define internal i32 @test_cmpxchg_relaxed_acqrel(i32 %oldval, i32 %newval) { | |
451 %ptr = inttoptr i32 undef to i32* | |
452 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 1, i32 5) | |
453 ret i32 %1 | |
454 } | |
455 ; CHECK: test_cmpxchg_relaxed_acqrel disallowed: invalid memory order | |
456 | |
457 define internal i32 @test_cmpxchg_relaxed_release(i32 %oldval, i32 %newval) { | |
458 %ptr = inttoptr i32 undef to i32* | |
459 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 1, i32 4) | |
460 ret i32 %1 | |
461 } | |
462 ; CHECK: test_cmpxchg_relaxed_release disallowed: invalid memory order | |
463 | |
464 define internal i32 @test_cmpxchg_relaxed_acquire(i32 %oldval, i32 %newval) { | |
465 %ptr = inttoptr i32 undef to i32* | |
466 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 1, i32 3) | |
467 ret i32 %1 | |
468 } | |
469 ; CHECK: test_cmpxchg_relaxed_acquire disallowed: invalid memory order | |
470 | |
471 define internal i32 @test_cmpxchg_relaxed_consume(i32 %oldval, i32 %newval) { | |
472 %ptr = inttoptr i32 undef to i32* | |
473 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 1, i32 2) | |
474 ret i32 %1 | |
475 } | |
476 ; CHECK: test_cmpxchg_relaxed_consume disallowed: invalid memory order | |
477 | |
478 define internal i32 @test_cmpxchg_relaxed_relaxed(i32 %oldval, i32 %newval) { | |
479 %ptr = inttoptr i32 undef to i32* | |
480 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 1, i32 1) | |
481 ret i32 %1 | |
482 } | |
483 ; CHECK: test_cmpxchg_relaxed_relaxed disallowed: invalid memory order | |
484 | |
485 | |
486 ; This stops the verifier from complaining about the lack of an entry point. | |
487 define void @_start(i32 %arg) { | |
488 ret void | |
489 } | |
490 | |
491 | |
492 ; fence | |
493 | |
494 define internal void @test_fence_invalid_7() { | |
495 call void @llvm.nacl.atomic.fence(i32 7) | |
496 ret void | |
497 } | |
498 ; CHECK: test_fence_invalid_7 disallowed: invalid memory order | |
499 | |
500 define internal void @test_fence_invalid_0() { | |
501 call void @llvm.nacl.atomic.fence(i32 0) | |
502 ret void | |
503 } | |
504 ; CHECK: test_fence_invalid_0 disallowed: invalid memory order | |
505 | |
506 define internal void @test_fence_seqcst() { | |
507 call void @llvm.nacl.atomic.fence(i32 6) | |
508 ret void | |
509 } | |
510 ; CHECK-NOT: disallowed | |
511 | |
512 define internal void @test_fence_acqrel() { | |
513 call void @llvm.nacl.atomic.fence(i32 5) | |
514 ret void | |
515 } | |
516 ; CHECK-NOT: disallowed | |
517 | |
518 define internal void @test_fence_acquire() { | |
519 call void @llvm.nacl.atomic.fence(i32 4) | |
520 ret void | |
521 } | |
522 ; CHECK-NOT: disallowed | |
523 | |
524 define internal void @test_fence_release() { | |
525 call void @llvm.nacl.atomic.fence(i32 3) | |
526 ret void | |
527 } | |
528 ; CHECK-NOT: disallowed | |
529 | |
530 define internal void @test_fence_consume() { | |
531 call void @llvm.nacl.atomic.fence(i32 2) | |
532 ret void | |
533 } | |
534 ; CHECK: test_fence_consume disallowed: invalid memory order | |
535 | |
536 define internal void @test_fence_relaxed() { | |
537 call void @llvm.nacl.atomic.fence(i32 1) | |
538 ret void | |
539 } | |
540 ; CHECK: test_fence_relaxed disallowed: invalid memory order | |
OLD | NEW |