OLD | NEW |
(Empty) | |
| 1 ; RUN: not pnacl-abicheck < %s | FileCheck %s |
| 2 |
| 3 declare i8 @llvm.nacl.atomic.load.i8(i8*, i32) |
| 4 declare i16 @llvm.nacl.atomic.load.i16(i16*, i32) |
| 5 declare i32 @llvm.nacl.atomic.load.i32(i32*, i32) |
| 6 declare i64 @llvm.nacl.atomic.load.i64(i64*, i32) |
| 7 declare void @llvm.nacl.atomic.store.i8(i8, i8*, i32) |
| 8 declare void @llvm.nacl.atomic.store.i16(i16, i16*, i32) |
| 9 declare void @llvm.nacl.atomic.store.i32(i32, i32*, i32) |
| 10 declare void @llvm.nacl.atomic.store.i64(i64, i64*, i32) |
| 11 declare i8 @llvm.nacl.atomic.rmw.i8(i32, i8*, i8, i32) |
| 12 declare i16 @llvm.nacl.atomic.rmw.i16(i32, i16*, i16, i32) |
| 13 declare i32 @llvm.nacl.atomic.rmw.i32(i32, i32*, i32, i32) |
| 14 declare i64 @llvm.nacl.atomic.rmw.i64(i32, i64*, i64, i32) |
| 15 declare i8 @llvm.nacl.atomic.cmpxchg.i8(i8*, i8, i8, i32, i32) |
| 16 declare i16 @llvm.nacl.atomic.cmpxchg.i16(i16*, i16, i16, i32, i32) |
| 17 declare i32 @llvm.nacl.atomic.cmpxchg.i32(i32*, i32, i32, i32, i32) |
| 18 declare i64 @llvm.nacl.atomic.cmpxchg.i64(i64*, i64, i64, i32, i32) |
| 19 declare void @llvm.nacl.atomic.fence(i32) |
| 20 declare void @llvm.nacl.atomic.fence.all() |
| 21 declare i1 @llvm.nacl.atomic.is.lock.free(i32, i8*) |
| 22 |
| 23 ; This stops the verifier from complaining about the lack of an entry point. |
| 24 define void @_start(i32 %arg) { |
| 25 ret void |
| 26 } |
| 27 |
| 28 |
| 29 ; Load |
| 30 |
| 31 define internal i32 @test_load_invalid_7() { |
| 32 %ptr = inttoptr i32 undef to i32* |
| 33 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 7) |
| 34 ret i32 %1 |
| 35 } |
| 36 ; CHECK: test_load_invalid_7 disallowed: invalid memory order |
| 37 |
| 38 define internal i32 @test_load_invalid_0() { |
| 39 %ptr = inttoptr i32 undef to i32* |
| 40 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 0) |
| 41 ret i32 %1 |
| 42 } |
| 43 ; CHECK: test_load_invalid_0 disallowed: invalid memory order |
| 44 |
| 45 define internal i32 @test_load_seqcst() { |
| 46 %ptr = inttoptr i32 undef to i32* |
| 47 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 6) |
| 48 ret i32 %1 |
| 49 } |
| 50 ; CHECK-NOT: disallowed |
| 51 |
| 52 define internal i32 @test_load_acqrel() { |
| 53 %ptr = inttoptr i32 undef to i32* |
| 54 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 5) |
| 55 ret i32 %1 |
| 56 } |
| 57 ; CHECK: test_load_acqrel disallowed: invalid memory order |
| 58 |
| 59 define internal i32 @test_load_release() { |
| 60 %ptr = inttoptr i32 undef to i32* |
| 61 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 4) |
| 62 ret i32 %1 |
| 63 } |
| 64 ; CHECK: test_load_release disallowed: invalid memory order |
| 65 |
| 66 define internal i32 @test_load_acquire() { |
| 67 %ptr = inttoptr i32 undef to i32* |
| 68 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 3) |
| 69 ret i32 %1 |
| 70 } |
| 71 ; CHECK-NOT: disallowed |
| 72 |
| 73 define internal i32 @test_load_consume() { |
| 74 %ptr = inttoptr i32 undef to i32* |
| 75 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 2) |
| 76 ret i32 %1 |
| 77 } |
| 78 ; CHECK: test_load_consume disallowed: invalid memory order |
| 79 |
| 80 define internal i32 @test_load_relaxed() { |
| 81 %ptr = inttoptr i32 undef to i32* |
| 82 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 1) |
| 83 ret i32 %1 |
| 84 } |
| 85 ; CHECK: test_load_relaxed disallowed: invalid memory order |
| 86 |
| 87 |
| 88 ; Store |
| 89 |
| 90 define internal void @test_store_invalid_7() { |
| 91 %ptr = inttoptr i32 undef to i32* |
| 92 call void @llvm.nacl.atomic.store.i32(i32 undef, i32* %ptr, i32 7) |
| 93 ret void |
| 94 } |
| 95 ; CHECK: test_store_invalid_7 disallowed: invalid memory order |
| 96 |
| 97 define internal void @test_store_invalid_0() { |
| 98 %ptr = inttoptr i32 undef to i32* |
| 99 call void @llvm.nacl.atomic.store.i32(i32 undef, i32* %ptr, i32 0) |
| 100 ret void |
| 101 } |
| 102 ; CHECK: test_store_invalid_0 disallowed: invalid memory order |
| 103 |
| 104 define internal void @test_store_seqcst() { |
| 105 %ptr = inttoptr i32 undef to i32* |
| 106 call void @llvm.nacl.atomic.store.i32(i32 undef, i32* %ptr, i32 6) |
| 107 ret void |
| 108 } |
| 109 ; CHECK-NOT: disallowed |
| 110 |
| 111 define internal void @test_store_acqrel() { |
| 112 %ptr = inttoptr i32 undef to i32* |
| 113 call void @llvm.nacl.atomic.store.i32(i32 undef, i32* %ptr, i32 5) |
| 114 ret void |
| 115 } |
| 116 ; CHECK: test_store_acqrel disallowed: invalid memory order |
| 117 |
| 118 define internal void @test_store_release() { |
| 119 %ptr = inttoptr i32 undef to i32* |
| 120 call void @llvm.nacl.atomic.store.i32(i32 undef, i32* %ptr, i32 4) |
| 121 ret void |
| 122 } |
| 123 ; CHECK-NOT: disallowed |
| 124 |
| 125 define internal void @test_store_acquire() { |
| 126 %ptr = inttoptr i32 undef to i32* |
| 127 call void @llvm.nacl.atomic.store.i32(i32 undef, i32* %ptr, i32 3) |
| 128 ret void |
| 129 } |
| 130 ; CHECK: test_store_acquire disallowed: invalid memory order |
| 131 |
| 132 define internal void @test_store_consume() { |
| 133 %ptr = inttoptr i32 undef to i32* |
| 134 call void @llvm.nacl.atomic.store.i32(i32 undef, i32* %ptr, i32 2) |
| 135 ret void |
| 136 } |
| 137 ; CHECK: test_store_consume disallowed: invalid memory order |
| 138 |
| 139 define internal void @test_store_relaxed() { |
| 140 %ptr = inttoptr i32 undef to i32* |
| 141 call void @llvm.nacl.atomic.store.i32(i32 undef, i32* %ptr, i32 1) |
| 142 ret void |
| 143 } |
| 144 ; CHECK: test_store_relaxed disallowed: invalid memory order |
| 145 |
| 146 |
| 147 ; rmw |
| 148 |
| 149 define internal i32 @test_rmw_invalid_7() { |
| 150 %ptr = inttoptr i32 undef to i32* |
| 151 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 0, i32 7) |
| 152 ret i32 %1 |
| 153 } |
| 154 ; CHECK: test_rmw_invalid_7 disallowed: invalid memory order |
| 155 |
| 156 define internal i32 @test_rmw_invalid_0() { |
| 157 %ptr = inttoptr i32 undef to i32* |
| 158 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 0, i32 0) |
| 159 ret i32 %1 |
| 160 } |
| 161 ; CHECK: test_rmw_invalid_0 disallowed: invalid memory order |
| 162 |
| 163 define internal i32 @test_rmw_seqcst() { |
| 164 %ptr = inttoptr i32 undef to i32* |
| 165 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 0, i32 6) |
| 166 ret i32 %1 |
| 167 } |
| 168 ; CHECK-NOT: disallowed |
| 169 |
| 170 define internal i32 @test_rmw_acqrel() { |
| 171 %ptr = inttoptr i32 undef to i32* |
| 172 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 0, i32 5) |
| 173 ret i32 %1 |
| 174 } |
| 175 ; CHECK-NOT: disallowed |
| 176 |
| 177 define internal i32 @test_rmw_release() { |
| 178 %ptr = inttoptr i32 undef to i32* |
| 179 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 0, i32 4) |
| 180 ret i32 %1 |
| 181 } |
| 182 ; CHECK-NOT: disallowed |
| 183 |
| 184 define internal i32 @test_rmw_acquire() { |
| 185 %ptr = inttoptr i32 undef to i32* |
| 186 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 0, i32 3) |
| 187 ret i32 %1 |
| 188 } |
| 189 ; CHECK-NOT: disallowed |
| 190 |
| 191 define internal i32 @test_rmw_consume() { |
| 192 %ptr = inttoptr i32 undef to i32* |
| 193 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 0, i32 2) |
| 194 ret i32 %1 |
| 195 } |
| 196 ; CHECK: test_rmw_consume disallowed: invalid memory order |
| 197 |
| 198 define internal i32 @test_rmw_relaxed() { |
| 199 %ptr = inttoptr i32 undef to i32* |
| 200 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 0, i32 1) |
| 201 ret i32 %1 |
| 202 } |
| 203 ; CHECK: test_rmw_relaxed disallowed: invalid memory order |
| 204 |
| 205 |
| 206 ; cmpxchg |
| 207 |
| 208 define internal i32 @test_cmpxchg_invalid_7(i32 %oldval, i32 %newval) { |
| 209 %ptr = inttoptr i32 undef to i32* |
| 210 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 7, i32 7) |
| 211 ret i32 %1 |
| 212 } |
| 213 ; CHECK: test_cmpxchg_invalid_7 disallowed: invalid memory order |
| 214 |
| 215 define internal i32 @test_cmpxchg_invalid_0(i32 %oldval, i32 %newval) { |
| 216 %ptr = inttoptr i32 undef to i32* |
| 217 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 0, i32 0) |
| 218 ret i32 %1 |
| 219 } |
| 220 ; CHECK: test_cmpxchg_invalid_0 disallowed: invalid memory order |
| 221 |
| 222 ; seq_cst |
| 223 |
| 224 define internal i32 @test_cmpxchg_seqcst_seqcst(i32 %oldval, i32 %newval) { |
| 225 %ptr = inttoptr i32 undef to i32* |
| 226 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 6, i32 6) |
| 227 ret i32 %1 |
| 228 } |
| 229 ; CHECK-NOT: disallowed |
| 230 |
| 231 define internal i32 @test_cmpxchg_seqcst_acqrel(i32 %oldval, i32 %newval) { |
| 232 %ptr = inttoptr i32 undef to i32* |
| 233 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 6, i32 5) |
| 234 ret i32 %1 |
| 235 } |
| 236 ; CHECK: test_cmpxchg_seqcst_acqrel disallowed: invalid memory order |
| 237 |
| 238 define internal i32 @test_cmpxchg_seqcst_release(i32 %oldval, i32 %newval) { |
| 239 %ptr = inttoptr i32 undef to i32* |
| 240 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 6, i32 4) |
| 241 ret i32 %1 |
| 242 } |
| 243 ; CHECK: test_cmpxchg_seqcst_release disallowed: invalid memory order |
| 244 |
| 245 define internal i32 @test_cmpxchg_seqcst_acquire(i32 %oldval, i32 %newval) { |
| 246 %ptr = inttoptr i32 undef to i32* |
| 247 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 6, i32 3) |
| 248 ret i32 %1 |
| 249 } |
| 250 ; CHECK-NOT: disallowed |
| 251 |
| 252 define internal i32 @test_cmpxchg_seqcst_consume(i32 %oldval, i32 %newval) { |
| 253 %ptr = inttoptr i32 undef to i32* |
| 254 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 6, i32 2) |
| 255 ret i32 %1 |
| 256 } |
| 257 ; CHECK: test_cmpxchg_seqcst_consume disallowed: invalid memory order |
| 258 |
| 259 define internal i32 @test_cmpxchg_seqcst_relaxed(i32 %oldval, i32 %newval) { |
| 260 %ptr = inttoptr i32 undef to i32* |
| 261 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 6, i32 1) |
| 262 ret i32 %1 |
| 263 } |
| 264 ; CHECK: test_cmpxchg_seqcst_relaxed disallowed: invalid memory order |
| 265 |
| 266 ; acq_rel |
| 267 |
| 268 define internal i32 @test_cmpxchg_acqrel_seqcst(i32 %oldval, i32 %newval) { |
| 269 %ptr = inttoptr i32 undef to i32* |
| 270 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 5, i32 6) |
| 271 ret i32 %1 |
| 272 } |
| 273 ; CHECK: test_cmpxchg_acqrel_seqcst disallowed: invalid memory order |
| 274 |
| 275 define internal i32 @test_cmpxchg_acqrel_acqrel(i32 %oldval, i32 %newval) { |
| 276 %ptr = inttoptr i32 undef to i32* |
| 277 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 5, i32 5) |
| 278 ret i32 %1 |
| 279 } |
| 280 ; CHECK: test_cmpxchg_acqrel_acqrel disallowed: invalid memory order |
| 281 |
| 282 define internal i32 @test_cmpxchg_acqrel_release(i32 %oldval, i32 %newval) { |
| 283 %ptr = inttoptr i32 undef to i32* |
| 284 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 5, i32 4) |
| 285 ret i32 %1 |
| 286 } |
| 287 ; CHECK: test_cmpxchg_acqrel_release disallowed: invalid memory order |
| 288 |
| 289 define internal i32 @test_cmpxchg_acqrel_acquire(i32 %oldval, i32 %newval) { |
| 290 %ptr = inttoptr i32 undef to i32* |
| 291 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 5, i32 3) |
| 292 ret i32 %1 |
| 293 } |
| 294 ; CHECK-NOT: disallowed |
| 295 |
| 296 define internal i32 @test_cmpxchg_acqrel_consume(i32 %oldval, i32 %newval) { |
| 297 %ptr = inttoptr i32 undef to i32* |
| 298 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 5, i32 2) |
| 299 ret i32 %1 |
| 300 } |
| 301 ; CHECK: test_cmpxchg_acqrel_consume disallowed: invalid memory order |
| 302 |
| 303 define internal i32 @test_cmpxchg_acqrel_relaxed(i32 %oldval, i32 %newval) { |
| 304 %ptr = inttoptr i32 undef to i32* |
| 305 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 5, i32 1) |
| 306 ret i32 %1 |
| 307 } |
| 308 ; CHECK: test_cmpxchg_acqrel_relaxed disallowed: invalid memory order |
| 309 |
| 310 ; release |
| 311 |
| 312 define internal i32 @test_cmpxchg_release_seqcst(i32 %oldval, i32 %newval) { |
| 313 %ptr = inttoptr i32 undef to i32* |
| 314 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 4, i32 6) |
| 315 ret i32 %1 |
| 316 } |
| 317 ; CHECK: test_cmpxchg_release_seqcst disallowed: invalid memory order |
| 318 |
| 319 define internal i32 @test_cmpxchg_release_acqrel(i32 %oldval, i32 %newval) { |
| 320 %ptr = inttoptr i32 undef to i32* |
| 321 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 4, i32 5) |
| 322 ret i32 %1 |
| 323 } |
| 324 ; CHECK: test_cmpxchg_release_acqrel disallowed: invalid memory order |
| 325 |
| 326 define internal i32 @test_cmpxchg_release_release(i32 %oldval, i32 %newval) { |
| 327 %ptr = inttoptr i32 undef to i32* |
| 328 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 4, i32 4) |
| 329 ret i32 %1 |
| 330 } |
| 331 ; CHECK: test_cmpxchg_release_release disallowed: invalid memory order |
| 332 |
| 333 define internal i32 @test_cmpxchg_release_acquire(i32 %oldval, i32 %newval) { |
| 334 %ptr = inttoptr i32 undef to i32* |
| 335 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 4, i32 3) |
| 336 ret i32 %1 |
| 337 } |
| 338 ; CHECK: test_cmpxchg_release_acquire disallowed: invalid memory order |
| 339 |
| 340 define internal i32 @test_cmpxchg_release_consume(i32 %oldval, i32 %newval) { |
| 341 %ptr = inttoptr i32 undef to i32* |
| 342 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 4, i32 2) |
| 343 ret i32 %1 |
| 344 } |
| 345 ; CHECK: test_cmpxchg_release_consume disallowed: invalid memory order |
| 346 |
| 347 define internal i32 @test_cmpxchg_release_relaxed(i32 %oldval, i32 %newval) { |
| 348 %ptr = inttoptr i32 undef to i32* |
| 349 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 4, i32 1) |
| 350 ret i32 %1 |
| 351 } |
| 352 ; CHECK: test_cmpxchg_release_relaxed disallowed: invalid memory order |
| 353 |
| 354 ; acquire |
| 355 |
| 356 define internal i32 @test_cmpxchg_acquire_seqcst(i32 %oldval, i32 %newval) { |
| 357 %ptr = inttoptr i32 undef to i32* |
| 358 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 3, i32 6) |
| 359 ret i32 %1 |
| 360 } |
| 361 ; CHECK: test_cmpxchg_acquire_seqcst disallowed: invalid memory order |
| 362 |
| 363 define internal i32 @test_cmpxchg_acquire_acqrel(i32 %oldval, i32 %newval) { |
| 364 %ptr = inttoptr i32 undef to i32* |
| 365 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 3, i32 5) |
| 366 ret i32 %1 |
| 367 } |
| 368 ; CHECK: test_cmpxchg_acquire_acqrel disallowed: invalid memory order |
| 369 |
| 370 define internal i32 @test_cmpxchg_acquire_release(i32 %oldval, i32 %newval) { |
| 371 %ptr = inttoptr i32 undef to i32* |
| 372 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 3, i32 4) |
| 373 ret i32 %1 |
| 374 } |
| 375 ; CHECK: test_cmpxchg_acquire_release disallowed: invalid memory order |
| 376 |
| 377 define internal i32 @test_cmpxchg_acquire_acquire(i32 %oldval, i32 %newval) { |
| 378 %ptr = inttoptr i32 undef to i32* |
| 379 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 3, i32 3) |
| 380 ret i32 %1 |
| 381 } |
| 382 ; CHECK-NOT: disallowed |
| 383 |
| 384 define internal i32 @test_cmpxchg_acquire_consume(i32 %oldval, i32 %newval) { |
| 385 %ptr = inttoptr i32 undef to i32* |
| 386 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 3, i32 2) |
| 387 ret i32 %1 |
| 388 } |
| 389 ; CHECK: test_cmpxchg_acquire_consume disallowed: invalid memory order |
| 390 |
| 391 define internal i32 @test_cmpxchg_acquire_relaxed(i32 %oldval, i32 %newval) { |
| 392 %ptr = inttoptr i32 undef to i32* |
| 393 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 3, i32 1) |
| 394 ret i32 %1 |
| 395 } |
| 396 ; CHECK: test_cmpxchg_acquire_relaxed disallowed: invalid memory order |
| 397 |
| 398 ; consume |
| 399 |
| 400 define internal i32 @test_cmpxchg_consume_seqcst(i32 %oldval, i32 %newval) { |
| 401 %ptr = inttoptr i32 undef to i32* |
| 402 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 2, i32 6) |
| 403 ret i32 %1 |
| 404 } |
| 405 ; CHECK: test_cmpxchg_consume_seqcst disallowed: invalid memory order |
| 406 |
| 407 define internal i32 @test_cmpxchg_consume_acqrel(i32 %oldval, i32 %newval) { |
| 408 %ptr = inttoptr i32 undef to i32* |
| 409 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 2, i32 5) |
| 410 ret i32 %1 |
| 411 } |
| 412 ; CHECK: test_cmpxchg_consume_acqrel disallowed: invalid memory order |
| 413 |
| 414 define internal i32 @test_cmpxchg_consume_release(i32 %oldval, i32 %newval) { |
| 415 %ptr = inttoptr i32 undef to i32* |
| 416 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 2, i32 4) |
| 417 ret i32 %1 |
| 418 } |
| 419 ; CHECK: test_cmpxchg_consume_release disallowed: invalid memory order |
| 420 |
| 421 define internal i32 @test_cmpxchg_consume_acquire(i32 %oldval, i32 %newval) { |
| 422 %ptr = inttoptr i32 undef to i32* |
| 423 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 2, i32 3) |
| 424 ret i32 %1 |
| 425 } |
| 426 ; CHECK: test_cmpxchg_consume_acquire disallowed: invalid memory order |
| 427 |
| 428 define internal i32 @test_cmpxchg_consume_consume(i32 %oldval, i32 %newval) { |
| 429 %ptr = inttoptr i32 undef to i32* |
| 430 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 2, i32 2) |
| 431 ret i32 %1 |
| 432 } |
| 433 ; CHECK: test_cmpxchg_consume_consume disallowed: invalid memory order |
| 434 |
| 435 define internal i32 @test_cmpxchg_consume_relaxed(i32 %oldval, i32 %newval) { |
| 436 %ptr = inttoptr i32 undef to i32* |
| 437 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 2, i32 1) |
| 438 ret i32 %1 |
| 439 } |
| 440 ; CHECK: test_cmpxchg_consume_relaxed disallowed: invalid memory order |
| 441 |
| 442 ; relaxed |
| 443 |
| 444 define internal i32 @test_cmpxchg_relaxed_seqcst(i32 %oldval, i32 %newval) { |
| 445 %ptr = inttoptr i32 undef to i32* |
| 446 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 1, i32 6) |
| 447 ret i32 %1 |
| 448 } |
| 449 ; CHECK: test_cmpxchg_relaxed_seqcst disallowed: invalid memory order |
| 450 |
| 451 define internal i32 @test_cmpxchg_relaxed_acqrel(i32 %oldval, i32 %newval) { |
| 452 %ptr = inttoptr i32 undef to i32* |
| 453 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 1, i32 5) |
| 454 ret i32 %1 |
| 455 } |
| 456 ; CHECK: test_cmpxchg_relaxed_acqrel disallowed: invalid memory order |
| 457 |
| 458 define internal i32 @test_cmpxchg_relaxed_release(i32 %oldval, i32 %newval) { |
| 459 %ptr = inttoptr i32 undef to i32* |
| 460 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 1, i32 4) |
| 461 ret i32 %1 |
| 462 } |
| 463 ; CHECK: test_cmpxchg_relaxed_release disallowed: invalid memory order |
| 464 |
| 465 define internal i32 @test_cmpxchg_relaxed_acquire(i32 %oldval, i32 %newval) { |
| 466 %ptr = inttoptr i32 undef to i32* |
| 467 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 1, i32 3) |
| 468 ret i32 %1 |
| 469 } |
| 470 ; CHECK: test_cmpxchg_relaxed_acquire disallowed: invalid memory order |
| 471 |
| 472 define internal i32 @test_cmpxchg_relaxed_consume(i32 %oldval, i32 %newval) { |
| 473 %ptr = inttoptr i32 undef to i32* |
| 474 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 1, i32 2) |
| 475 ret i32 %1 |
| 476 } |
| 477 ; CHECK: test_cmpxchg_relaxed_consume disallowed: invalid memory order |
| 478 |
| 479 define internal i32 @test_cmpxchg_relaxed_relaxed(i32 %oldval, i32 %newval) { |
| 480 %ptr = inttoptr i32 undef to i32* |
| 481 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 1, i32 1) |
| 482 ret i32 %1 |
| 483 } |
| 484 ; CHECK: test_cmpxchg_relaxed_relaxed disallowed: invalid memory order |
| 485 |
| 486 |
| 487 ; fence |
| 488 |
| 489 define internal void @test_fence_invalid_7() { |
| 490 call void @llvm.nacl.atomic.fence(i32 7) |
| 491 ret void |
| 492 } |
| 493 ; CHECK: test_fence_invalid_7 disallowed: invalid memory order |
| 494 |
| 495 define internal void @test_fence_invalid_0() { |
| 496 call void @llvm.nacl.atomic.fence(i32 0) |
| 497 ret void |
| 498 } |
| 499 ; CHECK: test_fence_invalid_0 disallowed: invalid memory order |
| 500 |
| 501 define internal void @test_fence_seqcst() { |
| 502 call void @llvm.nacl.atomic.fence(i32 6) |
| 503 ret void |
| 504 } |
| 505 ; CHECK-NOT: disallowed |
| 506 |
| 507 define internal void @test_fence_acqrel() { |
| 508 call void @llvm.nacl.atomic.fence(i32 5) |
| 509 ret void |
| 510 } |
| 511 ; CHECK-NOT: disallowed |
| 512 |
| 513 define internal void @test_fence_acquire() { |
| 514 call void @llvm.nacl.atomic.fence(i32 4) |
| 515 ret void |
| 516 } |
| 517 ; CHECK-NOT: disallowed |
| 518 |
| 519 define internal void @test_fence_release() { |
| 520 call void @llvm.nacl.atomic.fence(i32 3) |
| 521 ret void |
| 522 } |
| 523 ; CHECK-NOT: disallowed |
| 524 |
| 525 define internal void @test_fence_consume() { |
| 526 call void @llvm.nacl.atomic.fence(i32 2) |
| 527 ret void |
| 528 } |
| 529 ; CHECK: test_fence_consume disallowed: invalid memory order |
| 530 |
| 531 define internal void @test_fence_relaxed() { |
| 532 call void @llvm.nacl.atomic.fence(i32 1) |
| 533 ret void |
| 534 } |
| 535 ; CHECK: test_fence_relaxed disallowed: invalid memory order |
OLD | NEW |