OLD | NEW |
1 ; Test that some errors trigger when the usage of NaCl atomic | 1 ; Test that some errors trigger when the usage of NaCl atomic |
2 ; intrinsics does not match the required ABI. | 2 ; intrinsics does not match the required ABI. |
3 | 3 |
4 ; RUN: %p2i -i %s --args --verbose none --exit-success -threads=0 2>&1 \ | 4 ; RUN: %p2i -i %s --args --verbose none --exit-success -threads=0 2>&1 \ |
5 ; RUN: | FileCheck %s | 5 ; RUN: | FileCheck %s |
6 | 6 |
7 declare i8 @llvm.nacl.atomic.load.i8(i8*, i32) | 7 declare i8 @llvm.nacl.atomic.load.i8(i8*, i32) |
8 declare i16 @llvm.nacl.atomic.load.i16(i16*, i32) | 8 declare i16 @llvm.nacl.atomic.load.i16(i16*, i32) |
9 declare i64 @llvm.nacl.atomic.load.i64(i64*, i32) | 9 declare i64 @llvm.nacl.atomic.load.i64(i64*, i32) |
10 declare void @llvm.nacl.atomic.store.i32(i32, i32*, i32) | 10 declare void @llvm.nacl.atomic.store.i32(i32, i32*, i32) |
11 declare void @llvm.nacl.atomic.store.i64(i64, i64*, i32) | 11 declare void @llvm.nacl.atomic.store.i64(i64, i64*, i32) |
12 declare i8 @llvm.nacl.atomic.rmw.i8(i32, i8*, i8, i32) | 12 declare i8 @llvm.nacl.atomic.rmw.i8(i32, i8*, i8, i32) |
13 declare i16 @llvm.nacl.atomic.rmw.i16(i32, i16*, i16, i32) | 13 declare i16 @llvm.nacl.atomic.rmw.i16(i32, i16*, i16, i32) |
14 declare i32 @llvm.nacl.atomic.rmw.i32(i32, i32*, i32, i32) | 14 declare i32 @llvm.nacl.atomic.rmw.i32(i32, i32*, i32, i32) |
15 declare i64 @llvm.nacl.atomic.rmw.i64(i32, i64*, i64, i32) | 15 declare i64 @llvm.nacl.atomic.rmw.i64(i32, i64*, i64, i32) |
16 declare i32 @llvm.nacl.atomic.cmpxchg.i32(i32*, i32, i32, i32, i32) | 16 declare i32 @llvm.nacl.atomic.cmpxchg.i32(i32*, i32, i32, i32, i32) |
17 declare i64 @llvm.nacl.atomic.cmpxchg.i64(i64*, i64, i64, i32, i32) | 17 declare i64 @llvm.nacl.atomic.cmpxchg.i64(i64*, i64, i64, i32, i32) |
18 declare void @llvm.nacl.atomic.fence(i32) | 18 declare void @llvm.nacl.atomic.fence(i32) |
19 declare i1 @llvm.nacl.atomic.is.lock.free(i32, i8*) | 19 declare i1 @llvm.nacl.atomic.is.lock.free(i32, i8*) |
20 | 20 |
21 ;;; Load | 21 ;;; Load |
22 ;;; Check unexpected memory order parameter (only sequential | 22 ;;; Check unexpected memory order parameter (release=4 and acq_rel=5 |
23 ;;; consistency == 6 is currently allowed). | 23 ;;; are disallowed). |
24 | 24 |
25 define i32 @error_atomic_load_8(i32 %iptr) { | 25 define i32 @error_atomic_load_8(i32 %iptr) { |
26 entry: | 26 entry: |
27 %ptr = inttoptr i32 %iptr to i8* | 27 %ptr = inttoptr i32 %iptr to i8* |
28 %i = call i8 @llvm.nacl.atomic.load.i8(i8* %ptr, i32 0) | 28 %i = call i8 @llvm.nacl.atomic.load.i8(i8* %ptr, i32 0) |
29 %r = zext i8 %i to i32 | 29 %r = zext i8 %i to i32 |
30 ret i32 %r | 30 ret i32 %r |
31 } | 31 } |
32 ; CHECK: Unexpected memory ordering for AtomicLoad | 32 ; CHECK: Unexpected memory ordering for AtomicLoad |
33 | 33 |
34 define i32 @error_atomic_load_16(i32 %iptr) { | 34 define i32 @error_atomic_load_16(i32 %iptr) { |
35 entry: | 35 entry: |
36 %ptr = inttoptr i32 %iptr to i16* | 36 %ptr = inttoptr i32 %iptr to i16* |
37 %i = call i16 @llvm.nacl.atomic.load.i16(i16* %ptr, i32 1) | 37 %i = call i16 @llvm.nacl.atomic.load.i16(i16* %ptr, i32 4) |
38 %r = zext i16 %i to i32 | 38 %r = zext i16 %i to i32 |
39 ret i32 %r | 39 ret i32 %r |
40 } | 40 } |
41 ; CHECK: Unexpected memory ordering for AtomicLoad | 41 ; CHECK: Unexpected memory ordering for AtomicLoad |
42 | 42 |
43 define i64 @error_atomic_load_64(i32 %iptr) { | 43 define i64 @error_atomic_load_64(i32 %iptr) { |
44 entry: | 44 entry: |
45 %ptr = inttoptr i32 %iptr to i64* | 45 %ptr = inttoptr i32 %iptr to i64* |
46 %r = call i64 @llvm.nacl.atomic.load.i64(i64* %ptr, i32 2) | 46 %r = call i64 @llvm.nacl.atomic.load.i64(i64* %ptr, i32 5) |
47 ret i64 %r | 47 ret i64 %r |
48 } | 48 } |
49 ; CHECK: Unexpected memory ordering for AtomicLoad | 49 ; CHECK: Unexpected memory ordering for AtomicLoad |
50 | 50 |
51 | 51 |
52 ;;; Store | 52 ;;; Store |
| 53 ;;; consume=2, acquire=3, acq_rel=5 are disallowed |
53 | 54 |
54 define void @error_atomic_store_32(i32 %iptr, i32 %v) { | 55 define void @error_atomic_store_32(i32 %iptr, i32 %v) { |
55 entry: | 56 entry: |
56 %ptr = inttoptr i32 %iptr to i32* | 57 %ptr = inttoptr i32 %iptr to i32* |
57 call void @llvm.nacl.atomic.store.i32(i32 %v, i32* %ptr, i32 2) | 58 call void @llvm.nacl.atomic.store.i32(i32 %v, i32* %ptr, i32 2) |
58 ret void | 59 ret void |
59 } | 60 } |
60 ; CHECK: Unexpected memory ordering for AtomicStore | 61 ; CHECK: Unexpected memory ordering for AtomicStore |
61 | 62 |
62 define void @error_atomic_store_64(i32 %iptr, i64 %v) { | 63 define void @error_atomic_store_64(i32 %iptr, i64 %v) { |
63 entry: | 64 entry: |
64 %ptr = inttoptr i32 %iptr to i64* | 65 %ptr = inttoptr i32 %iptr to i64* |
65 call void @llvm.nacl.atomic.store.i64(i64 %v, i64* %ptr, i32 3) | 66 call void @llvm.nacl.atomic.store.i64(i64 %v, i64* %ptr, i32 3) |
66 ret void | 67 ret void |
67 } | 68 } |
68 ; CHECK: Unexpected memory ordering for AtomicStore | 69 ; CHECK: Unexpected memory ordering for AtomicStore |
69 | 70 |
70 define void @error_atomic_store_64_const(i32 %iptr) { | 71 define void @error_atomic_store_64_const(i32 %iptr) { |
71 entry: | 72 entry: |
72 %ptr = inttoptr i32 %iptr to i64* | 73 %ptr = inttoptr i32 %iptr to i64* |
73 call void @llvm.nacl.atomic.store.i64(i64 12345678901234, i64* %ptr, i32 4) | 74 call void @llvm.nacl.atomic.store.i64(i64 12345678901234, i64* %ptr, i32 5) |
74 ret void | 75 ret void |
75 } | 76 } |
76 ; CHECK: Unexpected memory ordering for AtomicStore | 77 ; CHECK: Unexpected memory ordering for AtomicStore |
77 | 78 |
78 ;;; RMW | 79 ;;; RMW |
79 ;;; Test atomic memory order and operation. | 80 ;;; Test atomic memory order and operation. |
| 81 ;;; Modes 3:6 allowed. |
80 | 82 |
81 define i32 @error_atomic_rmw_add_8(i32 %iptr, i32 %v) { | 83 define i32 @error_atomic_rmw_add_8(i32 %iptr, i32 %v) { |
82 entry: | 84 entry: |
83 %trunc = trunc i32 %v to i8 | 85 %trunc = trunc i32 %v to i8 |
84 %ptr = inttoptr i32 %iptr to i8* | 86 %ptr = inttoptr i32 %iptr to i8* |
85 %a = call i8 @llvm.nacl.atomic.rmw.i8(i32 1, i8* %ptr, i8 %trunc, i32 5) | 87 %a = call i8 @llvm.nacl.atomic.rmw.i8(i32 1, i8* %ptr, i8 %trunc, i32 1) |
86 %a_ext = zext i8 %a to i32 | 88 %a_ext = zext i8 %a to i32 |
87 ret i32 %a_ext | 89 ret i32 %a_ext |
88 } | 90 } |
89 ; CHECK: Unexpected memory ordering for AtomicRMW | 91 ; CHECK: Unexpected memory ordering for AtomicRMW |
90 | 92 |
91 define i64 @error_atomic_rmw_add_64(i32 %iptr, i64 %v) { | 93 define i64 @error_atomic_rmw_add_64(i32 %iptr, i64 %v) { |
92 entry: | 94 entry: |
93 %ptr = inttoptr i32 %iptr to i64* | 95 %ptr = inttoptr i32 %iptr to i64* |
94 %a = call i64 @llvm.nacl.atomic.rmw.i64(i32 1, i64* %ptr, i64 %v, i32 4) | 96 %a = call i64 @llvm.nacl.atomic.rmw.i64(i32 1, i64* %ptr, i64 %v, i32 7) |
95 ret i64 %a | 97 ret i64 %a |
96 } | 98 } |
97 ; CHECK: Unexpected memory ordering for AtomicRMW | 99 ; CHECK: Unexpected memory ordering for AtomicRMW |
98 | 100 |
99 define i32 @error_atomic_rmw_add_16(i32 %iptr, i32 %v) { | 101 define i32 @error_atomic_rmw_add_16(i32 %iptr, i32 %v) { |
100 entry: | 102 entry: |
101 %trunc = trunc i32 %v to i16 | 103 %trunc = trunc i32 %v to i16 |
102 %ptr = inttoptr i32 %iptr to i16* | 104 %ptr = inttoptr i32 %iptr to i16* |
103 %a = call i16 @llvm.nacl.atomic.rmw.i16(i32 0, i16* %ptr, i16 %trunc, i32 6) | 105 %a = call i16 @llvm.nacl.atomic.rmw.i16(i32 0, i16* %ptr, i16 %trunc, i32 6) |
104 %a_ext = zext i16 %a to i32 | 106 %a_ext = zext i16 %a to i32 |
(...skipping 19 matching lines...) Expand all Loading... |
124 | 126 |
125 ;;; Cmpxchg | 127 ;;; Cmpxchg |
126 | 128 |
127 define i32 @error_atomic_cmpxchg_32_success(i32 %iptr, i32 %expected, i32 %desir
ed) { | 129 define i32 @error_atomic_cmpxchg_32_success(i32 %iptr, i32 %expected, i32 %desir
ed) { |
128 entry: | 130 entry: |
129 %ptr = inttoptr i32 %iptr to i32* | 131 %ptr = inttoptr i32 %iptr to i32* |
130 %old = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %expected, | 132 %old = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %expected, |
131 i32 %desired, i32 0, i32 6) | 133 i32 %desired, i32 0, i32 6) |
132 ret i32 %old | 134 ret i32 %old |
133 } | 135 } |
134 ; CHECK: Unexpected memory ordering (success) for AtomicCmpxchg | 136 ; CHECK: Unexpected memory ordering for AtomicCmpxchg |
135 | 137 |
136 define i32 @error_atomic_cmpxchg_32_failure(i32 %iptr, i32 %expected, i32 %desir
ed) { | 138 define i32 @error_atomic_cmpxchg_32_failure(i32 %iptr, i32 %expected, i32 %desir
ed) { |
137 entry: | 139 entry: |
138 %ptr = inttoptr i32 %iptr to i32* | 140 %ptr = inttoptr i32 %iptr to i32* |
139 %old = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %expected, | 141 %old = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %expected, |
140 i32 %desired, i32 6, i32 0) | 142 i32 %desired, i32 6, i32 0) |
141 ret i32 %old | 143 ret i32 %old |
142 } | 144 } |
143 ; CHECK: Unexpected memory ordering (failure) for AtomicCmpxchg | 145 ; CHECK: Unexpected memory ordering for AtomicCmpxchg |
144 | 146 |
145 define i64 @error_atomic_cmpxchg_64_failure(i32 %iptr, i64 %expected, i64 %desir
ed) { | 147 define i64 @error_atomic_cmpxchg_64_failure(i32 %iptr, i64 %expected, i64 %desir
ed) { |
146 entry: | 148 entry: |
147 %ptr = inttoptr i32 %iptr to i64* | 149 %ptr = inttoptr i32 %iptr to i64* |
148 %old = call i64 @llvm.nacl.atomic.cmpxchg.i64(i64* %ptr, i64 %expected, | 150 %old = call i64 @llvm.nacl.atomic.cmpxchg.i64(i64* %ptr, i64 %expected, |
149 i64 %desired, i32 6, i32 3) | 151 i64 %desired, i32 4, i32 1) |
150 ret i64 %old | 152 ret i64 %old |
151 } | 153 } |
152 ; CHECK: Unexpected memory ordering (failure) for AtomicCmpxchg | 154 ; CHECK: Unexpected memory ordering for AtomicCmpxchg |
153 | 155 |
154 ;;; Fence and is-lock-free. | 156 ;;; Fence and is-lock-free. |
155 | 157 |
156 define void @error_atomic_fence() { | 158 define void @error_atomic_fence() { |
157 entry: | 159 entry: |
158 call void @llvm.nacl.atomic.fence(i32 1) | 160 call void @llvm.nacl.atomic.fence(i32 0) |
159 ret void | 161 ret void |
160 } | 162 } |
161 ; CHECK: Unexpected memory ordering for AtomicFence | 163 ; CHECK: Unexpected memory ordering for AtomicFence |
162 | 164 |
163 define i32 @error_atomic_is_lock_free_var(i32 %iptr, i32 %bs) { | 165 define i32 @error_atomic_is_lock_free_var(i32 %iptr, i32 %bs) { |
164 entry: | 166 entry: |
165 %ptr = inttoptr i32 %iptr to i8* | 167 %ptr = inttoptr i32 %iptr to i8* |
166 %i = call i1 @llvm.nacl.atomic.is.lock.free(i32 %bs, i8* %ptr) | 168 %i = call i1 @llvm.nacl.atomic.is.lock.free(i32 %bs, i8* %ptr) |
167 %r = zext i1 %i to i32 | 169 %r = zext i1 %i to i32 |
168 ret i32 %r | 170 ret i32 %r |
169 } | 171 } |
170 ; CHECK: AtomicIsLockFree byte size should be compile-time const | 172 ; CHECK: AtomicIsLockFree byte size should be compile-time const |
| 173 |
| 174 |
| 175 ;;; Test bad non-constant memory ordering values. |
| 176 |
| 177 define i32 @error_atomic_load_8_nonconst(i32 %iptr) { |
| 178 entry: |
| 179 %ptr = inttoptr i32 %iptr to i8* |
| 180 %i = call i8 @llvm.nacl.atomic.load.i8(i8* %ptr, i32 %iptr) |
| 181 %r = zext i8 %i to i32 |
| 182 ret i32 %r |
| 183 } |
| 184 ; CHECK: Unexpected memory ordering for AtomicLoad |
| 185 |
| 186 define void @error_atomic_store_32_nonconst(i32 %iptr, i32 %v) { |
| 187 entry: |
| 188 %ptr = inttoptr i32 %iptr to i32* |
| 189 call void @llvm.nacl.atomic.store.i32(i32 %v, i32* %ptr, i32 %v) |
| 190 ret void |
| 191 } |
| 192 ; CHECK: Unexpected memory ordering for AtomicStore |
| 193 |
| 194 define i32 @error_atomic_rmw_add_8_nonconst(i32 %iptr, i32 %v) { |
| 195 entry: |
| 196 %trunc = trunc i32 %v to i8 |
| 197 %ptr = inttoptr i32 %iptr to i8* |
| 198 %a = call i8 @llvm.nacl.atomic.rmw.i8(i32 1, i8* %ptr, i8 %trunc, i32 %iptr) |
| 199 %a_ext = zext i8 %a to i32 |
| 200 ret i32 %a_ext |
| 201 } |
| 202 ; CHECK: Unexpected memory ordering for AtomicRMW |
| 203 |
| 204 define i32 @error_atomic_cmpxchg_32_success_nonconst_1(i32 %iptr, i32 %expected,
i32 %desired) { |
| 205 entry: |
| 206 %ptr = inttoptr i32 %iptr to i32* |
| 207 %old = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %expected, |
| 208 i32 %desired, i32 %iptr, i32 6) |
| 209 ret i32 %old |
| 210 } |
| 211 ; CHECK: Unexpected memory ordering for AtomicCmpxchg |
| 212 |
| 213 define i32 @error_atomic_cmpxchg_32_success_nonconst_2(i32 %iptr, i32 %expected,
i32 %desired) { |
| 214 entry: |
| 215 %ptr = inttoptr i32 %iptr to i32* |
| 216 %old = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %expected, |
| 217 i32 %desired, i32 6, i32 %iptr) |
| 218 ret i32 %old |
| 219 } |
| 220 ; CHECK: Unexpected memory ordering for AtomicCmpxchg |
| 221 |
| 222 define void @error_atomic_fence_nonconst(i32 %v) { |
| 223 entry: |
| 224 call void @llvm.nacl.atomic.fence(i32 %v) |
| 225 ret void |
| 226 } |
| 227 ; CHECK: Unexpected memory ordering for AtomicFence |
OLD | NEW |