OLD | NEW |
1 ; Test that some errors trigger when the usage of NaCl atomic | 1 ; Test that some errors trigger when the usage of NaCl atomic |
2 ; intrinsics does not match the required ABI. | 2 ; intrinsics does not match the required ABI. |
3 ; REQUIRES: allow_dump | 3 ; REQUIRES: allow_dump |
4 | 4 |
5 ; RUN: %p2i -i %s --args --verbose none --exit-success -threads=0 2>&1 \ | 5 ; RUN: %p2i -i %s --args --verbose none --exit-success -threads=0 2>&1 \ |
6 ; RUN: | FileCheck %s | 6 ; RUN: | FileCheck %s |
7 | 7 |
8 declare i8 @llvm.nacl.atomic.load.i8(i8*, i32) | 8 declare i8 @llvm.nacl.atomic.load.i8(i8*, i32) |
9 declare i16 @llvm.nacl.atomic.load.i16(i16*, i32) | 9 declare i16 @llvm.nacl.atomic.load.i16(i16*, i32) |
10 declare i64 @llvm.nacl.atomic.load.i64(i64*, i32) | 10 declare i64 @llvm.nacl.atomic.load.i64(i64*, i32) |
11 declare void @llvm.nacl.atomic.store.i32(i32, i32*, i32) | 11 declare void @llvm.nacl.atomic.store.i32(i32, i32*, i32) |
12 declare void @llvm.nacl.atomic.store.i64(i64, i64*, i32) | 12 declare void @llvm.nacl.atomic.store.i64(i64, i64*, i32) |
13 declare i8 @llvm.nacl.atomic.rmw.i8(i32, i8*, i8, i32) | 13 declare i8 @llvm.nacl.atomic.rmw.i8(i32, i8*, i8, i32) |
14 declare i16 @llvm.nacl.atomic.rmw.i16(i32, i16*, i16, i32) | 14 declare i16 @llvm.nacl.atomic.rmw.i16(i32, i16*, i16, i32) |
15 declare i32 @llvm.nacl.atomic.rmw.i32(i32, i32*, i32, i32) | 15 declare i32 @llvm.nacl.atomic.rmw.i32(i32, i32*, i32, i32) |
16 declare i64 @llvm.nacl.atomic.rmw.i64(i32, i64*, i64, i32) | 16 declare i64 @llvm.nacl.atomic.rmw.i64(i32, i64*, i64, i32) |
17 declare i32 @llvm.nacl.atomic.cmpxchg.i32(i32*, i32, i32, i32, i32) | 17 declare i32 @llvm.nacl.atomic.cmpxchg.i32(i32*, i32, i32, i32, i32) |
18 declare i64 @llvm.nacl.atomic.cmpxchg.i64(i64*, i64, i64, i32, i32) | 18 declare i64 @llvm.nacl.atomic.cmpxchg.i64(i64*, i64, i64, i32, i32) |
19 declare void @llvm.nacl.atomic.fence(i32) | 19 declare void @llvm.nacl.atomic.fence(i32) |
20 declare i1 @llvm.nacl.atomic.is.lock.free(i32, i8*) | 20 declare i1 @llvm.nacl.atomic.is.lock.free(i32, i8*) |
21 | 21 |
22 ;;; Load | 22 ;;; Load |
23 ;;; Check unexpected memory order parameter (release=4 and acq_rel=5 | 23 ;;; Check unexpected memory order parameter (release=4 and acq_rel=5 |
24 ;;; are disallowed). | 24 ;;; are disallowed). |
25 | 25 |
26 define i32 @error_atomic_load_8(i32 %iptr) { | 26 define internal i32 @error_atomic_load_8(i32 %iptr) { |
27 entry: | 27 entry: |
28 %ptr = inttoptr i32 %iptr to i8* | 28 %ptr = inttoptr i32 %iptr to i8* |
29 %i = call i8 @llvm.nacl.atomic.load.i8(i8* %ptr, i32 0) | 29 %i = call i8 @llvm.nacl.atomic.load.i8(i8* %ptr, i32 0) |
30 %r = zext i8 %i to i32 | 30 %r = zext i8 %i to i32 |
31 ret i32 %r | 31 ret i32 %r |
32 } | 32 } |
33 ; CHECK: Unexpected memory ordering for AtomicLoad | 33 ; CHECK: Unexpected memory ordering for AtomicLoad |
34 | 34 |
35 define i32 @error_atomic_load_16(i32 %iptr) { | 35 define internal i32 @error_atomic_load_16(i32 %iptr) { |
36 entry: | 36 entry: |
37 %ptr = inttoptr i32 %iptr to i16* | 37 %ptr = inttoptr i32 %iptr to i16* |
38 %i = call i16 @llvm.nacl.atomic.load.i16(i16* %ptr, i32 4) | 38 %i = call i16 @llvm.nacl.atomic.load.i16(i16* %ptr, i32 4) |
39 %r = zext i16 %i to i32 | 39 %r = zext i16 %i to i32 |
40 ret i32 %r | 40 ret i32 %r |
41 } | 41 } |
42 ; CHECK: Unexpected memory ordering for AtomicLoad | 42 ; CHECK: Unexpected memory ordering for AtomicLoad |
43 | 43 |
44 define i64 @error_atomic_load_64(i32 %iptr) { | 44 define internal i64 @error_atomic_load_64(i32 %iptr) { |
45 entry: | 45 entry: |
46 %ptr = inttoptr i32 %iptr to i64* | 46 %ptr = inttoptr i32 %iptr to i64* |
47 %r = call i64 @llvm.nacl.atomic.load.i64(i64* %ptr, i32 5) | 47 %r = call i64 @llvm.nacl.atomic.load.i64(i64* %ptr, i32 5) |
48 ret i64 %r | 48 ret i64 %r |
49 } | 49 } |
50 ; CHECK: Unexpected memory ordering for AtomicLoad | 50 ; CHECK: Unexpected memory ordering for AtomicLoad |
51 | 51 |
52 | 52 |
53 ;;; Store | 53 ;;; Store |
54 ;;; consume=2, acquire=3, acq_rel=5 are disallowed | 54 ;;; consume=2, acquire=3, acq_rel=5 are disallowed |
55 | 55 |
56 define void @error_atomic_store_32(i32 %iptr, i32 %v) { | 56 define internal void @error_atomic_store_32(i32 %iptr, i32 %v) { |
57 entry: | 57 entry: |
58 %ptr = inttoptr i32 %iptr to i32* | 58 %ptr = inttoptr i32 %iptr to i32* |
59 call void @llvm.nacl.atomic.store.i32(i32 %v, i32* %ptr, i32 2) | 59 call void @llvm.nacl.atomic.store.i32(i32 %v, i32* %ptr, i32 2) |
60 ret void | 60 ret void |
61 } | 61 } |
62 ; CHECK: Unexpected memory ordering for AtomicStore | 62 ; CHECK: Unexpected memory ordering for AtomicStore |
63 | 63 |
64 define void @error_atomic_store_64(i32 %iptr, i64 %v) { | 64 define internal void @error_atomic_store_64(i32 %iptr, i64 %v) { |
65 entry: | 65 entry: |
66 %ptr = inttoptr i32 %iptr to i64* | 66 %ptr = inttoptr i32 %iptr to i64* |
67 call void @llvm.nacl.atomic.store.i64(i64 %v, i64* %ptr, i32 3) | 67 call void @llvm.nacl.atomic.store.i64(i64 %v, i64* %ptr, i32 3) |
68 ret void | 68 ret void |
69 } | 69 } |
70 ; CHECK: Unexpected memory ordering for AtomicStore | 70 ; CHECK: Unexpected memory ordering for AtomicStore |
71 | 71 |
72 define void @error_atomic_store_64_const(i32 %iptr) { | 72 define internal void @error_atomic_store_64_const(i32 %iptr) { |
73 entry: | 73 entry: |
74 %ptr = inttoptr i32 %iptr to i64* | 74 %ptr = inttoptr i32 %iptr to i64* |
75 call void @llvm.nacl.atomic.store.i64(i64 12345678901234, i64* %ptr, i32 5) | 75 call void @llvm.nacl.atomic.store.i64(i64 12345678901234, i64* %ptr, i32 5) |
76 ret void | 76 ret void |
77 } | 77 } |
78 ; CHECK: Unexpected memory ordering for AtomicStore | 78 ; CHECK: Unexpected memory ordering for AtomicStore |
79 | 79 |
80 ;;; RMW | 80 ;;; RMW |
81 ;;; Test atomic memory order and operation. | 81 ;;; Test atomic memory order and operation. |
82 ;;; Modes 3:6 allowed. | 82 ;;; Modes 3:6 allowed. |
83 | 83 |
84 define i32 @error_atomic_rmw_add_8(i32 %iptr, i32 %v) { | 84 define internal i32 @error_atomic_rmw_add_8(i32 %iptr, i32 %v) { |
85 entry: | 85 entry: |
86 %trunc = trunc i32 %v to i8 | 86 %trunc = trunc i32 %v to i8 |
87 %ptr = inttoptr i32 %iptr to i8* | 87 %ptr = inttoptr i32 %iptr to i8* |
88 %a = call i8 @llvm.nacl.atomic.rmw.i8(i32 1, i8* %ptr, i8 %trunc, i32 1) | 88 %a = call i8 @llvm.nacl.atomic.rmw.i8(i32 1, i8* %ptr, i8 %trunc, i32 1) |
89 %a_ext = zext i8 %a to i32 | 89 %a_ext = zext i8 %a to i32 |
90 ret i32 %a_ext | 90 ret i32 %a_ext |
91 } | 91 } |
92 ; CHECK: Unexpected memory ordering for AtomicRMW | 92 ; CHECK: Unexpected memory ordering for AtomicRMW |
93 | 93 |
94 define i64 @error_atomic_rmw_add_64(i32 %iptr, i64 %v) { | 94 define internal i64 @error_atomic_rmw_add_64(i32 %iptr, i64 %v) { |
95 entry: | 95 entry: |
96 %ptr = inttoptr i32 %iptr to i64* | 96 %ptr = inttoptr i32 %iptr to i64* |
97 %a = call i64 @llvm.nacl.atomic.rmw.i64(i32 1, i64* %ptr, i64 %v, i32 7) | 97 %a = call i64 @llvm.nacl.atomic.rmw.i64(i32 1, i64* %ptr, i64 %v, i32 7) |
98 ret i64 %a | 98 ret i64 %a |
99 } | 99 } |
100 ; CHECK: Unexpected memory ordering for AtomicRMW | 100 ; CHECK: Unexpected memory ordering for AtomicRMW |
101 | 101 |
102 define i32 @error_atomic_rmw_add_16(i32 %iptr, i32 %v) { | 102 define internal i32 @error_atomic_rmw_add_16(i32 %iptr, i32 %v) { |
103 entry: | 103 entry: |
104 %trunc = trunc i32 %v to i16 | 104 %trunc = trunc i32 %v to i16 |
105 %ptr = inttoptr i32 %iptr to i16* | 105 %ptr = inttoptr i32 %iptr to i16* |
106 %a = call i16 @llvm.nacl.atomic.rmw.i16(i32 0, i16* %ptr, i16 %trunc, i32 6) | 106 %a = call i16 @llvm.nacl.atomic.rmw.i16(i32 0, i16* %ptr, i16 %trunc, i32 6) |
107 %a_ext = zext i16 %a to i32 | 107 %a_ext = zext i16 %a to i32 |
108 ret i32 %a_ext | 108 ret i32 %a_ext |
109 } | 109 } |
110 ; CHECK: Unknown AtomicRMW operation | 110 ; CHECK: Unknown AtomicRMW operation |
111 | 111 |
112 define i32 @error_atomic_rmw_add_32(i32 %iptr, i32 %v) { | 112 define internal i32 @error_atomic_rmw_add_32(i32 %iptr, i32 %v) { |
113 entry: | 113 entry: |
114 %ptr = inttoptr i32 %iptr to i32* | 114 %ptr = inttoptr i32 %iptr to i32* |
115 %a = call i32 @llvm.nacl.atomic.rmw.i32(i32 7, i32* %ptr, i32 %v, i32 6) | 115 %a = call i32 @llvm.nacl.atomic.rmw.i32(i32 7, i32* %ptr, i32 %v, i32 6) |
116 ret i32 %a | 116 ret i32 %a |
117 } | 117 } |
118 ; CHECK: Unknown AtomicRMW operation | 118 ; CHECK: Unknown AtomicRMW operation |
119 | 119 |
120 define i32 @error_atomic_rmw_add_32_max(i32 %iptr, i32 %v) { | 120 define internal i32 @error_atomic_rmw_add_32_max(i32 %iptr, i32 %v) { |
121 entry: | 121 entry: |
122 %ptr = inttoptr i32 %iptr to i32* | 122 %ptr = inttoptr i32 %iptr to i32* |
123 %a = call i32 @llvm.nacl.atomic.rmw.i32(i32 4294967295, i32* %ptr, i32 %v, i32
6) | 123 %a = call i32 @llvm.nacl.atomic.rmw.i32(i32 4294967295, i32* %ptr, i32 %v, i32
6) |
124 ret i32 %a | 124 ret i32 %a |
125 } | 125 } |
126 ; CHECK: Unknown AtomicRMW operation | 126 ; CHECK: Unknown AtomicRMW operation |
127 | 127 |
128 ;;; Cmpxchg | 128 ;;; Cmpxchg |
129 | 129 |
130 define i32 @error_atomic_cmpxchg_32_success(i32 %iptr, i32 %expected, i32 %desir
ed) { | 130 define internal i32 @error_atomic_cmpxchg_32_success(i32 %iptr, i32 %expected, |
| 131 i32 %desired) { |
131 entry: | 132 entry: |
132 %ptr = inttoptr i32 %iptr to i32* | 133 %ptr = inttoptr i32 %iptr to i32* |
133 %old = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %expected, | 134 %old = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %expected, |
134 i32 %desired, i32 0, i32 6) | 135 i32 %desired, i32 0, i32 6) |
135 ret i32 %old | 136 ret i32 %old |
136 } | 137 } |
137 ; CHECK: Unexpected memory ordering for AtomicCmpxchg | 138 ; CHECK: Unexpected memory ordering for AtomicCmpxchg |
138 | 139 |
139 define i32 @error_atomic_cmpxchg_32_failure(i32 %iptr, i32 %expected, i32 %desir
ed) { | 140 define internal i32 @error_atomic_cmpxchg_32_failure(i32 %iptr, i32 %expected, |
| 141 i32 %desired) { |
140 entry: | 142 entry: |
141 %ptr = inttoptr i32 %iptr to i32* | 143 %ptr = inttoptr i32 %iptr to i32* |
142 %old = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %expected, | 144 %old = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %expected, |
143 i32 %desired, i32 6, i32 0) | 145 i32 %desired, i32 6, i32 0) |
144 ret i32 %old | 146 ret i32 %old |
145 } | 147 } |
146 ; CHECK: Unexpected memory ordering for AtomicCmpxchg | 148 ; CHECK: Unexpected memory ordering for AtomicCmpxchg |
147 | 149 |
148 define i64 @error_atomic_cmpxchg_64_failure(i32 %iptr, i64 %expected, i64 %desir
ed) { | 150 define internal i64 @error_atomic_cmpxchg_64_failure(i32 %iptr, i64 %expected, |
| 151 i64 %desired) { |
149 entry: | 152 entry: |
150 %ptr = inttoptr i32 %iptr to i64* | 153 %ptr = inttoptr i32 %iptr to i64* |
151 %old = call i64 @llvm.nacl.atomic.cmpxchg.i64(i64* %ptr, i64 %expected, | 154 %old = call i64 @llvm.nacl.atomic.cmpxchg.i64(i64* %ptr, i64 %expected, |
152 i64 %desired, i32 4, i32 1) | 155 i64 %desired, i32 4, i32 1) |
153 ret i64 %old | 156 ret i64 %old |
154 } | 157 } |
155 ; CHECK: Unexpected memory ordering for AtomicCmpxchg | 158 ; CHECK: Unexpected memory ordering for AtomicCmpxchg |
156 | 159 |
157 ;;; Fence and is-lock-free. | 160 ;;; Fence and is-lock-free. |
158 | 161 |
159 define void @error_atomic_fence() { | 162 define internal void @error_atomic_fence() { |
160 entry: | 163 entry: |
161 call void @llvm.nacl.atomic.fence(i32 0) | 164 call void @llvm.nacl.atomic.fence(i32 0) |
162 ret void | 165 ret void |
163 } | 166 } |
164 ; CHECK: Unexpected memory ordering for AtomicFence | 167 ; CHECK: Unexpected memory ordering for AtomicFence |
165 | 168 |
166 define i32 @error_atomic_is_lock_free_var(i32 %iptr, i32 %bs) { | 169 define internal i32 @error_atomic_is_lock_free_var(i32 %iptr, i32 %bs) { |
167 entry: | 170 entry: |
168 %ptr = inttoptr i32 %iptr to i8* | 171 %ptr = inttoptr i32 %iptr to i8* |
169 %i = call i1 @llvm.nacl.atomic.is.lock.free(i32 %bs, i8* %ptr) | 172 %i = call i1 @llvm.nacl.atomic.is.lock.free(i32 %bs, i8* %ptr) |
170 %r = zext i1 %i to i32 | 173 %r = zext i1 %i to i32 |
171 ret i32 %r | 174 ret i32 %r |
172 } | 175 } |
173 ; CHECK: AtomicIsLockFree byte size should be compile-time const | 176 ; CHECK: AtomicIsLockFree byte size should be compile-time const |
174 | 177 |
175 | 178 |
176 ;;; Test bad non-constant memory ordering values. | 179 ;;; Test bad non-constant memory ordering values. |
177 | 180 |
178 define i32 @error_atomic_load_8_nonconst(i32 %iptr) { | 181 define internal i32 @error_atomic_load_8_nonconst(i32 %iptr) { |
179 entry: | 182 entry: |
180 %ptr = inttoptr i32 %iptr to i8* | 183 %ptr = inttoptr i32 %iptr to i8* |
181 %i = call i8 @llvm.nacl.atomic.load.i8(i8* %ptr, i32 %iptr) | 184 %i = call i8 @llvm.nacl.atomic.load.i8(i8* %ptr, i32 %iptr) |
182 %r = zext i8 %i to i32 | 185 %r = zext i8 %i to i32 |
183 ret i32 %r | 186 ret i32 %r |
184 } | 187 } |
185 ; CHECK: Unexpected memory ordering for AtomicLoad | 188 ; CHECK: Unexpected memory ordering for AtomicLoad |
186 | 189 |
187 define void @error_atomic_store_32_nonconst(i32 %iptr, i32 %v) { | 190 define internal void @error_atomic_store_32_nonconst(i32 %iptr, i32 %v) { |
188 entry: | 191 entry: |
189 %ptr = inttoptr i32 %iptr to i32* | 192 %ptr = inttoptr i32 %iptr to i32* |
190 call void @llvm.nacl.atomic.store.i32(i32 %v, i32* %ptr, i32 %v) | 193 call void @llvm.nacl.atomic.store.i32(i32 %v, i32* %ptr, i32 %v) |
191 ret void | 194 ret void |
192 } | 195 } |
193 ; CHECK: Unexpected memory ordering for AtomicStore | 196 ; CHECK: Unexpected memory ordering for AtomicStore |
194 | 197 |
195 define i32 @error_atomic_rmw_add_8_nonconst(i32 %iptr, i32 %v) { | 198 define internal i32 @error_atomic_rmw_add_8_nonconst(i32 %iptr, i32 %v) { |
196 entry: | 199 entry: |
197 %trunc = trunc i32 %v to i8 | 200 %trunc = trunc i32 %v to i8 |
198 %ptr = inttoptr i32 %iptr to i8* | 201 %ptr = inttoptr i32 %iptr to i8* |
199 %a = call i8 @llvm.nacl.atomic.rmw.i8(i32 1, i8* %ptr, i8 %trunc, i32 %iptr) | 202 %a = call i8 @llvm.nacl.atomic.rmw.i8(i32 1, i8* %ptr, i8 %trunc, i32 %iptr) |
200 %a_ext = zext i8 %a to i32 | 203 %a_ext = zext i8 %a to i32 |
201 ret i32 %a_ext | 204 ret i32 %a_ext |
202 } | 205 } |
203 ; CHECK: Unexpected memory ordering for AtomicRMW | 206 ; CHECK: Unexpected memory ordering for AtomicRMW |
204 | 207 |
205 define i32 @error_atomic_cmpxchg_32_success_nonconst_1(i32 %iptr, i32 %expected,
i32 %desired) { | 208 define internal i32 @error_atomic_cmpxchg_32_success_nonconst_1(i32 %iptr, i32 %
expected, |
| 209 i32 %desired) { |
206 entry: | 210 entry: |
207 %ptr = inttoptr i32 %iptr to i32* | 211 %ptr = inttoptr i32 %iptr to i32* |
208 %old = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %expected, | 212 %old = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %expected, |
209 i32 %desired, i32 %iptr, i32 6) | 213 i32 %desired, i32 %iptr, i32 6) |
210 ret i32 %old | 214 ret i32 %old |
211 } | 215 } |
212 ; CHECK: Unexpected memory ordering for AtomicCmpxchg | 216 ; CHECK: Unexpected memory ordering for AtomicCmpxchg |
213 | 217 |
214 define i32 @error_atomic_cmpxchg_32_success_nonconst_2(i32 %iptr, i32 %expected,
i32 %desired) { | 218 define internal i32 @error_atomic_cmpxchg_32_success_nonconst_2(i32 %iptr, i32 %
expected, |
| 219 i32 %desired) { |
215 entry: | 220 entry: |
216 %ptr = inttoptr i32 %iptr to i32* | 221 %ptr = inttoptr i32 %iptr to i32* |
217 %old = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %expected, | 222 %old = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %expected, |
218 i32 %desired, i32 6, i32 %iptr) | 223 i32 %desired, i32 6, i32 %iptr) |
219 ret i32 %old | 224 ret i32 %old |
220 } | 225 } |
221 ; CHECK: Unexpected memory ordering for AtomicCmpxchg | 226 ; CHECK: Unexpected memory ordering for AtomicCmpxchg |
222 | 227 |
223 define void @error_atomic_fence_nonconst(i32 %v) { | 228 define internal void @error_atomic_fence_nonconst(i32 %v) { |
224 entry: | 229 entry: |
225 call void @llvm.nacl.atomic.fence(i32 %v) | 230 call void @llvm.nacl.atomic.fence(i32 %v) |
226 ret void | 231 ret void |
227 } | 232 } |
228 ; CHECK: Unexpected memory ordering for AtomicFence | 233 ; CHECK: Unexpected memory ordering for AtomicFence |
OLD | NEW |