OLD | NEW |
1 ; RUN: opt < %s -resolve-pnacl-intrinsics -S | FileCheck %s -check-prefix=CLEANE
D | 1 ; RUN: opt < %s -resolve-pnacl-intrinsics -S | FileCheck %s -check-prefix=CLEANE
D |
2 ; RUN: opt < %s -resolve-pnacl-intrinsics -S | FileCheck %s | 2 ; RUN: opt < %s -resolve-pnacl-intrinsics -S | FileCheck %s |
3 | 3 |
4 ; CLEANED-NOT: call i32 @llvm.nacl.setjmp | 4 ; CLEANED-NOT: call i32 @llvm.nacl.setjmp |
5 ; CLEANED-NOT: call void @llvm.nacl.longjmp | 5 ; CLEANED-NOT: call void @llvm.nacl.longjmp |
6 ; CLEANED-NOT: call {{.*}} @llvm.nacl.atomic | 6 ; CLEANED-NOT: call {{.*}} @llvm.nacl.atomic |
7 | 7 |
8 declare i32 @llvm.nacl.setjmp(i8*) | 8 declare i32 @llvm.nacl.setjmp(i8*) |
9 declare void @llvm.nacl.longjmp(i8*, i32) | 9 declare void @llvm.nacl.longjmp(i8*, i32) |
10 | 10 |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
46 } | 46 } |
47 | 47 |
48 define void @call_longjmp(i8* %arg, i32 %num) { | 48 define void @call_longjmp(i8* %arg, i32 %num) { |
49 call void @llvm.nacl.longjmp(i8* %arg, i32 %num) | 49 call void @llvm.nacl.longjmp(i8* %arg, i32 %num) |
50 ; CHECK: call void @longjmp(i8* %arg, i32 %num){{$}} | 50 ; CHECK: call void @longjmp(i8* %arg, i32 %num){{$}} |
51 ret void | 51 ret void |
52 } | 52 } |
53 | 53 |
54 ; atomics. | 54 ; atomics. |
55 | 55 |
56 ; CHECK: @test_fetch_and_add_i32 | 56 ; CHECK-LABEL: @test_atomic_acquire |
| 57 define i32 @test_atomic_acquire(i32* %ptr) { |
| 58 ; CHECK: %1 = load atomic i32* %ptr acquire, align 4 |
| 59 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 3) |
| 60 ret i32 %1 |
| 61 } |
| 62 |
| 63 ; CHECK-LABEL: @test_atomic_release |
| 64 define void @test_atomic_release(i32* %ptr, i32 %value) { |
| 65 ; CHECK: store atomic i32 %value, i32* %ptr release, align 4 |
| 66 call void @llvm.nacl.atomic.store.i32(i32 %value, i32* %ptr, i32 4) |
| 67 ret void |
| 68 } |
| 69 |
| 70 ; CHECK-LABEL: @test_atomic_acquire_release |
| 71 define i32 @test_atomic_acquire_release(i32* %ptr, i32 %value) { |
| 72 ; CHECK: %1 = atomicrmw add i32* %ptr, i32 %value acq_rel |
| 73 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 %value, i32 5) |
| 74 ret i32 %1 |
| 75 } |
| 76 |
| 77 ; CHECK-LABEL: @test_fetch_and_add_i32 |
57 define i32 @test_fetch_and_add_i32(i32* %ptr, i32 %value) { | 78 define i32 @test_fetch_and_add_i32(i32* %ptr, i32 %value) { |
58 ; CHECK: %1 = atomicrmw add i32* %ptr, i32 %value seq_cst | 79 ; CHECK: %1 = atomicrmw add i32* %ptr, i32 %value seq_cst |
59 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 %value, i32 6) | 80 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 %value, i32 6) |
60 ret i32 %1 | 81 ret i32 %1 |
61 } | 82 } |
62 | 83 |
63 ; CHECK: @test_fetch_and_sub_i32 | 84 ; CHECK-LABEL: @test_fetch_and_sub_i32 |
64 define i32 @test_fetch_and_sub_i32(i32* %ptr, i32 %value) { | 85 define i32 @test_fetch_and_sub_i32(i32* %ptr, i32 %value) { |
65 ; CHECK: %1 = atomicrmw sub i32* %ptr, i32 %value seq_cst | 86 ; CHECK: %1 = atomicrmw sub i32* %ptr, i32 %value seq_cst |
66 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 2, i32* %ptr, i32 %value, i32 6) | 87 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 2, i32* %ptr, i32 %value, i32 6) |
67 ret i32 %1 | 88 ret i32 %1 |
68 } | 89 } |
69 | 90 |
70 ; CHECK: @test_fetch_and_or_i32 | 91 ; CHECK-LABEL: @test_fetch_and_or_i32 |
71 define i32 @test_fetch_and_or_i32(i32* %ptr, i32 %value) { | 92 define i32 @test_fetch_and_or_i32(i32* %ptr, i32 %value) { |
72 ; CHECK: %1 = atomicrmw or i32* %ptr, i32 %value seq_cst | 93 ; CHECK: %1 = atomicrmw or i32* %ptr, i32 %value seq_cst |
73 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 3, i32* %ptr, i32 %value, i32 6) | 94 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 3, i32* %ptr, i32 %value, i32 6) |
74 ret i32 %1 | 95 ret i32 %1 |
75 } | 96 } |
76 | 97 |
77 ; CHECK: @test_fetch_and_and_i32 | 98 ; CHECK-LABEL: @test_fetch_and_and_i32 |
78 define i32 @test_fetch_and_and_i32(i32* %ptr, i32 %value) { | 99 define i32 @test_fetch_and_and_i32(i32* %ptr, i32 %value) { |
79 ; CHECK: %1 = atomicrmw and i32* %ptr, i32 %value seq_cst | 100 ; CHECK: %1 = atomicrmw and i32* %ptr, i32 %value seq_cst |
80 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 4, i32* %ptr, i32 %value, i32 6) | 101 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 4, i32* %ptr, i32 %value, i32 6) |
81 ret i32 %1 | 102 ret i32 %1 |
82 } | 103 } |
83 | 104 |
84 ; CHECK: @test_fetch_and_xor_i32 | 105 ; CHECK-LABEL: @test_fetch_and_xor_i32 |
85 define i32 @test_fetch_and_xor_i32(i32* %ptr, i32 %value) { | 106 define i32 @test_fetch_and_xor_i32(i32* %ptr, i32 %value) { |
86 ; CHECK: %1 = atomicrmw xor i32* %ptr, i32 %value seq_cst | 107 ; CHECK: %1 = atomicrmw xor i32* %ptr, i32 %value seq_cst |
87 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 5, i32* %ptr, i32 %value, i32 6) | 108 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 5, i32* %ptr, i32 %value, i32 6) |
88 ret i32 %1 | 109 ret i32 %1 |
89 } | 110 } |
90 | 111 |
91 ; CHECK: @test_val_compare_and_swap_i32 | 112 ; CHECK-LABEL: @test_val_compare_and_swap_i32 |
92 define i32 @test_val_compare_and_swap_i32(i32* %ptr, i32 %oldval, i32 %newval) { | 113 define i32 @test_val_compare_and_swap_i32(i32* %ptr, i32 %oldval, i32 %newval) { |
93 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst | 114 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst |
94 ; CHECK-NEXT: %2 = extractvalue { i32, i1 } %1, 0 | 115 ; CHECK-NEXT: %2 = extractvalue { i32, i1 } %1, 0 |
95 ; CHECK-NEXT: ret i32 %2 | 116 ; CHECK-NEXT: ret i32 %2 |
96 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 6, i32 6) | 117 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 6, i32 6) |
97 ret i32 %1 | 118 ret i32 %1 |
98 } | 119 } |
99 | 120 |
100 ; CHECK: @test_val_compare_and_swap_i32_new | 121 ; CHECK-LABEL: @test_val_compare_and_swap_i32_new |
101 define i32 @test_val_compare_and_swap_i32_new(i32* %ptr, i32 %oldval, i32 %newva
l) { | 122 define i32 @test_val_compare_and_swap_i32_new(i32* %ptr, i32 %oldval, i32 %newva
l) { |
102 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst | 123 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst |
103 ; CHECK-NEXT: %res2 = extractvalue { i32, i1 } %1, 0 | 124 ; CHECK-NEXT: %res2 = extractvalue { i32, i1 } %1, 0 |
104 ; CHECK-NEXT: ret i32 %res2 | 125 ; CHECK-NEXT: ret i32 %res2 |
105 %res = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %new
val, i32 6, i32 6) | 126 %res = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %new
val, i32 6, i32 6) |
106 %success = icmp eq i32 %res, %oldval | 127 %success = icmp eq i32 %res, %oldval |
107 %res.insert.value = insertvalue { i32, i1 } undef, i32 %res, 0 | 128 %res.insert.value = insertvalue { i32, i1 } undef, i32 %res, 0 |
108 %res.insert.success = insertvalue { i32, i1 } %res.insert.value, i1 %success,
1 | 129 %res.insert.success = insertvalue { i32, i1 } %res.insert.value, i1 %success,
1 |
109 %val = extractvalue { i32, i1 } %res.insert.success, 0 | 130 %val = extractvalue { i32, i1 } %res.insert.success, 0 |
110 ret i32 %val | 131 ret i32 %val |
111 } | 132 } |
112 | 133 |
113 ; CHECK: @test_bool_compare_and_swap_i32 | 134 ; CHECK-LABEL: @test_bool_compare_and_swap_i32 |
114 define i1 @test_bool_compare_and_swap_i32(i32* %ptr, i32 %oldval, i32 %newval) { | 135 define i1 @test_bool_compare_and_swap_i32(i32* %ptr, i32 %oldval, i32 %newval) { |
115 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst | 136 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst |
116 ; CHECK-NEXT: %success = extractvalue { i32, i1 } %1, 1 | 137 ; CHECK-NEXT: %success = extractvalue { i32, i1 } %1, 1 |
117 ; CHECK-NEXT: ret i1 %success | 138 ; CHECK-NEXT: ret i1 %success |
118 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 6, i32 6) | 139 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 6, i32 6) |
119 %2 = icmp eq i32 %1, %oldval | 140 %2 = icmp eq i32 %1, %oldval |
120 ret i1 %2 | 141 ret i1 %2 |
121 } | 142 } |
122 | 143 |
123 ; CHECK: @test_bool_compare_and_swap_i32_new | 144 ; CHECK-LABEL: @test_bool_compare_and_swap_i32_new |
124 define i1 @test_bool_compare_and_swap_i32_new(i32* %ptr, i32 %oldval, i32 %newva
l) { | 145 define i1 @test_bool_compare_and_swap_i32_new(i32* %ptr, i32 %oldval, i32 %newva
l) { |
125 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst | 146 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst |
126 ; CHECK-NEXT: %suc = extractvalue { i32, i1 } %1, 1 | 147 ; CHECK-NEXT: %suc = extractvalue { i32, i1 } %1, 1 |
127 ; CHECK-NEXT: ret i1 %suc | 148 ; CHECK-NEXT: ret i1 %suc |
128 %res = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %new
val, i32 6, i32 6) | 149 %res = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %new
val, i32 6, i32 6) |
129 %success = icmp eq i32 %res, %oldval | 150 %success = icmp eq i32 %res, %oldval |
130 %res.insert.value = insertvalue { i32, i1 } undef, i32 %res, 0 | 151 %res.insert.value = insertvalue { i32, i1 } undef, i32 %res, 0 |
131 %res.insert.success = insertvalue { i32, i1 } %res.insert.value, i1 %success,
1 | 152 %res.insert.success = insertvalue { i32, i1 } %res.insert.value, i1 %success,
1 |
132 %suc = extractvalue { i32, i1 } %res.insert.success, 1 | 153 %suc = extractvalue { i32, i1 } %res.insert.success, 1 |
133 ret i1 %suc | 154 ret i1 %suc |
134 } | 155 } |
135 | 156 |
136 ; CHECK: @test_bool_compare_and_swap_i32_reordered | 157 ; CHECK-LABEL: @test_bool_compare_and_swap_i32_reordered |
137 define i1 @test_bool_compare_and_swap_i32_reordered(i32* %ptr, i32 %oldval, i32
%newval) { | 158 define i1 @test_bool_compare_and_swap_i32_reordered(i32* %ptr, i32 %oldval, i32
%newval) { |
138 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst | 159 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst |
139 ; CHECK-NEXT: %success = extractvalue { i32, i1 } %1, 1 | 160 ; CHECK-NEXT: %success = extractvalue { i32, i1 } %1, 1 |
140 ; CHECK-NEXT: ret i1 %success | 161 ; CHECK-NEXT: ret i1 %success |
141 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 6, i32 6) | 162 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 6, i32 6) |
142 %2 = icmp eq i32 %oldval, %1 ; Note operands are swapped from above. | 163 %2 = icmp eq i32 %oldval, %1 ; Note operands are swapped from above. |
143 ret i1 %2 | 164 ret i1 %2 |
144 } | 165 } |
145 | 166 |
146 ; CHECK: @test_struct_compare_and_swap_i32 | 167 ; CHECK-LABEL: @test_struct_compare_and_swap_i32 |
147 define { i32, i1 } @test_struct_compare_and_swap_i32(i32* %ptr, i32 %oldval, i32
%newval) { | 168 define { i32, i1 } @test_struct_compare_and_swap_i32(i32* %ptr, i32 %oldval, i32
%newval) { |
148 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst | 169 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst |
149 ; CHECK-NEXT: ret { i32, i1 } %1 | 170 ; CHECK-NEXT: ret { i32, i1 } %1 |
150 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 6, i32 6) | 171 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 6, i32 6) |
151 %2 = icmp eq i32 %1, %oldval | 172 %2 = icmp eq i32 %1, %oldval |
152 %3 = insertvalue { i32, i1 } undef, i32 %1, 0 | 173 %3 = insertvalue { i32, i1 } undef, i32 %1, 0 |
153 %4 = insertvalue { i32, i1 } %3, i1 %2, 1 | 174 %4 = insertvalue { i32, i1 } %3, i1 %2, 1 |
154 ret { i32, i1 } %4 | 175 ret { i32, i1 } %4 |
155 } | 176 } |
156 | 177 |
157 ; CHECK: @test_c11_fence | 178 ; CHECK-LABEL: @test_c11_fence |
158 define void @test_c11_fence() { | 179 define void @test_c11_fence() { |
159 ; CHECK: fence seq_cst | 180 ; CHECK: fence seq_cst |
160 call void @llvm.nacl.atomic.fence(i32 6) | 181 call void @llvm.nacl.atomic.fence(i32 6) |
161 ret void | 182 ret void |
162 } | 183 } |
163 | 184 |
164 ; CHECK: @test_synchronize | 185 ; CHECK-LABEL: @test_synchronize |
165 define void @test_synchronize() { | 186 define void @test_synchronize() { |
166 ; CHECK: call void asm sideeffect "", "~{memory}"() | 187 ; CHECK: call void asm sideeffect "", "~{memory}"() |
167 ; CHECK: fence seq_cst | 188 ; CHECK: fence seq_cst |
168 ; CHECK: call void asm sideeffect "", "~{memory}"() | 189 ; CHECK: call void asm sideeffect "", "~{memory}"() |
169 call void @llvm.nacl.atomic.fence.all() | 190 call void @llvm.nacl.atomic.fence.all() |
170 ret void | 191 ret void |
171 } | 192 } |
172 | 193 |
173 ; CHECK: @test_is_lock_free_1 | 194 ; CHECK-LABEL: @test_is_lock_free_1 |
174 define i1 @test_is_lock_free_1(i8* %ptr) { | 195 define i1 @test_is_lock_free_1(i8* %ptr) { |
175 ; CHECK: ret i1 {{true|false}} | 196 ; CHECK: ret i1 {{true|false}} |
176 %res = call i1 @llvm.nacl.atomic.is.lock.free(i32 1, i8* %ptr) | 197 %res = call i1 @llvm.nacl.atomic.is.lock.free(i32 1, i8* %ptr) |
177 ret i1 %res | 198 ret i1 %res |
178 } | 199 } |
179 | 200 |
180 ; CHECK: @test_is_lock_free_2 | 201 ; CHECK-LABEL: @test_is_lock_free_2 |
181 define i1 @test_is_lock_free_2(i16* %ptr) { | 202 define i1 @test_is_lock_free_2(i16* %ptr) { |
182 ; CHECK: ret i1 {{true|false}} | 203 ; CHECK: ret i1 {{true|false}} |
183 %ptr2 = bitcast i16* %ptr to i8* | 204 %ptr2 = bitcast i16* %ptr to i8* |
184 %res = call i1 @llvm.nacl.atomic.is.lock.free(i32 2, i8* %ptr2) | 205 %res = call i1 @llvm.nacl.atomic.is.lock.free(i32 2, i8* %ptr2) |
185 ret i1 %res | 206 ret i1 %res |
186 } | 207 } |
187 | 208 |
188 ; CHECK: @test_is_lock_free_4 | 209 ; CHECK-LABEL: @test_is_lock_free_4 |
189 define i1 @test_is_lock_free_4(i32* %ptr) { | 210 define i1 @test_is_lock_free_4(i32* %ptr) { |
190 ; CHECK: ret i1 {{true|false}} | 211 ; CHECK: ret i1 {{true|false}} |
191 %ptr2 = bitcast i32* %ptr to i8* | 212 %ptr2 = bitcast i32* %ptr to i8* |
192 %res = call i1 @llvm.nacl.atomic.is.lock.free(i32 4, i8* %ptr2) | 213 %res = call i1 @llvm.nacl.atomic.is.lock.free(i32 4, i8* %ptr2) |
193 ret i1 %res | 214 ret i1 %res |
194 } | 215 } |
195 | 216 |
196 ; CHECK: @test_is_lock_free_8 | 217 ; CHECK-LABEL: @test_is_lock_free_8 |
197 define i1 @test_is_lock_free_8(i64* %ptr) { | 218 define i1 @test_is_lock_free_8(i64* %ptr) { |
198 ; CHECK: ret i1 {{true|false}} | 219 ; CHECK: ret i1 {{true|false}} |
199 %ptr2 = bitcast i64* %ptr to i8* | 220 %ptr2 = bitcast i64* %ptr to i8* |
200 %res = call i1 @llvm.nacl.atomic.is.lock.free(i32 8, i8* %ptr2) | 221 %res = call i1 @llvm.nacl.atomic.is.lock.free(i32 8, i8* %ptr2) |
201 ret i1 %res | 222 ret i1 %res |
202 } | 223 } |
203 | 224 |
204 ; CHECK: @test_lock_test_and_set_i32 | 225 ; CHECK-LABEL: @test_lock_test_and_set_i32 |
205 define i32 @test_lock_test_and_set_i32(i32* %ptr, i32 %value) { | 226 define i32 @test_lock_test_and_set_i32(i32* %ptr, i32 %value) { |
206 ; CHECK: %1 = atomicrmw xchg i32* %ptr, i32 %value seq_cst | 227 ; CHECK: %1 = atomicrmw xchg i32* %ptr, i32 %value seq_cst |
207 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 6, i32* %ptr, i32 %value, i32 6) | 228 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 6, i32* %ptr, i32 %value, i32 6) |
208 ret i32 %1 | 229 ret i32 %1 |
209 } | 230 } |
210 | 231 |
211 ; CHECK: @test_lock_release_i32 | 232 ; CHECK-LABEL: @test_lock_release_i32 |
212 define void @test_lock_release_i32(i32* %ptr) { | 233 define void @test_lock_release_i32(i32* %ptr) { |
213 ; Note that the 'release' was changed to a 'seq_cst'. | 234 ; Note that the 'release' was changed to a 'seq_cst'. |
214 ; CHECK: store atomic i32 0, i32* %ptr seq_cst, align 4 | 235 ; CHECK: store atomic i32 0, i32* %ptr seq_cst, align 4 |
215 call void @llvm.nacl.atomic.store.i32(i32 0, i32* %ptr, i32 6) | 236 call void @llvm.nacl.atomic.store.i32(i32 0, i32* %ptr, i32 6) |
216 ret void | 237 ret void |
217 } | 238 } |
218 | 239 |
219 ; CHECK: @test_atomic_load_i8 | 240 ; CHECK-LABEL: @test_atomic_load_i8 |
220 define zeroext i8 @test_atomic_load_i8(i8* %ptr) { | 241 define zeroext i8 @test_atomic_load_i8(i8* %ptr) { |
221 ; CHECK: %1 = load atomic i8* %ptr seq_cst, align 1 | 242 ; CHECK: %1 = load atomic i8* %ptr seq_cst, align 1 |
222 %1 = call i8 @llvm.nacl.atomic.load.i8(i8* %ptr, i32 6) | 243 %1 = call i8 @llvm.nacl.atomic.load.i8(i8* %ptr, i32 6) |
223 ret i8 %1 | 244 ret i8 %1 |
224 } | 245 } |
225 | 246 |
226 ; CHECK: @test_atomic_store_i8 | 247 ; CHECK-LABEL: @test_atomic_store_i8 |
227 define void @test_atomic_store_i8(i8* %ptr, i8 zeroext %value) { | 248 define void @test_atomic_store_i8(i8* %ptr, i8 zeroext %value) { |
228 ; CHECK: store atomic i8 %value, i8* %ptr seq_cst, align 1 | 249 ; CHECK: store atomic i8 %value, i8* %ptr seq_cst, align 1 |
229 call void @llvm.nacl.atomic.store.i8(i8 %value, i8* %ptr, i32 6) | 250 call void @llvm.nacl.atomic.store.i8(i8 %value, i8* %ptr, i32 6) |
230 ret void | 251 ret void |
231 } | 252 } |
232 | 253 |
233 ; CHECK: @test_atomic_load_i16 | 254 ; CHECK-LABEL: @test_atomic_load_i16 |
234 define zeroext i16 @test_atomic_load_i16(i16* %ptr) { | 255 define zeroext i16 @test_atomic_load_i16(i16* %ptr) { |
235 ; CHECK: %1 = load atomic i16* %ptr seq_cst, align 2 | 256 ; CHECK: %1 = load atomic i16* %ptr seq_cst, align 2 |
236 %1 = call i16 @llvm.nacl.atomic.load.i16(i16* %ptr, i32 6) | 257 %1 = call i16 @llvm.nacl.atomic.load.i16(i16* %ptr, i32 6) |
237 ret i16 %1 | 258 ret i16 %1 |
238 } | 259 } |
239 | 260 |
240 ; CHECK: @test_atomic_store_i16 | 261 ; CHECK-LABEL: @test_atomic_store_i16 |
241 define void @test_atomic_store_i16(i16* %ptr, i16 zeroext %value) { | 262 define void @test_atomic_store_i16(i16* %ptr, i16 zeroext %value) { |
242 ; CHECK: store atomic i16 %value, i16* %ptr seq_cst, align 2 | 263 ; CHECK: store atomic i16 %value, i16* %ptr seq_cst, align 2 |
243 call void @llvm.nacl.atomic.store.i16(i16 %value, i16* %ptr, i32 6) | 264 call void @llvm.nacl.atomic.store.i16(i16 %value, i16* %ptr, i32 6) |
244 ret void | 265 ret void |
245 } | 266 } |
246 | 267 |
247 ; CHECK: @test_atomic_load_i32 | 268 ; CHECK-LABEL: @test_atomic_load_i32 |
248 define i32 @test_atomic_load_i32(i32* %ptr) { | 269 define i32 @test_atomic_load_i32(i32* %ptr) { |
249 ; CHECK: %1 = load atomic i32* %ptr seq_cst, align 4 | 270 ; CHECK: %1 = load atomic i32* %ptr seq_cst, align 4 |
250 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 6) | 271 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 6) |
251 ret i32 %1 | 272 ret i32 %1 |
252 } | 273 } |
253 | 274 |
254 ; CHECK: @test_atomic_store_i32 | 275 ; CHECK-LABEL: @test_atomic_store_i32 |
255 define void @test_atomic_store_i32(i32* %ptr, i32 %value) { | 276 define void @test_atomic_store_i32(i32* %ptr, i32 %value) { |
256 ; CHECK: store atomic i32 %value, i32* %ptr seq_cst, align 4 | 277 ; CHECK: store atomic i32 %value, i32* %ptr seq_cst, align 4 |
257 call void @llvm.nacl.atomic.store.i32(i32 %value, i32* %ptr, i32 6) | 278 call void @llvm.nacl.atomic.store.i32(i32 %value, i32* %ptr, i32 6) |
258 ret void | 279 ret void |
259 } | 280 } |
260 | 281 |
261 ; CHECK: @test_atomic_load_i64 | 282 ; CHECK-LABEL: @test_atomic_load_i64 |
262 define i64 @test_atomic_load_i64(i64* %ptr) { | 283 define i64 @test_atomic_load_i64(i64* %ptr) { |
263 ; CHECK: %1 = load atomic i64* %ptr seq_cst, align 8 | 284 ; CHECK: %1 = load atomic i64* %ptr seq_cst, align 8 |
264 %1 = call i64 @llvm.nacl.atomic.load.i64(i64* %ptr, i32 6) | 285 %1 = call i64 @llvm.nacl.atomic.load.i64(i64* %ptr, i32 6) |
265 ret i64 %1 | 286 ret i64 %1 |
266 } | 287 } |
267 | 288 |
268 ; CHECK: @test_atomic_store_i64 | 289 ; CHECK-LABEL: @test_atomic_store_i64 |
269 define void @test_atomic_store_i64(i64* %ptr, i64 %value) { | 290 define void @test_atomic_store_i64(i64* %ptr, i64 %value) { |
270 ; CHECK: store atomic i64 %value, i64* %ptr seq_cst, align 8 | 291 ; CHECK: store atomic i64 %value, i64* %ptr seq_cst, align 8 |
271 call void @llvm.nacl.atomic.store.i64(i64 %value, i64* %ptr, i32 6) | 292 call void @llvm.nacl.atomic.store.i64(i64 %value, i64* %ptr, i32 6) |
272 ret void | 293 ret void |
273 } | 294 } |
274 | 295 |
275 ; CHECK: attributes [[RETURNS_TWICE]] = { returns_twice } | 296 ; CHECK: attributes [[RETURNS_TWICE]] = { returns_twice } |
OLD | NEW |