OLD | NEW |
1 ; RUN: opt < %s -resolve-pnacl-intrinsics -S | FileCheck %s -check-prefix=CLEANE
D | 1 ; RUN: opt < %s -resolve-pnacl-intrinsics -S | FileCheck %s \ |
| 2 ; RUN: -check-prefix=CLEANED |
2 ; RUN: opt < %s -resolve-pnacl-intrinsics -S | FileCheck %s | 3 ; RUN: opt < %s -resolve-pnacl-intrinsics -S | FileCheck %s |
3 | 4 |
4 ; CLEANED-NOT: call i32 @llvm.nacl.setjmp | 5 ; CLEANED-NOT: call i32 @llvm.nacl.setjmp |
5 ; CLEANED-NOT: call void @llvm.nacl.longjmp | 6 ; CLEANED-NOT: call void @llvm.nacl.longjmp |
6 ; CLEANED-NOT: call {{.*}} @llvm.nacl.atomic | 7 ; CLEANED-NOT: call {{.*}} @llvm.nacl.atomic |
7 | 8 |
8 declare i32 @llvm.nacl.setjmp(i8*) | 9 declare i32 @llvm.nacl.setjmp(i8*) |
9 declare void @llvm.nacl.longjmp(i8*, i32) | 10 declare void @llvm.nacl.longjmp(i8*, i32) |
10 | 11 |
11 ; Intrinsic name mangling is based on overloaded parameters only, | 12 ; Intrinsic name mangling is based on overloaded parameters only, |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
46 } | 47 } |
47 | 48 |
48 define void @call_longjmp(i8* %arg, i32 %num) { | 49 define void @call_longjmp(i8* %arg, i32 %num) { |
49 call void @llvm.nacl.longjmp(i8* %arg, i32 %num) | 50 call void @llvm.nacl.longjmp(i8* %arg, i32 %num) |
50 ; CHECK: call void @longjmp(i8* %arg, i32 %num){{$}} | 51 ; CHECK: call void @longjmp(i8* %arg, i32 %num){{$}} |
51 ret void | 52 ret void |
52 } | 53 } |
53 | 54 |
54 ; atomics. | 55 ; atomics. |
55 | 56 |
56 ; CHECK: @test_fetch_and_add_i32 | 57 ; CHECK-LABEL: @test_atomic_acquire |
| 58 define i32 @test_atomic_acquire(i32* %ptr) { |
| 59 ; CHECK: %1 = load atomic i32* %ptr acquire, align 4 |
| 60 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 3) |
| 61 ret i32 %1 |
| 62 } |
| 63 |
| 64 ; CHECK-LABEL: @test_atomic_release |
| 65 define void @test_atomic_release(i32* %ptr, i32 %value) { |
| 66 ; CHECK: store atomic i32 %value, i32* %ptr release, align 4 |
| 67 call void @llvm.nacl.atomic.store.i32(i32 %value, i32* %ptr, i32 4) |
| 68 ret void |
| 69 } |
| 70 |
| 71 ; CHECK-LABEL: @test_atomic_acquire_release |
| 72 define i32 @test_atomic_acquire_release(i32* %ptr, i32 %value) { |
| 73 ; CHECK: %1 = atomicrmw add i32* %ptr, i32 %value acq_rel |
| 74 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 %value, i32 5) |
| 75 ret i32 %1 |
| 76 } |
| 77 |
| 78 ; CHECK-LABEL: @test_fetch_and_add_i32 |
57 define i32 @test_fetch_and_add_i32(i32* %ptr, i32 %value) { | 79 define i32 @test_fetch_and_add_i32(i32* %ptr, i32 %value) { |
58 ; CHECK: %1 = atomicrmw add i32* %ptr, i32 %value seq_cst | 80 ; CHECK: %1 = atomicrmw add i32* %ptr, i32 %value seq_cst |
59 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 %value, i32 6) | 81 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 %value, i32 6) |
60 ret i32 %1 | 82 ret i32 %1 |
61 } | 83 } |
62 | 84 |
63 ; CHECK: @test_fetch_and_sub_i32 | 85 ; CHECK-LABEL: @test_fetch_and_sub_i32 |
64 define i32 @test_fetch_and_sub_i32(i32* %ptr, i32 %value) { | 86 define i32 @test_fetch_and_sub_i32(i32* %ptr, i32 %value) { |
65 ; CHECK: %1 = atomicrmw sub i32* %ptr, i32 %value seq_cst | 87 ; CHECK: %1 = atomicrmw sub i32* %ptr, i32 %value seq_cst |
66 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 2, i32* %ptr, i32 %value, i32 6) | 88 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 2, i32* %ptr, i32 %value, i32 6) |
67 ret i32 %1 | 89 ret i32 %1 |
68 } | 90 } |
69 | 91 |
70 ; CHECK: @test_fetch_and_or_i32 | 92 ; CHECK-LABEL: @test_fetch_and_or_i32 |
71 define i32 @test_fetch_and_or_i32(i32* %ptr, i32 %value) { | 93 define i32 @test_fetch_and_or_i32(i32* %ptr, i32 %value) { |
72 ; CHECK: %1 = atomicrmw or i32* %ptr, i32 %value seq_cst | 94 ; CHECK: %1 = atomicrmw or i32* %ptr, i32 %value seq_cst |
73 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 3, i32* %ptr, i32 %value, i32 6) | 95 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 3, i32* %ptr, i32 %value, i32 6) |
74 ret i32 %1 | 96 ret i32 %1 |
75 } | 97 } |
76 | 98 |
77 ; CHECK: @test_fetch_and_and_i32 | 99 ; CHECK-LABEL: @test_fetch_and_and_i32 |
78 define i32 @test_fetch_and_and_i32(i32* %ptr, i32 %value) { | 100 define i32 @test_fetch_and_and_i32(i32* %ptr, i32 %value) { |
79 ; CHECK: %1 = atomicrmw and i32* %ptr, i32 %value seq_cst | 101 ; CHECK: %1 = atomicrmw and i32* %ptr, i32 %value seq_cst |
80 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 4, i32* %ptr, i32 %value, i32 6) | 102 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 4, i32* %ptr, i32 %value, i32 6) |
81 ret i32 %1 | 103 ret i32 %1 |
82 } | 104 } |
83 | 105 |
84 ; CHECK: @test_fetch_and_xor_i32 | 106 ; CHECK-LABEL: @test_fetch_and_xor_i32 |
85 define i32 @test_fetch_and_xor_i32(i32* %ptr, i32 %value) { | 107 define i32 @test_fetch_and_xor_i32(i32* %ptr, i32 %value) { |
86 ; CHECK: %1 = atomicrmw xor i32* %ptr, i32 %value seq_cst | 108 ; CHECK: %1 = atomicrmw xor i32* %ptr, i32 %value seq_cst |
87 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 5, i32* %ptr, i32 %value, i32 6) | 109 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 5, i32* %ptr, i32 %value, i32 6) |
88 ret i32 %1 | 110 ret i32 %1 |
89 } | 111 } |
90 | 112 |
91 ; CHECK: @test_val_compare_and_swap_i32 | 113 ; Test different compare-and-swap patterns that commonly occur and are a bit |
| 114 ; tricky because the PNaCl intrinsic only returns the value whereas the LLVM |
| 115 ; intrinsic also returns the success flag (equivalent to comparing the oldval |
| 116 ; with what was just loaded). |
| 117 |
| 118 ; CHECK-LABEL: @test_val_compare_and_swap_i32 |
92 define i32 @test_val_compare_and_swap_i32(i32* %ptr, i32 %oldval, i32 %newval) { | 119 define i32 @test_val_compare_and_swap_i32(i32* %ptr, i32 %oldval, i32 %newval) { |
93 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst | 120 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst seq_cst |
94 ; CHECK-NEXT: %2 = extractvalue { i32, i1 } %1, 0 | 121 ; CHECK-NEXT: %2 = extractvalue { i32, i1 } %1, 0 |
95 ; CHECK-NEXT: ret i32 %2 | 122 ; CHECK-NEXT: ret i32 %2 |
96 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 6, i32 6) | 123 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 6, i32 6) |
97 ret i32 %1 | 124 ret i32 %1 |
98 } | 125 } |
99 | 126 |
100 ; CHECK: @test_val_compare_and_swap_i32_new | 127 ; CHECK-LABEL: @test_val_compare_and_swap_i32_new |
101 define i32 @test_val_compare_and_swap_i32_new(i32* %ptr, i32 %oldval, i32 %newva
l) { | 128 define i32 @test_val_compare_and_swap_i32_new(i32* %ptr, i32 %oldval, i32 %newva
l) { |
102 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst | 129 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst seq_cst |
103 ; CHECK-NEXT: %res2 = extractvalue { i32, i1 } %1, 0 | 130 ; CHECK-NEXT: %res2 = extractvalue { i32, i1 } %1, 0 |
104 ; CHECK-NEXT: ret i32 %res2 | 131 ; CHECK-NEXT: ret i32 %res2 |
105 %res = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %new
val, i32 6, i32 6) | 132 %res = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %new
val, i32 6, i32 6) |
106 %success = icmp eq i32 %res, %oldval | 133 %success = icmp eq i32 %res, %oldval |
107 %res.insert.value = insertvalue { i32, i1 } undef, i32 %res, 0 | 134 %res.insert.value = insertvalue { i32, i1 } undef, i32 %res, 0 |
108 %res.insert.success = insertvalue { i32, i1 } %res.insert.value, i1 %success,
1 | 135 %res.insert.success = insertvalue { i32, i1 } %res.insert.value, i1 %success,
1 |
109 %val = extractvalue { i32, i1 } %res.insert.success, 0 | 136 %val = extractvalue { i32, i1 } %res.insert.success, 0 |
110 ret i32 %val | 137 ret i32 %val |
111 } | 138 } |
112 | 139 |
113 ; CHECK: @test_bool_compare_and_swap_i32 | 140 ; CHECK-LABEL: @test_bool_compare_and_swap_i32 |
114 define i1 @test_bool_compare_and_swap_i32(i32* %ptr, i32 %oldval, i32 %newval) { | 141 define i1 @test_bool_compare_and_swap_i32(i32* %ptr, i32 %oldval, i32 %newval) { |
115 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst | 142 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst seq_cst |
116 ; CHECK-NEXT: %success = extractvalue { i32, i1 } %1, 1 | 143 ; CHECK-NEXT: %success = extractvalue { i32, i1 } %1, 1 |
117 ; CHECK-NEXT: ret i1 %success | 144 ; CHECK-NEXT: ret i1 %success |
118 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 6, i32 6) | 145 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 6, i32 6) |
119 %2 = icmp eq i32 %1, %oldval | 146 %2 = icmp eq i32 %1, %oldval |
120 ret i1 %2 | 147 ret i1 %2 |
121 } | 148 } |
122 | 149 |
123 ; CHECK: @test_bool_compare_and_swap_i32_new | 150 ; CHECK-LABEL: @test_bool_compare_and_swap_i32_new |
124 define i1 @test_bool_compare_and_swap_i32_new(i32* %ptr, i32 %oldval, i32 %newva
l) { | 151 define i1 @test_bool_compare_and_swap_i32_new(i32* %ptr, i32 %oldval, i32 %newva
l) { |
125 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst | 152 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst seq_cst |
126 ; CHECK-NEXT: %suc = extractvalue { i32, i1 } %1, 1 | 153 ; CHECK-NEXT: %suc = extractvalue { i32, i1 } %1, 1 |
127 ; CHECK-NEXT: ret i1 %suc | 154 ; CHECK-NEXT: ret i1 %suc |
128 %res = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %new
val, i32 6, i32 6) | 155 %res = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %new
val, i32 6, i32 6) |
129 %success = icmp eq i32 %res, %oldval | 156 %success = icmp eq i32 %res, %oldval |
130 %res.insert.value = insertvalue { i32, i1 } undef, i32 %res, 0 | 157 %res.insert.value = insertvalue { i32, i1 } undef, i32 %res, 0 |
131 %res.insert.success = insertvalue { i32, i1 } %res.insert.value, i1 %success,
1 | 158 %res.insert.success = insertvalue { i32, i1 } %res.insert.value, i1 %success,
1 |
132 %suc = extractvalue { i32, i1 } %res.insert.success, 1 | 159 %suc = extractvalue { i32, i1 } %res.insert.success, 1 |
133 ret i1 %suc | 160 ret i1 %suc |
134 } | 161 } |
135 | 162 |
136 ; CHECK: @test_bool_compare_and_swap_i32_reordered | 163 ; CHECK-LABEL: @test_bool_compare_and_swap_i32_reordered |
137 define i1 @test_bool_compare_and_swap_i32_reordered(i32* %ptr, i32 %oldval, i32
%newval) { | 164 define i1 @test_bool_compare_and_swap_i32_reordered(i32* %ptr, i32 %oldval, i32
%newval) { |
138 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst | 165 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst seq_cst |
139 ; CHECK-NEXT: %success = extractvalue { i32, i1 } %1, 1 | 166 ; CHECK-NEXT: %success = extractvalue { i32, i1 } %1, 1 |
140 ; CHECK-NEXT: ret i1 %success | 167 ; CHECK-NEXT: ret i1 %success |
141 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 6, i32 6) | 168 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 6, i32 6) |
142 %2 = icmp eq i32 %oldval, %1 ; Note operands are swapped from above. | 169 %2 = icmp eq i32 %oldval, %1 ; Note operands are swapped from above. |
143 ret i1 %2 | 170 ret i1 %2 |
144 } | 171 } |
145 | 172 |
146 ; CHECK: @test_struct_compare_and_swap_i32 | 173 ; CHECK-LABEL: @test_struct_compare_and_swap_i32 |
147 define { i32, i1 } @test_struct_compare_and_swap_i32(i32* %ptr, i32 %oldval, i32
%newval) { | 174 define { i32, i1 } @test_struct_compare_and_swap_i32(i32* %ptr, i32 %oldval, i32
%newval) { |
148 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst | 175 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst seq_cst |
149 ; CHECK-NEXT: ret { i32, i1 } %1 | 176 ; CHECK-NEXT: ret { i32, i1 } %1 |
150 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 6, i32 6) | 177 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 6, i32 6) |
151 %2 = icmp eq i32 %1, %oldval | 178 %2 = icmp eq i32 %1, %oldval |
152 %3 = insertvalue { i32, i1 } undef, i32 %1, 0 | 179 %3 = insertvalue { i32, i1 } undef, i32 %1, 0 |
153 %4 = insertvalue { i32, i1 } %3, i1 %2, 1 | 180 %4 = insertvalue { i32, i1 } %3, i1 %2, 1 |
154 ret { i32, i1 } %4 | 181 ret { i32, i1 } %4 |
155 } | 182 } |
156 | 183 |
157 ; CHECK: @test_c11_fence | 184 ; Test all allowed cmpxchg success/failure memory orderings. |
| 185 |
| 186 ; CHECK-LABEL: @test_cmpxchg_seqcst_seqcst |
| 187 define i32 @test_cmpxchg_seqcst_seqcst(i32* %ptr, i32 %oldval, i32 %newval) { |
| 188 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst seq_cst |
| 189 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 6, i32 6) |
| 190 ret i32 %1 |
| 191 } |
| 192 |
| 193 ; CHECK-LABEL: @test_cmpxchg_seqcst_acquire |
| 194 define i32 @test_cmpxchg_seqcst_acquire(i32* %ptr, i32 %oldval, i32 %newval) { |
| 195 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst acquire |
| 196 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 6, i32 3) |
| 197 ret i32 %1 |
| 198 } |
| 199 |
| 200 ; CHECK-LABEL: @test_cmpxchg_acquire_acquire |
| 201 define i32 @test_cmpxchg_acquire_acquire(i32* %ptr, i32 %oldval, i32 %newval) { |
| 202 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval acquire acquire |
| 203 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva
l, i32 3, i32 3) |
| 204 ret i32 %1 |
| 205 } |
| 206 |
| 207 ; CHECK-LABEL: @test_c11_fence |
158 define void @test_c11_fence() { | 208 define void @test_c11_fence() { |
159 ; CHECK: fence seq_cst | 209 ; CHECK: fence seq_cst |
160 call void @llvm.nacl.atomic.fence(i32 6) | 210 call void @llvm.nacl.atomic.fence(i32 6) |
161 ret void | 211 ret void |
162 } | 212 } |
163 | 213 |
164 ; CHECK: @test_synchronize | 214 ; CHECK-LABEL: @test_synchronize |
165 define void @test_synchronize() { | 215 define void @test_synchronize() { |
166 ; CHECK: call void asm sideeffect "", "~{memory}"() | 216 ; CHECK: call void asm sideeffect "", "~{memory}"() |
167 ; CHECK: fence seq_cst | 217 ; CHECK: fence seq_cst |
168 ; CHECK: call void asm sideeffect "", "~{memory}"() | 218 ; CHECK: call void asm sideeffect "", "~{memory}"() |
169 call void @llvm.nacl.atomic.fence.all() | 219 call void @llvm.nacl.atomic.fence.all() |
170 ret void | 220 ret void |
171 } | 221 } |
172 | 222 |
173 ; CHECK: @test_is_lock_free_1 | 223 ; CHECK-LABEL: @test_is_lock_free_1 |
174 define i1 @test_is_lock_free_1(i8* %ptr) { | 224 define i1 @test_is_lock_free_1(i8* %ptr) { |
175 ; CHECK: ret i1 {{true|false}} | 225 ; CHECK: ret i1 {{true|false}} |
176 %res = call i1 @llvm.nacl.atomic.is.lock.free(i32 1, i8* %ptr) | 226 %res = call i1 @llvm.nacl.atomic.is.lock.free(i32 1, i8* %ptr) |
177 ret i1 %res | 227 ret i1 %res |
178 } | 228 } |
179 | 229 |
180 ; CHECK: @test_is_lock_free_2 | 230 ; CHECK-LABEL: @test_is_lock_free_2 |
181 define i1 @test_is_lock_free_2(i16* %ptr) { | 231 define i1 @test_is_lock_free_2(i16* %ptr) { |
182 ; CHECK: ret i1 {{true|false}} | 232 ; CHECK: ret i1 {{true|false}} |
183 %ptr2 = bitcast i16* %ptr to i8* | 233 %ptr2 = bitcast i16* %ptr to i8* |
184 %res = call i1 @llvm.nacl.atomic.is.lock.free(i32 2, i8* %ptr2) | 234 %res = call i1 @llvm.nacl.atomic.is.lock.free(i32 2, i8* %ptr2) |
185 ret i1 %res | 235 ret i1 %res |
186 } | 236 } |
187 | 237 |
188 ; CHECK: @test_is_lock_free_4 | 238 ; CHECK-LABEL: @test_is_lock_free_4 |
189 define i1 @test_is_lock_free_4(i32* %ptr) { | 239 define i1 @test_is_lock_free_4(i32* %ptr) { |
190 ; CHECK: ret i1 {{true|false}} | 240 ; CHECK: ret i1 {{true|false}} |
191 %ptr2 = bitcast i32* %ptr to i8* | 241 %ptr2 = bitcast i32* %ptr to i8* |
192 %res = call i1 @llvm.nacl.atomic.is.lock.free(i32 4, i8* %ptr2) | 242 %res = call i1 @llvm.nacl.atomic.is.lock.free(i32 4, i8* %ptr2) |
193 ret i1 %res | 243 ret i1 %res |
194 } | 244 } |
195 | 245 |
196 ; CHECK: @test_is_lock_free_8 | 246 ; CHECK-LABEL: @test_is_lock_free_8 |
197 define i1 @test_is_lock_free_8(i64* %ptr) { | 247 define i1 @test_is_lock_free_8(i64* %ptr) { |
198 ; CHECK: ret i1 {{true|false}} | 248 ; CHECK: ret i1 {{true|false}} |
199 %ptr2 = bitcast i64* %ptr to i8* | 249 %ptr2 = bitcast i64* %ptr to i8* |
200 %res = call i1 @llvm.nacl.atomic.is.lock.free(i32 8, i8* %ptr2) | 250 %res = call i1 @llvm.nacl.atomic.is.lock.free(i32 8, i8* %ptr2) |
201 ret i1 %res | 251 ret i1 %res |
202 } | 252 } |
203 | 253 |
204 ; CHECK: @test_lock_test_and_set_i32 | 254 ; CHECK-LABEL: @test_lock_test_and_set_i32 |
205 define i32 @test_lock_test_and_set_i32(i32* %ptr, i32 %value) { | 255 define i32 @test_lock_test_and_set_i32(i32* %ptr, i32 %value) { |
206 ; CHECK: %1 = atomicrmw xchg i32* %ptr, i32 %value seq_cst | 256 ; CHECK: %1 = atomicrmw xchg i32* %ptr, i32 %value seq_cst |
207 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 6, i32* %ptr, i32 %value, i32 6) | 257 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 6, i32* %ptr, i32 %value, i32 6) |
208 ret i32 %1 | 258 ret i32 %1 |
209 } | 259 } |
210 | 260 |
211 ; CHECK: @test_lock_release_i32 | 261 ; CHECK-LABEL: @test_lock_release_i32 |
212 define void @test_lock_release_i32(i32* %ptr) { | 262 define void @test_lock_release_i32(i32* %ptr) { |
213 ; Note that the 'release' was changed to a 'seq_cst'. | 263 ; Note that the 'release' was changed to a 'seq_cst'. |
214 ; CHECK: store atomic i32 0, i32* %ptr seq_cst, align 4 | 264 ; CHECK: store atomic i32 0, i32* %ptr seq_cst, align 4 |
215 call void @llvm.nacl.atomic.store.i32(i32 0, i32* %ptr, i32 6) | 265 call void @llvm.nacl.atomic.store.i32(i32 0, i32* %ptr, i32 6) |
216 ret void | 266 ret void |
217 } | 267 } |
218 | 268 |
219 ; CHECK: @test_atomic_load_i8 | 269 ; CHECK-LABEL: @test_atomic_load_i8 |
220 define zeroext i8 @test_atomic_load_i8(i8* %ptr) { | 270 define zeroext i8 @test_atomic_load_i8(i8* %ptr) { |
221 ; CHECK: %1 = load atomic i8* %ptr seq_cst, align 1 | 271 ; CHECK: %1 = load atomic i8* %ptr seq_cst, align 1 |
222 %1 = call i8 @llvm.nacl.atomic.load.i8(i8* %ptr, i32 6) | 272 %1 = call i8 @llvm.nacl.atomic.load.i8(i8* %ptr, i32 6) |
223 ret i8 %1 | 273 ret i8 %1 |
224 } | 274 } |
225 | 275 |
226 ; CHECK: @test_atomic_store_i8 | 276 ; CHECK-LABEL: @test_atomic_store_i8 |
227 define void @test_atomic_store_i8(i8* %ptr, i8 zeroext %value) { | 277 define void @test_atomic_store_i8(i8* %ptr, i8 zeroext %value) { |
228 ; CHECK: store atomic i8 %value, i8* %ptr seq_cst, align 1 | 278 ; CHECK: store atomic i8 %value, i8* %ptr seq_cst, align 1 |
229 call void @llvm.nacl.atomic.store.i8(i8 %value, i8* %ptr, i32 6) | 279 call void @llvm.nacl.atomic.store.i8(i8 %value, i8* %ptr, i32 6) |
230 ret void | 280 ret void |
231 } | 281 } |
232 | 282 |
233 ; CHECK: @test_atomic_load_i16 | 283 ; CHECK-LABEL: @test_atomic_load_i16 |
234 define zeroext i16 @test_atomic_load_i16(i16* %ptr) { | 284 define zeroext i16 @test_atomic_load_i16(i16* %ptr) { |
235 ; CHECK: %1 = load atomic i16* %ptr seq_cst, align 2 | 285 ; CHECK: %1 = load atomic i16* %ptr seq_cst, align 2 |
236 %1 = call i16 @llvm.nacl.atomic.load.i16(i16* %ptr, i32 6) | 286 %1 = call i16 @llvm.nacl.atomic.load.i16(i16* %ptr, i32 6) |
237 ret i16 %1 | 287 ret i16 %1 |
238 } | 288 } |
239 | 289 |
240 ; CHECK: @test_atomic_store_i16 | 290 ; CHECK-LABEL: @test_atomic_store_i16 |
241 define void @test_atomic_store_i16(i16* %ptr, i16 zeroext %value) { | 291 define void @test_atomic_store_i16(i16* %ptr, i16 zeroext %value) { |
242 ; CHECK: store atomic i16 %value, i16* %ptr seq_cst, align 2 | 292 ; CHECK: store atomic i16 %value, i16* %ptr seq_cst, align 2 |
243 call void @llvm.nacl.atomic.store.i16(i16 %value, i16* %ptr, i32 6) | 293 call void @llvm.nacl.atomic.store.i16(i16 %value, i16* %ptr, i32 6) |
244 ret void | 294 ret void |
245 } | 295 } |
246 | 296 |
247 ; CHECK: @test_atomic_load_i32 | 297 ; CHECK-LABEL: @test_atomic_load_i32 |
248 define i32 @test_atomic_load_i32(i32* %ptr) { | 298 define i32 @test_atomic_load_i32(i32* %ptr) { |
249 ; CHECK: %1 = load atomic i32* %ptr seq_cst, align 4 | 299 ; CHECK: %1 = load atomic i32* %ptr seq_cst, align 4 |
250 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 6) | 300 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 6) |
251 ret i32 %1 | 301 ret i32 %1 |
252 } | 302 } |
253 | 303 |
254 ; CHECK: @test_atomic_store_i32 | 304 ; CHECK-LABEL: @test_atomic_store_i32 |
255 define void @test_atomic_store_i32(i32* %ptr, i32 %value) { | 305 define void @test_atomic_store_i32(i32* %ptr, i32 %value) { |
256 ; CHECK: store atomic i32 %value, i32* %ptr seq_cst, align 4 | 306 ; CHECK: store atomic i32 %value, i32* %ptr seq_cst, align 4 |
257 call void @llvm.nacl.atomic.store.i32(i32 %value, i32* %ptr, i32 6) | 307 call void @llvm.nacl.atomic.store.i32(i32 %value, i32* %ptr, i32 6) |
258 ret void | 308 ret void |
259 } | 309 } |
260 | 310 |
261 ; CHECK: @test_atomic_load_i64 | 311 ; CHECK-LABEL: @test_atomic_load_i64 |
262 define i64 @test_atomic_load_i64(i64* %ptr) { | 312 define i64 @test_atomic_load_i64(i64* %ptr) { |
263 ; CHECK: %1 = load atomic i64* %ptr seq_cst, align 8 | 313 ; CHECK: %1 = load atomic i64* %ptr seq_cst, align 8 |
264 %1 = call i64 @llvm.nacl.atomic.load.i64(i64* %ptr, i32 6) | 314 %1 = call i64 @llvm.nacl.atomic.load.i64(i64* %ptr, i32 6) |
265 ret i64 %1 | 315 ret i64 %1 |
266 } | 316 } |
267 | 317 |
268 ; CHECK: @test_atomic_store_i64 | 318 ; CHECK-LABEL: @test_atomic_store_i64 |
269 define void @test_atomic_store_i64(i64* %ptr, i64 %value) { | 319 define void @test_atomic_store_i64(i64* %ptr, i64 %value) { |
270 ; CHECK: store atomic i64 %value, i64* %ptr seq_cst, align 8 | 320 ; CHECK: store atomic i64 %value, i64* %ptr seq_cst, align 8 |
271 call void @llvm.nacl.atomic.store.i64(i64 %value, i64* %ptr, i32 6) | 321 call void @llvm.nacl.atomic.store.i64(i64 %value, i64* %ptr, i32 6) |
272 ret void | 322 ret void |
273 } | 323 } |
274 | 324 |
275 ; CHECK: attributes [[RETURNS_TWICE]] = { returns_twice } | 325 ; CHECK: attributes [[RETURNS_TWICE]] = { returns_twice } |
OLD | NEW |