OLD | NEW |
---|---|
1 ; RUN: opt < %s -resolve-pnacl-intrinsics -S | FileCheck %s -check-prefix=CLEANE D | 1 ; RUN: opt < %s -resolve-pnacl-intrinsics -S | FileCheck %s -check-prefix=CLEANE D |
Jim Stichnoth
2015/01/08 19:29:25
Since you're touching this file, make this 80-char
JF
2015/01/08 19:36:25
Done.
| |
2 ; RUN: opt < %s -resolve-pnacl-intrinsics -S | FileCheck %s | 2 ; RUN: opt < %s -resolve-pnacl-intrinsics -S | FileCheck %s |
3 | 3 |
4 ; CLEANED-NOT: call i32 @llvm.nacl.setjmp | 4 ; CLEANED-NOT: call i32 @llvm.nacl.setjmp |
5 ; CLEANED-NOT: call void @llvm.nacl.longjmp | 5 ; CLEANED-NOT: call void @llvm.nacl.longjmp |
6 ; CLEANED-NOT: call {{.*}} @llvm.nacl.atomic | 6 ; CLEANED-NOT: call {{.*}} @llvm.nacl.atomic |
7 | 7 |
8 declare i32 @llvm.nacl.setjmp(i8*) | 8 declare i32 @llvm.nacl.setjmp(i8*) |
9 declare void @llvm.nacl.longjmp(i8*, i32) | 9 declare void @llvm.nacl.longjmp(i8*, i32) |
10 | 10 |
11 ; Intrinsic name mangling is based on overloaded parameters only, | 11 ; Intrinsic name mangling is based on overloaded parameters only, |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
46 } | 46 } |
47 | 47 |
48 define void @call_longjmp(i8* %arg, i32 %num) { | 48 define void @call_longjmp(i8* %arg, i32 %num) { |
49 call void @llvm.nacl.longjmp(i8* %arg, i32 %num) | 49 call void @llvm.nacl.longjmp(i8* %arg, i32 %num) |
50 ; CHECK: call void @longjmp(i8* %arg, i32 %num){{$}} | 50 ; CHECK: call void @longjmp(i8* %arg, i32 %num){{$}} |
51 ret void | 51 ret void |
52 } | 52 } |
53 | 53 |
54 ; atomics. | 54 ; atomics. |
55 | 55 |
56 ; CHECK: @test_fetch_and_add_i32 | 56 ; CHECK-LABEL: @test_atomic_acquire |
57 define i32 @test_atomic_acquire(i32* %ptr) { | |
58 ; CHECK: %1 = load atomic i32* %ptr acquire, align 4 | |
59 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 3) | |
60 ret i32 %1 | |
61 } | |
62 | |
63 ; CHECK-LABEL: @test_atomic_release | |
64 define void @test_atomic_release(i32* %ptr, i32 %value) { | |
65 ; CHECK: store atomic i32 %value, i32* %ptr release, align 4 | |
66 call void @llvm.nacl.atomic.store.i32(i32 %value, i32* %ptr, i32 4) | |
67 ret void | |
68 } | |
69 | |
70 ; CHECK-LABEL: @test_atomic_acquire_release | |
71 define i32 @test_atomic_acquire_release(i32* %ptr, i32 %value) { | |
72 ; CHECK: %1 = atomicrmw add i32* %ptr, i32 %value acq_rel | |
73 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 %value, i32 5) | |
74 ret i32 %1 | |
75 } | |
76 | |
77 ; CHECK-LABEL: @test_fetch_and_add_i32 | |
57 define i32 @test_fetch_and_add_i32(i32* %ptr, i32 %value) { | 78 define i32 @test_fetch_and_add_i32(i32* %ptr, i32 %value) { |
58 ; CHECK: %1 = atomicrmw add i32* %ptr, i32 %value seq_cst | 79 ; CHECK: %1 = atomicrmw add i32* %ptr, i32 %value seq_cst |
59 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 %value, i32 6) | 80 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 %value, i32 6) |
60 ret i32 %1 | 81 ret i32 %1 |
61 } | 82 } |
62 | 83 |
63 ; CHECK: @test_fetch_and_sub_i32 | 84 ; CHECK-LABEL: @test_fetch_and_sub_i32 |
64 define i32 @test_fetch_and_sub_i32(i32* %ptr, i32 %value) { | 85 define i32 @test_fetch_and_sub_i32(i32* %ptr, i32 %value) { |
65 ; CHECK: %1 = atomicrmw sub i32* %ptr, i32 %value seq_cst | 86 ; CHECK: %1 = atomicrmw sub i32* %ptr, i32 %value seq_cst |
66 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 2, i32* %ptr, i32 %value, i32 6) | 87 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 2, i32* %ptr, i32 %value, i32 6) |
67 ret i32 %1 | 88 ret i32 %1 |
68 } | 89 } |
69 | 90 |
70 ; CHECK: @test_fetch_and_or_i32 | 91 ; CHECK-LABEL: @test_fetch_and_or_i32 |
71 define i32 @test_fetch_and_or_i32(i32* %ptr, i32 %value) { | 92 define i32 @test_fetch_and_or_i32(i32* %ptr, i32 %value) { |
72 ; CHECK: %1 = atomicrmw or i32* %ptr, i32 %value seq_cst | 93 ; CHECK: %1 = atomicrmw or i32* %ptr, i32 %value seq_cst |
73 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 3, i32* %ptr, i32 %value, i32 6) | 94 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 3, i32* %ptr, i32 %value, i32 6) |
74 ret i32 %1 | 95 ret i32 %1 |
75 } | 96 } |
76 | 97 |
77 ; CHECK: @test_fetch_and_and_i32 | 98 ; CHECK-LABEL: @test_fetch_and_and_i32 |
78 define i32 @test_fetch_and_and_i32(i32* %ptr, i32 %value) { | 99 define i32 @test_fetch_and_and_i32(i32* %ptr, i32 %value) { |
79 ; CHECK: %1 = atomicrmw and i32* %ptr, i32 %value seq_cst | 100 ; CHECK: %1 = atomicrmw and i32* %ptr, i32 %value seq_cst |
80 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 4, i32* %ptr, i32 %value, i32 6) | 101 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 4, i32* %ptr, i32 %value, i32 6) |
81 ret i32 %1 | 102 ret i32 %1 |
82 } | 103 } |
83 | 104 |
84 ; CHECK: @test_fetch_and_xor_i32 | 105 ; CHECK-LABEL: @test_fetch_and_xor_i32 |
85 define i32 @test_fetch_and_xor_i32(i32* %ptr, i32 %value) { | 106 define i32 @test_fetch_and_xor_i32(i32* %ptr, i32 %value) { |
86 ; CHECK: %1 = atomicrmw xor i32* %ptr, i32 %value seq_cst | 107 ; CHECK: %1 = atomicrmw xor i32* %ptr, i32 %value seq_cst |
87 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 5, i32* %ptr, i32 %value, i32 6) | 108 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 5, i32* %ptr, i32 %value, i32 6) |
88 ret i32 %1 | 109 ret i32 %1 |
89 } | 110 } |
90 | 111 |
91 ; CHECK: @test_val_compare_and_swap_i32 | 112 ; Test different compare-and-swap patterns that commonly occur and are a bit |
113 ; tricky because the PNaCl intrinsic only returns the value whereas the LLVM | |
114 ; intrinsic also returns the success flag (equivalent to comparing the oldval | |
115 ; with what was just loaded). | |
116 | |
117 ; CHECK-LABEL: @test_val_compare_and_swap_i32 | |
92 define i32 @test_val_compare_and_swap_i32(i32* %ptr, i32 %oldval, i32 %newval) { | 118 define i32 @test_val_compare_and_swap_i32(i32* %ptr, i32 %oldval, i32 %newval) { |
93 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst | 119 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst seq_cst |
94 ; CHECK-NEXT: %2 = extractvalue { i32, i1 } %1, 0 | 120 ; CHECK-NEXT: %2 = extractvalue { i32, i1 } %1, 0 |
95 ; CHECK-NEXT: ret i32 %2 | 121 ; CHECK-NEXT: ret i32 %2 |
96 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 6, i32 6) | 122 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 6, i32 6) |
97 ret i32 %1 | 123 ret i32 %1 |
98 } | 124 } |
99 | 125 |
100 ; CHECK: @test_val_compare_and_swap_i32_new | 126 ; CHECK-LABEL: @test_val_compare_and_swap_i32_new |
101 define i32 @test_val_compare_and_swap_i32_new(i32* %ptr, i32 %oldval, i32 %newva l) { | 127 define i32 @test_val_compare_and_swap_i32_new(i32* %ptr, i32 %oldval, i32 %newva l) { |
102 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst | 128 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst seq_cst |
103 ; CHECK-NEXT: %res2 = extractvalue { i32, i1 } %1, 0 | 129 ; CHECK-NEXT: %res2 = extractvalue { i32, i1 } %1, 0 |
104 ; CHECK-NEXT: ret i32 %res2 | 130 ; CHECK-NEXT: ret i32 %res2 |
105 %res = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %new val, i32 6, i32 6) | 131 %res = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %new val, i32 6, i32 6) |
106 %success = icmp eq i32 %res, %oldval | 132 %success = icmp eq i32 %res, %oldval |
107 %res.insert.value = insertvalue { i32, i1 } undef, i32 %res, 0 | 133 %res.insert.value = insertvalue { i32, i1 } undef, i32 %res, 0 |
108 %res.insert.success = insertvalue { i32, i1 } %res.insert.value, i1 %success, 1 | 134 %res.insert.success = insertvalue { i32, i1 } %res.insert.value, i1 %success, 1 |
109 %val = extractvalue { i32, i1 } %res.insert.success, 0 | 135 %val = extractvalue { i32, i1 } %res.insert.success, 0 |
110 ret i32 %val | 136 ret i32 %val |
111 } | 137 } |
112 | 138 |
113 ; CHECK: @test_bool_compare_and_swap_i32 | 139 ; CHECK-LABEL: @test_bool_compare_and_swap_i32 |
114 define i1 @test_bool_compare_and_swap_i32(i32* %ptr, i32 %oldval, i32 %newval) { | 140 define i1 @test_bool_compare_and_swap_i32(i32* %ptr, i32 %oldval, i32 %newval) { |
115 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst | 141 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst seq_cst |
116 ; CHECK-NEXT: %success = extractvalue { i32, i1 } %1, 1 | 142 ; CHECK-NEXT: %success = extractvalue { i32, i1 } %1, 1 |
117 ; CHECK-NEXT: ret i1 %success | 143 ; CHECK-NEXT: ret i1 %success |
118 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 6, i32 6) | 144 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 6, i32 6) |
119 %2 = icmp eq i32 %1, %oldval | 145 %2 = icmp eq i32 %1, %oldval |
120 ret i1 %2 | 146 ret i1 %2 |
121 } | 147 } |
122 | 148 |
123 ; CHECK: @test_bool_compare_and_swap_i32_new | 149 ; CHECK-LABEL: @test_bool_compare_and_swap_i32_new |
124 define i1 @test_bool_compare_and_swap_i32_new(i32* %ptr, i32 %oldval, i32 %newva l) { | 150 define i1 @test_bool_compare_and_swap_i32_new(i32* %ptr, i32 %oldval, i32 %newva l) { |
125 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst | 151 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst seq_cst |
126 ; CHECK-NEXT: %suc = extractvalue { i32, i1 } %1, 1 | 152 ; CHECK-NEXT: %suc = extractvalue { i32, i1 } %1, 1 |
127 ; CHECK-NEXT: ret i1 %suc | 153 ; CHECK-NEXT: ret i1 %suc |
128 %res = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %new val, i32 6, i32 6) | 154 %res = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %new val, i32 6, i32 6) |
129 %success = icmp eq i32 %res, %oldval | 155 %success = icmp eq i32 %res, %oldval |
130 %res.insert.value = insertvalue { i32, i1 } undef, i32 %res, 0 | 156 %res.insert.value = insertvalue { i32, i1 } undef, i32 %res, 0 |
131 %res.insert.success = insertvalue { i32, i1 } %res.insert.value, i1 %success, 1 | 157 %res.insert.success = insertvalue { i32, i1 } %res.insert.value, i1 %success, 1 |
132 %suc = extractvalue { i32, i1 } %res.insert.success, 1 | 158 %suc = extractvalue { i32, i1 } %res.insert.success, 1 |
133 ret i1 %suc | 159 ret i1 %suc |
134 } | 160 } |
135 | 161 |
136 ; CHECK: @test_bool_compare_and_swap_i32_reordered | 162 ; CHECK-LABEL: @test_bool_compare_and_swap_i32_reordered |
137 define i1 @test_bool_compare_and_swap_i32_reordered(i32* %ptr, i32 %oldval, i32 %newval) { | 163 define i1 @test_bool_compare_and_swap_i32_reordered(i32* %ptr, i32 %oldval, i32 %newval) { |
138 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst | 164 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst seq_cst |
139 ; CHECK-NEXT: %success = extractvalue { i32, i1 } %1, 1 | 165 ; CHECK-NEXT: %success = extractvalue { i32, i1 } %1, 1 |
140 ; CHECK-NEXT: ret i1 %success | 166 ; CHECK-NEXT: ret i1 %success |
141 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 6, i32 6) | 167 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 6, i32 6) |
142 %2 = icmp eq i32 %oldval, %1 ; Note operands are swapped from above. | 168 %2 = icmp eq i32 %oldval, %1 ; Note operands are swapped from above. |
143 ret i1 %2 | 169 ret i1 %2 |
144 } | 170 } |
145 | 171 |
146 ; CHECK: @test_struct_compare_and_swap_i32 | 172 ; CHECK-LABEL: @test_struct_compare_and_swap_i32 |
147 define { i32, i1 } @test_struct_compare_and_swap_i32(i32* %ptr, i32 %oldval, i32 %newval) { | 173 define { i32, i1 } @test_struct_compare_and_swap_i32(i32* %ptr, i32 %oldval, i32 %newval) { |
148 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst | 174 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst seq_cst |
149 ; CHECK-NEXT: ret { i32, i1 } %1 | 175 ; CHECK-NEXT: ret { i32, i1 } %1 |
150 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 6, i32 6) | 176 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 6, i32 6) |
151 %2 = icmp eq i32 %1, %oldval | 177 %2 = icmp eq i32 %1, %oldval |
152 %3 = insertvalue { i32, i1 } undef, i32 %1, 0 | 178 %3 = insertvalue { i32, i1 } undef, i32 %1, 0 |
153 %4 = insertvalue { i32, i1 } %3, i1 %2, 1 | 179 %4 = insertvalue { i32, i1 } %3, i1 %2, 1 |
154 ret { i32, i1 } %4 | 180 ret { i32, i1 } %4 |
155 } | 181 } |
156 | 182 |
157 ; CHECK: @test_c11_fence | 183 ; Test all allowed cmpxchg success/failure memory orderings. |
184 | |
185 ; CHECK-LABEL: @test_cmpxchg_seqcst_seqcst | |
186 define i32 @test_cmpxchg_seqcst_seqcst(i32* %ptr, i32 %oldval, i32 %newval) { | |
187 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst seq_cst | |
188 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 6, i32 6) | |
189 ret i32 %1 | |
190 } | |
191 | |
192 ; CHECK-LABEL: @test_cmpxchg_seqcst_acquire | |
193 define i32 @test_cmpxchg_seqcst_acquire(i32* %ptr, i32 %oldval, i32 %newval) { | |
194 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval seq_cst acquire | |
195 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 6, i32 3) | |
196 ret i32 %1 | |
197 } | |
198 | |
199 ; CHECK-LABEL: @test_cmpxchg_acquire_acquire | |
200 define i32 @test_cmpxchg_acquire_acquire(i32* %ptr, i32 %oldval, i32 %newval) { | |
201 ; CHECK: %1 = cmpxchg i32* %ptr, i32 %oldval, i32 %newval acquire acquire | |
202 %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newva l, i32 3, i32 3) | |
203 ret i32 %1 | |
204 } | |
205 | |
206 ; CHECK-LABEL: @test_c11_fence | |
158 define void @test_c11_fence() { | 207 define void @test_c11_fence() { |
159 ; CHECK: fence seq_cst | 208 ; CHECK: fence seq_cst |
160 call void @llvm.nacl.atomic.fence(i32 6) | 209 call void @llvm.nacl.atomic.fence(i32 6) |
161 ret void | 210 ret void |
162 } | 211 } |
163 | 212 |
164 ; CHECK: @test_synchronize | 213 ; CHECK-LABEL: @test_synchronize |
165 define void @test_synchronize() { | 214 define void @test_synchronize() { |
166 ; CHECK: call void asm sideeffect "", "~{memory}"() | 215 ; CHECK: call void asm sideeffect "", "~{memory}"() |
167 ; CHECK: fence seq_cst | 216 ; CHECK: fence seq_cst |
168 ; CHECK: call void asm sideeffect "", "~{memory}"() | 217 ; CHECK: call void asm sideeffect "", "~{memory}"() |
169 call void @llvm.nacl.atomic.fence.all() | 218 call void @llvm.nacl.atomic.fence.all() |
170 ret void | 219 ret void |
171 } | 220 } |
172 | 221 |
173 ; CHECK: @test_is_lock_free_1 | 222 ; CHECK-LABEL: @test_is_lock_free_1 |
174 define i1 @test_is_lock_free_1(i8* %ptr) { | 223 define i1 @test_is_lock_free_1(i8* %ptr) { |
175 ; CHECK: ret i1 {{true|false}} | 224 ; CHECK: ret i1 {{true|false}} |
176 %res = call i1 @llvm.nacl.atomic.is.lock.free(i32 1, i8* %ptr) | 225 %res = call i1 @llvm.nacl.atomic.is.lock.free(i32 1, i8* %ptr) |
177 ret i1 %res | 226 ret i1 %res |
178 } | 227 } |
179 | 228 |
180 ; CHECK: @test_is_lock_free_2 | 229 ; CHECK-LABEL: @test_is_lock_free_2 |
181 define i1 @test_is_lock_free_2(i16* %ptr) { | 230 define i1 @test_is_lock_free_2(i16* %ptr) { |
182 ; CHECK: ret i1 {{true|false}} | 231 ; CHECK: ret i1 {{true|false}} |
183 %ptr2 = bitcast i16* %ptr to i8* | 232 %ptr2 = bitcast i16* %ptr to i8* |
184 %res = call i1 @llvm.nacl.atomic.is.lock.free(i32 2, i8* %ptr2) | 233 %res = call i1 @llvm.nacl.atomic.is.lock.free(i32 2, i8* %ptr2) |
185 ret i1 %res | 234 ret i1 %res |
186 } | 235 } |
187 | 236 |
188 ; CHECK: @test_is_lock_free_4 | 237 ; CHECK-LABEL: @test_is_lock_free_4 |
189 define i1 @test_is_lock_free_4(i32* %ptr) { | 238 define i1 @test_is_lock_free_4(i32* %ptr) { |
190 ; CHECK: ret i1 {{true|false}} | 239 ; CHECK: ret i1 {{true|false}} |
191 %ptr2 = bitcast i32* %ptr to i8* | 240 %ptr2 = bitcast i32* %ptr to i8* |
192 %res = call i1 @llvm.nacl.atomic.is.lock.free(i32 4, i8* %ptr2) | 241 %res = call i1 @llvm.nacl.atomic.is.lock.free(i32 4, i8* %ptr2) |
193 ret i1 %res | 242 ret i1 %res |
194 } | 243 } |
195 | 244 |
196 ; CHECK: @test_is_lock_free_8 | 245 ; CHECK-LABEL: @test_is_lock_free_8 |
197 define i1 @test_is_lock_free_8(i64* %ptr) { | 246 define i1 @test_is_lock_free_8(i64* %ptr) { |
198 ; CHECK: ret i1 {{true|false}} | 247 ; CHECK: ret i1 {{true|false}} |
199 %ptr2 = bitcast i64* %ptr to i8* | 248 %ptr2 = bitcast i64* %ptr to i8* |
200 %res = call i1 @llvm.nacl.atomic.is.lock.free(i32 8, i8* %ptr2) | 249 %res = call i1 @llvm.nacl.atomic.is.lock.free(i32 8, i8* %ptr2) |
201 ret i1 %res | 250 ret i1 %res |
202 } | 251 } |
203 | 252 |
204 ; CHECK: @test_lock_test_and_set_i32 | 253 ; CHECK-LABEL: @test_lock_test_and_set_i32 |
205 define i32 @test_lock_test_and_set_i32(i32* %ptr, i32 %value) { | 254 define i32 @test_lock_test_and_set_i32(i32* %ptr, i32 %value) { |
206 ; CHECK: %1 = atomicrmw xchg i32* %ptr, i32 %value seq_cst | 255 ; CHECK: %1 = atomicrmw xchg i32* %ptr, i32 %value seq_cst |
207 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 6, i32* %ptr, i32 %value, i32 6) | 256 %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 6, i32* %ptr, i32 %value, i32 6) |
208 ret i32 %1 | 257 ret i32 %1 |
209 } | 258 } |
210 | 259 |
211 ; CHECK: @test_lock_release_i32 | 260 ; CHECK-LABEL: @test_lock_release_i32 |
212 define void @test_lock_release_i32(i32* %ptr) { | 261 define void @test_lock_release_i32(i32* %ptr) { |
213 ; Note that the 'release' was changed to a 'seq_cst'. | 262 ; Note that the 'release' was changed to a 'seq_cst'. |
214 ; CHECK: store atomic i32 0, i32* %ptr seq_cst, align 4 | 263 ; CHECK: store atomic i32 0, i32* %ptr seq_cst, align 4 |
215 call void @llvm.nacl.atomic.store.i32(i32 0, i32* %ptr, i32 6) | 264 call void @llvm.nacl.atomic.store.i32(i32 0, i32* %ptr, i32 6) |
216 ret void | 265 ret void |
217 } | 266 } |
218 | 267 |
219 ; CHECK: @test_atomic_load_i8 | 268 ; CHECK-LABEL: @test_atomic_load_i8 |
220 define zeroext i8 @test_atomic_load_i8(i8* %ptr) { | 269 define zeroext i8 @test_atomic_load_i8(i8* %ptr) { |
221 ; CHECK: %1 = load atomic i8* %ptr seq_cst, align 1 | 270 ; CHECK: %1 = load atomic i8* %ptr seq_cst, align 1 |
222 %1 = call i8 @llvm.nacl.atomic.load.i8(i8* %ptr, i32 6) | 271 %1 = call i8 @llvm.nacl.atomic.load.i8(i8* %ptr, i32 6) |
223 ret i8 %1 | 272 ret i8 %1 |
224 } | 273 } |
225 | 274 |
226 ; CHECK: @test_atomic_store_i8 | 275 ; CHECK-LABEL: @test_atomic_store_i8 |
227 define void @test_atomic_store_i8(i8* %ptr, i8 zeroext %value) { | 276 define void @test_atomic_store_i8(i8* %ptr, i8 zeroext %value) { |
228 ; CHECK: store atomic i8 %value, i8* %ptr seq_cst, align 1 | 277 ; CHECK: store atomic i8 %value, i8* %ptr seq_cst, align 1 |
229 call void @llvm.nacl.atomic.store.i8(i8 %value, i8* %ptr, i32 6) | 278 call void @llvm.nacl.atomic.store.i8(i8 %value, i8* %ptr, i32 6) |
230 ret void | 279 ret void |
231 } | 280 } |
232 | 281 |
233 ; CHECK: @test_atomic_load_i16 | 282 ; CHECK-LABEL: @test_atomic_load_i16 |
234 define zeroext i16 @test_atomic_load_i16(i16* %ptr) { | 283 define zeroext i16 @test_atomic_load_i16(i16* %ptr) { |
235 ; CHECK: %1 = load atomic i16* %ptr seq_cst, align 2 | 284 ; CHECK: %1 = load atomic i16* %ptr seq_cst, align 2 |
236 %1 = call i16 @llvm.nacl.atomic.load.i16(i16* %ptr, i32 6) | 285 %1 = call i16 @llvm.nacl.atomic.load.i16(i16* %ptr, i32 6) |
237 ret i16 %1 | 286 ret i16 %1 |
238 } | 287 } |
239 | 288 |
240 ; CHECK: @test_atomic_store_i16 | 289 ; CHECK-LABEL: @test_atomic_store_i16 |
241 define void @test_atomic_store_i16(i16* %ptr, i16 zeroext %value) { | 290 define void @test_atomic_store_i16(i16* %ptr, i16 zeroext %value) { |
242 ; CHECK: store atomic i16 %value, i16* %ptr seq_cst, align 2 | 291 ; CHECK: store atomic i16 %value, i16* %ptr seq_cst, align 2 |
243 call void @llvm.nacl.atomic.store.i16(i16 %value, i16* %ptr, i32 6) | 292 call void @llvm.nacl.atomic.store.i16(i16 %value, i16* %ptr, i32 6) |
244 ret void | 293 ret void |
245 } | 294 } |
246 | 295 |
247 ; CHECK: @test_atomic_load_i32 | 296 ; CHECK-LABEL: @test_atomic_load_i32 |
248 define i32 @test_atomic_load_i32(i32* %ptr) { | 297 define i32 @test_atomic_load_i32(i32* %ptr) { |
249 ; CHECK: %1 = load atomic i32* %ptr seq_cst, align 4 | 298 ; CHECK: %1 = load atomic i32* %ptr seq_cst, align 4 |
250 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 6) | 299 %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 6) |
251 ret i32 %1 | 300 ret i32 %1 |
252 } | 301 } |
253 | 302 |
254 ; CHECK: @test_atomic_store_i32 | 303 ; CHECK-LABEL: @test_atomic_store_i32 |
255 define void @test_atomic_store_i32(i32* %ptr, i32 %value) { | 304 define void @test_atomic_store_i32(i32* %ptr, i32 %value) { |
256 ; CHECK: store atomic i32 %value, i32* %ptr seq_cst, align 4 | 305 ; CHECK: store atomic i32 %value, i32* %ptr seq_cst, align 4 |
257 call void @llvm.nacl.atomic.store.i32(i32 %value, i32* %ptr, i32 6) | 306 call void @llvm.nacl.atomic.store.i32(i32 %value, i32* %ptr, i32 6) |
258 ret void | 307 ret void |
259 } | 308 } |
260 | 309 |
261 ; CHECK: @test_atomic_load_i64 | 310 ; CHECK-LABEL: @test_atomic_load_i64 |
262 define i64 @test_atomic_load_i64(i64* %ptr) { | 311 define i64 @test_atomic_load_i64(i64* %ptr) { |
263 ; CHECK: %1 = load atomic i64* %ptr seq_cst, align 8 | 312 ; CHECK: %1 = load atomic i64* %ptr seq_cst, align 8 |
264 %1 = call i64 @llvm.nacl.atomic.load.i64(i64* %ptr, i32 6) | 313 %1 = call i64 @llvm.nacl.atomic.load.i64(i64* %ptr, i32 6) |
265 ret i64 %1 | 314 ret i64 %1 |
266 } | 315 } |
267 | 316 |
268 ; CHECK: @test_atomic_store_i64 | 317 ; CHECK-LABEL: @test_atomic_store_i64 |
269 define void @test_atomic_store_i64(i64* %ptr, i64 %value) { | 318 define void @test_atomic_store_i64(i64* %ptr, i64 %value) { |
270 ; CHECK: store atomic i64 %value, i64* %ptr seq_cst, align 8 | 319 ; CHECK: store atomic i64 %value, i64* %ptr seq_cst, align 8 |
271 call void @llvm.nacl.atomic.store.i64(i64 %value, i64* %ptr, i32 6) | 320 call void @llvm.nacl.atomic.store.i64(i64 %value, i64* %ptr, i32 6) |
272 ret void | 321 ret void |
273 } | 322 } |
274 | 323 |
275 ; CHECK: attributes [[RETURNS_TWICE]] = { returns_twice } | 324 ; CHECK: attributes [[RETURNS_TWICE]] = { returns_twice } |
OLD | NEW |