OLD | NEW |
1 ; This tests the NaCl intrinsics not related to atomic operations. | 1 ; This tests the NaCl intrinsics not related to atomic operations. |
2 | 2 |
3 ; TODO(jvoung): fix extra "CALLTARGETS" run. The llvm-objdump symbolizer | 3 ; TODO(jvoung): fix extra "CALLTARGETS" run. The llvm-objdump symbolizer |
4 ; doesn't know how to symbolize non-section-local functions. | 4 ; doesn't know how to symbolize non-section-local functions. |
5 ; The newer LLVM 3.6 one does work, but watch out for other bugs. | 5 ; The newer LLVM 3.6 one does work, but watch out for other bugs. |
6 | 6 |
7 ; RUN: %llvm2ice -O2 --verbose none %s \ | 7 ; RUN: %llvm2ice -O2 --verbose none %s \ |
8 ; RUN: | FileCheck --check-prefix=CALLTARGETS %s | 8 ; RUN: | FileCheck --check-prefix=CALLTARGETS %s |
9 ; RUN: %llvm2ice -O2 --verbose none -sandbox %s \ | 9 ; RUN: %llvm2ice -O2 --verbose none -sandbox %s \ |
10 ; RUN: | llvm-mc -triple=i686-none-nacl -x86-asm-syntax=intel -filetype=obj \ | 10 ; RUN: | llvm-mc -triple=i686-none-nacl -x86-asm-syntax=intel -filetype=obj \ |
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
110 ; CHECKO2UNSANDBOXEDREM-LABEL: test_nacl_read_tp_dead | 110 ; CHECKO2UNSANDBOXEDREM-LABEL: test_nacl_read_tp_dead |
111 ; CHECKO2UNSANDBOXEDREM-NOT: call -4 | 111 ; CHECKO2UNSANDBOXEDREM-NOT: call -4 |
112 ; CALLTARGETS-LABEL: test_nacl_read_tp_dead | 112 ; CALLTARGETS-LABEL: test_nacl_read_tp_dead |
113 ; CALLTARGETS-NOT: call __nacl_read_tp | 113 ; CALLTARGETS-NOT: call __nacl_read_tp |
114 | 114 |
115 define void @test_memcpy(i32 %iptr_dst, i32 %iptr_src, i32 %len) { | 115 define void @test_memcpy(i32 %iptr_dst, i32 %iptr_src, i32 %len) { |
116 entry: | 116 entry: |
117 %dst = inttoptr i32 %iptr_dst to i8* | 117 %dst = inttoptr i32 %iptr_dst to i8* |
118 %src = inttoptr i32 %iptr_src to i8* | 118 %src = inttoptr i32 %iptr_src to i8* |
119 call void @llvm.memcpy.p0i8.p0i8.i32(i8* %dst, i8* %src, | 119 call void @llvm.memcpy.p0i8.p0i8.i32(i8* %dst, i8* %src, |
120 i32 %len, i32 1, i1 0) | 120 i32 %len, i32 1, i1 false) |
121 ret void | 121 ret void |
122 } | 122 } |
123 ; CHECK-LABEL: test_memcpy | 123 ; CHECK-LABEL: test_memcpy |
124 ; CHECK: call -4 | 124 ; CHECK: call -4 |
125 ; CALLTARGETS-LABEL: test_memcpy | 125 ; CALLTARGETS-LABEL: test_memcpy |
126 ; CALLTARGETS: call memcpy | 126 ; CALLTARGETS: call memcpy |
127 ; CHECKO2REM-LABEL: test_memcpy | 127 ; CHECKO2REM-LABEL: test_memcpy |
128 ; CHECKO2UNSANDBOXEDREM-LABEL: test_memcpy | 128 ; CHECKO2UNSANDBOXEDREM-LABEL: test_memcpy |
129 | 129 |
130 ; TODO(jvoung) -- if we want to be clever, we can do this and the memmove, | 130 ; TODO(jvoung) -- if we want to be clever, we can do this and the memmove, |
131 ; memset without a function call. | 131 ; memset without a function call. |
132 define void @test_memcpy_const_len_align(i32 %iptr_dst, i32 %iptr_src) { | 132 define void @test_memcpy_const_len_align(i32 %iptr_dst, i32 %iptr_src) { |
133 entry: | 133 entry: |
134 %dst = inttoptr i32 %iptr_dst to i8* | 134 %dst = inttoptr i32 %iptr_dst to i8* |
135 %src = inttoptr i32 %iptr_src to i8* | 135 %src = inttoptr i32 %iptr_src to i8* |
136 call void @llvm.memcpy.p0i8.p0i8.i32(i8* %dst, i8* %src, | 136 call void @llvm.memcpy.p0i8.p0i8.i32(i8* %dst, i8* %src, |
137 i32 8, i32 1, i1 0) | 137 i32 8, i32 1, i1 false) |
138 ret void | 138 ret void |
139 } | 139 } |
140 ; CHECK-LABEL: test_memcpy_const_len_align | 140 ; CHECK-LABEL: test_memcpy_const_len_align |
141 ; CHECK: call -4 | 141 ; CHECK: call -4 |
142 ; CALLTARGETS-LABEL: test_memcpy_const_len_align | 142 ; CALLTARGETS-LABEL: test_memcpy_const_len_align |
143 ; CALLTARGETS: call memcpy | 143 ; CALLTARGETS: call memcpy |
144 | 144 |
145 define void @test_memmove(i32 %iptr_dst, i32 %iptr_src, i32 %len) { | 145 define void @test_memmove(i32 %iptr_dst, i32 %iptr_src, i32 %len) { |
146 entry: | 146 entry: |
147 %dst = inttoptr i32 %iptr_dst to i8* | 147 %dst = inttoptr i32 %iptr_dst to i8* |
148 %src = inttoptr i32 %iptr_src to i8* | 148 %src = inttoptr i32 %iptr_src to i8* |
149 call void @llvm.memmove.p0i8.p0i8.i32(i8* %dst, i8* %src, | 149 call void @llvm.memmove.p0i8.p0i8.i32(i8* %dst, i8* %src, |
150 i32 %len, i32 1, i1 0) | 150 i32 %len, i32 1, i1 false) |
151 ret void | 151 ret void |
152 } | 152 } |
153 ; CHECK-LABEL: test_memmove | 153 ; CHECK-LABEL: test_memmove |
154 ; CHECK: call -4 | 154 ; CHECK: call -4 |
155 ; CALLTARGETS-LABEL: test_memmove | 155 ; CALLTARGETS-LABEL: test_memmove |
156 ; CALLTARGETS: call memmove | 156 ; CALLTARGETS: call memmove |
157 | 157 |
158 define void @test_memmove_const_len_align(i32 %iptr_dst, i32 %iptr_src) { | 158 define void @test_memmove_const_len_align(i32 %iptr_dst, i32 %iptr_src) { |
159 entry: | 159 entry: |
160 %dst = inttoptr i32 %iptr_dst to i8* | 160 %dst = inttoptr i32 %iptr_dst to i8* |
161 %src = inttoptr i32 %iptr_src to i8* | 161 %src = inttoptr i32 %iptr_src to i8* |
162 call void @llvm.memmove.p0i8.p0i8.i32(i8* %dst, i8* %src, | 162 call void @llvm.memmove.p0i8.p0i8.i32(i8* %dst, i8* %src, |
163 i32 8, i32 1, i1 0) | 163 i32 8, i32 1, i1 false) |
164 ret void | 164 ret void |
165 } | 165 } |
166 ; CHECK-LABEL: test_memmove_const_len_align | 166 ; CHECK-LABEL: test_memmove_const_len_align |
167 ; CHECK: call -4 | 167 ; CHECK: call -4 |
168 ; CALLTARGETS-LABEL: test_memmove_const_len_align | 168 ; CALLTARGETS-LABEL: test_memmove_const_len_align |
169 ; CALLTARGETS: call memmove | 169 ; CALLTARGETS: call memmove |
170 | 170 |
171 define void @test_memset(i32 %iptr_dst, i32 %wide_val, i32 %len) { | 171 define void @test_memset(i32 %iptr_dst, i32 %wide_val, i32 %len) { |
172 entry: | 172 entry: |
173 %val = trunc i32 %wide_val to i8 | 173 %val = trunc i32 %wide_val to i8 |
174 %dst = inttoptr i32 %iptr_dst to i8* | 174 %dst = inttoptr i32 %iptr_dst to i8* |
175 call void @llvm.memset.p0i8.i32(i8* %dst, i8 %val, | 175 call void @llvm.memset.p0i8.i32(i8* %dst, i8 %val, |
176 i32 %len, i32 1, i1 0) | 176 i32 %len, i32 1, i1 false) |
177 ret void | 177 ret void |
178 } | 178 } |
179 ; CHECK-LABEL: test_memset | 179 ; CHECK-LABEL: test_memset |
180 ; CHECK: movzx | 180 ; CHECK: movzx |
181 ; CHECK: call -4 | 181 ; CHECK: call -4 |
182 ; CALLTARGETS-LABEL: test_memset | 182 ; CALLTARGETS-LABEL: test_memset |
183 ; CALLTARGETS: call memset | 183 ; CALLTARGETS: call memset |
184 | 184 |
185 define void @test_memset_const_len_align(i32 %iptr_dst, i32 %wide_val) { | 185 define void @test_memset_const_len_align(i32 %iptr_dst, i32 %wide_val) { |
186 entry: | 186 entry: |
187 %val = trunc i32 %wide_val to i8 | 187 %val = trunc i32 %wide_val to i8 |
188 %dst = inttoptr i32 %iptr_dst to i8* | 188 %dst = inttoptr i32 %iptr_dst to i8* |
189 call void @llvm.memset.p0i8.i32(i8* %dst, i8 %val, | 189 call void @llvm.memset.p0i8.i32(i8* %dst, i8 %val, |
190 i32 8, i32 1, i1 0) | 190 i32 8, i32 1, i1 false) |
191 ret void | 191 ret void |
192 } | 192 } |
193 ; CHECK-LABEL: test_memset_const_len_align | 193 ; CHECK-LABEL: test_memset_const_len_align |
194 ; CHECK: movzx | 194 ; CHECK: movzx |
195 ; CHECK: call -4 | 195 ; CHECK: call -4 |
196 ; CALLTARGETS-LABEL: test_memset_const_len_align | 196 ; CALLTARGETS-LABEL: test_memset_const_len_align |
197 ; CALLTARGETS: call memset | 197 ; CALLTARGETS: call memset |
198 | 198 |
199 define void @test_memset_const_val(i32 %iptr_dst, i32 %len) { | 199 define void @test_memset_const_val(i32 %iptr_dst, i32 %len) { |
200 entry: | 200 entry: |
201 %dst = inttoptr i32 %iptr_dst to i8* | 201 %dst = inttoptr i32 %iptr_dst to i8* |
202 call void @llvm.memset.p0i8.i32(i8* %dst, i8 0, i32 %len, i32 1, i1 0) | 202 call void @llvm.memset.p0i8.i32(i8* %dst, i8 0, i32 %len, i32 1, i1 false) |
203 ret void | 203 ret void |
204 } | 204 } |
205 ; CHECK-LABEL: test_memset_const_val | 205 ; CHECK-LABEL: test_memset_const_val |
206 ; Make sure the argument is legalized (can't movzx reg, 0). | 206 ; Make sure the argument is legalized (can't movzx reg, 0). |
207 ; CHECK: movzx {{.*}}, {{[^0]}} | 207 ; CHECK: movzx {{.*}}, {{[^0]}} |
208 ; CHECK: call -4 | 208 ; CHECK: call -4 |
209 ; CALLTARGETS-LABEL: test_memset_const_val | 209 ; CALLTARGETS-LABEL: test_memset_const_val |
210 ; CALLTARGETS: call memset | 210 ; CALLTARGETS: call memset |
211 | 211 |
212 | 212 |
(...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
343 entry: | 343 entry: |
344 %r = call i64 @llvm.bswap.i64(i64 %x) | 344 %r = call i64 @llvm.bswap.i64(i64 %x) |
345 ret i64 %r | 345 ret i64 %r |
346 } | 346 } |
347 ; CHECK-LABEL: test_bswap_64 | 347 ; CHECK-LABEL: test_bswap_64 |
348 ; CHECK: bswap e{{.*}} | 348 ; CHECK: bswap e{{.*}} |
349 ; CHECK: bswap e{{.*}} | 349 ; CHECK: bswap e{{.*}} |
350 | 350 |
351 define i32 @test_ctlz_32(i32 %x) { | 351 define i32 @test_ctlz_32(i32 %x) { |
352 entry: | 352 entry: |
353 %r = call i32 @llvm.ctlz.i32(i32 %x, i1 0) | 353 %r = call i32 @llvm.ctlz.i32(i32 %x, i1 false) |
354 ret i32 %r | 354 ret i32 %r |
355 } | 355 } |
356 ; CHECK-LABEL: test_ctlz_32 | 356 ; CHECK-LABEL: test_ctlz_32 |
357 ; TODO(jvoung): If we detect that LZCNT is supported, then use that | 357 ; TODO(jvoung): If we detect that LZCNT is supported, then use that |
358 ; and avoid the need to do the cmovne and xor stuff to guarantee that | 358 ; and avoid the need to do the cmovne and xor stuff to guarantee that |
359 ; the result is well-defined w/ input == 0. | 359 ; the result is well-defined w/ input == 0. |
360 ; CHECK: bsr [[REG_TMP:e.*]], {{.*}} | 360 ; CHECK: bsr [[REG_TMP:e.*]], {{.*}} |
361 ; CHECK: mov [[REG_RES:e.*]], 63 | 361 ; CHECK: mov [[REG_RES:e.*]], 63 |
362 ; CHECK: cmovne [[REG_RES]], [[REG_TMP]] | 362 ; CHECK: cmovne [[REG_RES]], [[REG_TMP]] |
363 ; CHECK: xor [[REG_RES]], 31 | 363 ; CHECK: xor [[REG_RES]], 31 |
364 | 364 |
365 define i32 @test_ctlz_32_const() { | 365 define i32 @test_ctlz_32_const() { |
366 entry: | 366 entry: |
367 %r = call i32 @llvm.ctlz.i32(i32 123456, i1 0) | 367 %r = call i32 @llvm.ctlz.i32(i32 123456, i1 false) |
368 ret i32 %r | 368 ret i32 %r |
369 } | 369 } |
370 ; Could potentially constant fold this, but the front-end should have done that. | 370 ; Could potentially constant fold this, but the front-end should have done that. |
371 ; The dest operand must be a register and the source operand must be a register | 371 ; The dest operand must be a register and the source operand must be a register |
372 ; or memory. | 372 ; or memory. |
373 ; CHECK-LABEL: test_ctlz_32_const | 373 ; CHECK-LABEL: test_ctlz_32_const |
374 ; CHECK: bsr e{{.*}}, {{.*}}e{{.*}} | 374 ; CHECK: bsr e{{.*}}, {{.*}}e{{.*}} |
375 | 375 |
376 define i32 @test_ctlz_32_ignored(i32 %x) { | 376 define i32 @test_ctlz_32_ignored(i32 %x) { |
377 entry: | 377 entry: |
378 %ignored = call i32 @llvm.ctlz.i32(i32 %x, i1 0) | 378 %ignored = call i32 @llvm.ctlz.i32(i32 %x, i1 false) |
379 ret i32 1 | 379 ret i32 1 |
380 } | 380 } |
381 ; CHECKO2REM-LABEL: test_ctlz_32_ignored | 381 ; CHECKO2REM-LABEL: test_ctlz_32_ignored |
382 ; CHECKO2REM-NOT: bsr | 382 ; CHECKO2REM-NOT: bsr |
383 | 383 |
384 define i64 @test_ctlz_64(i64 %x) { | 384 define i64 @test_ctlz_64(i64 %x) { |
385 entry: | 385 entry: |
386 %r = call i64 @llvm.ctlz.i64(i64 %x, i1 0) | 386 %r = call i64 @llvm.ctlz.i64(i64 %x, i1 false) |
387 ret i64 %r | 387 ret i64 %r |
388 } | 388 } |
389 ; CHECKO2REM-LABEL: test_ctlz_64 | 389 ; CHECKO2REM-LABEL: test_ctlz_64 |
390 ; CHECK-LABEL: test_ctlz_64 | 390 ; CHECK-LABEL: test_ctlz_64 |
391 ; CHECK: bsr [[REG_TMP1:e.*]], {{.*}} | 391 ; CHECK: bsr [[REG_TMP1:e.*]], {{.*}} |
392 ; CHECK: mov [[REG_RES1:e.*]], 63 | 392 ; CHECK: mov [[REG_RES1:e.*]], 63 |
393 ; CHECK: cmovne [[REG_RES1]], [[REG_TMP1]] | 393 ; CHECK: cmovne [[REG_RES1]], [[REG_TMP1]] |
394 ; CHECK: xor [[REG_RES1]], 31 | 394 ; CHECK: xor [[REG_RES1]], 31 |
395 ; CHECK: add [[REG_RES1]], 32 | 395 ; CHECK: add [[REG_RES1]], 32 |
396 ; CHECK: bsr [[REG_RES2:e.*]], {{.*}} | 396 ; CHECK: bsr [[REG_RES2:e.*]], {{.*}} |
397 ; CHECK: xor [[REG_RES2]], 31 | 397 ; CHECK: xor [[REG_RES2]], 31 |
398 ; CHECK: test [[REG_UPPER:.*]], [[REG_UPPER]] | 398 ; CHECK: test [[REG_UPPER:.*]], [[REG_UPPER]] |
399 ; CHECK: cmove [[REG_RES2]], [[REG_RES1]] | 399 ; CHECK: cmove [[REG_RES2]], [[REG_RES1]] |
400 ; CHECK: mov {{.*}}, 0 | 400 ; CHECK: mov {{.*}}, 0 |
401 | 401 |
402 define i32 @test_ctlz_64_const(i64 %x) { | 402 define i32 @test_ctlz_64_const(i64 %x) { |
403 entry: | 403 entry: |
404 %r = call i64 @llvm.ctlz.i64(i64 123456789012, i1 0) | 404 %r = call i64 @llvm.ctlz.i64(i64 123456789012, i1 false) |
405 %r2 = trunc i64 %r to i32 | 405 %r2 = trunc i64 %r to i32 |
406 ret i32 %r2 | 406 ret i32 %r2 |
407 } | 407 } |
408 ; CHECK-LABEL: test_ctlz_64_const | 408 ; CHECK-LABEL: test_ctlz_64_const |
409 ; CHECK: bsr e{{.*}}, {{.*}}e{{.*}} | 409 ; CHECK: bsr e{{.*}}, {{.*}}e{{.*}} |
410 ; CHECK: bsr e{{.*}}, {{.*}}e{{.*}} | 410 ; CHECK: bsr e{{.*}}, {{.*}}e{{.*}} |
411 | 411 |
412 | 412 |
413 define i32 @test_ctlz_64_ignored(i64 %x) { | 413 define i32 @test_ctlz_64_ignored(i64 %x) { |
414 entry: | 414 entry: |
415 %ignored = call i64 @llvm.ctlz.i64(i64 1234567890, i1 0) | 415 %ignored = call i64 @llvm.ctlz.i64(i64 1234567890, i1 false) |
416 ret i32 2 | 416 ret i32 2 |
417 } | 417 } |
418 ; CHECKO2REM-LABEL: test_ctlz_64_ignored | 418 ; CHECKO2REM-LABEL: test_ctlz_64_ignored |
419 ; CHECKO2REM-NOT: bsr | 419 ; CHECKO2REM-NOT: bsr |
420 | 420 |
421 define i32 @test_cttz_32(i32 %x) { | 421 define i32 @test_cttz_32(i32 %x) { |
422 entry: | 422 entry: |
423 %r = call i32 @llvm.cttz.i32(i32 %x, i1 0) | 423 %r = call i32 @llvm.cttz.i32(i32 %x, i1 false) |
424 ret i32 %r | 424 ret i32 %r |
425 } | 425 } |
426 ; CHECK-LABEL: test_cttz_32 | 426 ; CHECK-LABEL: test_cttz_32 |
427 ; CHECK: bsf [[REG_IF_NOTZERO:e.*]], {{.*}} | 427 ; CHECK: bsf [[REG_IF_NOTZERO:e.*]], {{.*}} |
428 ; CHECK: mov [[REG_IF_ZERO:e.*]], 32 | 428 ; CHECK: mov [[REG_IF_ZERO:e.*]], 32 |
429 ; CHECK: cmovne [[REG_IF_ZERO]], [[REG_IF_NOTZERO]] | 429 ; CHECK: cmovne [[REG_IF_ZERO]], [[REG_IF_NOTZERO]] |
430 | 430 |
431 define i64 @test_cttz_64(i64 %x) { | 431 define i64 @test_cttz_64(i64 %x) { |
432 entry: | 432 entry: |
433 %r = call i64 @llvm.cttz.i64(i64 %x, i1 0) | 433 %r = call i64 @llvm.cttz.i64(i64 %x, i1 false) |
434 ret i64 %r | 434 ret i64 %r |
435 } | 435 } |
436 ; CHECK-LABEL: test_cttz_64 | 436 ; CHECK-LABEL: test_cttz_64 |
437 ; CHECK: bsf [[REG_IF_NOTZERO:e.*]], {{.*}} | 437 ; CHECK: bsf [[REG_IF_NOTZERO:e.*]], {{.*}} |
438 ; CHECK: mov [[REG_RES1:e.*]], 32 | 438 ; CHECK: mov [[REG_RES1:e.*]], 32 |
439 ; CHECK: cmovne [[REG_RES1]], [[REG_IF_NOTZERO]] | 439 ; CHECK: cmovne [[REG_RES1]], [[REG_IF_NOTZERO]] |
440 ; CHECK: add [[REG_RES1]], 32 | 440 ; CHECK: add [[REG_RES1]], 32 |
441 ; CHECK: bsf [[REG_RES2:e.*]], [[REG_LOWER:.*]] | 441 ; CHECK: bsf [[REG_RES2:e.*]], [[REG_LOWER:.*]] |
442 ; CHECK: test [[REG_LOWER]], [[REG_LOWER]] | 442 ; CHECK: test [[REG_LOWER]], [[REG_LOWER]] |
443 ; CHECK: cmove [[REG_RES2]], [[REG_RES1]] | 443 ; CHECK: cmove [[REG_RES2]], [[REG_RES1]] |
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
520 } | 520 } |
521 ; CHECK-LABEL: test_stacksave_multiple | 521 ; CHECK-LABEL: test_stacksave_multiple |
522 ; At least 3 copies of esp, but probably more from having to do the allocas. | 522 ; At least 3 copies of esp, but probably more from having to do the allocas. |
523 ; CHECK: mov {{.*}}, esp | 523 ; CHECK: mov {{.*}}, esp |
524 ; CHECK: mov {{.*}}, esp | 524 ; CHECK: mov {{.*}}, esp |
525 ; CHECK: mov {{.*}}, esp | 525 ; CHECK: mov {{.*}}, esp |
526 ; CHECK: mov esp, {{.*}} | 526 ; CHECK: mov esp, {{.*}} |
527 | 527 |
528 ; ERRORS-NOT: ICE translation error | 528 ; ERRORS-NOT: ICE translation error |
529 ; DUMP-NOT: SZ | 529 ; DUMP-NOT: SZ |
OLD | NEW |