OLD | NEW |
1 // Copyright 2007-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2007-2008 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 404 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
415 TEST(Vfp) { | 415 TEST(Vfp) { |
416 SETUP(); | 416 SETUP(); |
417 | 417 |
418 if (CpuFeatures::IsSupported(VFP3)) { | 418 if (CpuFeatures::IsSupported(VFP3)) { |
419 CpuFeatures::Scope scope(VFP3); | 419 CpuFeatures::Scope scope(VFP3); |
420 COMPARE(vmov(d0, d1), | 420 COMPARE(vmov(d0, d1), |
421 "eeb00b41 vmov.f64 d0, d1"); | 421 "eeb00b41 vmov.f64 d0, d1"); |
422 COMPARE(vmov(d3, d3, eq), | 422 COMPARE(vmov(d3, d3, eq), |
423 "0eb03b43 vmov.f64eq d3, d3"); | 423 "0eb03b43 vmov.f64eq d3, d3"); |
424 | 424 |
| 425 COMPARE(vmov(s0, s31), |
| 426 "eeb00a6f vmov.f32 s0, s31"); |
| 427 COMPARE(vmov(s31, s0), |
| 428 "eef0fa40 vmov.f32 s31, s0"); |
| 429 COMPARE(vmov(r0, s0), |
| 430 "ee100a10 vmov r0, s0"); |
| 431 COMPARE(vmov(r10, s31), |
| 432 "ee1faa90 vmov r10, s31"); |
| 433 COMPARE(vmov(s0, r0), |
| 434 "ee000a10 vmov s0, r0"); |
| 435 COMPARE(vmov(s31, r10), |
| 436 "ee0faa90 vmov s31, r10"); |
| 437 |
425 COMPARE(vadd(d0, d1, d2), | 438 COMPARE(vadd(d0, d1, d2), |
426 "ee310b02 vadd.f64 d0, d1, d2"); | 439 "ee310b02 vadd.f64 d0, d1, d2"); |
427 COMPARE(vadd(d3, d4, d5, mi), | 440 COMPARE(vadd(d3, d4, d5, mi), |
428 "4e343b05 vadd.f64mi d3, d4, d5"); | 441 "4e343b05 vadd.f64mi d3, d4, d5"); |
429 | 442 |
430 COMPARE(vsub(d0, d1, d2), | 443 COMPARE(vsub(d0, d1, d2), |
431 "ee310b42 vsub.f64 d0, d1, d2"); | 444 "ee310b42 vsub.f64 d0, d1, d2"); |
432 COMPARE(vsub(d3, d4, d5, ne), | 445 COMPARE(vsub(d3, d4, d5, ne), |
433 "1e343b45 vsub.f64ne d3, d4, d5"); | 446 "1e343b45 vsub.f64ne d3, d4, d5"); |
434 | 447 |
435 COMPARE(vmul(d2, d1, d0), | 448 COMPARE(vmul(d2, d1, d0), |
436 "ee212b00 vmul.f64 d2, d1, d0"); | 449 "ee212b00 vmul.f64 d2, d1, d0"); |
437 COMPARE(vmul(d6, d4, d5, cc), | 450 COMPARE(vmul(d6, d4, d5, cc), |
438 "3e246b05 vmul.f64cc d6, d4, d5"); | 451 "3e246b05 vmul.f64cc d6, d4, d5"); |
439 | 452 |
440 COMPARE(vdiv(d2, d2, d2), | 453 COMPARE(vdiv(d2, d2, d2), |
441 "ee822b02 vdiv.f64 d2, d2, d2"); | 454 "ee822b02 vdiv.f64 d2, d2, d2"); |
442 COMPARE(vdiv(d6, d7, d7, hi), | 455 COMPARE(vdiv(d6, d7, d7, hi), |
443 "8e876b07 vdiv.f64hi d6, d7, d7"); | 456 "8e876b07 vdiv.f64hi d6, d7, d7"); |
444 | 457 |
445 COMPARE(vsqrt(d0, d0), | 458 COMPARE(vsqrt(d0, d0), |
446 "eeb10bc0 vsqrt.f64 d0, d0"); | 459 "eeb10bc0 vsqrt.f64 d0, d0"); |
447 COMPARE(vsqrt(d2, d3, ne), | 460 COMPARE(vsqrt(d2, d3, ne), |
448 "1eb12bc3 vsqrt.f64ne d2, d3"); | 461 "1eb12bc3 vsqrt.f64ne d2, d3"); |
449 | 462 |
450 COMPARE(vmov(d0, 1.0), | 463 COMPARE(vmov(d0, 1.0), |
451 "eeb70b00 vmov.f64 d0, #1"); | 464 "eeb70b00 vmov.f64 d0, #1"); |
452 COMPARE(vmov(d2, -13.0), | 465 COMPARE(vmov(d2, -13.0), |
453 "eeba2b0a vmov.f64 d2, #-13"); | 466 "eeba2b0a vmov.f64 d2, #-13"); |
| 467 |
| 468 COMPARE(vldr(s0, r0, 0), |
| 469 "ed900a00 vldr s0, [r0 + 4*0]"); |
| 470 COMPARE(vldr(s1, r1, 4), |
| 471 "edd10a01 vldr s1, [r1 + 4*1]"); |
| 472 COMPARE(vldr(s15, r4, 16), |
| 473 "edd47a04 vldr s15, [r4 + 4*4]"); |
| 474 COMPARE(vldr(s16, r5, 20), |
| 475 "ed958a05 vldr s16, [r5 + 4*5]"); |
| 476 COMPARE(vldr(s31, r10, 1020), |
| 477 "eddafaff vldr s31, [r10 + 4*255]"); |
| 478 |
| 479 COMPARE(vstr(s0, r0, 0), |
| 480 "ed800a00 vstr s0, [r0 + 4*0]"); |
| 481 COMPARE(vstr(s1, r1, 4), |
| 482 "edc10a01 vstr s1, [r1 + 4*1]"); |
| 483 COMPARE(vstr(s15, r8, 8), |
| 484 "edc87a02 vstr s15, [r8 + 4*2]"); |
| 485 COMPARE(vstr(s16, r9, 12), |
| 486 "ed898a03 vstr s16, [r9 + 4*3]"); |
| 487 COMPARE(vstr(s31, r10, 1020), |
| 488 "edcafaff vstr s31, [r10 + 4*255]"); |
| 489 |
| 490 COMPARE(vldr(d0, r0, 0), |
| 491 "ed900b00 vldr d0, [r0 + 4*0]"); |
| 492 COMPARE(vldr(d1, r1, 4), |
| 493 "ed911b01 vldr d1, [r1 + 4*1]"); |
| 494 COMPARE(vldr(d15, r10, 1020), |
| 495 "ed9afbff vldr d15, [r10 + 4*255]"); |
| 496 COMPARE(vstr(d0, r0, 0), |
| 497 "ed800b00 vstr d0, [r0 + 4*0]"); |
| 498 COMPARE(vstr(d1, r1, 4), |
| 499 "ed811b01 vstr d1, [r1 + 4*1]"); |
| 500 COMPARE(vstr(d15, r10, 1020), |
| 501 "ed8afbff vstr d15, [r10 + 4*255]"); |
454 } | 502 } |
455 | 503 |
456 VERIFY_RUN(); | 504 VERIFY_RUN(); |
457 } | 505 } |
OLD | NEW |