OLD | NEW |
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1432 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1443 __ vmovq(rdx, xmm7); | 1443 __ vmovq(rdx, xmm7); |
1444 __ movq(rcx, V8_UINT64_C(0x00123456789abcde)); | 1444 __ movq(rcx, V8_UINT64_C(0x00123456789abcde)); |
1445 __ cmpq(rdx, rcx); | 1445 __ cmpq(rdx, rcx); |
1446 __ j(not_equal, &exit); | 1446 __ j(not_equal, &exit); |
1447 __ vpsllq(xmm7, xmm6, 12); | 1447 __ vpsllq(xmm7, xmm6, 12); |
1448 __ vmovq(rdx, xmm7); | 1448 __ vmovq(rdx, xmm7); |
1449 __ movq(rcx, V8_UINT64_C(0x3456789abcdef000)); | 1449 __ movq(rcx, V8_UINT64_C(0x3456789abcdef000)); |
1450 __ cmpq(rdx, rcx); | 1450 __ cmpq(rdx, rcx); |
1451 __ j(not_equal, &exit); | 1451 __ j(not_equal, &exit); |
1452 | 1452 |
| 1453 // Test vandpd, vorpd, vxorpd |
| 1454 __ movl(rax, Immediate(14)); |
| 1455 __ movl(rdx, Immediate(0x00ff00ff)); |
| 1456 __ movl(rcx, Immediate(0x0f0f0f0f)); |
| 1457 __ vmovd(xmm4, rdx); |
| 1458 __ vmovd(xmm5, rcx); |
| 1459 __ vandpd(xmm6, xmm4, xmm5); |
| 1460 __ vmovd(rdx, xmm6); |
| 1461 __ cmpl(rdx, Immediate(0x000f000f)); |
| 1462 __ j(not_equal, &exit); |
| 1463 __ vorpd(xmm6, xmm4, xmm5); |
| 1464 __ vmovd(rdx, xmm6); |
| 1465 __ cmpl(rdx, Immediate(0x0fff0fff)); |
| 1466 __ j(not_equal, &exit); |
| 1467 __ vxorpd(xmm6, xmm4, xmm5); |
| 1468 __ vmovd(rdx, xmm6); |
| 1469 __ cmpl(rdx, Immediate(0x0ff00ff0)); |
| 1470 __ j(not_equal, &exit); |
| 1471 |
1453 __ movl(rdx, Immediate(6)); | 1472 __ movl(rdx, Immediate(6)); |
1454 __ vcvtlsi2sd(xmm6, xmm6, rdx); | 1473 __ vcvtlsi2sd(xmm6, xmm6, rdx); |
1455 __ movl(Operand(rsp, 0), Immediate(5)); | 1474 __ movl(Operand(rsp, 0), Immediate(5)); |
1456 __ vcvtlsi2sd(xmm7, xmm7, Operand(rsp, 0)); | 1475 __ vcvtlsi2sd(xmm7, xmm7, Operand(rsp, 0)); |
1457 __ vsubsd(xmm7, xmm6, xmm7); // xmm7 is 1.0 | 1476 __ vsubsd(xmm7, xmm6, xmm7); // xmm7 is 1.0 |
1458 __ vmulsd(xmm1, xmm1, xmm7); | 1477 __ vmulsd(xmm1, xmm1, xmm7); |
1459 | 1478 |
1460 __ movq(rdx, V8_INT64_C(0x3ff0000000000000)); // 1.0 | 1479 __ movq(rdx, V8_INT64_C(0x3ff0000000000000)); // 1.0 |
1461 __ vmovq(xmm7, rdx); | 1480 __ vmovq(xmm7, rdx); |
1462 __ vmulsd(xmm1, xmm1, xmm7); | 1481 __ vmulsd(xmm1, xmm1, xmm7); |
(...skipping 717 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2180 | 2199 |
2181 F1 f = FUNCTION_CAST<F1>(code->entry()); | 2200 F1 f = FUNCTION_CAST<F1>(code->entry()); |
2182 for (int i = 0; i < kNumCases; ++i) { | 2201 for (int i = 0; i < kNumCases; ++i) { |
2183 int res = f(i); | 2202 int res = f(i); |
2184 PrintF("f(%d) = %d\n", i, res); | 2203 PrintF("f(%d) = %d\n", i, res); |
2185 CHECK_EQ(values[i], res); | 2204 CHECK_EQ(values[i], res); |
2186 } | 2205 } |
2187 } | 2206 } |
2188 | 2207 |
2189 #undef __ | 2208 #undef __ |
OLD | NEW |