OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1209 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1220 (bit_cast<int64_t>(t.div_result) >> 32) & 0x7fffffff); | 1220 (bit_cast<int64_t>(t.div_result) >> 32) & 0x7fffffff); |
1221 CHECK_EQ(kArmNanLower32, bit_cast<int64_t>(t.div_result) & 0xffffffffu); | 1221 CHECK_EQ(kArmNanLower32, bit_cast<int64_t>(t.div_result) & 0xffffffffu); |
1222 } | 1222 } |
1223 | 1223 |
1224 #define CHECK_EQ_SPLAT(field, ex) \ | 1224 #define CHECK_EQ_SPLAT(field, ex) \ |
1225 CHECK_EQ(ex, t.field[0]); \ | 1225 CHECK_EQ(ex, t.field[0]); \ |
1226 CHECK_EQ(ex, t.field[1]); \ | 1226 CHECK_EQ(ex, t.field[1]); \ |
1227 CHECK_EQ(ex, t.field[2]); \ | 1227 CHECK_EQ(ex, t.field[2]); \ |
1228 CHECK_EQ(ex, t.field[3]); | 1228 CHECK_EQ(ex, t.field[3]); |
1229 | 1229 |
| 1230 #define CHECK_EQ_32X2(field, ex0, ex1) \ |
| 1231 CHECK_EQ(ex0, t.field[0]); \ |
| 1232 CHECK_EQ(ex1, t.field[1]); |
| 1233 |
1230 #define CHECK_EQ_32X4(field, ex0, ex1, ex2, ex3) \ | 1234 #define CHECK_EQ_32X4(field, ex0, ex1, ex2, ex3) \ |
1231 CHECK_EQ(ex0, t.field[0]); \ | 1235 CHECK_EQ(ex0, t.field[0]); \ |
1232 CHECK_EQ(ex1, t.field[1]); \ | 1236 CHECK_EQ(ex1, t.field[1]); \ |
1233 CHECK_EQ(ex2, t.field[2]); \ | 1237 CHECK_EQ(ex2, t.field[2]); \ |
1234 CHECK_EQ(ex3, t.field[3]); | 1238 CHECK_EQ(ex3, t.field[3]); |
1235 | 1239 |
1236 #define CHECK_ESTIMATE(expected, tolerance, value) \ | 1240 #define CHECK_ESTIMATE(expected, tolerance, value) \ |
1237 CHECK_LT((expected) - (tolerance), value); \ | 1241 CHECK_LT((expected) - (tolerance), value); \ |
1238 CHECK_GT((expected) + (tolerance), value); | 1242 CHECK_GT((expected) + (tolerance), value); |
1239 | 1243 |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1291 uint32_t vcvt_u32_f32[4]; | 1295 uint32_t vcvt_u32_f32[4]; |
1292 float vcvt_f32_s32[4], vcvt_f32_u32[4]; | 1296 float vcvt_f32_s32[4], vcvt_f32_u32[4]; |
1293 uint32_t vdup8[4], vdup16[4], vdup32[4]; | 1297 uint32_t vdup8[4], vdup16[4], vdup32[4]; |
1294 float vabsf[4], vnegf[4]; | 1298 float vabsf[4], vnegf[4]; |
1295 uint32_t vabs_s8[4], vabs_s16[4], vabs_s32[4]; | 1299 uint32_t vabs_s8[4], vabs_s16[4], vabs_s32[4]; |
1296 uint32_t vneg_s8[4], vneg_s16[4], vneg_s32[4]; | 1300 uint32_t vneg_s8[4], vneg_s16[4], vneg_s32[4]; |
1297 uint32_t veor[4], vand[4], vorr[4]; | 1301 uint32_t veor[4], vand[4], vorr[4]; |
1298 float vdupf[4], vaddf[4], vsubf[4], vmulf[4]; | 1302 float vdupf[4], vaddf[4], vsubf[4], vmulf[4]; |
1299 uint32_t vmin_s8[4], vmin_u16[4], vmin_s32[4]; | 1303 uint32_t vmin_s8[4], vmin_u16[4], vmin_s32[4]; |
1300 uint32_t vmax_s8[4], vmax_u16[4], vmax_s32[4]; | 1304 uint32_t vmax_s8[4], vmax_u16[4], vmax_s32[4]; |
| 1305 uint32_t vpmin_s8[2], vpmin_u16[2], vpmin_s32[2]; |
| 1306 uint32_t vpmax_s8[2], vpmax_u16[2], vpmax_s32[2]; |
1301 uint32_t vadd8[4], vadd16[4], vadd32[4]; | 1307 uint32_t vadd8[4], vadd16[4], vadd32[4]; |
1302 uint32_t vqadd_s8[4], vqadd_u16[4], vqadd_s32[4]; | 1308 uint32_t vqadd_s8[4], vqadd_u16[4], vqadd_s32[4]; |
1303 uint32_t vsub8[4], vsub16[4], vsub32[4]; | 1309 uint32_t vsub8[4], vsub16[4], vsub32[4]; |
1304 uint32_t vqsub_u8[4], vqsub_s16[4], vqsub_u32[4]; | 1310 uint32_t vqsub_u8[4], vqsub_s16[4], vqsub_u32[4]; |
1305 uint32_t vmul8[4], vmul16[4], vmul32[4]; | 1311 uint32_t vmul8[4], vmul16[4], vmul32[4]; |
1306 uint32_t vshl8[4], vshl16[4], vshl32[5]; | 1312 uint32_t vshl8[4], vshl16[4], vshl32[5]; |
1307 uint32_t vshr_s8[4], vshr_u16[4], vshr_s32[5]; | 1313 uint32_t vshr_s8[4], vshr_u16[4], vshr_s32[5]; |
1308 uint32_t vceq[4], vceqf[4], vcgef[4], vcgtf[4]; | 1314 uint32_t vceq[4], vceqf[4], vcgef[4], vcgtf[4]; |
1309 uint32_t vcge_s8[4], vcge_u16[4], vcge_s32[4]; | 1315 uint32_t vcge_s8[4], vcge_u16[4], vcge_s32[4]; |
1310 uint32_t vcgt_s8[4], vcgt_u16[4], vcgt_s32[4]; | 1316 uint32_t vcgt_s8[4], vcgt_u16[4], vcgt_s32[4]; |
(...skipping 294 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1605 __ mov(r4, Operand(0xff)); | 1611 __ mov(r4, Operand(0xff)); |
1606 __ vdup(Neon32, q0, r4); | 1612 __ vdup(Neon32, q0, r4); |
1607 __ vdup(Neon8, q1, r4); | 1613 __ vdup(Neon8, q1, r4); |
1608 __ vmin(NeonS32, q2, q0, q1); | 1614 __ vmin(NeonS32, q2, q0, q1); |
1609 __ add(r4, r0, Operand(static_cast<int32_t>(offsetof(T, vmin_s32)))); | 1615 __ add(r4, r0, Operand(static_cast<int32_t>(offsetof(T, vmin_s32)))); |
1610 __ vst1(Neon8, NeonListOperand(q2), NeonMemOperand(r4)); | 1616 __ vst1(Neon8, NeonListOperand(q2), NeonMemOperand(r4)); |
1611 __ vmax(NeonS32, q2, q0, q1); | 1617 __ vmax(NeonS32, q2, q0, q1); |
1612 __ add(r4, r0, Operand(static_cast<int32_t>(offsetof(T, vmax_s32)))); | 1618 __ add(r4, r0, Operand(static_cast<int32_t>(offsetof(T, vmax_s32)))); |
1613 __ vst1(Neon8, NeonListOperand(q2), NeonMemOperand(r4)); | 1619 __ vst1(Neon8, NeonListOperand(q2), NeonMemOperand(r4)); |
1614 | 1620 |
| 1621 // vpmin/vpmax integer. |
| 1622 __ mov(r4, Operand(0x03)); |
| 1623 __ vdup(Neon16, q0, r4); |
| 1624 __ vdup(Neon8, q1, r4); |
| 1625 __ vpmin(NeonS8, d4, d0, d2); |
| 1626 __ vstr(d4, r0, offsetof(T, vpmin_s8)); |
| 1627 __ vpmax(NeonS8, d4, d0, d2); |
| 1628 __ vstr(d4, r0, offsetof(T, vpmax_s8)); |
| 1629 __ mov(r4, Operand(0xffff)); |
| 1630 __ vdup(Neon32, q0, r4); |
| 1631 __ vdup(Neon16, q1, r4); |
| 1632 __ vpmin(NeonU16, d4, d0, d2); |
| 1633 __ vstr(d4, r0, offsetof(T, vpmin_u16)); |
| 1634 __ vpmax(NeonU16, d4, d0, d2); |
| 1635 __ vstr(d4, r0, offsetof(T, vpmax_u16)); |
| 1636 __ mov(r4, Operand(0xff)); |
| 1637 __ veor(q0, q0, q0); |
| 1638 __ vmov(s0, r4); |
| 1639 __ vdup(Neon8, q1, r4); |
| 1640 __ vpmin(NeonS32, d4, d0, d2); |
| 1641 __ vstr(d4, r0, offsetof(T, vpmin_s32)); |
| 1642 __ vpmax(NeonS32, d4, d0, d2); |
| 1643 __ vstr(d4, r0, offsetof(T, vpmax_s32)); |
| 1644 |
1615 // vadd (integer). | 1645 // vadd (integer). |
1616 __ mov(r4, Operand(0x81)); | 1646 __ mov(r4, Operand(0x81)); |
1617 __ vdup(Neon8, q0, r4); | 1647 __ vdup(Neon8, q0, r4); |
1618 __ mov(r4, Operand(0x82)); | 1648 __ mov(r4, Operand(0x82)); |
1619 __ vdup(Neon8, q1, r4); | 1649 __ vdup(Neon8, q1, r4); |
1620 __ vadd(Neon8, q1, q1, q0); | 1650 __ vadd(Neon8, q1, q1, q0); |
1621 __ add(r4, r0, Operand(static_cast<int32_t>(offsetof(T, vadd8)))); | 1651 __ add(r4, r0, Operand(static_cast<int32_t>(offsetof(T, vadd8)))); |
1622 __ vst1(Neon8, NeonListOperand(q1), NeonMemOperand(r4)); | 1652 __ vst1(Neon8, NeonListOperand(q1), NeonMemOperand(r4)); |
1623 __ mov(r4, Operand(0x8001)); | 1653 __ mov(r4, Operand(0x8001)); |
1624 __ vdup(Neon16, q0, r4); | 1654 __ vdup(Neon16, q0, r4); |
(...skipping 360 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1985 CHECK_EQ_32X4(vcgtf, 0u, 0xffffffffu, 0u, 0u); | 2015 CHECK_EQ_32X4(vcgtf, 0u, 0xffffffffu, 0u, 0u); |
1986 // [0, 3, 0, 3, ...] and [3, 3, 3, 3, ...] | 2016 // [0, 3, 0, 3, ...] and [3, 3, 3, 3, ...] |
1987 CHECK_EQ_SPLAT(vmin_s8, 0x00030003u); | 2017 CHECK_EQ_SPLAT(vmin_s8, 0x00030003u); |
1988 CHECK_EQ_SPLAT(vmax_s8, 0x03030303u); | 2018 CHECK_EQ_SPLAT(vmax_s8, 0x03030303u); |
1989 // [0x00ff, 0x00ff, ...] and [0xffff, 0xffff, ...] | 2019 // [0x00ff, 0x00ff, ...] and [0xffff, 0xffff, ...] |
1990 CHECK_EQ_SPLAT(vmin_u16, 0x00ff00ffu); | 2020 CHECK_EQ_SPLAT(vmin_u16, 0x00ff00ffu); |
1991 CHECK_EQ_SPLAT(vmax_u16, 0xffffffffu); | 2021 CHECK_EQ_SPLAT(vmax_u16, 0xffffffffu); |
1992 // [0x000000ff, 0x000000ff, ...] and [0xffffffff, 0xffffffff, ...] | 2022 // [0x000000ff, 0x000000ff, ...] and [0xffffffff, 0xffffffff, ...] |
1993 CHECK_EQ_SPLAT(vmin_s32, 0xffffffffu); | 2023 CHECK_EQ_SPLAT(vmin_s32, 0xffffffffu); |
1994 CHECK_EQ_SPLAT(vmax_s32, 0xffu); | 2024 CHECK_EQ_SPLAT(vmax_s32, 0xffu); |
| 2025 // [0, 3, 0, 3, ...] and [3, 3, 3, 3, ...] |
| 2026 CHECK_EQ_32X2(vpmin_s8, 0x00000000u, 0x03030303u); |
| 2027 CHECK_EQ_32X2(vpmax_s8, 0x03030303u, 0x03030303u); |
| 2028 // [0, ffff, 0, ffff] and [ffff, ffff] |
| 2029 CHECK_EQ_32X2(vpmin_u16, 0x00000000u, 0xffffffffu); |
| 2030 CHECK_EQ_32X2(vpmax_u16, 0xffffffffu, 0xffffffffu); |
| 2031 // [0x000000ff, 0x00000000u] and [0xffffffff, 0xffffffff, ...] |
| 2032 CHECK_EQ_32X2(vpmin_s32, 0x00u, 0xffffffffu); |
| 2033 CHECK_EQ_32X2(vpmax_s32, 0xffu, 0xffffffffu); |
1995 CHECK_EQ_SPLAT(vadd8, 0x03030303u); | 2034 CHECK_EQ_SPLAT(vadd8, 0x03030303u); |
1996 CHECK_EQ_SPLAT(vadd16, 0x00030003u); | 2035 CHECK_EQ_SPLAT(vadd16, 0x00030003u); |
1997 CHECK_EQ_SPLAT(vadd32, 0x00000003u); | 2036 CHECK_EQ_SPLAT(vadd32, 0x00000003u); |
1998 CHECK_EQ_SPLAT(vqadd_s8, 0x80808080u); | 2037 CHECK_EQ_SPLAT(vqadd_s8, 0x80808080u); |
1999 CHECK_EQ_SPLAT(vqadd_u16, 0xffffffffu); | 2038 CHECK_EQ_SPLAT(vqadd_u16, 0xffffffffu); |
2000 CHECK_EQ_SPLAT(vqadd_s32, 0x80000000u); | 2039 CHECK_EQ_SPLAT(vqadd_s32, 0x80000000u); |
2001 CHECK_EQ_SPLAT(vqsub_u8, 0x00000000u); | 2040 CHECK_EQ_SPLAT(vqsub_u8, 0x00000000u); |
2002 CHECK_EQ_SPLAT(vqsub_s16, 0x7fff7fffu); | 2041 CHECK_EQ_SPLAT(vqsub_s16, 0x7fff7fffu); |
2003 CHECK_EQ_SPLAT(vqsub_u32, 0x00000000u); | 2042 CHECK_EQ_SPLAT(vqsub_u32, 0x00000000u); |
2004 CHECK_EQ_SPLAT(vsub8, 0xfefefefeu); | 2043 CHECK_EQ_SPLAT(vsub8, 0xfefefefeu); |
(...skipping 1720 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3725 HandleScope scope(isolate); | 3764 HandleScope scope(isolate); |
3726 | 3765 |
3727 Assembler assm(isolate, NULL, 0); | 3766 Assembler assm(isolate, NULL, 0); |
3728 __ mov(r0, Operand(isolate->factory()->infinity_value())); | 3767 __ mov(r0, Operand(isolate->factory()->infinity_value())); |
3729 __ BlockConstPoolFor(1019); | 3768 __ BlockConstPoolFor(1019); |
3730 for (int i = 0; i < 1019; ++i) __ nop(); | 3769 for (int i = 0; i < 1019; ++i) __ nop(); |
3731 __ vldr(d0, MemOperand(r0, 0)); | 3770 __ vldr(d0, MemOperand(r0, 0)); |
3732 } | 3771 } |
3733 | 3772 |
3734 #undef __ | 3773 #undef __ |
OLD | NEW |