| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1037 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1048 COMPARE(vmul(Neon16, q1, q2, q8), | 1048 COMPARE(vmul(Neon16, q1, q2, q8), |
| 1049 "f2142970 vmul.i16 q1, q2, q8"); | 1049 "f2142970 vmul.i16 q1, q2, q8"); |
| 1050 COMPARE(vmul(Neon32, q15, q0, q8), | 1050 COMPARE(vmul(Neon32, q15, q0, q8), |
| 1051 "f260e970 vmul.i32 q15, q0, q8"); | 1051 "f260e970 vmul.i32 q15, q0, q8"); |
| 1052 COMPARE(vtst(Neon8, q0, q1, q2), | 1052 COMPARE(vtst(Neon8, q0, q1, q2), |
| 1053 "f2020854 vtst.i8 q0, q1, q2"); | 1053 "f2020854 vtst.i8 q0, q1, q2"); |
| 1054 COMPARE(vtst(Neon16, q1, q2, q8), | 1054 COMPARE(vtst(Neon16, q1, q2, q8), |
| 1055 "f2142870 vtst.i16 q1, q2, q8"); | 1055 "f2142870 vtst.i16 q1, q2, q8"); |
| 1056 COMPARE(vtst(Neon32, q15, q0, q8), | 1056 COMPARE(vtst(Neon32, q15, q0, q8), |
| 1057 "f260e870 vtst.i32 q15, q0, q8"); | 1057 "f260e870 vtst.i32 q15, q0, q8"); |
| 1058 COMPARE(vceq(q0, q1, q2), |
| 1059 "f2020e44 vceq.f32 q0, q1, q2"); |
| 1058 COMPARE(vceq(Neon8, q0, q1, q2), | 1060 COMPARE(vceq(Neon8, q0, q1, q2), |
| 1059 "f3020854 vceq.i8 q0, q1, q2"); | 1061 "f3020854 vceq.i8 q0, q1, q2"); |
| 1060 COMPARE(vceq(Neon16, q1, q2, q8), | 1062 COMPARE(vceq(Neon16, q1, q2, q8), |
| 1061 "f3142870 vceq.i16 q1, q2, q8"); | 1063 "f3142870 vceq.i16 q1, q2, q8"); |
| 1062 COMPARE(vceq(Neon32, q15, q0, q8), | 1064 COMPARE(vceq(Neon32, q15, q0, q8), |
| 1063 "f360e870 vceq.i32 q15, q0, q8"); | 1065 "f360e870 vceq.i32 q15, q0, q8"); |
| 1064 COMPARE(vbsl(q0, q1, q2), | 1066 COMPARE(vbsl(q0, q1, q2), |
| 1065 "f3120154 vbsl q0, q1, q2"); | 1067 "f3120154 vbsl q0, q1, q2"); |
| 1066 COMPARE(vbsl(q15, q0, q8), | 1068 COMPARE(vbsl(q15, q0, q8), |
| 1067 "f350e170 vbsl q15, q0, q8"); | 1069 "f350e170 vbsl q15, q0, q8"); |
| (...skipping 302 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1370 | 1372 |
| 1371 COMPARE(ldrexb(r0, r1), "e1d10f9f ldrexb r0, [r1]"); | 1373 COMPARE(ldrexb(r0, r1), "e1d10f9f ldrexb r0, [r1]"); |
| 1372 COMPARE(strexb(r0, r1, r2), "e1c20f91 strexb r0, r1, [r2]"); | 1374 COMPARE(strexb(r0, r1, r2), "e1c20f91 strexb r0, r1, [r2]"); |
| 1373 COMPARE(ldrexh(r0, r1), "e1f10f9f ldrexh r0, [r1]"); | 1375 COMPARE(ldrexh(r0, r1), "e1f10f9f ldrexh r0, [r1]"); |
| 1374 COMPARE(strexh(r0, r1, r2), "e1e20f91 strexh r0, r1, [r2]"); | 1376 COMPARE(strexh(r0, r1, r2), "e1e20f91 strexh r0, r1, [r2]"); |
| 1375 COMPARE(ldrex(r0, r1), "e1910f9f ldrex r0, [r1]"); | 1377 COMPARE(ldrex(r0, r1), "e1910f9f ldrex r0, [r1]"); |
| 1376 COMPARE(strex(r0, r1, r2), "e1820f91 strex r0, r1, [r2]"); | 1378 COMPARE(strex(r0, r1, r2), "e1820f91 strex r0, r1, [r2]"); |
| 1377 | 1379 |
| 1378 VERIFY_RUN(); | 1380 VERIFY_RUN(); |
| 1379 } | 1381 } |
| OLD | NEW |