| OLD | NEW |
| 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. | 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. |
| 2 // All Rights Reserved. | 2 // All Rights Reserved. |
| 3 // | 3 // |
| 4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
| 5 // modification, are permitted provided that the following conditions are | 5 // modification, are permitted provided that the following conditions are |
| 6 // met: | 6 // met: |
| 7 // | 7 // |
| 8 // - Redistributions of source code must retain the above copyright notice, | 8 // - Redistributions of source code must retain the above copyright notice, |
| 9 // this list of conditions and the following disclaimer. | 9 // this list of conditions and the following disclaimer. |
| 10 // | 10 // |
| (...skipping 1307 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1318 } | 1318 } |
| 1319 void vmaxsd(XMMRegister dst, XMMRegister src1, const Operand& src2) { | 1319 void vmaxsd(XMMRegister dst, XMMRegister src1, const Operand& src2) { |
| 1320 vsd(0x5f, dst, src1, src2); | 1320 vsd(0x5f, dst, src1, src2); |
| 1321 } | 1321 } |
| 1322 void vminsd(XMMRegister dst, XMMRegister src1, XMMRegister src2) { | 1322 void vminsd(XMMRegister dst, XMMRegister src1, XMMRegister src2) { |
| 1323 vsd(0x5d, dst, src1, src2); | 1323 vsd(0x5d, dst, src1, src2); |
| 1324 } | 1324 } |
| 1325 void vminsd(XMMRegister dst, XMMRegister src1, const Operand& src2) { | 1325 void vminsd(XMMRegister dst, XMMRegister src1, const Operand& src2) { |
| 1326 vsd(0x5d, dst, src1, src2); | 1326 vsd(0x5d, dst, src1, src2); |
| 1327 } | 1327 } |
| 1328 void vcvtss2sd(XMMRegister dst, XMMRegister src1, XMMRegister src2) { |
| 1329 vsd(0x5a, dst, src1, src2, kF3, k0F, kWIG); |
| 1330 } |
| 1331 void vcvtss2sd(XMMRegister dst, XMMRegister src1, const Operand& src2) { |
| 1332 vsd(0x5a, dst, src1, src2, kF3, k0F, kWIG); |
| 1333 } |
| 1334 void vcvtsd2ss(XMMRegister dst, XMMRegister src1, XMMRegister src2) { |
| 1335 vsd(0x5a, dst, src1, src2); |
| 1336 } |
| 1337 void vcvtsd2ss(XMMRegister dst, XMMRegister src1, const Operand& src2) { |
| 1338 vsd(0x5a, dst, src1, src2); |
| 1339 } |
| 1328 void vcvtlsi2sd(XMMRegister dst, XMMRegister src1, Register src2) { | 1340 void vcvtlsi2sd(XMMRegister dst, XMMRegister src1, Register src2) { |
| 1329 XMMRegister isrc2 = {src2.code()}; | 1341 XMMRegister isrc2 = {src2.code()}; |
| 1330 vsd(0x2a, dst, src1, isrc2, kF2, k0F, kW0); | 1342 vsd(0x2a, dst, src1, isrc2, kF2, k0F, kW0); |
| 1331 } | 1343 } |
| 1332 void vcvtlsi2sd(XMMRegister dst, XMMRegister src1, const Operand& src2) { | 1344 void vcvtlsi2sd(XMMRegister dst, XMMRegister src1, const Operand& src2) { |
| 1333 vsd(0x2a, dst, src1, src2, kF2, k0F, kW0); | 1345 vsd(0x2a, dst, src1, src2, kF2, k0F, kW0); |
| 1334 } | 1346 } |
| 1335 void vcvttsd2si(Register dst, XMMRegister src) { | 1347 void vcvttsd2si(Register dst, XMMRegister src) { |
| 1336 XMMRegister idst = {dst.code()}; | 1348 XMMRegister idst = {dst.code()}; |
| 1337 vsd(0x2c, idst, xmm0, src, kF2, k0F, kW0); | 1349 vsd(0x2c, idst, xmm0, src, kF2, k0F, kW0); |
| (...skipping 819 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2157 Assembler* assembler_; | 2169 Assembler* assembler_; |
| 2158 #ifdef DEBUG | 2170 #ifdef DEBUG |
| 2159 int space_before_; | 2171 int space_before_; |
| 2160 #endif | 2172 #endif |
| 2161 }; | 2173 }; |
| 2162 | 2174 |
| 2163 } // namespace internal | 2175 } // namespace internal |
| 2164 } // namespace v8 | 2176 } // namespace v8 |
| 2165 | 2177 |
| 2166 #endif // V8_X64_ASSEMBLER_X64_H_ | 2178 #endif // V8_X64_ASSEMBLER_X64_H_ |
| OLD | NEW |