OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
159 | 159 |
160 | 160 |
161 // ----------------------------------------------------------------------------- | 161 // ----------------------------------------------------------------------------- |
162 // Implementation of RelocInfo | 162 // Implementation of RelocInfo |
163 | 163 |
164 // Patch the code at the current PC with a call to the target address. | 164 // Patch the code at the current PC with a call to the target address. |
165 // Additional guard int3 instructions can be added if required. | 165 // Additional guard int3 instructions can be added if required. |
166 void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) { | 166 void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) { |
167 // Load register with immediate 64 and call through a register instructions | 167 // Load register with immediate 64 and call through a register instructions |
168 // takes up 13 bytes and int3 takes up one byte. | 168 // takes up 13 bytes and int3 takes up one byte. |
| 169 #ifndef V8_TARGET_ARCH_X32 |
169 static const int kCallCodeSize = 13; | 170 static const int kCallCodeSize = 13; |
| 171 #else |
| 172 static const int kCallCodeSize = 9; |
| 173 #endif |
170 int code_size = kCallCodeSize + guard_bytes; | 174 int code_size = kCallCodeSize + guard_bytes; |
171 | 175 |
172 // Create a code patcher. | 176 // Create a code patcher. |
173 CodePatcher patcher(pc_, code_size); | 177 CodePatcher patcher(pc_, code_size); |
174 | 178 |
175 // Add a label for checking the size of the code used for returning. | 179 // Add a label for checking the size of the code used for returning. |
176 #ifdef DEBUG | 180 #ifdef DEBUG |
177 Label check_codesize; | 181 Label check_codesize; |
178 patcher.masm()->bind(&check_codesize); | 182 patcher.masm()->bind(&check_codesize); |
179 #endif | 183 #endif |
180 | 184 |
181 // Patch the code. | 185 // Patch the code. |
| 186 #ifndef V8_TARGET_ARCH_X32 |
182 patcher.masm()->movq(r10, target, RelocInfo::NONE64); | 187 patcher.masm()->movq(r10, target, RelocInfo::NONE64); |
| 188 #else |
| 189 patcher.masm()->movl(r10, target, RelocInfo::NONE32); |
| 190 #endif |
183 patcher.masm()->call(r10); | 191 patcher.masm()->call(r10); |
184 | 192 |
185 // Check that the size of the code generated is as expected. | 193 // Check that the size of the code generated is as expected. |
186 ASSERT_EQ(kCallCodeSize, | 194 ASSERT_EQ(kCallCodeSize, |
187 patcher.masm()->SizeOfCodeGeneratedSince(&check_codesize)); | 195 patcher.masm()->SizeOfCodeGeneratedSince(&check_codesize)); |
188 | 196 |
189 // Add the requested number of int3 instructions after the call. | 197 // Add the requested number of int3 instructions after the call. |
190 for (int i = 0; i < guard_bytes; i++) { | 198 for (int i = 0; i < guard_bytes; i++) { |
191 patcher.masm()->int3(); | 199 patcher.masm()->int3(); |
192 } | 200 } |
(...skipping 1173 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1366 void Assembler::leal(Register dst, const Operand& src) { | 1374 void Assembler::leal(Register dst, const Operand& src) { |
1367 EnsureSpace ensure_space(this); | 1375 EnsureSpace ensure_space(this); |
1368 emit_optional_rex_32(dst, src); | 1376 emit_optional_rex_32(dst, src); |
1369 emit(0x8D); | 1377 emit(0x8D); |
1370 emit_operand(dst, src); | 1378 emit_operand(dst, src); |
1371 } | 1379 } |
1372 | 1380 |
1373 | 1381 |
1374 void Assembler::load_rax(void* value, RelocInfo::Mode mode) { | 1382 void Assembler::load_rax(void* value, RelocInfo::Mode mode) { |
1375 EnsureSpace ensure_space(this); | 1383 EnsureSpace ensure_space(this); |
| 1384 #ifndef V8_TARGET_ARCH_X32 |
1376 emit(0x48); // REX.W | 1385 emit(0x48); // REX.W |
| 1386 #endif |
1377 emit(0xA1); | 1387 emit(0xA1); |
| 1388 #ifndef V8_TARGET_ARCH_X32 |
1378 emitq(reinterpret_cast<uintptr_t>(value), mode); | 1389 emitq(reinterpret_cast<uintptr_t>(value), mode); |
| 1390 #else |
| 1391 // In 64-bit mode, need to zero extend the operand to 8 bytes. |
| 1392 // See 2.2.1.4 in Intel64 and IA32 Architectures Software |
| 1393 // Developer's Manual Volume 2. |
| 1394 emitl(reinterpret_cast<uintptr_t>(value), mode); |
| 1395 emitl(0); |
| 1396 #endif |
1379 } | 1397 } |
1380 | 1398 |
1381 | 1399 |
1382 void Assembler::load_rax(ExternalReference ref) { | 1400 void Assembler::load_rax(ExternalReference ref) { |
1383 load_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE); | 1401 load_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE); |
1384 } | 1402 } |
1385 | 1403 |
1386 | 1404 |
1387 void Assembler::leave() { | 1405 void Assembler::leave() { |
1388 EnsureSpace ensure_space(this); | 1406 EnsureSpace ensure_space(this); |
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1474 | 1492 |
1475 | 1493 |
1476 void Assembler::movl(Register dst, Immediate value) { | 1494 void Assembler::movl(Register dst, Immediate value) { |
1477 EnsureSpace ensure_space(this); | 1495 EnsureSpace ensure_space(this); |
1478 emit_optional_rex_32(dst); | 1496 emit_optional_rex_32(dst); |
1479 emit(0xB8 + dst.low_bits()); | 1497 emit(0xB8 + dst.low_bits()); |
1480 emit(value); | 1498 emit(value); |
1481 } | 1499 } |
1482 | 1500 |
1483 | 1501 |
| 1502 #ifdef V8_TARGET_ARCH_X32 |
| 1503 void Assembler::movl(Register dst, int32_t value, RelocInfo::Mode rmode) { |
| 1504 // Non-relocatable values might not need a 64-bit representation. |
| 1505 if (RelocInfo::IsNone(rmode)) { |
| 1506 movl(dst, Immediate(value)); |
| 1507 return; |
| 1508 } |
| 1509 EnsureSpace ensure_space(this); |
| 1510 emit_optional_rex_32(dst); |
| 1511 emit(0xB8 | dst.low_bits()); |
| 1512 emitl(value, rmode); |
| 1513 } |
| 1514 #endif |
| 1515 |
| 1516 |
1484 void Assembler::movq(Register dst, const Operand& src) { | 1517 void Assembler::movq(Register dst, const Operand& src) { |
1485 EnsureSpace ensure_space(this); | 1518 EnsureSpace ensure_space(this); |
1486 emit_rex_64(dst, src); | 1519 emit_rex_64(dst, src); |
1487 emit(0x8B); | 1520 emit(0x8B); |
1488 emit_operand(dst, src); | 1521 emit_operand(dst, src); |
1489 } | 1522 } |
1490 | 1523 |
1491 | 1524 |
1492 void Assembler::movq(Register dst, Register src) { | 1525 void Assembler::movq(Register dst, Register src) { |
1493 EnsureSpace ensure_space(this); | 1526 EnsureSpace ensure_space(this); |
(...skipping 19 matching lines...) Expand all Loading... |
1513 | 1546 |
1514 | 1547 |
1515 void Assembler::movq(const Operand& dst, Register src) { | 1548 void Assembler::movq(const Operand& dst, Register src) { |
1516 EnsureSpace ensure_space(this); | 1549 EnsureSpace ensure_space(this); |
1517 emit_rex_64(src, dst); | 1550 emit_rex_64(src, dst); |
1518 emit(0x89); | 1551 emit(0x89); |
1519 emit_operand(src, dst); | 1552 emit_operand(src, dst); |
1520 } | 1553 } |
1521 | 1554 |
1522 | 1555 |
| 1556 #ifndef V8_TARGET_ARCH_X32 |
1523 void Assembler::movq(Register dst, void* value, RelocInfo::Mode rmode) { | 1557 void Assembler::movq(Register dst, void* value, RelocInfo::Mode rmode) { |
1524 // This method must not be used with heap object references. The stored | 1558 // This method must not be used with heap object references. The stored |
1525 // address is not GC safe. Use the handle version instead. | 1559 // address is not GC safe. Use the handle version instead. |
1526 ASSERT(rmode > RelocInfo::LAST_GCED_ENUM); | 1560 ASSERT(rmode > RelocInfo::LAST_GCED_ENUM); |
1527 EnsureSpace ensure_space(this); | 1561 EnsureSpace ensure_space(this); |
1528 emit_rex_64(dst); | 1562 emit_rex_64(dst); |
1529 emit(0xB8 | dst.low_bits()); | 1563 emit(0xB8 | dst.low_bits()); |
1530 emitq(reinterpret_cast<uintptr_t>(value), rmode); | 1564 emitq(reinterpret_cast<uintptr_t>(value), rmode); |
1531 } | 1565 } |
| 1566 #else |
| 1567 void Assembler::movl(Register dst, void* value, RelocInfo::Mode rmode) { |
| 1568 // This method must not be used with heap object references. The stored |
| 1569 // address is not GC safe. Use the handle version instead. |
| 1570 ASSERT(rmode > RelocInfo::LAST_GCED_ENUM); |
| 1571 EnsureSpace ensure_space(this); |
| 1572 emit_optional_rex_32(dst); |
| 1573 emit(0xB8 | dst.low_bits()); |
| 1574 emitl(reinterpret_cast<uintptr_t>(value), rmode); |
| 1575 } |
| 1576 #endif |
1532 | 1577 |
1533 | 1578 |
1534 void Assembler::movq(Register dst, int64_t value, RelocInfo::Mode rmode) { | 1579 void Assembler::movq(Register dst, int64_t value, RelocInfo::Mode rmode) { |
| 1580 #ifndef V8_TARGET_ARCH_X32 |
1535 // Non-relocatable values might not need a 64-bit representation. | 1581 // Non-relocatable values might not need a 64-bit representation. |
1536 if (RelocInfo::IsNone(rmode)) { | 1582 if (RelocInfo::IsNone(rmode)) { |
1537 if (is_uint32(value)) { | 1583 if (is_uint32(value)) { |
1538 movl(dst, Immediate(static_cast<int32_t>(value))); | 1584 movl(dst, Immediate(static_cast<int32_t>(value))); |
1539 return; | 1585 return; |
1540 } else if (is_int32(value)) { | 1586 } else if (is_int32(value)) { |
1541 movq(dst, Immediate(static_cast<int32_t>(value))); | 1587 movq(dst, Immediate(static_cast<int32_t>(value))); |
1542 return; | 1588 return; |
1543 } | 1589 } |
1544 // Value cannot be represented by 32 bits, so do a full 64 bit immediate | 1590 // Value cannot be represented by 32 bits, so do a full 64 bit immediate |
1545 // value. | 1591 // value. |
1546 } | 1592 } |
| 1593 #else |
| 1594 ASSERT(RelocInfo::IsNone(rmode)); |
| 1595 if (is_uint32(value)) { |
| 1596 movl(dst, Immediate(static_cast<int32_t>(value))); |
| 1597 return; |
| 1598 } else if (is_int32(value)) { |
| 1599 movq(dst, Immediate(static_cast<int32_t>(value))); |
| 1600 return; |
| 1601 } |
| 1602 // Value cannot be represented by 32 bits, so do a full 64 bit immediate |
| 1603 // value. |
| 1604 #endif |
1547 EnsureSpace ensure_space(this); | 1605 EnsureSpace ensure_space(this); |
1548 emit_rex_64(dst); | 1606 emit_rex_64(dst); |
1549 emit(0xB8 | dst.low_bits()); | 1607 emit(0xB8 | dst.low_bits()); |
1550 emitq(value, rmode); | 1608 emitq(value, rmode); |
1551 } | 1609 } |
1552 | 1610 |
1553 | 1611 |
| 1612 #ifndef V8_TARGET_ARCH_X32 |
1554 void Assembler::movq(Register dst, ExternalReference ref) { | 1613 void Assembler::movq(Register dst, ExternalReference ref) { |
1555 int64_t value = reinterpret_cast<int64_t>(ref.address()); | 1614 int64_t value = reinterpret_cast<int64_t>(ref.address()); |
1556 movq(dst, value, RelocInfo::EXTERNAL_REFERENCE); | 1615 movq(dst, value, RelocInfo::EXTERNAL_REFERENCE); |
1557 } | 1616 } |
| 1617 #else |
| 1618 void Assembler::movl(Register dst, ExternalReference ref) { |
| 1619 int32_t value = reinterpret_cast<int32_t>(ref.address()); |
| 1620 movl(dst, value, RelocInfo::EXTERNAL_REFERENCE); |
| 1621 } |
| 1622 #endif |
1558 | 1623 |
1559 | 1624 |
1560 void Assembler::movq(const Operand& dst, Immediate value) { | 1625 void Assembler::movq(const Operand& dst, Immediate value) { |
1561 EnsureSpace ensure_space(this); | 1626 EnsureSpace ensure_space(this); |
1562 emit_rex_64(dst); | 1627 emit_rex_64(dst); |
1563 emit(0xC7); | 1628 emit(0xC7); |
1564 emit_operand(0, dst); | 1629 emit_operand(0, dst); |
1565 emit(value); | 1630 emit(value); |
1566 } | 1631 } |
1567 | 1632 |
(...skipping 14 matching lines...) Expand all Loading... |
1582 src->link_to(pc_offset() - sizeof(int32_t)); | 1647 src->link_to(pc_offset() - sizeof(int32_t)); |
1583 } else { | 1648 } else { |
1584 ASSERT(src->is_unused()); | 1649 ASSERT(src->is_unused()); |
1585 int32_t current = pc_offset(); | 1650 int32_t current = pc_offset(); |
1586 emitl(current); | 1651 emitl(current); |
1587 src->link_to(current); | 1652 src->link_to(current); |
1588 } | 1653 } |
1589 } | 1654 } |
1590 | 1655 |
1591 | 1656 |
| 1657 #ifndef V8_TARGET_ARCH_X32 |
1592 void Assembler::movq(Register dst, Handle<Object> value, RelocInfo::Mode mode) { | 1658 void Assembler::movq(Register dst, Handle<Object> value, RelocInfo::Mode mode) { |
1593 AllowDeferredHandleDereference using_raw_address; | 1659 AllowDeferredHandleDereference using_raw_address; |
1594 // If there is no relocation info, emit the value of the handle efficiently | 1660 // If there is no relocation info, emit the value of the handle efficiently |
1595 // (possibly using less that 8 bytes for the value). | 1661 // (possibly using less that 8 bytes for the value). |
1596 if (RelocInfo::IsNone(mode)) { | 1662 if (RelocInfo::IsNone(mode)) { |
1597 // There is no possible reason to store a heap pointer without relocation | 1663 // There is no possible reason to store a heap pointer without relocation |
1598 // info, so it must be a smi. | 1664 // info, so it must be a smi. |
1599 ASSERT(value->IsSmi()); | 1665 ASSERT(value->IsSmi()); |
1600 movq(dst, reinterpret_cast<int64_t>(*value), RelocInfo::NONE64); | 1666 movq(dst, reinterpret_cast<int64_t>(*value), RelocInfo::NONE64); |
1601 } else { | 1667 } else { |
1602 EnsureSpace ensure_space(this); | 1668 EnsureSpace ensure_space(this); |
1603 ASSERT(value->IsHeapObject()); | 1669 ASSERT(value->IsHeapObject()); |
1604 ASSERT(!HEAP->InNewSpace(*value)); | 1670 ASSERT(!HEAP->InNewSpace(*value)); |
1605 emit_rex_64(dst); | 1671 emit_rex_64(dst); |
1606 emit(0xB8 | dst.low_bits()); | 1672 emit(0xB8 | dst.low_bits()); |
1607 emitq(reinterpret_cast<uintptr_t>(value.location()), mode); | 1673 emitq(reinterpret_cast<uintptr_t>(value.location()), mode); |
1608 } | 1674 } |
1609 } | 1675 } |
| 1676 #else |
| 1677 void Assembler::movl(Register dst, Handle<Object> value, RelocInfo::Mode mode) { |
| 1678 AllowDeferredHandleDereference using_raw_address; |
| 1679 // If there is no relocation info, emit the value of the handle efficiently |
| 1680 // (possibly using less that 8 bytes for the value). |
| 1681 if (RelocInfo::IsNone(mode)) { |
| 1682 // There is no possible reason to store a heap pointer without relocation |
| 1683 // info, so it must be a smi. |
| 1684 ASSERT(value->IsSmi()); |
| 1685 movl(dst, reinterpret_cast<int32_t>(*value), RelocInfo::NONE32); |
| 1686 } else { |
| 1687 EnsureSpace ensure_space(this); |
| 1688 ASSERT(value->IsHeapObject()); |
| 1689 ASSERT(!HEAP->InNewSpace(*value)); |
| 1690 emit_optional_rex_32(dst); |
| 1691 emit(0xB8 | dst.low_bits()); |
| 1692 emitl(reinterpret_cast<uintptr_t>(value.location()), mode); |
| 1693 } |
| 1694 } |
| 1695 |
| 1696 #endif |
1610 | 1697 |
1611 | 1698 |
1612 void Assembler::movsxbq(Register dst, const Operand& src) { | 1699 void Assembler::movsxbq(Register dst, const Operand& src) { |
1613 EnsureSpace ensure_space(this); | 1700 EnsureSpace ensure_space(this); |
1614 emit_rex_64(dst, src); | 1701 emit_rex_64(dst, src); |
1615 emit(0x0F); | 1702 emit(0x0F); |
1616 emit(0xBE); | 1703 emit(0xBE); |
1617 emit_operand(dst, src); | 1704 emit_operand(dst, src); |
1618 } | 1705 } |
1619 | 1706 |
(...skipping 365 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1985 emit(0x87); | 2072 emit(0x87); |
1986 emit_modrm(dst, src); | 2073 emit_modrm(dst, src); |
1987 } else { | 2074 } else { |
1988 emit_rex_64(src, dst); | 2075 emit_rex_64(src, dst); |
1989 emit(0x87); | 2076 emit(0x87); |
1990 emit_modrm(src, dst); | 2077 emit_modrm(src, dst); |
1991 } | 2078 } |
1992 } | 2079 } |
1993 | 2080 |
1994 | 2081 |
| 2082 #ifdef V8_TARGET_ARCH_X32 |
| 2083 void Assembler::xchgl(Register dst, Register src) { |
| 2084 EnsureSpace ensure_space(this); |
| 2085 if (src.is(rax) || dst.is(rax)) { // Single-byte encoding |
| 2086 Register other = src.is(rax) ? dst : src; |
| 2087 emit_optional_rex_32(other); |
| 2088 emit(0x90 | other.low_bits()); |
| 2089 } else if (dst.low_bits() == 4) { |
| 2090 emit_optional_rex_32(dst, src); |
| 2091 emit(0x87); |
| 2092 emit_modrm(dst, src); |
| 2093 } else { |
| 2094 emit_optional_rex_32(src, dst); |
| 2095 emit(0x87); |
| 2096 emit_modrm(src, dst); |
| 2097 } |
| 2098 } |
| 2099 #endif |
| 2100 |
| 2101 |
1995 void Assembler::store_rax(void* dst, RelocInfo::Mode mode) { | 2102 void Assembler::store_rax(void* dst, RelocInfo::Mode mode) { |
1996 EnsureSpace ensure_space(this); | 2103 EnsureSpace ensure_space(this); |
| 2104 #ifndef V8_TARGET_ARCH_X32 |
1997 emit(0x48); // REX.W | 2105 emit(0x48); // REX.W |
| 2106 #endif |
1998 emit(0xA3); | 2107 emit(0xA3); |
| 2108 #ifndef V8_TARGET_ARCH_X32 |
1999 emitq(reinterpret_cast<uintptr_t>(dst), mode); | 2109 emitq(reinterpret_cast<uintptr_t>(dst), mode); |
| 2110 #else |
| 2111 emitl(reinterpret_cast<uintptr_t>(dst), mode); |
| 2112 emitl(0); |
| 2113 #endif |
2000 } | 2114 } |
2001 | 2115 |
2002 | 2116 |
2003 void Assembler::store_rax(ExternalReference ref) { | 2117 void Assembler::store_rax(ExternalReference ref) { |
2004 store_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE); | 2118 store_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE); |
2005 } | 2119 } |
2006 | 2120 |
2007 | 2121 |
2008 void Assembler::testb(Register dst, Register src) { | 2122 void Assembler::testb(Register dst, Register src) { |
2009 EnsureSpace ensure_space(this); | 2123 EnsureSpace ensure_space(this); |
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2103 return; | 2217 return; |
2104 } | 2218 } |
2105 EnsureSpace ensure_space(this); | 2219 EnsureSpace ensure_space(this); |
2106 emit_optional_rex_32(rax, op); | 2220 emit_optional_rex_32(rax, op); |
2107 emit(0xF7); | 2221 emit(0xF7); |
2108 emit_operand(rax, op); // Operation code 0 | 2222 emit_operand(rax, op); // Operation code 0 |
2109 emit(mask); | 2223 emit(mask); |
2110 } | 2224 } |
2111 | 2225 |
2112 | 2226 |
| 2227 #ifdef V8_TARGET_ARCH_X32 |
| 2228 void Assembler::testl(const Operand& op, Register reg) { |
| 2229 EnsureSpace ensure_space(this); |
| 2230 emit_optional_rex_32(reg, op); |
| 2231 emit(0x85); |
| 2232 emit_operand(reg, op); |
| 2233 } |
| 2234 #endif |
| 2235 |
| 2236 |
2113 void Assembler::testq(const Operand& op, Register reg) { | 2237 void Assembler::testq(const Operand& op, Register reg) { |
2114 EnsureSpace ensure_space(this); | 2238 EnsureSpace ensure_space(this); |
2115 emit_rex_64(reg, op); | 2239 emit_rex_64(reg, op); |
2116 emit(0x85); | 2240 emit(0x85); |
2117 emit_operand(reg, op); | 2241 emit_operand(reg, op); |
2118 } | 2242 } |
2119 | 2243 |
2120 | 2244 |
2121 void Assembler::testq(Register dst, Register src) { | 2245 void Assembler::testq(Register dst, Register src) { |
2122 EnsureSpace ensure_space(this); | 2246 EnsureSpace ensure_space(this); |
(...skipping 895 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3018 | 3142 |
3019 void Assembler::movmskps(Register dst, XMMRegister src) { | 3143 void Assembler::movmskps(Register dst, XMMRegister src) { |
3020 EnsureSpace ensure_space(this); | 3144 EnsureSpace ensure_space(this); |
3021 emit_optional_rex_32(dst, src); | 3145 emit_optional_rex_32(dst, src); |
3022 emit(0x0f); | 3146 emit(0x0f); |
3023 emit(0x50); | 3147 emit(0x50); |
3024 emit_sse_operand(dst, src); | 3148 emit_sse_operand(dst, src); |
3025 } | 3149 } |
3026 | 3150 |
3027 | 3151 |
| 3152 #ifdef V8_TARGET_ARCH_X32 |
| 3153 void Assembler::pcmpeqd(XMMRegister dst, XMMRegister src) { |
| 3154 EnsureSpace ensure_space(this); |
| 3155 emit(0x66); |
| 3156 emit(0x0f); |
| 3157 emit(0x76); |
| 3158 emit_sse_operand(dst, src); |
| 3159 } |
| 3160 #endif |
| 3161 |
| 3162 |
3028 void Assembler::emit_sse_operand(XMMRegister reg, const Operand& adr) { | 3163 void Assembler::emit_sse_operand(XMMRegister reg, const Operand& adr) { |
3029 Register ireg = { reg.code() }; | 3164 Register ireg = { reg.code() }; |
3030 emit_operand(ireg, adr); | 3165 emit_operand(ireg, adr); |
3031 } | 3166 } |
3032 | 3167 |
3033 | 3168 |
3034 void Assembler::emit_sse_operand(XMMRegister dst, XMMRegister src) { | 3169 void Assembler::emit_sse_operand(XMMRegister dst, XMMRegister src) { |
3035 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits()); | 3170 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits()); |
3036 } | 3171 } |
3037 | 3172 |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3106 bool RelocInfo::IsCodedSpecially() { | 3241 bool RelocInfo::IsCodedSpecially() { |
3107 // The deserializer needs to know whether a pointer is specially coded. Being | 3242 // The deserializer needs to know whether a pointer is specially coded. Being |
3108 // specially coded on x64 means that it is a relative 32 bit address, as used | 3243 // specially coded on x64 means that it is a relative 32 bit address, as used |
3109 // by branch instructions. | 3244 // by branch instructions. |
3110 return (1 << rmode_) & kApplyMask; | 3245 return (1 << rmode_) & kApplyMask; |
3111 } | 3246 } |
3112 | 3247 |
3113 } } // namespace v8::internal | 3248 } } // namespace v8::internal |
3114 | 3249 |
3115 #endif // V8_TARGET_ARCH_X64 | 3250 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |