| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // | 2 // |
| 3 // Redistribution and use in source and binary forms, with or without | 3 // Redistribution and use in source and binary forms, with or without |
| 4 // modification, are permitted provided that the following conditions are | 4 // modification, are permitted provided that the following conditions are |
| 5 // met: | 5 // met: |
| 6 // | 6 // |
| 7 // * Redistributions of source code must retain the above copyright | 7 // * Redistributions of source code must retain the above copyright |
| 8 // notice, this list of conditions and the following disclaimer. | 8 // notice, this list of conditions and the following disclaimer. |
| 9 // * Redistributions in binary form must reproduce the above | 9 // * Redistributions in binary form must reproduce the above |
| 10 // copyright notice, this list of conditions and the following | 10 // copyright notice, this list of conditions and the following |
| (...skipping 1609 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1620 addrmodeop = LoadStorePairPreIndexFixed; | 1620 addrmodeop = LoadStorePairPreIndexFixed; |
| 1621 } else { | 1621 } else { |
| 1622 DCHECK(addr.IsPostIndex()); | 1622 DCHECK(addr.IsPostIndex()); |
| 1623 addrmodeop = LoadStorePairPostIndexFixed; | 1623 addrmodeop = LoadStorePairPostIndexFixed; |
| 1624 } | 1624 } |
| 1625 } | 1625 } |
| 1626 Emit(addrmodeop | memop); | 1626 Emit(addrmodeop | memop); |
| 1627 } | 1627 } |
| 1628 | 1628 |
| 1629 | 1629 |
| 1630 void Assembler::ldnp(const CPURegister& rt, | |
| 1631 const CPURegister& rt2, | |
| 1632 const MemOperand& src) { | |
| 1633 LoadStorePairNonTemporal(rt, rt2, src, | |
| 1634 LoadPairNonTemporalOpFor(rt, rt2)); | |
| 1635 } | |
| 1636 | |
| 1637 | |
| 1638 void Assembler::stnp(const CPURegister& rt, | |
| 1639 const CPURegister& rt2, | |
| 1640 const MemOperand& dst) { | |
| 1641 LoadStorePairNonTemporal(rt, rt2, dst, | |
| 1642 StorePairNonTemporalOpFor(rt, rt2)); | |
| 1643 } | |
| 1644 | |
| 1645 | |
| 1646 void Assembler::LoadStorePairNonTemporal(const CPURegister& rt, | |
| 1647 const CPURegister& rt2, | |
| 1648 const MemOperand& addr, | |
| 1649 LoadStorePairNonTemporalOp op) { | |
| 1650 DCHECK(!rt.Is(rt2)); | |
| 1651 DCHECK(AreSameSizeAndType(rt, rt2)); | |
| 1652 DCHECK(addr.IsImmediateOffset()); | |
| 1653 LSDataSize size = CalcLSPairDataSize( | |
| 1654 static_cast<LoadStorePairOp>(op & LoadStorePairMask)); | |
| 1655 DCHECK(IsImmLSPair(addr.offset(), size)); | |
| 1656 int offset = static_cast<int>(addr.offset()); | |
| 1657 Emit(op | Rt(rt) | Rt2(rt2) | RnSP(addr.base()) | ImmLSPair(offset, size)); | |
| 1658 } | |
| 1659 | |
| 1660 | |
| 1661 // Memory instructions. | 1630 // Memory instructions. |
| 1662 void Assembler::ldrb(const Register& rt, const MemOperand& src) { | 1631 void Assembler::ldrb(const Register& rt, const MemOperand& src) { |
| 1663 LoadStore(rt, src, LDRB_w); | 1632 LoadStore(rt, src, LDRB_w); |
| 1664 } | 1633 } |
| 1665 | 1634 |
| 1666 | 1635 |
| 1667 void Assembler::strb(const Register& rt, const MemOperand& dst) { | 1636 void Assembler::strb(const Register& rt, const MemOperand& dst) { |
| 1668 LoadStore(rt, dst, STRB_w); | 1637 LoadStore(rt, dst, STRB_w); |
| 1669 } | 1638 } |
| 1670 | 1639 |
| (...skipping 1498 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3169 movk(scratch, (target_offset >> 32) & 0xFFFF, 32); | 3138 movk(scratch, (target_offset >> 32) & 0xFFFF, 32); |
| 3170 DCHECK((target_offset >> 48) == 0); | 3139 DCHECK((target_offset >> 48) == 0); |
| 3171 add(rd, rd, scratch); | 3140 add(rd, rd, scratch); |
| 3172 } | 3141 } |
| 3173 | 3142 |
| 3174 | 3143 |
| 3175 } // namespace internal | 3144 } // namespace internal |
| 3176 } // namespace v8 | 3145 } // namespace v8 |
| 3177 | 3146 |
| 3178 #endif // V8_TARGET_ARCH_ARM64 | 3147 #endif // V8_TARGET_ARCH_ARM64 |
| OLD | NEW |