OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_ARM64 | 7 #if V8_TARGET_ARCH_ARM64 |
8 | 8 |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 570 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
581 } else if (addr.IsPreIndex() && !IsImmLSUnscaled(offset)) { | 581 } else if (addr.IsPreIndex() && !IsImmLSUnscaled(offset)) { |
582 // Pre-index beyond unscaled addressing range. | 582 // Pre-index beyond unscaled addressing range. |
583 add(addr.base(), addr.base(), offset); | 583 add(addr.base(), addr.base(), offset); |
584 LoadStore(rt, MemOperand(addr.base()), op); | 584 LoadStore(rt, MemOperand(addr.base()), op); |
585 } else { | 585 } else { |
586 // Encodable in one load/store instruction. | 586 // Encodable in one load/store instruction. |
587 LoadStore(rt, addr, op); | 587 LoadStore(rt, addr, op); |
588 } | 588 } |
589 } | 589 } |
590 | 590 |
| 591 void MacroAssembler::LoadStorePairMacro(const CPURegister& rt, |
| 592 const CPURegister& rt2, |
| 593 const MemOperand& addr, |
| 594 LoadStorePairOp op) { |
| 595 // TODO(all): Should we support register offset for load-store-pair? |
| 596 DCHECK(!addr.IsRegisterOffset()); |
| 597 |
| 598 int64_t offset = addr.offset(); |
| 599 LSDataSize size = CalcLSPairDataSize(op); |
| 600 |
| 601 // Check if the offset fits in the immediate field of the appropriate |
| 602 // instruction. If not, emit two instructions to perform the operation. |
| 603 if (IsImmLSPair(offset, size)) { |
| 604 // Encodable in one load/store pair instruction. |
| 605 LoadStorePair(rt, rt2, addr, op); |
| 606 } else { |
| 607 Register base = addr.base(); |
| 608 if (addr.IsImmediateOffset()) { |
| 609 UseScratchRegisterScope temps(this); |
| 610 Register temp = temps.AcquireSameSizeAs(base); |
| 611 Add(temp, base, offset); |
| 612 LoadStorePair(rt, rt2, MemOperand(temp), op); |
| 613 } else if (addr.IsPostIndex()) { |
| 614 LoadStorePair(rt, rt2, MemOperand(base), op); |
| 615 Add(base, base, offset); |
| 616 } else { |
| 617 DCHECK(addr.IsPreIndex()); |
| 618 Add(base, base, offset); |
| 619 LoadStorePair(rt, rt2, MemOperand(base), op); |
| 620 } |
| 621 } |
| 622 } |
| 623 |
591 | 624 |
592 void MacroAssembler::Load(const Register& rt, | 625 void MacroAssembler::Load(const Register& rt, |
593 const MemOperand& addr, | 626 const MemOperand& addr, |
594 Representation r) { | 627 Representation r) { |
595 DCHECK(!r.IsDouble()); | 628 DCHECK(!r.IsDouble()); |
596 | 629 |
597 if (r.IsInteger8()) { | 630 if (r.IsInteger8()) { |
598 Ldrsb(rt, addr); | 631 Ldrsb(rt, addr); |
599 } else if (r.IsUInteger8()) { | 632 } else if (r.IsUInteger8()) { |
600 Ldrb(rt, addr); | 633 Ldrb(rt, addr); |
(...skipping 4774 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5375 } | 5408 } |
5376 } | 5409 } |
5377 | 5410 |
5378 | 5411 |
5379 #undef __ | 5412 #undef __ |
5380 | 5413 |
5381 | 5414 |
5382 } } // namespace v8::internal | 5415 } } // namespace v8::internal |
5383 | 5416 |
5384 #endif // V8_TARGET_ARCH_ARM64 | 5417 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |