OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 629 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
640 } | 640 } |
641 | 641 |
642 | 642 |
643 void MacroAssembler::Ldrd(Register dst1, Register dst2, | 643 void MacroAssembler::Ldrd(Register dst1, Register dst2, |
644 const MemOperand& src, Condition cond) { | 644 const MemOperand& src, Condition cond) { |
645 ASSERT(src.rm().is(no_reg)); | 645 ASSERT(src.rm().is(no_reg)); |
646 ASSERT(!dst1.is(lr)); // r14. | 646 ASSERT(!dst1.is(lr)); // r14. |
647 ASSERT_EQ(0, dst1.code() % 2); | 647 ASSERT_EQ(0, dst1.code() % 2); |
648 ASSERT_EQ(dst1.code() + 1, dst2.code()); | 648 ASSERT_EQ(dst1.code() + 1, dst2.code()); |
649 | 649 |
| 650 // V8 does not use this addressing mode, so the fallback code |
| 651 // below doesn't support it yet. |
| 652 ASSERT((src.am() != PreIndex) && (src.am() != NegPreIndex)); |
| 653 |
650 // Generate two ldr instructions if ldrd is not available. | 654 // Generate two ldr instructions if ldrd is not available. |
651 if (CpuFeatures::IsSupported(ARMv7)) { | 655 if (CpuFeatures::IsSupported(ARMv7)) { |
652 CpuFeatures::Scope scope(ARMv7); | 656 CpuFeatures::Scope scope(ARMv7); |
653 ldrd(dst1, dst2, src, cond); | 657 ldrd(dst1, dst2, src, cond); |
654 } else { | 658 } else { |
655 MemOperand src2(src); | 659 if ((src.am() == Offset) || (src.am() == NegOffset)) { |
656 src2.set_offset(src2.offset() + 4); | 660 MemOperand src2(src); |
657 if (dst1.is(src.rn())) { | 661 src2.set_offset(src2.offset() + 4); |
658 ldr(dst2, src2, cond); | 662 if (dst1.is(src.rn())) { |
659 ldr(dst1, src, cond); | 663 ldr(dst2, src2, cond); |
660 } else { | 664 ldr(dst1, src, cond); |
661 ldr(dst1, src, cond); | 665 } else { |
662 ldr(dst2, src2, cond); | 666 ldr(dst1, src, cond); |
| 667 ldr(dst2, src2, cond); |
| 668 } |
| 669 } else { // PostIndex or NegPostIndex. |
| 670 ASSERT((src.am() == PostIndex) || (src.am() == NegPostIndex)); |
| 671 if (dst1.is(src.rn())) { |
| 672 ldr(dst2, MemOperand(src.rn(), 4, Offset), cond); |
| 673 ldr(dst1, src, cond); |
| 674 } else { |
| 675 MemOperand src2(src); |
| 676 src2.set_offset(src2.offset() - 4); |
| 677 ldr(dst1, MemOperand(src.rn(), 4, PostIndex), cond); |
| 678 ldr(dst2, src2, cond); |
| 679 } |
663 } | 680 } |
664 } | 681 } |
665 } | 682 } |
666 | 683 |
667 | 684 |
668 void MacroAssembler::Strd(Register src1, Register src2, | 685 void MacroAssembler::Strd(Register src1, Register src2, |
669 const MemOperand& dst, Condition cond) { | 686 const MemOperand& dst, Condition cond) { |
670 ASSERT(dst.rm().is(no_reg)); | 687 ASSERT(dst.rm().is(no_reg)); |
671 ASSERT(!src1.is(lr)); // r14. | 688 ASSERT(!src1.is(lr)); // r14. |
672 ASSERT_EQ(0, src1.code() % 2); | 689 ASSERT_EQ(0, src1.code() % 2); |
673 ASSERT_EQ(src1.code() + 1, src2.code()); | 690 ASSERT_EQ(src1.code() + 1, src2.code()); |
674 | 691 |
| 692 // V8 does not use this addressing mode, so the fallback code |
| 693 // below doesn't support it yet. |
| 694 ASSERT((dst.am() != PreIndex) && (dst.am() != NegPreIndex)); |
| 695 |
675 // Generate two str instructions if strd is not available. | 696 // Generate two str instructions if strd is not available. |
676 if (CpuFeatures::IsSupported(ARMv7)) { | 697 if (CpuFeatures::IsSupported(ARMv7)) { |
677 CpuFeatures::Scope scope(ARMv7); | 698 CpuFeatures::Scope scope(ARMv7); |
678 strd(src1, src2, dst, cond); | 699 strd(src1, src2, dst, cond); |
679 } else { | 700 } else { |
680 MemOperand dst2(dst); | 701 MemOperand dst2(dst); |
681 dst2.set_offset(dst2.offset() + 4); | 702 if ((dst.am() == Offset) || (dst.am() == NegOffset)) { |
682 str(src1, dst, cond); | 703 dst2.set_offset(dst2.offset() + 4); |
683 str(src2, dst2, cond); | 704 str(src1, dst, cond); |
| 705 str(src2, dst2, cond); |
| 706 } else { // PostIndex or NegPostIndex. |
| 707 ASSERT((dst.am() == PostIndex) || (dst.am() == NegPostIndex)); |
| 708 dst2.set_offset(dst2.offset() - 4); |
| 709 str(src1, MemOperand(dst.rn(), 4, PostIndex), cond); |
| 710 str(src2, dst2, cond); |
| 711 } |
684 } | 712 } |
685 } | 713 } |
686 | 714 |
687 | 715 |
688 void MacroAssembler::ClearFPSCRBits(const uint32_t bits_to_clear, | 716 void MacroAssembler::ClearFPSCRBits(const uint32_t bits_to_clear, |
689 const Register scratch, | 717 const Register scratch, |
690 const Condition cond) { | 718 const Condition cond) { |
691 vmrs(scratch, cond); | 719 vmrs(scratch, cond); |
692 bic(scratch, scratch, Operand(bits_to_clear), LeaveCC, cond); | 720 bic(scratch, scratch, Operand(bits_to_clear), LeaveCC, cond); |
693 vmsr(scratch, cond); | 721 vmsr(scratch, cond); |
(...skipping 2462 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3156 void CodePatcher::EmitCondition(Condition cond) { | 3184 void CodePatcher::EmitCondition(Condition cond) { |
3157 Instr instr = Assembler::instr_at(masm_.pc_); | 3185 Instr instr = Assembler::instr_at(masm_.pc_); |
3158 instr = (instr & ~kCondMask) | cond; | 3186 instr = (instr & ~kCondMask) | cond; |
3159 masm_.emit(instr); | 3187 masm_.emit(instr); |
3160 } | 3188 } |
3161 | 3189 |
3162 | 3190 |
3163 } } // namespace v8::internal | 3191 } } // namespace v8::internal |
3164 | 3192 |
3165 #endif // V8_TARGET_ARCH_ARM | 3193 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |