OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
6 | 6 |
7 #include "src/arm/macro-assembler-arm.h" | 7 #include "src/arm/macro-assembler-arm.h" |
8 #include "src/assembler-inl.h" | 8 #include "src/assembler-inl.h" |
9 #include "src/compilation-info.h" | 9 #include "src/compilation-info.h" |
10 #include "src/compiler/code-generator-impl.h" | 10 #include "src/compiler/code-generator-impl.h" |
(...skipping 2804 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2815 __ vstr(temp, g.ToMemOperand(destination)); | 2815 __ vstr(temp, g.ToMemOperand(destination)); |
2816 } else { | 2816 } else { |
2817 DCHECK_EQ(MachineRepresentation::kSimd128, rep); | 2817 DCHECK_EQ(MachineRepresentation::kSimd128, rep); |
2818 MemOperand dst = g.ToMemOperand(destination); | 2818 MemOperand dst = g.ToMemOperand(destination); |
2819 __ add(kScratchReg, src.rn(), Operand(src.offset())); | 2819 __ add(kScratchReg, src.rn(), Operand(src.offset())); |
2820 __ vld1(Neon8, NeonListOperand(kScratchQuadReg.low(), 2), | 2820 __ vld1(Neon8, NeonListOperand(kScratchQuadReg.low(), 2), |
2821 NeonMemOperand(kScratchReg)); | 2821 NeonMemOperand(kScratchReg)); |
2822 __ add(kScratchReg, dst.rn(), Operand(dst.offset())); | 2822 __ add(kScratchReg, dst.rn(), Operand(dst.offset())); |
2823 __ vst1(Neon8, NeonListOperand(kScratchQuadReg.low(), 2), | 2823 __ vst1(Neon8, NeonListOperand(kScratchQuadReg.low(), 2), |
2824 NeonMemOperand(kScratchReg)); | 2824 NeonMemOperand(kScratchReg)); |
2825 __ veor(kDoubleRegZero, kDoubleRegZero, kDoubleRegZero); | |
2826 } | 2825 } |
2827 } | 2826 } |
2828 } else { | 2827 } else { |
2829 UNREACHABLE(); | 2828 UNREACHABLE(); |
2830 } | 2829 } |
2831 } | 2830 } |
2832 | 2831 |
2833 void CodeGenerator::AssembleSwap(InstructionOperand* source, | 2832 void CodeGenerator::AssembleSwap(InstructionOperand* source, |
2834 InstructionOperand* destination) { | 2833 InstructionOperand* destination) { |
2835 ArmOperandConverter g(this, nullptr); | 2834 ArmOperandConverter g(this, nullptr); |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2898 __ Swap(src, dst); | 2897 __ Swap(src, dst); |
2899 } else { | 2898 } else { |
2900 DCHECK(destination->IsFPStackSlot()); | 2899 DCHECK(destination->IsFPStackSlot()); |
2901 MemOperand dst = g.ToMemOperand(destination); | 2900 MemOperand dst = g.ToMemOperand(destination); |
2902 __ Move(kScratchQuadReg, src); | 2901 __ Move(kScratchQuadReg, src); |
2903 __ add(kScratchReg, dst.rn(), Operand(dst.offset())); | 2902 __ add(kScratchReg, dst.rn(), Operand(dst.offset())); |
2904 __ vld1(Neon8, NeonListOperand(src.low(), 2), | 2903 __ vld1(Neon8, NeonListOperand(src.low(), 2), |
2905 NeonMemOperand(kScratchReg)); | 2904 NeonMemOperand(kScratchReg)); |
2906 __ vst1(Neon8, NeonListOperand(kScratchQuadReg.low(), 2), | 2905 __ vst1(Neon8, NeonListOperand(kScratchQuadReg.low(), 2), |
2907 NeonMemOperand(kScratchReg)); | 2906 NeonMemOperand(kScratchReg)); |
2908 __ veor(kDoubleRegZero, kDoubleRegZero, kDoubleRegZero); | |
2909 } | 2907 } |
2910 } | 2908 } |
2911 } else if (source->IsFPStackSlot()) { | 2909 } else if (source->IsFPStackSlot()) { |
2912 DCHECK(destination->IsFPStackSlot()); | 2910 DCHECK(destination->IsFPStackSlot()); |
2913 MemOperand src = g.ToMemOperand(source); | 2911 Register temp_0 = kScratchReg; |
2914 MemOperand dst = g.ToMemOperand(destination); | 2912 LowDwVfpRegister temp_1 = kScratchDoubleReg; |
| 2913 MemOperand src0 = g.ToMemOperand(source); |
| 2914 MemOperand dst0 = g.ToMemOperand(destination); |
2915 MachineRepresentation rep = LocationOperand::cast(source)->representation(); | 2915 MachineRepresentation rep = LocationOperand::cast(source)->representation(); |
2916 if (rep == MachineRepresentation::kFloat64) { | 2916 if (rep == MachineRepresentation::kFloat64) { |
2917 __ vldr(kScratchDoubleReg, dst); | 2917 MemOperand src1(src0.rn(), src0.offset() + kPointerSize); |
2918 __ vldr(kDoubleRegZero, src); | 2918 MemOperand dst1(dst0.rn(), dst0.offset() + kPointerSize); |
2919 __ vstr(kScratchDoubleReg, src); | 2919 __ vldr(temp_1, dst0); // Save destination in temp_1. |
2920 __ vstr(kDoubleRegZero, dst); | 2920 __ ldr(temp_0, src0); // Then use temp_0 to copy source to destination. |
2921 // Restore the 0 register. | 2921 __ str(temp_0, dst0); |
2922 __ veor(kDoubleRegZero, kDoubleRegZero, kDoubleRegZero); | 2922 __ ldr(temp_0, src1); |
| 2923 __ str(temp_0, dst1); |
| 2924 __ vstr(temp_1, src0); |
2923 } else if (rep == MachineRepresentation::kFloat32) { | 2925 } else if (rep == MachineRepresentation::kFloat32) { |
2924 __ vldr(kScratchDoubleReg.low(), dst); | 2926 __ vldr(temp_1.low(), dst0); // Save destination in temp_1. |
2925 __ vldr(kScratchDoubleReg.high(), src); | 2927 __ ldr(temp_0, src0); // Then use temp_0 to copy source to destination. |
2926 __ vstr(kScratchDoubleReg.low(), src); | 2928 __ str(temp_0, dst0); |
2927 __ vstr(kScratchDoubleReg.high(), dst); | 2929 __ vstr(temp_1.low(), src0); |
2928 } else { | 2930 } else { |
2929 DCHECK_EQ(MachineRepresentation::kSimd128, rep); | 2931 DCHECK_EQ(MachineRepresentation::kSimd128, rep); |
2930 __ vldr(kScratchDoubleReg, dst); | 2932 MemOperand src1(src0.rn(), src0.offset() + kDoubleSize); |
2931 __ vldr(kDoubleRegZero, src); | 2933 MemOperand dst1(dst0.rn(), dst0.offset() + kDoubleSize); |
2932 __ vstr(kScratchDoubleReg, src); | 2934 __ vldr(kScratchQuadReg.low(), dst0); |
2933 __ vstr(kDoubleRegZero, dst); | 2935 __ vldr(kScratchQuadReg.high(), src0); |
2934 src.set_offset(src.offset() + kDoubleSize); | 2936 __ vstr(kScratchQuadReg.low(), src0); |
2935 dst.set_offset(dst.offset() + kDoubleSize); | 2937 __ vstr(kScratchQuadReg.high(), dst0); |
2936 __ vldr(kScratchDoubleReg, dst); | 2938 __ vldr(kScratchQuadReg.low(), dst1); |
2937 __ vldr(kDoubleRegZero, src); | 2939 __ vldr(kScratchQuadReg.high(), src1); |
2938 __ vstr(kScratchDoubleReg, src); | 2940 __ vstr(kScratchQuadReg.low(), src1); |
2939 __ vstr(kDoubleRegZero, dst); | 2941 __ vstr(kScratchQuadReg.high(), dst1); |
2940 // Restore the 0 register. | |
2941 __ veor(kDoubleRegZero, kDoubleRegZero, kDoubleRegZero); | |
2942 } | 2942 } |
2943 } else { | 2943 } else { |
2944 // No other combinations are possible. | 2944 // No other combinations are possible. |
2945 UNREACHABLE(); | 2945 UNREACHABLE(); |
2946 } | 2946 } |
2947 } | 2947 } |
2948 | 2948 |
2949 void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) { | 2949 void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) { |
2950 // On 32-bit ARM we emit the jump tables inline. | 2950 // On 32-bit ARM we emit the jump tables inline. |
2951 UNREACHABLE(); | 2951 UNREACHABLE(); |
(...skipping 19 matching lines...) Expand all Loading... |
2971 padding_size -= v8::internal::Assembler::kInstrSize; | 2971 padding_size -= v8::internal::Assembler::kInstrSize; |
2972 } | 2972 } |
2973 } | 2973 } |
2974 } | 2974 } |
2975 | 2975 |
2976 #undef __ | 2976 #undef __ |
2977 | 2977 |
2978 } // namespace compiler | 2978 } // namespace compiler |
2979 } // namespace internal | 2979 } // namespace internal |
2980 } // namespace v8 | 2980 } // namespace v8 |
OLD | NEW |