OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 272 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
283 } | 283 } |
284 | 284 |
285 | 285 |
286 void MacroAssembler::Move(Register dst, Register src, Condition cond) { | 286 void MacroAssembler::Move(Register dst, Register src, Condition cond) { |
287 if (!dst.is(src)) { | 287 if (!dst.is(src)) { |
288 mov(dst, src, LeaveCC, cond); | 288 mov(dst, src, LeaveCC, cond); |
289 } | 289 } |
290 } | 290 } |
291 | 291 |
292 | 292 |
293 void MacroAssembler::Move(DoubleRegister dst, DoubleRegister src) { | 293 void MacroAssembler::Move(DwVfpRegister dst, DwVfpRegister src) { |
294 ASSERT(CpuFeatures::IsSupported(VFP2)); | 294 ASSERT(CpuFeatures::IsSupported(VFP2)); |
295 CpuFeatures::Scope scope(VFP2); | 295 CpuFeatures::Scope scope(VFP2); |
296 if (!dst.is(src)) { | 296 if (!dst.is(src)) { |
297 vmov(dst, src); | 297 vmov(dst, src); |
298 } | 298 } |
299 } | 299 } |
300 | 300 |
301 | 301 |
302 void MacroAssembler::And(Register dst, Register src1, const Operand& src2, | 302 void MacroAssembler::And(Register dst, Register src1, const Operand& src2, |
303 Condition cond) { | 303 Condition cond) { |
(...skipping 332 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
636 | 636 |
637 void MacroAssembler::PopSafepointRegisters() { | 637 void MacroAssembler::PopSafepointRegisters() { |
638 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; | 638 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; |
639 ldm(ia_w, sp, kSafepointSavedRegisters); | 639 ldm(ia_w, sp, kSafepointSavedRegisters); |
640 add(sp, sp, Operand(num_unsaved * kPointerSize)); | 640 add(sp, sp, Operand(num_unsaved * kPointerSize)); |
641 } | 641 } |
642 | 642 |
643 | 643 |
644 void MacroAssembler::PushSafepointRegistersAndDoubles() { | 644 void MacroAssembler::PushSafepointRegistersAndDoubles() { |
645 PushSafepointRegisters(); | 645 PushSafepointRegisters(); |
646 sub(sp, sp, Operand(DwVfpRegister::kNumAllocatableRegisters * | 646 sub(sp, sp, Operand(DwVfpRegister::NumAllocatableRegisters() * |
647 kDoubleSize)); | 647 kDoubleSize)); |
648 for (int i = 0; i < DwVfpRegister::kNumAllocatableRegisters; i++) { | 648 for (int i = 0; i < DwVfpRegister::NumAllocatableRegisters(); i++) { |
649 vstr(DwVfpRegister::FromAllocationIndex(i), sp, i * kDoubleSize); | 649 vstr(DwVfpRegister::FromAllocationIndex(i), sp, i * kDoubleSize); |
650 } | 650 } |
651 } | 651 } |
652 | 652 |
653 | 653 |
654 void MacroAssembler::PopSafepointRegistersAndDoubles() { | 654 void MacroAssembler::PopSafepointRegistersAndDoubles() { |
655 for (int i = 0; i < DwVfpRegister::kNumAllocatableRegisters; i++) { | 655 for (int i = 0; i < DwVfpRegister::NumAllocatableRegisters(); i++) { |
656 vldr(DwVfpRegister::FromAllocationIndex(i), sp, i * kDoubleSize); | 656 vldr(DwVfpRegister::FromAllocationIndex(i), sp, i * kDoubleSize); |
657 } | 657 } |
658 add(sp, sp, Operand(DwVfpRegister::kNumAllocatableRegisters * | 658 add(sp, sp, Operand(DwVfpRegister::NumAllocatableRegisters() * |
659 kDoubleSize)); | 659 kDoubleSize)); |
660 PopSafepointRegisters(); | 660 PopSafepointRegisters(); |
661 } | 661 } |
662 | 662 |
663 void MacroAssembler::StoreToSafepointRegistersAndDoublesSlot(Register src, | 663 void MacroAssembler::StoreToSafepointRegistersAndDoublesSlot(Register src, |
664 Register dst) { | 664 Register dst) { |
665 str(src, SafepointRegistersAndDoublesSlot(dst)); | 665 str(src, SafepointRegistersAndDoublesSlot(dst)); |
666 } | 666 } |
667 | 667 |
668 | 668 |
(...skipping 15 matching lines...) Expand all Loading... |
684 } | 684 } |
685 | 685 |
686 | 686 |
687 MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) { | 687 MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) { |
688 return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kPointerSize); | 688 return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kPointerSize); |
689 } | 689 } |
690 | 690 |
691 | 691 |
692 MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) { | 692 MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) { |
693 // General purpose registers are pushed last on the stack. | 693 // General purpose registers are pushed last on the stack. |
694 int doubles_size = DwVfpRegister::kNumAllocatableRegisters * kDoubleSize; | 694 int doubles_size = DwVfpRegister::NumAllocatableRegisters() * kDoubleSize; |
695 int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize; | 695 int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize; |
696 return MemOperand(sp, doubles_size + register_offset); | 696 return MemOperand(sp, doubles_size + register_offset); |
697 } | 697 } |
698 | 698 |
699 | 699 |
700 void MacroAssembler::Ldrd(Register dst1, Register dst2, | 700 void MacroAssembler::Ldrd(Register dst1, Register dst2, |
701 const MemOperand& src, Condition cond) { | 701 const MemOperand& src, Condition cond) { |
702 ASSERT(src.rm().is(no_reg)); | 702 ASSERT(src.rm().is(no_reg)); |
703 ASSERT(!dst1.is(lr)); // r14. | 703 ASSERT(!dst1.is(lr)); // r14. |
704 ASSERT_EQ(0, dst1.code() % 2); | 704 ASSERT_EQ(0, dst1.code() % 2); |
(...skipping 255 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
960 #endif | 960 #endif |
961 | 961 |
962 // Tear down the exit frame, pop the arguments, and return. | 962 // Tear down the exit frame, pop the arguments, and return. |
963 mov(sp, Operand(fp)); | 963 mov(sp, Operand(fp)); |
964 ldm(ia_w, sp, fp.bit() | lr.bit()); | 964 ldm(ia_w, sp, fp.bit() | lr.bit()); |
965 if (argument_count.is_valid()) { | 965 if (argument_count.is_valid()) { |
966 add(sp, sp, Operand(argument_count, LSL, kPointerSizeLog2)); | 966 add(sp, sp, Operand(argument_count, LSL, kPointerSizeLog2)); |
967 } | 967 } |
968 } | 968 } |
969 | 969 |
970 void MacroAssembler::GetCFunctionDoubleResult(const DoubleRegister dst) { | 970 void MacroAssembler::GetCFunctionDoubleResult(const DwVfpRegister dst) { |
971 ASSERT(CpuFeatures::IsSupported(VFP2)); | 971 ASSERT(CpuFeatures::IsSupported(VFP2)); |
972 if (use_eabi_hardfloat()) { | 972 if (use_eabi_hardfloat()) { |
973 Move(dst, d0); | 973 Move(dst, d0); |
974 } else { | 974 } else { |
975 vmov(dst, r0, r1); | 975 vmov(dst, r0, r1); |
976 } | 976 } |
977 } | 977 } |
978 | 978 |
979 | 979 |
980 void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) { | 980 void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) { |
(...skipping 1729 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2710 | 2710 |
2711 void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) { | 2711 void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) { |
2712 CallRuntime(Runtime::FunctionForId(fid), num_arguments); | 2712 CallRuntime(Runtime::FunctionForId(fid), num_arguments); |
2713 } | 2713 } |
2714 | 2714 |
2715 | 2715 |
2716 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) { | 2716 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) { |
2717 const Runtime::Function* function = Runtime::FunctionForId(id); | 2717 const Runtime::Function* function = Runtime::FunctionForId(id); |
2718 mov(r0, Operand(function->nargs)); | 2718 mov(r0, Operand(function->nargs)); |
2719 mov(r1, Operand(ExternalReference(function, isolate()))); | 2719 mov(r1, Operand(ExternalReference(function, isolate()))); |
2720 CEntryStub stub(1, kSaveFPRegs); | 2720 SaveFPRegsMode mode = CpuFeatures::IsSupported(VFP2) |
| 2721 ? kSaveFPRegs |
| 2722 : kDontSaveFPRegs; |
| 2723 CEntryStub stub(1, mode); |
2721 CallStub(&stub); | 2724 CallStub(&stub); |
2722 } | 2725 } |
2723 | 2726 |
2724 | 2727 |
2725 void MacroAssembler::CallExternalReference(const ExternalReference& ext, | 2728 void MacroAssembler::CallExternalReference(const ExternalReference& ext, |
2726 int num_arguments) { | 2729 int num_arguments) { |
2727 mov(r0, Operand(num_arguments)); | 2730 mov(r0, Operand(num_arguments)); |
2728 mov(r1, Operand(ext)); | 2731 mov(r1, Operand(ext)); |
2729 | 2732 |
2730 CEntryStub stub(1); | 2733 CEntryStub stub(1); |
(...skipping 655 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3386 | 3389 |
3387 static const int kRegisterPassedArguments = 4; | 3390 static const int kRegisterPassedArguments = 4; |
3388 | 3391 |
3389 | 3392 |
3390 int MacroAssembler::CalculateStackPassedWords(int num_reg_arguments, | 3393 int MacroAssembler::CalculateStackPassedWords(int num_reg_arguments, |
3391 int num_double_arguments) { | 3394 int num_double_arguments) { |
3392 int stack_passed_words = 0; | 3395 int stack_passed_words = 0; |
3393 if (use_eabi_hardfloat()) { | 3396 if (use_eabi_hardfloat()) { |
3394 // In the hard floating point calling convention, we can use | 3397 // In the hard floating point calling convention, we can use |
3395 // all double registers to pass doubles. | 3398 // all double registers to pass doubles. |
3396 if (num_double_arguments > DoubleRegister::kNumRegisters) { | 3399 if (num_double_arguments > DoubleRegister::NumRegisters()) { |
3397 stack_passed_words += | 3400 stack_passed_words += |
3398 2 * (num_double_arguments - DoubleRegister::kNumRegisters); | 3401 2 * (num_double_arguments - DoubleRegister::NumRegisters()); |
3399 } | 3402 } |
3400 } else { | 3403 } else { |
3401 // In the soft floating point calling convention, every double | 3404 // In the soft floating point calling convention, every double |
3402 // argument is passed using two registers. | 3405 // argument is passed using two registers. |
3403 num_reg_arguments += 2 * num_double_arguments; | 3406 num_reg_arguments += 2 * num_double_arguments; |
3404 } | 3407 } |
3405 // Up to four simple arguments are passed in registers r0..r3. | 3408 // Up to four simple arguments are passed in registers r0..r3. |
3406 if (num_reg_arguments > kRegisterPassedArguments) { | 3409 if (num_reg_arguments > kRegisterPassedArguments) { |
3407 stack_passed_words += num_reg_arguments - kRegisterPassedArguments; | 3410 stack_passed_words += num_reg_arguments - kRegisterPassedArguments; |
3408 } | 3411 } |
(...skipping 20 matching lines...) Expand all Loading... |
3429 } | 3432 } |
3430 } | 3433 } |
3431 | 3434 |
3432 | 3435 |
3433 void MacroAssembler::PrepareCallCFunction(int num_reg_arguments, | 3436 void MacroAssembler::PrepareCallCFunction(int num_reg_arguments, |
3434 Register scratch) { | 3437 Register scratch) { |
3435 PrepareCallCFunction(num_reg_arguments, 0, scratch); | 3438 PrepareCallCFunction(num_reg_arguments, 0, scratch); |
3436 } | 3439 } |
3437 | 3440 |
3438 | 3441 |
3439 void MacroAssembler::SetCallCDoubleArguments(DoubleRegister dreg) { | 3442 void MacroAssembler::SetCallCDoubleArguments(DwVfpRegister dreg) { |
3440 ASSERT(CpuFeatures::IsSupported(VFP2)); | 3443 ASSERT(CpuFeatures::IsSupported(VFP2)); |
3441 if (use_eabi_hardfloat()) { | 3444 if (use_eabi_hardfloat()) { |
3442 Move(d0, dreg); | 3445 Move(d0, dreg); |
3443 } else { | 3446 } else { |
3444 vmov(r0, r1, dreg); | 3447 vmov(r0, r1, dreg); |
3445 } | 3448 } |
3446 } | 3449 } |
3447 | 3450 |
3448 | 3451 |
3449 void MacroAssembler::SetCallCDoubleArguments(DoubleRegister dreg1, | 3452 void MacroAssembler::SetCallCDoubleArguments(DwVfpRegister dreg1, |
3450 DoubleRegister dreg2) { | 3453 DwVfpRegister dreg2) { |
3451 ASSERT(CpuFeatures::IsSupported(VFP2)); | 3454 ASSERT(CpuFeatures::IsSupported(VFP2)); |
3452 if (use_eabi_hardfloat()) { | 3455 if (use_eabi_hardfloat()) { |
3453 if (dreg2.is(d0)) { | 3456 if (dreg2.is(d0)) { |
3454 ASSERT(!dreg1.is(d1)); | 3457 ASSERT(!dreg1.is(d1)); |
3455 Move(d1, dreg2); | 3458 Move(d1, dreg2); |
3456 Move(d0, dreg1); | 3459 Move(d0, dreg1); |
3457 } else { | 3460 } else { |
3458 Move(d0, dreg1); | 3461 Move(d0, dreg1); |
3459 Move(d1, dreg2); | 3462 Move(d1, dreg2); |
3460 } | 3463 } |
3461 } else { | 3464 } else { |
3462 vmov(r0, r1, dreg1); | 3465 vmov(r0, r1, dreg1); |
3463 vmov(r2, r3, dreg2); | 3466 vmov(r2, r3, dreg2); |
3464 } | 3467 } |
3465 } | 3468 } |
3466 | 3469 |
3467 | 3470 |
3468 void MacroAssembler::SetCallCDoubleArguments(DoubleRegister dreg, | 3471 void MacroAssembler::SetCallCDoubleArguments(DwVfpRegister dreg, |
3469 Register reg) { | 3472 Register reg) { |
3470 ASSERT(CpuFeatures::IsSupported(VFP2)); | 3473 ASSERT(CpuFeatures::IsSupported(VFP2)); |
3471 if (use_eabi_hardfloat()) { | 3474 if (use_eabi_hardfloat()) { |
3472 Move(d0, dreg); | 3475 Move(d0, dreg); |
3473 Move(r0, reg); | 3476 Move(r0, reg); |
3474 } else { | 3477 } else { |
3475 Move(r2, reg); | 3478 Move(r2, reg); |
3476 vmov(r0, r1, dreg); | 3479 vmov(r0, r1, dreg); |
3477 } | 3480 } |
3478 } | 3481 } |
(...skipping 262 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3741 bind(&done); | 3744 bind(&done); |
3742 } | 3745 } |
3743 | 3746 |
3744 | 3747 |
3745 void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) { | 3748 void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) { |
3746 Usat(output_reg, 8, Operand(input_reg)); | 3749 Usat(output_reg, 8, Operand(input_reg)); |
3747 } | 3750 } |
3748 | 3751 |
3749 | 3752 |
3750 void MacroAssembler::ClampDoubleToUint8(Register result_reg, | 3753 void MacroAssembler::ClampDoubleToUint8(Register result_reg, |
3751 DoubleRegister input_reg, | 3754 DwVfpRegister input_reg, |
3752 DoubleRegister temp_double_reg) { | 3755 DwVfpRegister temp_double_reg) { |
3753 Label above_zero; | 3756 Label above_zero; |
3754 Label done; | 3757 Label done; |
3755 Label in_bounds; | 3758 Label in_bounds; |
3756 | 3759 |
3757 Vmov(temp_double_reg, 0.0); | 3760 Vmov(temp_double_reg, 0.0); |
3758 VFPCompareAndSetFlags(input_reg, temp_double_reg); | 3761 VFPCompareAndSetFlags(input_reg, temp_double_reg); |
3759 b(gt, &above_zero); | 3762 b(gt, &above_zero); |
3760 | 3763 |
3761 // Double value is less than zero, NaN or Inf, return 0. | 3764 // Double value is less than zero, NaN or Inf, return 0. |
3762 mov(result_reg, Operand(0)); | 3765 mov(result_reg, Operand(0)); |
(...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3901 void CodePatcher::EmitCondition(Condition cond) { | 3904 void CodePatcher::EmitCondition(Condition cond) { |
3902 Instr instr = Assembler::instr_at(masm_.pc_); | 3905 Instr instr = Assembler::instr_at(masm_.pc_); |
3903 instr = (instr & ~kCondMask) | cond; | 3906 instr = (instr & ~kCondMask) | cond; |
3904 masm_.emit(instr); | 3907 masm_.emit(instr); |
3905 } | 3908 } |
3906 | 3909 |
3907 | 3910 |
3908 } } // namespace v8::internal | 3911 } } // namespace v8::internal |
3909 | 3912 |
3910 #endif // V8_TARGET_ARCH_ARM | 3913 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |