| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 272 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 283 } | 283 } |
| 284 | 284 |
| 285 | 285 |
| 286 void MacroAssembler::Move(Register dst, Register src, Condition cond) { | 286 void MacroAssembler::Move(Register dst, Register src, Condition cond) { |
| 287 if (!dst.is(src)) { | 287 if (!dst.is(src)) { |
| 288 mov(dst, src, LeaveCC, cond); | 288 mov(dst, src, LeaveCC, cond); |
| 289 } | 289 } |
| 290 } | 290 } |
| 291 | 291 |
| 292 | 292 |
| 293 void MacroAssembler::Move(DoubleRegister dst, DoubleRegister src) { | 293 void MacroAssembler::Move(DwVfpRegister dst, DwVfpRegister src) { |
| 294 ASSERT(CpuFeatures::IsSupported(VFP2)); | 294 ASSERT(CpuFeatures::IsSupported(VFP2)); |
| 295 CpuFeatures::Scope scope(VFP2); | 295 CpuFeatures::Scope scope(VFP2); |
| 296 if (!dst.is(src)) { | 296 if (!dst.is(src)) { |
| 297 vmov(dst, src); | 297 vmov(dst, src); |
| 298 } | 298 } |
| 299 } | 299 } |
| 300 | 300 |
| 301 | 301 |
| 302 void MacroAssembler::And(Register dst, Register src1, const Operand& src2, | 302 void MacroAssembler::And(Register dst, Register src1, const Operand& src2, |
| 303 Condition cond) { | 303 Condition cond) { |
| (...skipping 331 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 635 | 635 |
| 636 void MacroAssembler::PopSafepointRegisters() { | 636 void MacroAssembler::PopSafepointRegisters() { |
| 637 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; | 637 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; |
| 638 ldm(ia_w, sp, kSafepointSavedRegisters); | 638 ldm(ia_w, sp, kSafepointSavedRegisters); |
| 639 add(sp, sp, Operand(num_unsaved * kPointerSize)); | 639 add(sp, sp, Operand(num_unsaved * kPointerSize)); |
| 640 } | 640 } |
| 641 | 641 |
| 642 | 642 |
| 643 void MacroAssembler::PushSafepointRegistersAndDoubles() { | 643 void MacroAssembler::PushSafepointRegistersAndDoubles() { |
| 644 PushSafepointRegisters(); | 644 PushSafepointRegisters(); |
| 645 sub(sp, sp, Operand(DwVfpRegister::kNumAllocatableRegisters * | 645 sub(sp, sp, Operand(DwVfpRegister::NumAllocatableRegisters() * |
| 646 kDoubleSize)); | 646 kDoubleSize)); |
| 647 for (int i = 0; i < DwVfpRegister::kNumAllocatableRegisters; i++) { | 647 for (int i = 0; i < DwVfpRegister::NumAllocatableRegisters(); i++) { |
| 648 vstr(DwVfpRegister::FromAllocationIndex(i), sp, i * kDoubleSize); | 648 vstr(DwVfpRegister::FromAllocationIndex(i), sp, i * kDoubleSize); |
| 649 } | 649 } |
| 650 } | 650 } |
| 651 | 651 |
| 652 | 652 |
| 653 void MacroAssembler::PopSafepointRegistersAndDoubles() { | 653 void MacroAssembler::PopSafepointRegistersAndDoubles() { |
| 654 for (int i = 0; i < DwVfpRegister::kNumAllocatableRegisters; i++) { | 654 for (int i = 0; i < DwVfpRegister::NumAllocatableRegisters(); i++) { |
| 655 vldr(DwVfpRegister::FromAllocationIndex(i), sp, i * kDoubleSize); | 655 vldr(DwVfpRegister::FromAllocationIndex(i), sp, i * kDoubleSize); |
| 656 } | 656 } |
| 657 add(sp, sp, Operand(DwVfpRegister::kNumAllocatableRegisters * | 657 add(sp, sp, Operand(DwVfpRegister::NumAllocatableRegisters() * |
| 658 kDoubleSize)); | 658 kDoubleSize)); |
| 659 PopSafepointRegisters(); | 659 PopSafepointRegisters(); |
| 660 } | 660 } |
| 661 | 661 |
| 662 void MacroAssembler::StoreToSafepointRegistersAndDoublesSlot(Register src, | 662 void MacroAssembler::StoreToSafepointRegistersAndDoublesSlot(Register src, |
| 663 Register dst) { | 663 Register dst) { |
| 664 str(src, SafepointRegistersAndDoublesSlot(dst)); | 664 str(src, SafepointRegistersAndDoublesSlot(dst)); |
| 665 } | 665 } |
| 666 | 666 |
| 667 | 667 |
| (...skipping 15 matching lines...) Expand all Loading... |
| 683 } | 683 } |
| 684 | 684 |
| 685 | 685 |
| 686 MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) { | 686 MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) { |
| 687 return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kPointerSize); | 687 return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kPointerSize); |
| 688 } | 688 } |
| 689 | 689 |
| 690 | 690 |
| 691 MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) { | 691 MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) { |
| 692 // General purpose registers are pushed last on the stack. | 692 // General purpose registers are pushed last on the stack. |
| 693 int doubles_size = DwVfpRegister::kNumAllocatableRegisters * kDoubleSize; | 693 int doubles_size = DwVfpRegister::NumAllocatableRegisters() * kDoubleSize; |
| 694 int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize; | 694 int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize; |
| 695 return MemOperand(sp, doubles_size + register_offset); | 695 return MemOperand(sp, doubles_size + register_offset); |
| 696 } | 696 } |
| 697 | 697 |
| 698 | 698 |
| 699 void MacroAssembler::Ldrd(Register dst1, Register dst2, | 699 void MacroAssembler::Ldrd(Register dst1, Register dst2, |
| 700 const MemOperand& src, Condition cond) { | 700 const MemOperand& src, Condition cond) { |
| 701 ASSERT(src.rm().is(no_reg)); | 701 ASSERT(src.rm().is(no_reg)); |
| 702 ASSERT(!dst1.is(lr)); // r14. | 702 ASSERT(!dst1.is(lr)); // r14. |
| 703 ASSERT_EQ(0, dst1.code() % 2); | 703 ASSERT_EQ(0, dst1.code() % 2); |
| (...skipping 255 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 959 #endif | 959 #endif |
| 960 | 960 |
| 961 // Tear down the exit frame, pop the arguments, and return. | 961 // Tear down the exit frame, pop the arguments, and return. |
| 962 mov(sp, Operand(fp)); | 962 mov(sp, Operand(fp)); |
| 963 ldm(ia_w, sp, fp.bit() | lr.bit()); | 963 ldm(ia_w, sp, fp.bit() | lr.bit()); |
| 964 if (argument_count.is_valid()) { | 964 if (argument_count.is_valid()) { |
| 965 add(sp, sp, Operand(argument_count, LSL, kPointerSizeLog2)); | 965 add(sp, sp, Operand(argument_count, LSL, kPointerSizeLog2)); |
| 966 } | 966 } |
| 967 } | 967 } |
| 968 | 968 |
| 969 void MacroAssembler::GetCFunctionDoubleResult(const DoubleRegister dst) { | 969 void MacroAssembler::GetCFunctionDoubleResult(const DwVfpRegister dst) { |
| 970 ASSERT(CpuFeatures::IsSupported(VFP2)); | 970 ASSERT(CpuFeatures::IsSupported(VFP2)); |
| 971 if (use_eabi_hardfloat()) { | 971 if (use_eabi_hardfloat()) { |
| 972 Move(dst, d0); | 972 Move(dst, d0); |
| 973 } else { | 973 } else { |
| 974 vmov(dst, r0, r1); | 974 vmov(dst, r0, r1); |
| 975 } | 975 } |
| 976 } | 976 } |
| 977 | 977 |
| 978 | 978 |
| 979 void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) { | 979 void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) { |
| (...skipping 1713 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2693 | 2693 |
| 2694 void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) { | 2694 void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) { |
| 2695 CallRuntime(Runtime::FunctionForId(fid), num_arguments); | 2695 CallRuntime(Runtime::FunctionForId(fid), num_arguments); |
| 2696 } | 2696 } |
| 2697 | 2697 |
| 2698 | 2698 |
| 2699 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) { | 2699 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) { |
| 2700 const Runtime::Function* function = Runtime::FunctionForId(id); | 2700 const Runtime::Function* function = Runtime::FunctionForId(id); |
| 2701 mov(r0, Operand(function->nargs)); | 2701 mov(r0, Operand(function->nargs)); |
| 2702 mov(r1, Operand(ExternalReference(function, isolate()))); | 2702 mov(r1, Operand(ExternalReference(function, isolate()))); |
| 2703 CEntryStub stub(1, kSaveFPRegs); | 2703 SaveFPRegsMode mode = CpuFeatures::IsSupported(VFP2) |
| 2704 ? kSaveFPRegs |
| 2705 : kDontSaveFPRegs; |
| 2706 CEntryStub stub(1, mode); |
| 2704 CallStub(&stub); | 2707 CallStub(&stub); |
| 2705 } | 2708 } |
| 2706 | 2709 |
| 2707 | 2710 |
| 2708 void MacroAssembler::CallExternalReference(const ExternalReference& ext, | 2711 void MacroAssembler::CallExternalReference(const ExternalReference& ext, |
| 2709 int num_arguments) { | 2712 int num_arguments) { |
| 2710 mov(r0, Operand(num_arguments)); | 2713 mov(r0, Operand(num_arguments)); |
| 2711 mov(r1, Operand(ext)); | 2714 mov(r1, Operand(ext)); |
| 2712 | 2715 |
| 2713 CEntryStub stub(1); | 2716 CEntryStub stub(1); |
| (...skipping 655 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3369 | 3372 |
| 3370 static const int kRegisterPassedArguments = 4; | 3373 static const int kRegisterPassedArguments = 4; |
| 3371 | 3374 |
| 3372 | 3375 |
| 3373 int MacroAssembler::CalculateStackPassedWords(int num_reg_arguments, | 3376 int MacroAssembler::CalculateStackPassedWords(int num_reg_arguments, |
| 3374 int num_double_arguments) { | 3377 int num_double_arguments) { |
| 3375 int stack_passed_words = 0; | 3378 int stack_passed_words = 0; |
| 3376 if (use_eabi_hardfloat()) { | 3379 if (use_eabi_hardfloat()) { |
| 3377 // In the hard floating point calling convention, we can use | 3380 // In the hard floating point calling convention, we can use |
| 3378 // all double registers to pass doubles. | 3381 // all double registers to pass doubles. |
| 3379 if (num_double_arguments > DoubleRegister::kNumRegisters) { | 3382 if (num_double_arguments > DoubleRegister::NumRegisters()) { |
| 3380 stack_passed_words += | 3383 stack_passed_words += |
| 3381 2 * (num_double_arguments - DoubleRegister::kNumRegisters); | 3384 2 * (num_double_arguments - DoubleRegister::NumRegisters()); |
| 3382 } | 3385 } |
| 3383 } else { | 3386 } else { |
| 3384 // In the soft floating point calling convention, every double | 3387 // In the soft floating point calling convention, every double |
| 3385 // argument is passed using two registers. | 3388 // argument is passed using two registers. |
| 3386 num_reg_arguments += 2 * num_double_arguments; | 3389 num_reg_arguments += 2 * num_double_arguments; |
| 3387 } | 3390 } |
| 3388 // Up to four simple arguments are passed in registers r0..r3. | 3391 // Up to four simple arguments are passed in registers r0..r3. |
| 3389 if (num_reg_arguments > kRegisterPassedArguments) { | 3392 if (num_reg_arguments > kRegisterPassedArguments) { |
| 3390 stack_passed_words += num_reg_arguments - kRegisterPassedArguments; | 3393 stack_passed_words += num_reg_arguments - kRegisterPassedArguments; |
| 3391 } | 3394 } |
| (...skipping 20 matching lines...) Expand all Loading... |
| 3412 } | 3415 } |
| 3413 } | 3416 } |
| 3414 | 3417 |
| 3415 | 3418 |
| 3416 void MacroAssembler::PrepareCallCFunction(int num_reg_arguments, | 3419 void MacroAssembler::PrepareCallCFunction(int num_reg_arguments, |
| 3417 Register scratch) { | 3420 Register scratch) { |
| 3418 PrepareCallCFunction(num_reg_arguments, 0, scratch); | 3421 PrepareCallCFunction(num_reg_arguments, 0, scratch); |
| 3419 } | 3422 } |
| 3420 | 3423 |
| 3421 | 3424 |
| 3422 void MacroAssembler::SetCallCDoubleArguments(DoubleRegister dreg) { | 3425 void MacroAssembler::SetCallCDoubleArguments(DwVfpRegister dreg) { |
| 3423 ASSERT(CpuFeatures::IsSupported(VFP2)); | 3426 ASSERT(CpuFeatures::IsSupported(VFP2)); |
| 3424 if (use_eabi_hardfloat()) { | 3427 if (use_eabi_hardfloat()) { |
| 3425 Move(d0, dreg); | 3428 Move(d0, dreg); |
| 3426 } else { | 3429 } else { |
| 3427 vmov(r0, r1, dreg); | 3430 vmov(r0, r1, dreg); |
| 3428 } | 3431 } |
| 3429 } | 3432 } |
| 3430 | 3433 |
| 3431 | 3434 |
| 3432 void MacroAssembler::SetCallCDoubleArguments(DoubleRegister dreg1, | 3435 void MacroAssembler::SetCallCDoubleArguments(DwVfpRegister dreg1, |
| 3433 DoubleRegister dreg2) { | 3436 DwVfpRegister dreg2) { |
| 3434 ASSERT(CpuFeatures::IsSupported(VFP2)); | 3437 ASSERT(CpuFeatures::IsSupported(VFP2)); |
| 3435 if (use_eabi_hardfloat()) { | 3438 if (use_eabi_hardfloat()) { |
| 3436 if (dreg2.is(d0)) { | 3439 if (dreg2.is(d0)) { |
| 3437 ASSERT(!dreg1.is(d1)); | 3440 ASSERT(!dreg1.is(d1)); |
| 3438 Move(d1, dreg2); | 3441 Move(d1, dreg2); |
| 3439 Move(d0, dreg1); | 3442 Move(d0, dreg1); |
| 3440 } else { | 3443 } else { |
| 3441 Move(d0, dreg1); | 3444 Move(d0, dreg1); |
| 3442 Move(d1, dreg2); | 3445 Move(d1, dreg2); |
| 3443 } | 3446 } |
| 3444 } else { | 3447 } else { |
| 3445 vmov(r0, r1, dreg1); | 3448 vmov(r0, r1, dreg1); |
| 3446 vmov(r2, r3, dreg2); | 3449 vmov(r2, r3, dreg2); |
| 3447 } | 3450 } |
| 3448 } | 3451 } |
| 3449 | 3452 |
| 3450 | 3453 |
| 3451 void MacroAssembler::SetCallCDoubleArguments(DoubleRegister dreg, | 3454 void MacroAssembler::SetCallCDoubleArguments(DwVfpRegister dreg, |
| 3452 Register reg) { | 3455 Register reg) { |
| 3453 ASSERT(CpuFeatures::IsSupported(VFP2)); | 3456 ASSERT(CpuFeatures::IsSupported(VFP2)); |
| 3454 if (use_eabi_hardfloat()) { | 3457 if (use_eabi_hardfloat()) { |
| 3455 Move(d0, dreg); | 3458 Move(d0, dreg); |
| 3456 Move(r0, reg); | 3459 Move(r0, reg); |
| 3457 } else { | 3460 } else { |
| 3458 Move(r2, reg); | 3461 Move(r2, reg); |
| 3459 vmov(r0, r1, dreg); | 3462 vmov(r0, r1, dreg); |
| 3460 } | 3463 } |
| 3461 } | 3464 } |
| (...skipping 262 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3724 bind(&done); | 3727 bind(&done); |
| 3725 } | 3728 } |
| 3726 | 3729 |
| 3727 | 3730 |
| 3728 void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) { | 3731 void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) { |
| 3729 Usat(output_reg, 8, Operand(input_reg)); | 3732 Usat(output_reg, 8, Operand(input_reg)); |
| 3730 } | 3733 } |
| 3731 | 3734 |
| 3732 | 3735 |
| 3733 void MacroAssembler::ClampDoubleToUint8(Register result_reg, | 3736 void MacroAssembler::ClampDoubleToUint8(Register result_reg, |
| 3734 DoubleRegister input_reg, | 3737 DwVfpRegister input_reg, |
| 3735 DoubleRegister temp_double_reg) { | 3738 DwVfpRegister temp_double_reg) { |
| 3736 Label above_zero; | 3739 Label above_zero; |
| 3737 Label done; | 3740 Label done; |
| 3738 Label in_bounds; | 3741 Label in_bounds; |
| 3739 | 3742 |
| 3740 Vmov(temp_double_reg, 0.0); | 3743 Vmov(temp_double_reg, 0.0); |
| 3741 VFPCompareAndSetFlags(input_reg, temp_double_reg); | 3744 VFPCompareAndSetFlags(input_reg, temp_double_reg); |
| 3742 b(gt, &above_zero); | 3745 b(gt, &above_zero); |
| 3743 | 3746 |
| 3744 // Double value is less than zero, NaN or Inf, return 0. | 3747 // Double value is less than zero, NaN or Inf, return 0. |
| 3745 mov(result_reg, Operand(0)); | 3748 mov(result_reg, Operand(0)); |
| (...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3884 void CodePatcher::EmitCondition(Condition cond) { | 3887 void CodePatcher::EmitCondition(Condition cond) { |
| 3885 Instr instr = Assembler::instr_at(masm_.pc_); | 3888 Instr instr = Assembler::instr_at(masm_.pc_); |
| 3886 instr = (instr & ~kCondMask) | cond; | 3889 instr = (instr & ~kCondMask) | cond; |
| 3887 masm_.emit(instr); | 3890 masm_.emit(instr); |
| 3888 } | 3891 } |
| 3889 | 3892 |
| 3890 | 3893 |
| 3891 } } // namespace v8::internal | 3894 } } // namespace v8::internal |
| 3892 | 3895 |
| 3893 #endif // V8_TARGET_ARCH_ARM | 3896 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |