OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 272 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
283 } | 283 } |
284 | 284 |
285 | 285 |
286 void MacroAssembler::Move(Register dst, Register src, Condition cond) { | 286 void MacroAssembler::Move(Register dst, Register src, Condition cond) { |
287 if (!dst.is(src)) { | 287 if (!dst.is(src)) { |
288 mov(dst, src, LeaveCC, cond); | 288 mov(dst, src, LeaveCC, cond); |
289 } | 289 } |
290 } | 290 } |
291 | 291 |
292 | 292 |
293 void MacroAssembler::Move(DoubleRegister dst, DoubleRegister src) { | 293 void MacroAssembler::Move(DwVfpRegister dst, DwVfpRegister src) { |
294 ASSERT(CpuFeatures::IsSupported(VFP2)); | 294 ASSERT(CpuFeatures::IsSupported(VFP2)); |
295 CpuFeatures::Scope scope(VFP2); | 295 CpuFeatures::Scope scope(VFP2); |
296 if (!dst.is(src)) { | 296 if (!dst.is(src)) { |
297 vmov(dst, src); | 297 vmov(dst, src); |
298 } | 298 } |
299 } | 299 } |
300 | 300 |
301 | 301 |
302 void MacroAssembler::And(Register dst, Register src1, const Operand& src2, | 302 void MacroAssembler::And(Register dst, Register src1, const Operand& src2, |
303 Condition cond) { | 303 Condition cond) { |
(...skipping 331 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
635 | 635 |
636 void MacroAssembler::PopSafepointRegisters() { | 636 void MacroAssembler::PopSafepointRegisters() { |
637 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; | 637 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; |
638 ldm(ia_w, sp, kSafepointSavedRegisters); | 638 ldm(ia_w, sp, kSafepointSavedRegisters); |
639 add(sp, sp, Operand(num_unsaved * kPointerSize)); | 639 add(sp, sp, Operand(num_unsaved * kPointerSize)); |
640 } | 640 } |
641 | 641 |
642 | 642 |
643 void MacroAssembler::PushSafepointRegistersAndDoubles() { | 643 void MacroAssembler::PushSafepointRegistersAndDoubles() { |
644 PushSafepointRegisters(); | 644 PushSafepointRegisters(); |
645 sub(sp, sp, Operand(DwVfpRegister::kNumAllocatableRegisters * | 645 sub(sp, sp, Operand(DwVfpRegister::NumAllocatableRegisters() * |
646 kDoubleSize)); | 646 kDoubleSize)); |
647 for (int i = 0; i < DwVfpRegister::kNumAllocatableRegisters; i++) { | 647 for (int i = 0; i < DwVfpRegister::NumAllocatableRegisters(); i++) { |
648 vstr(DwVfpRegister::FromAllocationIndex(i), sp, i * kDoubleSize); | 648 vstr(DwVfpRegister::FromAllocationIndex(i), sp, i * kDoubleSize); |
649 } | 649 } |
650 } | 650 } |
651 | 651 |
652 | 652 |
653 void MacroAssembler::PopSafepointRegistersAndDoubles() { | 653 void MacroAssembler::PopSafepointRegistersAndDoubles() { |
654 for (int i = 0; i < DwVfpRegister::kNumAllocatableRegisters; i++) { | 654 for (int i = 0; i < DwVfpRegister::NumAllocatableRegisters(); i++) { |
655 vldr(DwVfpRegister::FromAllocationIndex(i), sp, i * kDoubleSize); | 655 vldr(DwVfpRegister::FromAllocationIndex(i), sp, i * kDoubleSize); |
656 } | 656 } |
657 add(sp, sp, Operand(DwVfpRegister::kNumAllocatableRegisters * | 657 add(sp, sp, Operand(DwVfpRegister::NumAllocatableRegisters() * |
658 kDoubleSize)); | 658 kDoubleSize)); |
659 PopSafepointRegisters(); | 659 PopSafepointRegisters(); |
660 } | 660 } |
661 | 661 |
662 void MacroAssembler::StoreToSafepointRegistersAndDoublesSlot(Register src, | 662 void MacroAssembler::StoreToSafepointRegistersAndDoublesSlot(Register src, |
663 Register dst) { | 663 Register dst) { |
664 str(src, SafepointRegistersAndDoublesSlot(dst)); | 664 str(src, SafepointRegistersAndDoublesSlot(dst)); |
665 } | 665 } |
666 | 666 |
667 | 667 |
(...skipping 15 matching lines...) Expand all Loading... | |
683 } | 683 } |
684 | 684 |
685 | 685 |
686 MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) { | 686 MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) { |
687 return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kPointerSize); | 687 return MemOperand(sp, SafepointRegisterStackIndex(reg.code()) * kPointerSize); |
688 } | 688 } |
689 | 689 |
690 | 690 |
691 MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) { | 691 MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) { |
692 // General purpose registers are pushed last on the stack. | 692 // General purpose registers are pushed last on the stack. |
693 int doubles_size = DwVfpRegister::kNumAllocatableRegisters * kDoubleSize; | 693 int doubles_size = DwVfpRegister::NumAllocatableRegisters() * kDoubleSize; |
694 int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize; | 694 int register_offset = SafepointRegisterStackIndex(reg.code()) * kPointerSize; |
695 return MemOperand(sp, doubles_size + register_offset); | 695 return MemOperand(sp, doubles_size + register_offset); |
696 } | 696 } |
697 | 697 |
698 | 698 |
699 void MacroAssembler::Ldrd(Register dst1, Register dst2, | 699 void MacroAssembler::Ldrd(Register dst1, Register dst2, |
700 const MemOperand& src, Condition cond) { | 700 const MemOperand& src, Condition cond) { |
701 ASSERT(src.rm().is(no_reg)); | 701 ASSERT(src.rm().is(no_reg)); |
702 ASSERT(!dst1.is(lr)); // r14. | 702 ASSERT(!dst1.is(lr)); // r14. |
703 ASSERT_EQ(0, dst1.code() % 2); | 703 ASSERT_EQ(0, dst1.code() % 2); |
(...skipping 255 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
959 #endif | 959 #endif |
960 | 960 |
961 // Tear down the exit frame, pop the arguments, and return. | 961 // Tear down the exit frame, pop the arguments, and return. |
962 mov(sp, Operand(fp)); | 962 mov(sp, Operand(fp)); |
963 ldm(ia_w, sp, fp.bit() | lr.bit()); | 963 ldm(ia_w, sp, fp.bit() | lr.bit()); |
964 if (argument_count.is_valid()) { | 964 if (argument_count.is_valid()) { |
965 add(sp, sp, Operand(argument_count, LSL, kPointerSizeLog2)); | 965 add(sp, sp, Operand(argument_count, LSL, kPointerSizeLog2)); |
966 } | 966 } |
967 } | 967 } |
968 | 968 |
969 void MacroAssembler::GetCFunctionDoubleResult(const DoubleRegister dst) { | 969 void MacroAssembler::GetCFunctionDoubleResult(const DwVfpRegister dst) { |
970 ASSERT(CpuFeatures::IsSupported(VFP2)); | 970 ASSERT(CpuFeatures::IsSupported(VFP2)); |
971 if (use_eabi_hardfloat()) { | 971 if (use_eabi_hardfloat()) { |
972 Move(dst, d0); | 972 Move(dst, d0); |
973 } else { | 973 } else { |
974 vmov(dst, r0, r1); | 974 vmov(dst, r0, r1); |
975 } | 975 } |
976 } | 976 } |
977 | 977 |
978 | 978 |
979 void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) { | 979 void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) { |
(...skipping 1710 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2690 | 2690 |
2691 void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) { | 2691 void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) { |
2692 CallRuntime(Runtime::FunctionForId(fid), num_arguments); | 2692 CallRuntime(Runtime::FunctionForId(fid), num_arguments); |
2693 } | 2693 } |
2694 | 2694 |
2695 | 2695 |
2696 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) { | 2696 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) { |
2697 const Runtime::Function* function = Runtime::FunctionForId(id); | 2697 const Runtime::Function* function = Runtime::FunctionForId(id); |
2698 mov(r0, Operand(function->nargs)); | 2698 mov(r0, Operand(function->nargs)); |
2699 mov(r1, Operand(ExternalReference(function, isolate()))); | 2699 mov(r1, Operand(ExternalReference(function, isolate()))); |
2700 CEntryStub stub(1, kSaveFPRegs); | 2700 CEntryStub stub(1, CpuFeatures::IsSupported(VFP2) |
Jakob Kummerow
2012/11/19 12:36:00
nit: formatting
danno
2012/11/26 17:16:18
Done.
| |
2701 ? kSaveFPRegs : kDontSaveFPRegs); | |
2701 CallStub(&stub); | 2702 CallStub(&stub); |
2702 } | 2703 } |
2703 | 2704 |
2704 | 2705 |
2705 void MacroAssembler::CallExternalReference(const ExternalReference& ext, | 2706 void MacroAssembler::CallExternalReference(const ExternalReference& ext, |
2706 int num_arguments) { | 2707 int num_arguments) { |
2707 mov(r0, Operand(num_arguments)); | 2708 mov(r0, Operand(num_arguments)); |
2708 mov(r1, Operand(ext)); | 2709 mov(r1, Operand(ext)); |
2709 | 2710 |
2710 CEntryStub stub(1); | 2711 CEntryStub stub(1); |
(...skipping 651 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3362 | 3363 |
3363 static const int kRegisterPassedArguments = 4; | 3364 static const int kRegisterPassedArguments = 4; |
3364 | 3365 |
3365 | 3366 |
3366 int MacroAssembler::CalculateStackPassedWords(int num_reg_arguments, | 3367 int MacroAssembler::CalculateStackPassedWords(int num_reg_arguments, |
3367 int num_double_arguments) { | 3368 int num_double_arguments) { |
3368 int stack_passed_words = 0; | 3369 int stack_passed_words = 0; |
3369 if (use_eabi_hardfloat()) { | 3370 if (use_eabi_hardfloat()) { |
3370 // In the hard floating point calling convention, we can use | 3371 // In the hard floating point calling convention, we can use |
3371 // all double registers to pass doubles. | 3372 // all double registers to pass doubles. |
3372 if (num_double_arguments > DoubleRegister::kNumRegisters) { | 3373 if (num_double_arguments > DoubleRegister::NumRegisters()) { |
3373 stack_passed_words += | 3374 stack_passed_words += |
3374 2 * (num_double_arguments - DoubleRegister::kNumRegisters); | 3375 2 * (num_double_arguments - DoubleRegister::NumRegisters()); |
3375 } | 3376 } |
3376 } else { | 3377 } else { |
3377 // In the soft floating point calling convention, every double | 3378 // In the soft floating point calling convention, every double |
3378 // argument is passed using two registers. | 3379 // argument is passed using two registers. |
3379 num_reg_arguments += 2 * num_double_arguments; | 3380 num_reg_arguments += 2 * num_double_arguments; |
3380 } | 3381 } |
3381 // Up to four simple arguments are passed in registers r0..r3. | 3382 // Up to four simple arguments are passed in registers r0..r3. |
3382 if (num_reg_arguments > kRegisterPassedArguments) { | 3383 if (num_reg_arguments > kRegisterPassedArguments) { |
3383 stack_passed_words += num_reg_arguments - kRegisterPassedArguments; | 3384 stack_passed_words += num_reg_arguments - kRegisterPassedArguments; |
3384 } | 3385 } |
(...skipping 20 matching lines...) Expand all Loading... | |
3405 } | 3406 } |
3406 } | 3407 } |
3407 | 3408 |
3408 | 3409 |
3409 void MacroAssembler::PrepareCallCFunction(int num_reg_arguments, | 3410 void MacroAssembler::PrepareCallCFunction(int num_reg_arguments, |
3410 Register scratch) { | 3411 Register scratch) { |
3411 PrepareCallCFunction(num_reg_arguments, 0, scratch); | 3412 PrepareCallCFunction(num_reg_arguments, 0, scratch); |
3412 } | 3413 } |
3413 | 3414 |
3414 | 3415 |
3415 void MacroAssembler::SetCallCDoubleArguments(DoubleRegister dreg) { | 3416 void MacroAssembler::SetCallCDoubleArguments(DwVfpRegister dreg) { |
3416 ASSERT(CpuFeatures::IsSupported(VFP2)); | 3417 ASSERT(CpuFeatures::IsSupported(VFP2)); |
3417 if (use_eabi_hardfloat()) { | 3418 if (use_eabi_hardfloat()) { |
3418 Move(d0, dreg); | 3419 Move(d0, dreg); |
3419 } else { | 3420 } else { |
3420 vmov(r0, r1, dreg); | 3421 vmov(r0, r1, dreg); |
3421 } | 3422 } |
3422 } | 3423 } |
3423 | 3424 |
3424 | 3425 |
3425 void MacroAssembler::SetCallCDoubleArguments(DoubleRegister dreg1, | 3426 void MacroAssembler::SetCallCDoubleArguments(DwVfpRegister dreg1, |
3426 DoubleRegister dreg2) { | 3427 DwVfpRegister dreg2) { |
3427 ASSERT(CpuFeatures::IsSupported(VFP2)); | 3428 ASSERT(CpuFeatures::IsSupported(VFP2)); |
3428 if (use_eabi_hardfloat()) { | 3429 if (use_eabi_hardfloat()) { |
3429 if (dreg2.is(d0)) { | 3430 if (dreg2.is(d0)) { |
3430 ASSERT(!dreg1.is(d1)); | 3431 ASSERT(!dreg1.is(d1)); |
3431 Move(d1, dreg2); | 3432 Move(d1, dreg2); |
3432 Move(d0, dreg1); | 3433 Move(d0, dreg1); |
3433 } else { | 3434 } else { |
3434 Move(d0, dreg1); | 3435 Move(d0, dreg1); |
3435 Move(d1, dreg2); | 3436 Move(d1, dreg2); |
3436 } | 3437 } |
3437 } else { | 3438 } else { |
3438 vmov(r0, r1, dreg1); | 3439 vmov(r0, r1, dreg1); |
3439 vmov(r2, r3, dreg2); | 3440 vmov(r2, r3, dreg2); |
3440 } | 3441 } |
3441 } | 3442 } |
3442 | 3443 |
3443 | 3444 |
3444 void MacroAssembler::SetCallCDoubleArguments(DoubleRegister dreg, | 3445 void MacroAssembler::SetCallCDoubleArguments(DwVfpRegister dreg, |
3445 Register reg) { | 3446 Register reg) { |
3446 ASSERT(CpuFeatures::IsSupported(VFP2)); | 3447 ASSERT(CpuFeatures::IsSupported(VFP2)); |
3447 if (use_eabi_hardfloat()) { | 3448 if (use_eabi_hardfloat()) { |
3448 Move(d0, dreg); | 3449 Move(d0, dreg); |
3449 Move(r0, reg); | 3450 Move(r0, reg); |
3450 } else { | 3451 } else { |
3451 Move(r2, reg); | 3452 Move(r2, reg); |
3452 vmov(r0, r1, dreg); | 3453 vmov(r0, r1, dreg); |
3453 } | 3454 } |
3454 } | 3455 } |
(...skipping 262 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3717 bind(&done); | 3718 bind(&done); |
3718 } | 3719 } |
3719 | 3720 |
3720 | 3721 |
3721 void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) { | 3722 void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) { |
3722 Usat(output_reg, 8, Operand(input_reg)); | 3723 Usat(output_reg, 8, Operand(input_reg)); |
3723 } | 3724 } |
3724 | 3725 |
3725 | 3726 |
3726 void MacroAssembler::ClampDoubleToUint8(Register result_reg, | 3727 void MacroAssembler::ClampDoubleToUint8(Register result_reg, |
3727 DoubleRegister input_reg, | 3728 DwVfpRegister input_reg, |
3728 DoubleRegister temp_double_reg) { | 3729 DwVfpRegister temp_double_reg) { |
3729 Label above_zero; | 3730 Label above_zero; |
3730 Label done; | 3731 Label done; |
3731 Label in_bounds; | 3732 Label in_bounds; |
3732 | 3733 |
3733 Vmov(temp_double_reg, 0.0); | 3734 Vmov(temp_double_reg, 0.0); |
3734 VFPCompareAndSetFlags(input_reg, temp_double_reg); | 3735 VFPCompareAndSetFlags(input_reg, temp_double_reg); |
3735 b(gt, &above_zero); | 3736 b(gt, &above_zero); |
3736 | 3737 |
3737 // Double value is less than zero, NaN or Inf, return 0. | 3738 // Double value is less than zero, NaN or Inf, return 0. |
3738 mov(result_reg, Operand(0)); | 3739 mov(result_reg, Operand(0)); |
(...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3877 void CodePatcher::EmitCondition(Condition cond) { | 3878 void CodePatcher::EmitCondition(Condition cond) { |
3878 Instr instr = Assembler::instr_at(masm_.pc_); | 3879 Instr instr = Assembler::instr_at(masm_.pc_); |
3879 instr = (instr & ~kCondMask) | cond; | 3880 instr = (instr & ~kCondMask) | cond; |
3880 masm_.emit(instr); | 3881 masm_.emit(instr); |
3881 } | 3882 } |
3882 | 3883 |
3883 | 3884 |
3884 } } // namespace v8::internal | 3885 } } // namespace v8::internal |
3885 | 3886 |
3886 #endif // V8_TARGET_ARCH_ARM | 3887 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |