OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 3489 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3500 | 3500 |
3501 void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space, | 3501 void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space, |
3502 bool save_doubles) { | 3502 bool save_doubles) { |
3503 #ifdef _WIN64 | 3503 #ifdef _WIN64 |
3504 const int kShadowSpace = 4; | 3504 const int kShadowSpace = 4; |
3505 arg_stack_space += kShadowSpace; | 3505 arg_stack_space += kShadowSpace; |
3506 #endif | 3506 #endif |
3507 // Optionally save all XMM registers. | 3507 // Optionally save all XMM registers. |
3508 if (save_doubles) { | 3508 if (save_doubles) { |
3509 int space = XMMRegister::kMaxNumRegisters * kDoubleSize + | 3509 int space = XMMRegister::kMaxNumRegisters * kDoubleSize + |
3510 arg_stack_space * kPointerSize; | 3510 arg_stack_space * kRegisterSize; |
3511 subq(rsp, Immediate(space)); | 3511 subq(rsp, Immediate(space)); |
3512 int offset = -2 * kPointerSize; | 3512 int offset = -2 * kPointerSize; |
3513 for (int i = 0; i < XMMRegister::NumAllocatableRegisters(); i++) { | 3513 for (int i = 0; i < XMMRegister::NumAllocatableRegisters(); i++) { |
3514 XMMRegister reg = XMMRegister::FromAllocationIndex(i); | 3514 XMMRegister reg = XMMRegister::FromAllocationIndex(i); |
3515 movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg); | 3515 movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg); |
3516 } | 3516 } |
3517 } else if (arg_stack_space > 0) { | 3517 } else if (arg_stack_space > 0) { |
3518 subq(rsp, Immediate(arg_stack_space * kPointerSize)); | 3518 subq(rsp, Immediate(arg_stack_space * kRegisterSize)); |
3519 } | 3519 } |
3520 | 3520 |
3521 // Get the required frame alignment for the OS. | 3521 // Get the required frame alignment for the OS. |
3522 const int kFrameAlignment = OS::ActivationFrameAlignment(); | 3522 const int kFrameAlignment = OS::ActivationFrameAlignment(); |
3523 if (kFrameAlignment > 0) { | 3523 if (kFrameAlignment > 0) { |
3524 ASSERT(IsPowerOf2(kFrameAlignment)); | 3524 ASSERT(IsPowerOf2(kFrameAlignment)); |
3525 ASSERT(is_int8(kFrameAlignment)); | 3525 ASSERT(is_int8(kFrameAlignment)); |
3526 and_(rsp, Immediate(-kFrameAlignment)); | 3526 and_(rsp, Immediate(-kFrameAlignment)); |
3527 } | 3527 } |
3528 | 3528 |
(...skipping 839 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4368 void MacroAssembler::PrepareCallCFunction(int num_arguments) { | 4368 void MacroAssembler::PrepareCallCFunction(int num_arguments) { |
4369 int frame_alignment = OS::ActivationFrameAlignment(); | 4369 int frame_alignment = OS::ActivationFrameAlignment(); |
4370 ASSERT(frame_alignment != 0); | 4370 ASSERT(frame_alignment != 0); |
4371 ASSERT(num_arguments >= 0); | 4371 ASSERT(num_arguments >= 0); |
4372 | 4372 |
4373 // Make stack end at alignment and allocate space for arguments and old rsp. | 4373 // Make stack end at alignment and allocate space for arguments and old rsp. |
4374 movq(kScratchRegister, rsp); | 4374 movq(kScratchRegister, rsp); |
4375 ASSERT(IsPowerOf2(frame_alignment)); | 4375 ASSERT(IsPowerOf2(frame_alignment)); |
4376 int argument_slots_on_stack = | 4376 int argument_slots_on_stack = |
4377 ArgumentStackSlotsForCFunctionCall(num_arguments); | 4377 ArgumentStackSlotsForCFunctionCall(num_arguments); |
4378 subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize)); | 4378 subq(rsp, Immediate((argument_slots_on_stack + 1) * kRegisterSize)); |
4379 and_(rsp, Immediate(-frame_alignment)); | 4379 and_(rsp, Immediate(-frame_alignment)); |
4380 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister); | 4380 movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister); |
4381 } | 4381 } |
4382 | 4382 |
4383 | 4383 |
4384 void MacroAssembler::CallCFunction(ExternalReference function, | 4384 void MacroAssembler::CallCFunction(ExternalReference function, |
4385 int num_arguments) { | 4385 int num_arguments) { |
4386 LoadAddress(rax, function); | 4386 LoadAddress(rax, function); |
4387 CallCFunction(rax, num_arguments); | 4387 CallCFunction(rax, num_arguments); |
4388 } | 4388 } |
4389 | 4389 |
4390 | 4390 |
4391 void MacroAssembler::CallCFunction(Register function, int num_arguments) { | 4391 void MacroAssembler::CallCFunction(Register function, int num_arguments) { |
4392 ASSERT(has_frame()); | 4392 ASSERT(has_frame()); |
4393 // Check stack alignment. | 4393 // Check stack alignment. |
4394 if (emit_debug_code()) { | 4394 if (emit_debug_code()) { |
4395 CheckStackAlignment(); | 4395 CheckStackAlignment(); |
4396 } | 4396 } |
4397 | 4397 |
4398 call(function); | 4398 call(function); |
4399 ASSERT(OS::ActivationFrameAlignment() != 0); | 4399 ASSERT(OS::ActivationFrameAlignment() != 0); |
4400 ASSERT(num_arguments >= 0); | 4400 ASSERT(num_arguments >= 0); |
4401 int argument_slots_on_stack = | 4401 int argument_slots_on_stack = |
4402 ArgumentStackSlotsForCFunctionCall(num_arguments); | 4402 ArgumentStackSlotsForCFunctionCall(num_arguments); |
4403 movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize)); | 4403 movq(rsp, Operand(rsp, argument_slots_on_stack * kRegisterSize)); |
4404 } | 4404 } |
4405 | 4405 |
4406 | 4406 |
4407 bool AreAliased(Register r1, Register r2, Register r3, Register r4) { | 4407 bool AreAliased(Register r1, Register r2, Register r3, Register r4) { |
4408 if (r1.is(r2)) return true; | 4408 if (r1.is(r2)) return true; |
4409 if (r1.is(r3)) return true; | 4409 if (r1.is(r3)) return true; |
4410 if (r1.is(r4)) return true; | 4410 if (r1.is(r4)) return true; |
4411 if (r2.is(r3)) return true; | 4411 if (r2.is(r3)) return true; |
4412 if (r2.is(r4)) return true; | 4412 if (r2.is(r4)) return true; |
4413 if (r3.is(r4)) return true; | 4413 if (r3.is(r4)) return true; |
(...skipping 280 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4694 j(greater, &no_memento_available); | 4694 j(greater, &no_memento_available); |
4695 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), | 4695 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), |
4696 Heap::kAllocationMementoMapRootIndex); | 4696 Heap::kAllocationMementoMapRootIndex); |
4697 bind(&no_memento_available); | 4697 bind(&no_memento_available); |
4698 } | 4698 } |
4699 | 4699 |
4700 | 4700 |
4701 } } // namespace v8::internal | 4701 } } // namespace v8::internal |
4702 | 4702 |
4703 #endif // V8_TARGET_ARCH_X64 | 4703 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |