OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 357 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
368 // there is no difference in using either key. | 368 // there is no difference in using either key. |
369 Integer32ToSmi(index, hash); | 369 Integer32ToSmi(index, hash); |
370 } | 370 } |
371 | 371 |
372 | 372 |
373 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) { | 373 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) { |
374 CallRuntime(Runtime::FunctionForId(id), num_arguments); | 374 CallRuntime(Runtime::FunctionForId(id), num_arguments); |
375 } | 375 } |
376 | 376 |
377 | 377 |
| 378 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) { |
| 379 Runtime::Function* function = Runtime::FunctionForId(id); |
| 380 Set(rax, function->nargs); |
| 381 movq(rbx, ExternalReference(function)); |
| 382 CEntryStub ces(1); |
| 383 ces.SaveDoubles(); |
| 384 CallStub(&ces); |
| 385 } |
| 386 |
| 387 |
378 MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id, | 388 MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id, |
379 int num_arguments) { | 389 int num_arguments) { |
380 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments); | 390 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments); |
381 } | 391 } |
382 | 392 |
383 | 393 |
384 void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) { | 394 void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) { |
385 // If the expected number of arguments of the runtime function is | 395 // If the expected number of arguments of the runtime function is |
386 // constant, we check that the actual number of arguments match the | 396 // constant, we check that the actual number of arguments match the |
387 // expectation. | 397 // expectation. |
(...skipping 572 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
960 | 970 |
961 | 971 |
962 Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) { | 972 Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) { |
963 // An unsigned 32-bit integer value is valid as long as the high bit | 973 // An unsigned 32-bit integer value is valid as long as the high bit |
964 // is not set. | 974 // is not set. |
965 testl(src, src); | 975 testl(src, src); |
966 return positive; | 976 return positive; |
967 } | 977 } |
968 | 978 |
969 | 979 |
| 980 void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) { |
| 981 if (dst.is(src)) { |
| 982 andl(dst, Immediate(kSmiTagMask)); |
| 983 } else { |
| 984 movl(dst, Immediate(kSmiTagMask)); |
| 985 andl(dst, src); |
| 986 } |
| 987 } |
| 988 |
| 989 |
| 990 void MacroAssembler::CheckSmiToIndicator(Register dst, const Operand& src) { |
| 991 if (!(src.AddressUsesRegister(dst))) { |
| 992 movl(dst, Immediate(kSmiTagMask)); |
| 993 andl(dst, src); |
| 994 } else { |
| 995 movl(dst, src); |
| 996 andl(dst, Immediate(kSmiTagMask)); |
| 997 } |
| 998 } |
| 999 |
| 1000 |
970 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) { | 1001 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) { |
971 if (constant->value() == 0) { | 1002 if (constant->value() == 0) { |
972 if (!dst.is(src)) { | 1003 if (!dst.is(src)) { |
973 movq(dst, src); | 1004 movq(dst, src); |
974 } | 1005 } |
975 return; | 1006 return; |
976 } else if (dst.is(src)) { | 1007 } else if (dst.is(src)) { |
977 ASSERT(!dst.is(kScratchRegister)); | 1008 ASSERT(!dst.is(kScratchRegister)); |
978 switch (constant->value()) { | 1009 switch (constant->value()) { |
979 case 1: | 1010 case 1: |
(...skipping 440 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1420 pop(r8); | 1451 pop(r8); |
1421 pop(rdi); | 1452 pop(rdi); |
1422 pop(rsi); | 1453 pop(rsi); |
1423 pop(rbx); | 1454 pop(rbx); |
1424 pop(rdx); | 1455 pop(rdx); |
1425 pop(rcx); | 1456 pop(rcx); |
1426 pop(rax); | 1457 pop(rax); |
1427 } | 1458 } |
1428 | 1459 |
1429 | 1460 |
| 1461 void MacroAssembler::Dropad() { |
| 1462 const int kRegistersPushedByPushad = 11; |
| 1463 addq(rsp, Immediate(kRegistersPushedByPushad * kPointerSize)); |
| 1464 } |
| 1465 |
| 1466 |
| 1467 // Order general registers are pushed by Pushad: |
| 1468 // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r12, r14. |
| 1469 int MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = { |
| 1470 0, |
| 1471 1, |
| 1472 2, |
| 1473 3, |
| 1474 -1, |
| 1475 -1, |
| 1476 4, |
| 1477 5, |
| 1478 6, |
| 1479 7, |
| 1480 -1, |
| 1481 8, |
| 1482 9, |
| 1483 -1, |
| 1484 10, |
| 1485 -1 |
| 1486 }; |
| 1487 |
| 1488 |
1430 void MacroAssembler::PushTryHandler(CodeLocation try_location, | 1489 void MacroAssembler::PushTryHandler(CodeLocation try_location, |
1431 HandlerType type) { | 1490 HandlerType type) { |
1432 // Adjust this code if not the case. | 1491 // Adjust this code if not the case. |
1433 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); | 1492 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); |
1434 | 1493 |
1435 // The pc (return address) is already on TOS. This code pushes state, | 1494 // The pc (return address) is already on TOS. This code pushes state, |
1436 // frame pointer and current handler. Check that they are expected | 1495 // frame pointer and current handler. Check that they are expected |
1437 // next on the stack, in that order. | 1496 // next on the stack, in that order. |
1438 ASSERT_EQ(StackHandlerConstants::kStateOffset, | 1497 ASSERT_EQ(StackHandlerConstants::kStateOffset, |
1439 StackHandlerConstants::kPCOffset - kPointerSize); | 1498 StackHandlerConstants::kPCOffset - kPointerSize); |
(...skipping 328 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1768 movq(r14, rax); // Backup rax before we use it. | 1827 movq(r14, rax); // Backup rax before we use it. |
1769 } | 1828 } |
1770 | 1829 |
1771 movq(rax, rbp); | 1830 movq(rax, rbp); |
1772 store_rax(c_entry_fp_address); | 1831 store_rax(c_entry_fp_address); |
1773 movq(rax, rsi); | 1832 movq(rax, rsi); |
1774 store_rax(context_address); | 1833 store_rax(context_address); |
1775 } | 1834 } |
1776 | 1835 |
1777 | 1836 |
1778 void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space) { | 1837 void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space, |
| 1838 bool save_doubles) { |
1779 #ifdef _WIN64 | 1839 #ifdef _WIN64 |
1780 const int kShaddowSpace = 4; | 1840 const int kShadowSpace = 4; |
1781 arg_stack_space += kShaddowSpace; | 1841 arg_stack_space += kShadowSpace; |
1782 #endif | 1842 #endif |
1783 if (arg_stack_space > 0) { | 1843 // Optionally save all XMM registers. |
| 1844 if (save_doubles) { |
| 1845 CpuFeatures::Scope scope(SSE2); |
| 1846 int space = XMMRegister::kNumRegisters * kDoubleSize + |
| 1847 arg_stack_space * kPointerSize; |
| 1848 subq(rsp, Immediate(space)); |
| 1849 int offset = -2 * kPointerSize; |
| 1850 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) { |
| 1851 XMMRegister reg = XMMRegister::FromAllocationIndex(i); |
| 1852 movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg); |
| 1853 } |
| 1854 } else if (arg_stack_space > 0) { |
1784 subq(rsp, Immediate(arg_stack_space * kPointerSize)); | 1855 subq(rsp, Immediate(arg_stack_space * kPointerSize)); |
1785 } | 1856 } |
1786 | 1857 |
1787 // Get the required frame alignment for the OS. | 1858 // Get the required frame alignment for the OS. |
1788 static const int kFrameAlignment = OS::ActivationFrameAlignment(); | 1859 static const int kFrameAlignment = OS::ActivationFrameAlignment(); |
1789 if (kFrameAlignment > 0) { | 1860 if (kFrameAlignment > 0) { |
1790 ASSERT(IsPowerOf2(kFrameAlignment)); | 1861 ASSERT(IsPowerOf2(kFrameAlignment)); |
1791 movq(kScratchRegister, Immediate(-kFrameAlignment)); | 1862 movq(kScratchRegister, Immediate(-kFrameAlignment)); |
1792 and_(rsp, kScratchRegister); | 1863 and_(rsp, kScratchRegister); |
1793 } | 1864 } |
1794 | 1865 |
1795 // Patch the saved entry sp. | 1866 // Patch the saved entry sp. |
1796 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp); | 1867 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp); |
1797 } | 1868 } |
1798 | 1869 |
1799 | 1870 |
1800 void MacroAssembler::EnterExitFrame(int arg_stack_space) { | 1871 void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles) { |
1801 EnterExitFramePrologue(true); | 1872 EnterExitFramePrologue(true); |
1802 | 1873 |
1803 // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame, | 1874 // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame, |
1804 // so it must be retained across the C-call. | 1875 // so it must be retained across the C-call. |
1805 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize; | 1876 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize; |
1806 lea(r12, Operand(rbp, r14, times_pointer_size, offset)); | 1877 lea(r12, Operand(rbp, r14, times_pointer_size, offset)); |
1807 | 1878 |
1808 EnterExitFrameEpilogue(arg_stack_space); | 1879 EnterExitFrameEpilogue(arg_stack_space, save_doubles); |
1809 } | 1880 } |
1810 | 1881 |
1811 | 1882 |
1812 void MacroAssembler::EnterApiExitFrame(int arg_stack_space) { | 1883 void MacroAssembler::EnterApiExitFrame(int arg_stack_space) { |
1813 EnterExitFramePrologue(false); | 1884 EnterExitFramePrologue(false); |
1814 EnterExitFrameEpilogue(arg_stack_space); | 1885 EnterExitFrameEpilogue(arg_stack_space, false); |
1815 } | 1886 } |
1816 | 1887 |
1817 | 1888 |
1818 void MacroAssembler::LeaveExitFrame() { | 1889 void MacroAssembler::LeaveExitFrame(bool save_doubles) { |
1819 // Registers: | 1890 // Registers: |
1820 // r12 : argv | 1891 // r12 : argv |
1821 | 1892 if (save_doubles) { |
| 1893 int offset = -2 * kPointerSize; |
| 1894 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) { |
| 1895 XMMRegister reg = XMMRegister::FromAllocationIndex(i); |
| 1896 movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize))); |
| 1897 } |
| 1898 } |
1822 // Get the return address from the stack and restore the frame pointer. | 1899 // Get the return address from the stack and restore the frame pointer. |
1823 movq(rcx, Operand(rbp, 1 * kPointerSize)); | 1900 movq(rcx, Operand(rbp, 1 * kPointerSize)); |
1824 movq(rbp, Operand(rbp, 0 * kPointerSize)); | 1901 movq(rbp, Operand(rbp, 0 * kPointerSize)); |
1825 | 1902 |
1826 // Pop everything up to and including the arguments and the receiver | 1903 // Drop everything up to and including the arguments and the receiver |
1827 // from the caller stack. | 1904 // from the caller stack. |
1828 lea(rsp, Operand(r12, 1 * kPointerSize)); | 1905 lea(rsp, Operand(r12, 1 * kPointerSize)); |
1829 | 1906 |
1830 // Push the return address to get ready to return. | 1907 // Push the return address to get ready to return. |
1831 push(rcx); | 1908 push(rcx); |
1832 | 1909 |
1833 LeaveExitFrameEpilogue(); | 1910 LeaveExitFrameEpilogue(); |
1834 } | 1911 } |
1835 | 1912 |
1836 | 1913 |
(...skipping 548 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2385 CPU::FlushICache(address_, size_); | 2462 CPU::FlushICache(address_, size_); |
2386 | 2463 |
2387 // Check that the code was patched as expected. | 2464 // Check that the code was patched as expected. |
2388 ASSERT(masm_.pc_ == address_ + size_); | 2465 ASSERT(masm_.pc_ == address_ + size_); |
2389 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 2466 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
2390 } | 2467 } |
2391 | 2468 |
2392 } } // namespace v8::internal | 2469 } } // namespace v8::internal |
2393 | 2470 |
2394 #endif // V8_TARGET_ARCH_X64 | 2471 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |