Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(18)

Side by Side Diff: src/x64/macro-assembler-x64.cc

Issue 6597029: [Isolates] Merge r 6300:6500 from bleeding_edge to isolates. (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/isolates/
Patch Set: Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/macro-assembler-x64.h ('k') | src/x64/stub-cache-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 357 matching lines...) Expand 10 before | Expand all | Expand 10 after
368 // there is no difference in using either key. 368 // there is no difference in using either key.
369 Integer32ToSmi(index, hash); 369 Integer32ToSmi(index, hash);
370 } 370 }
371 371
372 372
373 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) { 373 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
374 CallRuntime(Runtime::FunctionForId(id), num_arguments); 374 CallRuntime(Runtime::FunctionForId(id), num_arguments);
375 } 375 }
376 376
377 377
378 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
379 const Runtime::Function* function = Runtime::FunctionForId(id);
380 Set(rax, function->nargs);
381 movq(rbx, ExternalReference(function));
382 CEntryStub ces(1);
383 ces.SaveDoubles();
384 CallStub(&ces);
385 }
386
387
378 MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id, 388 MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
379 int num_arguments) { 389 int num_arguments) {
380 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments); 390 return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
381 } 391 }
382 392
383 393
384 void MacroAssembler::CallRuntime(const Runtime::Function* f, 394 void MacroAssembler::CallRuntime(const Runtime::Function* f,
385 int num_arguments) { 395 int num_arguments) {
386 // If the expected number of arguments of the runtime function is 396 // If the expected number of arguments of the runtime function is
387 // constant, we check that the actual number of arguments match the 397 // constant, we check that the actual number of arguments match the
(...skipping 496 matching lines...) Expand 10 before | Expand all | Expand 10 after
884 } 894 }
885 895
886 896
887 Condition MacroAssembler::CheckSmi(Register src) { 897 Condition MacroAssembler::CheckSmi(Register src) {
888 ASSERT_EQ(0, kSmiTag); 898 ASSERT_EQ(0, kSmiTag);
889 testb(src, Immediate(kSmiTagMask)); 899 testb(src, Immediate(kSmiTagMask));
890 return zero; 900 return zero;
891 } 901 }
892 902
893 903
904 Condition MacroAssembler::CheckSmi(const Operand& src) {
905 ASSERT_EQ(0, kSmiTag);
906 testb(src, Immediate(kSmiTagMask));
907 return zero;
908 }
909
910
894 Condition MacroAssembler::CheckNonNegativeSmi(Register src) { 911 Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
895 ASSERT_EQ(0, kSmiTag); 912 ASSERT_EQ(0, kSmiTag);
896 // Make mask 0x8000000000000001 and test that both bits are zero. 913 // Make mask 0x8000000000000001 and test that both bits are zero.
897 movq(kScratchRegister, src); 914 movq(kScratchRegister, src);
898 rol(kScratchRegister, Immediate(1)); 915 rol(kScratchRegister, Immediate(1));
899 testb(kScratchRegister, Immediate(3)); 916 testb(kScratchRegister, Immediate(3));
900 return zero; 917 return zero;
901 } 918 }
902 919
903 920
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
959 976
960 977
961 Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) { 978 Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
962 // An unsigned 32-bit integer value is valid as long as the high bit 979 // An unsigned 32-bit integer value is valid as long as the high bit
963 // is not set. 980 // is not set.
964 testl(src, src); 981 testl(src, src);
965 return positive; 982 return positive;
966 } 983 }
967 984
968 985
986 void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
987 if (dst.is(src)) {
988 andl(dst, Immediate(kSmiTagMask));
989 } else {
990 movl(dst, Immediate(kSmiTagMask));
991 andl(dst, src);
992 }
993 }
994
995
996 void MacroAssembler::CheckSmiToIndicator(Register dst, const Operand& src) {
997 if (!(src.AddressUsesRegister(dst))) {
998 movl(dst, Immediate(kSmiTagMask));
999 andl(dst, src);
1000 } else {
1001 movl(dst, src);
1002 andl(dst, Immediate(kSmiTagMask));
1003 }
1004 }
1005
1006
969 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) { 1007 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
970 if (constant->value() == 0) { 1008 if (constant->value() == 0) {
971 if (!dst.is(src)) { 1009 if (!dst.is(src)) {
972 movq(dst, src); 1010 movq(dst, src);
973 } 1011 }
974 return; 1012 return;
975 } else if (dst.is(src)) { 1013 } else if (dst.is(src)) {
976 ASSERT(!dst.is(kScratchRegister)); 1014 ASSERT(!dst.is(kScratchRegister));
977 switch (constant->value()) { 1015 switch (constant->value()) {
978 case 1: 1016 case 1:
(...skipping 294 matching lines...) Expand 10 before | Expand all | Expand 10 after
1273 } 1311 }
1274 1312
1275 1313
1276 void MacroAssembler::Move(Register dst, Register src) { 1314 void MacroAssembler::Move(Register dst, Register src) {
1277 if (!dst.is(src)) { 1315 if (!dst.is(src)) {
1278 movq(dst, src); 1316 movq(dst, src);
1279 } 1317 }
1280 } 1318 }
1281 1319
1282 1320
1283
1284
1285 void MacroAssembler::Move(Register dst, Handle<Object> source) { 1321 void MacroAssembler::Move(Register dst, Handle<Object> source) {
1286 ASSERT(!source->IsFailure()); 1322 ASSERT(!source->IsFailure());
1287 if (source->IsSmi()) { 1323 if (source->IsSmi()) {
1288 Move(dst, Smi::cast(*source)); 1324 Move(dst, Smi::cast(*source));
1289 } else { 1325 } else {
1290 movq(dst, source, RelocInfo::EMBEDDED_OBJECT); 1326 movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1291 } 1327 }
1292 } 1328 }
1293 1329
1294 1330
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
1387 call(kScratchRegister); 1423 call(kScratchRegister);
1388 } 1424 }
1389 1425
1390 1426
1391 void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) { 1427 void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1392 ASSERT(RelocInfo::IsCodeTarget(rmode)); 1428 ASSERT(RelocInfo::IsCodeTarget(rmode));
1393 call(code_object, rmode); 1429 call(code_object, rmode);
1394 } 1430 }
1395 1431
1396 1432
1433 void MacroAssembler::Pushad() {
1434 push(rax);
1435 push(rcx);
1436 push(rdx);
1437 push(rbx);
1438 // Not pushing rsp or rbp.
1439 push(rsi);
1440 push(rdi);
1441 push(r8);
1442 push(r9);
1443 // r10 is kScratchRegister.
1444 push(r11);
1445 push(r12);
1446 // r13 is kRootRegister.
1447 push(r14);
1448 // r15 is kSmiConstantRegister
1449 }
1450
1451
1452 void MacroAssembler::Popad() {
1453 pop(r14);
1454 pop(r12);
1455 pop(r11);
1456 pop(r9);
1457 pop(r8);
1458 pop(rdi);
1459 pop(rsi);
1460 pop(rbx);
1461 pop(rdx);
1462 pop(rcx);
1463 pop(rax);
1464 }
1465
1466
1467 void MacroAssembler::Dropad() {
1468 const int kRegistersPushedByPushad = 11;
1469 addq(rsp, Immediate(kRegistersPushedByPushad * kPointerSize));
1470 }
1471
1472
1473 // Order general registers are pushed by Pushad:
1474 // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r12, r14.
1475 int MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
1476 0,
1477 1,
1478 2,
1479 3,
1480 -1,
1481 -1,
1482 4,
1483 5,
1484 6,
1485 7,
1486 -1,
1487 8,
1488 9,
1489 -1,
1490 10,
1491 -1
1492 };
1493
1494
1397 void MacroAssembler::PushTryHandler(CodeLocation try_location, 1495 void MacroAssembler::PushTryHandler(CodeLocation try_location,
1398 HandlerType type) { 1496 HandlerType type) {
1399 // Adjust this code if not the case. 1497 // Adjust this code if not the case.
1400 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); 1498 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1401 1499
1402 // The pc (return address) is already on TOS. This code pushes state, 1500 // The pc (return address) is already on TOS. This code pushes state,
1403 // frame pointer and current handler. Check that they are expected 1501 // frame pointer and current handler. Check that they are expected
1404 // next on the stack, in that order. 1502 // next on the stack, in that order.
1405 ASSERT_EQ(StackHandlerConstants::kStateOffset, 1503 ASSERT_EQ(StackHandlerConstants::kStateOffset,
1406 StackHandlerConstants::kPCOffset - kPointerSize); 1504 StackHandlerConstants::kPCOffset - kPointerSize);
(...skipping 328 matching lines...) Expand 10 before | Expand all | Expand 10 after
1735 movq(r14, rax); // Backup rax before we use it. 1833 movq(r14, rax); // Backup rax before we use it.
1736 } 1834 }
1737 1835
1738 movq(rax, rbp); 1836 movq(rax, rbp);
1739 store_rax(c_entry_fp_address); 1837 store_rax(c_entry_fp_address);
1740 movq(rax, rsi); 1838 movq(rax, rsi);
1741 store_rax(context_address); 1839 store_rax(context_address);
1742 } 1840 }
1743 1841
1744 1842
1745 void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space) { 1843 void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
1844 bool save_doubles) {
1746 #ifdef _WIN64 1845 #ifdef _WIN64
1747 const int kShaddowSpace = 4; 1846 const int kShadowSpace = 4;
1748 arg_stack_space += kShaddowSpace; 1847 arg_stack_space += kShadowSpace;
1749 #endif 1848 #endif
1750 if (arg_stack_space > 0) { 1849 // Optionally save all XMM registers.
1850 if (save_doubles) {
1851 CpuFeatures::Scope scope(SSE2);
1852 int space = XMMRegister::kNumRegisters * kDoubleSize +
1853 arg_stack_space * kPointerSize;
1854 subq(rsp, Immediate(space));
1855 int offset = -2 * kPointerSize;
1856 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
1857 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
1858 movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
1859 }
1860 } else if (arg_stack_space > 0) {
1751 subq(rsp, Immediate(arg_stack_space * kPointerSize)); 1861 subq(rsp, Immediate(arg_stack_space * kPointerSize));
1752 } 1862 }
1753 1863
1754 // Get the required frame alignment for the OS. 1864 // Get the required frame alignment for the OS.
1755 const int kFrameAlignment = OS::ActivationFrameAlignment(); 1865 const int kFrameAlignment = OS::ActivationFrameAlignment();
1756 if (kFrameAlignment > 0) { 1866 if (kFrameAlignment > 0) {
1757 ASSERT(IsPowerOf2(kFrameAlignment)); 1867 ASSERT(IsPowerOf2(kFrameAlignment));
1758 movq(kScratchRegister, Immediate(-kFrameAlignment)); 1868 movq(kScratchRegister, Immediate(-kFrameAlignment));
1759 and_(rsp, kScratchRegister); 1869 and_(rsp, kScratchRegister);
1760 } 1870 }
1761 1871
1762 // Patch the saved entry sp. 1872 // Patch the saved entry sp.
1763 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp); 1873 movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
1764 } 1874 }
1765 1875
1766 1876
1767 void MacroAssembler::EnterExitFrame(int arg_stack_space) { 1877 void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles) {
1768 EnterExitFramePrologue(true); 1878 EnterExitFramePrologue(true);
1769 1879
1770 // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame, 1880 // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
1771 // so it must be retained across the C-call. 1881 // so it must be retained across the C-call.
1772 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize; 1882 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
1773 lea(r12, Operand(rbp, r14, times_pointer_size, offset)); 1883 lea(r12, Operand(rbp, r14, times_pointer_size, offset));
1774 1884
1775 EnterExitFrameEpilogue(arg_stack_space); 1885 EnterExitFrameEpilogue(arg_stack_space, save_doubles);
1776 } 1886 }
1777 1887
1778 1888
1779 void MacroAssembler::EnterApiExitFrame(int arg_stack_space) { 1889 void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
1780 EnterExitFramePrologue(false); 1890 EnterExitFramePrologue(false);
1781 EnterExitFrameEpilogue(arg_stack_space); 1891 EnterExitFrameEpilogue(arg_stack_space, false);
1782 } 1892 }
1783 1893
1784 1894
1785 void MacroAssembler::LeaveExitFrame() { 1895 void MacroAssembler::LeaveExitFrame(bool save_doubles) {
1786 // Registers: 1896 // Registers:
1787 // r12 : argv 1897 // r12 : argv
1788 1898 if (save_doubles) {
1899 int offset = -2 * kPointerSize;
1900 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
1901 XMMRegister reg = XMMRegister::FromAllocationIndex(i);
1902 movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
1903 }
1904 }
1789 // Get the return address from the stack and restore the frame pointer. 1905 // Get the return address from the stack and restore the frame pointer.
1790 movq(rcx, Operand(rbp, 1 * kPointerSize)); 1906 movq(rcx, Operand(rbp, 1 * kPointerSize));
1791 movq(rbp, Operand(rbp, 0 * kPointerSize)); 1907 movq(rbp, Operand(rbp, 0 * kPointerSize));
1792 1908
1793 // Pop everything up to and including the arguments and the receiver 1909 // Drop everything up to and including the arguments and the receiver
1794 // from the caller stack. 1910 // from the caller stack.
1795 lea(rsp, Operand(r12, 1 * kPointerSize)); 1911 lea(rsp, Operand(r12, 1 * kPointerSize));
1796 1912
1797 // Push the return address to get ready to return. 1913 // Push the return address to get ready to return.
1798 push(rcx); 1914 push(rcx);
1799 1915
1800 LeaveExitFrameEpilogue(); 1916 LeaveExitFrameEpilogue();
1801 } 1917 }
1802 1918
1803 1919
(...skipping 577 matching lines...) Expand 10 before | Expand all | Expand 10 after
2381 CPU::FlushICache(address_, size_); 2497 CPU::FlushICache(address_, size_);
2382 2498
2383 // Check that the code was patched as expected. 2499 // Check that the code was patched as expected.
2384 ASSERT(masm_.pc_ == address_ + size_); 2500 ASSERT(masm_.pc_ == address_ + size_);
2385 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); 2501 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2386 } 2502 }
2387 2503
2388 } } // namespace v8::internal 2504 } } // namespace v8::internal
2389 2505
2390 #endif // V8_TARGET_ARCH_X64 2506 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/macro-assembler-x64.h ('k') | src/x64/stub-cache-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698