Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(76)

Side by Side Diff: src/x64/macro-assembler-x64.cc

Issue 6614010: [Isolates] Merge 6700:7030 from bleeding_edge to isolates. (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/isolates/
Patch Set: '' Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/macro-assembler-x64.h ('k') | src/x64/regexp-macro-assembler-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after
129 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE); 129 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
130 } 130 }
131 } 131 }
132 132
133 133
134 void MacroAssembler::RecordWrite(Register object, 134 void MacroAssembler::RecordWrite(Register object,
135 Register address, 135 Register address,
136 Register value) { 136 Register value) {
137 // The compiled code assumes that record write doesn't change the 137 // The compiled code assumes that record write doesn't change the
138 // context register, so we check that none of the clobbered 138 // context register, so we check that none of the clobbered
139 // registers are esi. 139 // registers are rsi.
140 ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi)); 140 ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
141 141
142 // First, check if a write barrier is even needed. The tests below 142 // First, check if a write barrier is even needed. The tests below
143 // catch stores of Smis and stores into young gen. 143 // catch stores of Smis and stores into young gen.
144 Label done; 144 Label done;
145 JumpIfSmi(value, &done); 145 JumpIfSmi(value, &done);
146 146
147 InNewSpace(object, value, equal, &done); 147 InNewSpace(object, value, equal, &done);
148 148
149 RecordWriteHelper(object, address, value); 149 RecordWriteHelper(object, address, value);
(...skipping 472 matching lines...) Expand 10 before | Expand all | Expand 10 after
622 622
623 MaybeObject* MacroAssembler::TryJumpToExternalReference( 623 MaybeObject* MacroAssembler::TryJumpToExternalReference(
624 const ExternalReference& ext, int result_size) { 624 const ExternalReference& ext, int result_size) {
625 // Set the entry point and jump to the C entry runtime stub. 625 // Set the entry point and jump to the C entry runtime stub.
626 movq(rbx, ext); 626 movq(rbx, ext);
627 CEntryStub ces(result_size); 627 CEntryStub ces(result_size);
628 return TryTailCallStub(&ces); 628 return TryTailCallStub(&ces);
629 } 629 }
630 630
631 631
632 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) { 632 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
633 InvokeFlag flag,
634 PostCallGenerator* post_call_generator) {
633 // Calls are not allowed in some stubs. 635 // Calls are not allowed in some stubs.
634 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls()); 636 ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
635 637
636 // Rely on the assertion to check that the number of provided 638 // Rely on the assertion to check that the number of provided
637 // arguments match the expected number of arguments. Fake a 639 // arguments match the expected number of arguments. Fake a
638 // parameter count to avoid emitting code to do the check. 640 // parameter count to avoid emitting code to do the check.
639 ParameterCount expected(0); 641 ParameterCount expected(0);
640 GetBuiltinEntry(rdx, id); 642 GetBuiltinEntry(rdx, id);
641 InvokeCode(rdx, expected, expected, flag); 643 InvokeCode(rdx, expected, expected, flag, post_call_generator);
642 } 644 }
643 645
644 646
645 void MacroAssembler::GetBuiltinFunction(Register target, 647 void MacroAssembler::GetBuiltinFunction(Register target,
646 Builtins::JavaScript id) { 648 Builtins::JavaScript id) {
647 // Load the builtins object into target register. 649 // Load the builtins object into target register.
648 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); 650 movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
649 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset)); 651 movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
650 movq(target, FieldOperand(target, 652 movq(target, FieldOperand(target,
651 JSBuiltinsObject::OffsetOfFunctionWithId(id))); 653 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
(...skipping 789 matching lines...) Expand 10 before | Expand all | Expand 10 after
1441 push(rsi); 1443 push(rsi);
1442 push(rdi); 1444 push(rdi);
1443 push(r8); 1445 push(r8);
1444 push(r9); 1446 push(r9);
1445 // r10 is kScratchRegister. 1447 // r10 is kScratchRegister.
1446 push(r11); 1448 push(r11);
1447 push(r12); 1449 push(r12);
1448 // r13 is kRootRegister. 1450 // r13 is kRootRegister.
1449 push(r14); 1451 push(r14);
1450 // r15 is kSmiConstantRegister 1452 // r15 is kSmiConstantRegister
1453 STATIC_ASSERT(11 == kNumSafepointSavedRegisters);
1454 // Use lea for symmetry with Popad.
1455 int sp_delta =
1456 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
1457 lea(rsp, Operand(rsp, -sp_delta));
1451 } 1458 }
1452 1459
1453 1460
1454 void MacroAssembler::Popad() { 1461 void MacroAssembler::Popad() {
1462 // Popad must not change the flags, so use lea instead of addq.
1463 int sp_delta =
1464 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
1465 lea(rsp, Operand(rsp, sp_delta));
1455 pop(r14); 1466 pop(r14);
1456 pop(r12); 1467 pop(r12);
1457 pop(r11); 1468 pop(r11);
1458 pop(r9); 1469 pop(r9);
1459 pop(r8); 1470 pop(r8);
1460 pop(rdi); 1471 pop(rdi);
1461 pop(rsi); 1472 pop(rsi);
1462 pop(rbx); 1473 pop(rbx);
1463 pop(rdx); 1474 pop(rdx);
1464 pop(rcx); 1475 pop(rcx);
1465 pop(rax); 1476 pop(rax);
1466 } 1477 }
1467 1478
1468 1479
1469 void MacroAssembler::Dropad() { 1480 void MacroAssembler::Dropad() {
1470 const int kRegistersPushedByPushad = 11; 1481 addq(rsp, Immediate(kNumSafepointRegisters * kPointerSize));
1471 addq(rsp, Immediate(kRegistersPushedByPushad * kPointerSize));
1472 } 1482 }
1473 1483
1474 1484
1475 // Order general registers are pushed by Pushad: 1485 // Order general registers are pushed by Pushad:
1476 // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r12, r14. 1486 // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r12, r14.
1477 int MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = { 1487 int MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
1478 0, 1488 0,
1479 1, 1489 1,
1480 2, 1490 2,
1481 3, 1491 3,
1482 -1, 1492 -1,
1483 -1, 1493 -1,
1484 4, 1494 4,
1485 5, 1495 5,
1486 6, 1496 6,
1487 7, 1497 7,
1488 -1, 1498 -1,
1489 8, 1499 8,
1490 9, 1500 9,
1491 -1, 1501 -1,
1492 10, 1502 10,
1493 -1 1503 -1
1494 }; 1504 };
1495 1505
1496 1506
1507 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
1508 movq(SafepointRegisterSlot(dst), src);
1509 }
1510
1511
1512 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
1513 movq(dst, SafepointRegisterSlot(src));
1514 }
1515
1516
1517 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
1518 return Operand(rsp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
1519 }
1520
1521
1497 void MacroAssembler::PushTryHandler(CodeLocation try_location, 1522 void MacroAssembler::PushTryHandler(CodeLocation try_location,
1498 HandlerType type) { 1523 HandlerType type) {
1499 // Adjust this code if not the case. 1524 // Adjust this code if not the case.
1500 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); 1525 ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1501 1526
1502 // The pc (return address) is already on TOS. This code pushes state, 1527 // The pc (return address) is already on TOS. This code pushes state,
1503 // frame pointer and current handler. Check that they are expected 1528 // frame pointer and current handler. Check that they are expected
1504 // next on the stack, in that order. 1529 // next on the stack, in that order.
1505 ASSERT_EQ(StackHandlerConstants::kStateOffset, 1530 ASSERT_EQ(StackHandlerConstants::kStateOffset,
1506 StackHandlerConstants::kPCOffset - kPointerSize); 1531 StackHandlerConstants::kPCOffset - kPointerSize);
(...skipping 28 matching lines...) Expand all
1535 void MacroAssembler::PopTryHandler() { 1560 void MacroAssembler::PopTryHandler() {
1536 ASSERT_EQ(0, StackHandlerConstants::kNextOffset); 1561 ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1537 // Unlink this handler. 1562 // Unlink this handler.
1538 movq(kScratchRegister, ExternalReference(Isolate::k_handler_address)); 1563 movq(kScratchRegister, ExternalReference(Isolate::k_handler_address));
1539 pop(Operand(kScratchRegister, 0)); 1564 pop(Operand(kScratchRegister, 0));
1540 // Remove the remaining fields. 1565 // Remove the remaining fields.
1541 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize)); 1566 addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1542 } 1567 }
1543 1568
1544 1569
1570 void MacroAssembler::Throw(Register value) {
1571 // Check that stack should contain next handler, frame pointer, state and
1572 // return address in that order.
1573 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
1574 StackHandlerConstants::kStateOffset);
1575 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
1576 StackHandlerConstants::kPCOffset);
1577 // Keep thrown value in rax.
1578 if (!value.is(rax)) {
1579 movq(rax, value);
1580 }
1581
1582 ExternalReference handler_address(Isolate::k_handler_address);
1583 movq(kScratchRegister, handler_address);
1584 movq(rsp, Operand(kScratchRegister, 0));
1585 // get next in chain
1586 pop(rcx);
1587 movq(Operand(kScratchRegister, 0), rcx);
1588 pop(rbp); // pop frame pointer
1589 pop(rdx); // remove state
1590
1591 // Before returning we restore the context from the frame pointer if not NULL.
1592 // The frame pointer is NULL in the exception handler of a JS entry frame.
1593 Set(rsi, 0); // Tentatively set context pointer to NULL
1594 NearLabel skip;
1595 cmpq(rbp, Immediate(0));
1596 j(equal, &skip);
1597 movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1598 bind(&skip);
1599 ret(0);
1600 }
1601
1602
1603 void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
1604 Register value) {
1605 // Keep thrown value in rax.
1606 if (!value.is(rax)) {
1607 movq(rax, value);
1608 }
1609 // Fetch top stack handler.
1610 ExternalReference handler_address(Isolate::k_handler_address);
1611 movq(kScratchRegister, handler_address);
1612 movq(rsp, Operand(kScratchRegister, 0));
1613
1614 // Unwind the handlers until the ENTRY handler is found.
1615 NearLabel loop, done;
1616 bind(&loop);
1617 // Load the type of the current stack handler.
1618 const int kStateOffset = StackHandlerConstants::kStateOffset;
1619 cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY));
1620 j(equal, &done);
1621 // Fetch the next handler in the list.
1622 const int kNextOffset = StackHandlerConstants::kNextOffset;
1623 movq(rsp, Operand(rsp, kNextOffset));
1624 jmp(&loop);
1625 bind(&done);
1626
1627 // Set the top handler address to next handler past the current ENTRY handler.
1628 movq(kScratchRegister, handler_address);
1629 pop(Operand(kScratchRegister, 0));
1630
1631 if (type == OUT_OF_MEMORY) {
1632 // Set external caught exception to false.
1633 ExternalReference external_caught(
1634 Isolate::k_external_caught_exception_address);
1635 movq(rax, Immediate(false));
1636 store_rax(external_caught);
1637
1638 // Set pending exception and rax to out of memory exception.
1639 ExternalReference pending_exception(Isolate::k_pending_exception_address);
1640 movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
1641 store_rax(pending_exception);
1642 }
1643
1644 // Clear the context pointer.
1645 Set(rsi, 0);
1646
1647 // Restore registers from handler.
1648 STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize ==
1649 StackHandlerConstants::kFPOffset);
1650 pop(rbp); // FP
1651 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
1652 StackHandlerConstants::kStateOffset);
1653 pop(rdx); // State
1654
1655 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
1656 StackHandlerConstants::kPCOffset);
1657 ret(0);
1658 }
1659
1660
1545 void MacroAssembler::Ret() { 1661 void MacroAssembler::Ret() {
1546 ret(0); 1662 ret(0);
1547 } 1663 }
1548 1664
1549 1665
1550 void MacroAssembler::Ret(int bytes_dropped, Register scratch) { 1666 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
1551 if (is_uint16(bytes_dropped)) { 1667 if (is_uint16(bytes_dropped)) {
1552 ret(bytes_dropped); 1668 ret(bytes_dropped);
1553 } else { 1669 } else {
1554 pop(scratch); 1670 pop(scratch);
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
1609 } 1725 }
1610 1726
1611 1727
1612 void MacroAssembler::AbortIfNotSmi(Register object) { 1728 void MacroAssembler::AbortIfNotSmi(Register object) {
1613 NearLabel ok; 1729 NearLabel ok;
1614 Condition is_smi = CheckSmi(object); 1730 Condition is_smi = CheckSmi(object);
1615 Assert(is_smi, "Operand is not a smi"); 1731 Assert(is_smi, "Operand is not a smi");
1616 } 1732 }
1617 1733
1618 1734
1735 void MacroAssembler::AbortIfNotString(Register object) {
1736 testb(object, Immediate(kSmiTagMask));
1737 Assert(not_equal, "Operand is not a string");
1738 push(object);
1739 movq(object, FieldOperand(object, HeapObject::kMapOffset));
1740 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
1741 pop(object);
1742 Assert(below, "Operand is not a string");
1743 }
1744
1745
1619 void MacroAssembler::AbortIfNotRootValue(Register src, 1746 void MacroAssembler::AbortIfNotRootValue(Register src,
1620 Heap::RootListIndex root_value_index, 1747 Heap::RootListIndex root_value_index,
1621 const char* message) { 1748 const char* message) {
1622 ASSERT(!src.is(kScratchRegister)); 1749 ASSERT(!src.is(kScratchRegister));
1623 LoadRoot(kScratchRegister, root_value_index); 1750 LoadRoot(kScratchRegister, root_value_index);
1624 cmpq(src, kScratchRegister); 1751 cmpq(src, kScratchRegister);
1625 Check(equal, message); 1752 Check(equal, message);
1626 } 1753 }
1627 1754
1628 1755
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after
1727 movq(rbx, ExternalReference(Runtime::kDebugBreak)); 1854 movq(rbx, ExternalReference(Runtime::kDebugBreak));
1728 CEntryStub ces(1); 1855 CEntryStub ces(1);
1729 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK); 1856 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
1730 } 1857 }
1731 #endif // ENABLE_DEBUGGER_SUPPORT 1858 #endif // ENABLE_DEBUGGER_SUPPORT
1732 1859
1733 1860
1734 void MacroAssembler::InvokeCode(Register code, 1861 void MacroAssembler::InvokeCode(Register code,
1735 const ParameterCount& expected, 1862 const ParameterCount& expected,
1736 const ParameterCount& actual, 1863 const ParameterCount& actual,
1737 InvokeFlag flag) { 1864 InvokeFlag flag,
1865 PostCallGenerator* post_call_generator) {
1738 NearLabel done; 1866 NearLabel done;
1739 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag); 1867 InvokePrologue(expected,
1868 actual,
1869 Handle<Code>::null(),
1870 code,
1871 &done,
1872 flag,
1873 post_call_generator);
1740 if (flag == CALL_FUNCTION) { 1874 if (flag == CALL_FUNCTION) {
1741 call(code); 1875 call(code);
1876 if (post_call_generator != NULL) post_call_generator->Generate();
1742 } else { 1877 } else {
1743 ASSERT(flag == JUMP_FUNCTION); 1878 ASSERT(flag == JUMP_FUNCTION);
1744 jmp(code); 1879 jmp(code);
1745 } 1880 }
1746 bind(&done); 1881 bind(&done);
1747 } 1882 }
1748 1883
1749 1884
1750 void MacroAssembler::InvokeCode(Handle<Code> code, 1885 void MacroAssembler::InvokeCode(Handle<Code> code,
1751 const ParameterCount& expected, 1886 const ParameterCount& expected,
1752 const ParameterCount& actual, 1887 const ParameterCount& actual,
1753 RelocInfo::Mode rmode, 1888 RelocInfo::Mode rmode,
1754 InvokeFlag flag) { 1889 InvokeFlag flag,
1890 PostCallGenerator* post_call_generator) {
1755 NearLabel done; 1891 NearLabel done;
1756 Register dummy = rax; 1892 Register dummy = rax;
1757 InvokePrologue(expected, actual, code, dummy, &done, flag); 1893 InvokePrologue(expected,
1894 actual,
1895 code,
1896 dummy,
1897 &done,
1898 flag,
1899 post_call_generator);
1758 if (flag == CALL_FUNCTION) { 1900 if (flag == CALL_FUNCTION) {
1759 Call(code, rmode); 1901 Call(code, rmode);
1902 if (post_call_generator != NULL) post_call_generator->Generate();
1760 } else { 1903 } else {
1761 ASSERT(flag == JUMP_FUNCTION); 1904 ASSERT(flag == JUMP_FUNCTION);
1762 Jump(code, rmode); 1905 Jump(code, rmode);
1763 } 1906 }
1764 bind(&done); 1907 bind(&done);
1765 } 1908 }
1766 1909
1767 1910
1768 void MacroAssembler::InvokeFunction(Register function, 1911 void MacroAssembler::InvokeFunction(Register function,
1769 const ParameterCount& actual, 1912 const ParameterCount& actual,
1770 InvokeFlag flag) { 1913 InvokeFlag flag,
1914 PostCallGenerator* post_call_generator) {
1771 ASSERT(function.is(rdi)); 1915 ASSERT(function.is(rdi));
1772 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset)); 1916 movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1773 movq(rsi, FieldOperand(function, JSFunction::kContextOffset)); 1917 movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
1774 movsxlq(rbx, 1918 movsxlq(rbx,
1775 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset)); 1919 FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
1776 // Advances rdx to the end of the Code object header, to the start of 1920 // Advances rdx to the end of the Code object header, to the start of
1777 // the executable code. 1921 // the executable code.
1778 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); 1922 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1779 1923
1780 ParameterCount expected(rbx); 1924 ParameterCount expected(rbx);
1781 InvokeCode(rdx, expected, actual, flag); 1925 InvokeCode(rdx, expected, actual, flag, post_call_generator);
1782 } 1926 }
1783 1927
1784 1928
1785 void MacroAssembler::InvokeFunction(JSFunction* function, 1929 void MacroAssembler::InvokeFunction(JSFunction* function,
1786 const ParameterCount& actual, 1930 const ParameterCount& actual,
1787 InvokeFlag flag) { 1931 InvokeFlag flag,
1932 PostCallGenerator* post_call_generator) {
1788 ASSERT(function->is_compiled()); 1933 ASSERT(function->is_compiled());
1789 // Get the function and setup the context. 1934 // Get the function and setup the context.
1790 Move(rdi, Handle<JSFunction>(function)); 1935 Move(rdi, Handle<JSFunction>(function));
1791 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 1936 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1792 1937
1793 if (V8::UseCrankshaft()) { 1938 if (V8::UseCrankshaft()) {
1794 // Since Crankshaft can recompile a function, we need to load 1939 // Since Crankshaft can recompile a function, we need to load
1795 // the Code object every time we call the function. 1940 // the Code object every time we call the function.
1796 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); 1941 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1797 ParameterCount expected(function->shared()->formal_parameter_count()); 1942 ParameterCount expected(function->shared()->formal_parameter_count());
1798 InvokeCode(rdx, expected, actual, flag); 1943 InvokeCode(rdx, expected, actual, flag, post_call_generator);
1799 } else { 1944 } else {
1800 // Invoke the cached code. 1945 // Invoke the cached code.
1801 Handle<Code> code(function->code()); 1946 Handle<Code> code(function->code());
1802 ParameterCount expected(function->shared()->formal_parameter_count()); 1947 ParameterCount expected(function->shared()->formal_parameter_count());
1803 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag); 1948 InvokeCode(code,
1949 expected,
1950 actual,
1951 RelocInfo::CODE_TARGET,
1952 flag,
1953 post_call_generator);
1804 } 1954 }
1805 } 1955 }
1806 1956
1807 1957
1808 void MacroAssembler::EnterFrame(StackFrame::Type type) { 1958 void MacroAssembler::EnterFrame(StackFrame::Type type) {
1809 push(rbp); 1959 push(rbp);
1810 movq(rbp, rsp); 1960 movq(rbp, rsp);
1811 push(rsi); // Context. 1961 push(rsi); // Context.
1812 Push(Smi::FromInt(type)); 1962 Push(Smi::FromInt(type));
1813 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); 1963 movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
(...skipping 572 matching lines...) Expand 10 before | Expand all | Expand 10 after
2386 // Move up the chain of contexts to the context containing the slot. 2536 // Move up the chain of contexts to the context containing the slot.
2387 movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX))); 2537 movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2388 // Load the function context (which is the incoming, outer context). 2538 // Load the function context (which is the incoming, outer context).
2389 movq(dst, FieldOperand(dst, JSFunction::kContextOffset)); 2539 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2390 for (int i = 1; i < context_chain_length; i++) { 2540 for (int i = 1; i < context_chain_length; i++) {
2391 movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX))); 2541 movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2392 movq(dst, FieldOperand(dst, JSFunction::kContextOffset)); 2542 movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2393 } 2543 }
2394 // The context may be an intermediate context, not a function context. 2544 // The context may be an intermediate context, not a function context.
2395 movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX))); 2545 movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2396 } else { // context is the current function context. 2546 } else {
2397 // The context may be an intermediate context, not a function context. 2547 // Slot is in the current function context. Move it into the
2398 movq(dst, Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX))); 2548 // destination register in case we store into it (the write barrier
2549 // cannot be allowed to destroy the context in rsi).
2550 movq(dst, rsi);
2551 }
2552
2553 // We should not have found a 'with' context by walking the context chain
2554 // (i.e., the static scope chain and runtime context chain do not agree).
2555 // A variable occurring in such a scope should have slot type LOOKUP and
2556 // not CONTEXT.
2557 if (FLAG_debug_code) {
2558 cmpq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2559 Check(equal, "Yo dawg, I heard you liked function contexts "
2560 "so I put function contexts in all your contexts");
2399 } 2561 }
2400 } 2562 }
2401 2563
2402 #ifdef _WIN64 2564 #ifdef _WIN64
2403 static const int kRegisterPassedArguments = 4; 2565 static const int kRegisterPassedArguments = 4;
2404 #else 2566 #else
2405 static const int kRegisterPassedArguments = 6; 2567 static const int kRegisterPassedArguments = 6;
2406 #endif 2568 #endif
2407 2569
2408 void MacroAssembler::LoadGlobalFunction(int index, Register function) { 2570 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after
2525 CPU::FlushICache(address_, size_); 2687 CPU::FlushICache(address_, size_);
2526 2688
2527 // Check that the code was patched as expected. 2689 // Check that the code was patched as expected.
2528 ASSERT(masm_.pc_ == address_ + size_); 2690 ASSERT(masm_.pc_ == address_ + size_);
2529 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); 2691 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2530 } 2692 }
2531 2693
2532 } } // namespace v8::internal 2694 } } // namespace v8::internal
2533 2695
2534 #endif // V8_TARGET_ARCH_X64 2696 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/macro-assembler-x64.h ('k') | src/x64/regexp-macro-assembler-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698