| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_MIPS | 5 #if V8_TARGET_ARCH_MIPS |
| 6 | 6 |
| 7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
| 8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
| 9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
| 10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
| (...skipping 1566 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1577 | 1577 |
| 1578 // Slow-case: Handle non-smi or out-of-bounds access to arguments | 1578 // Slow-case: Handle non-smi or out-of-bounds access to arguments |
| 1579 // by calling the runtime system. | 1579 // by calling the runtime system. |
| 1580 __ bind(&slow); | 1580 __ bind(&slow); |
| 1581 __ push(a1); | 1581 __ push(a1); |
| 1582 __ TailCallRuntime(Runtime::kArguments, 1, 1); | 1582 __ TailCallRuntime(Runtime::kArguments, 1, 1); |
| 1583 } | 1583 } |
| 1584 | 1584 |
| 1585 | 1585 |
| 1586 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { | 1586 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { |
| 1587 // sp[0] : number of parameters | 1587 // a1 : function |
| 1588 // sp[4] : receiver displacement | 1588 // a2 : number of parameters (tagged) |
| 1589 // sp[8] : function | 1589 // a3 : parameters pointer |
| 1590 |
| 1591 DCHECK(a1.is(ArgumentsAccessNewDescriptor::function())); |
| 1592 DCHECK(a2.is(ArgumentsAccessNewDescriptor::parameter_count())); |
| 1593 DCHECK(a3.is(ArgumentsAccessNewDescriptor::parameter_pointer())); |
| 1590 | 1594 |
| 1591 // Check if the calling frame is an arguments adaptor frame. | 1595 // Check if the calling frame is an arguments adaptor frame. |
| 1592 Label runtime; | 1596 Label runtime; |
| 1593 __ lw(a3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 1597 __ lw(t0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 1594 __ lw(a2, MemOperand(a3, StandardFrameConstants::kContextOffset)); | 1598 __ lw(a0, MemOperand(t0, StandardFrameConstants::kContextOffset)); |
| 1595 __ Branch(&runtime, | 1599 __ Branch(&runtime, ne, a0, |
| 1596 ne, | |
| 1597 a2, | |
| 1598 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 1600 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 1599 | 1601 |
| 1600 // Patch the arguments.length and the parameters pointer in the current frame. | 1602 // Patch the arguments.length and the parameters pointer in the current frame. |
| 1601 __ lw(a2, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 1603 __ lw(a2, MemOperand(t0, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 1602 __ sw(a2, MemOperand(sp, 0 * kPointerSize)); | |
| 1603 __ sll(t3, a2, 1); | 1604 __ sll(t3, a2, 1); |
| 1604 __ Addu(a3, a3, Operand(t3)); | 1605 __ Addu(t0, t0, Operand(t3)); |
| 1605 __ addiu(a3, a3, StandardFrameConstants::kCallerSPOffset); | 1606 __ addiu(a3, t0, StandardFrameConstants::kCallerSPOffset); |
| 1606 __ sw(a3, MemOperand(sp, 1 * kPointerSize)); | |
| 1607 | 1607 |
| 1608 __ bind(&runtime); | 1608 __ bind(&runtime); |
| 1609 __ Push(a1, a3, a2); |
| 1609 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1); | 1610 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1); |
| 1610 } | 1611 } |
| 1611 | 1612 |
| 1612 | 1613 |
| 1613 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { | 1614 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { |
| 1614 // Stack layout: | 1615 // a1 : function |
| 1615 // sp[0] : number of parameters (tagged) | 1616 // a2 : number of parameters (tagged) |
| 1616 // sp[4] : address of receiver argument | 1617 // a3 : parameters pointer |
| 1617 // sp[8] : function | |
| 1618 // Registers used over whole function: | 1618 // Registers used over whole function: |
| 1619 // t2 : allocated object (tagged) | 1619 // t1 : arguments count (tagged) |
| 1620 // t5 : mapped parameter count (tagged) | 1620 // t2 : mapped parameter count (tagged) |
| 1621 | 1621 |
| 1622 __ lw(a1, MemOperand(sp, 0 * kPointerSize)); | 1622 DCHECK(a1.is(ArgumentsAccessNewDescriptor::function())); |
| 1623 // a1 = parameter count (tagged) | 1623 DCHECK(a2.is(ArgumentsAccessNewDescriptor::parameter_count())); |
| 1624 DCHECK(a3.is(ArgumentsAccessNewDescriptor::parameter_pointer())); |
| 1624 | 1625 |
| 1625 // Check if the calling frame is an arguments adaptor frame. | 1626 // Check if the calling frame is an arguments adaptor frame. |
| 1626 Label runtime; | 1627 Label adaptor_frame, try_allocate, runtime; |
| 1627 Label adaptor_frame, try_allocate; | 1628 __ lw(t0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 1628 __ lw(a3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 1629 __ lw(a0, MemOperand(t0, StandardFrameConstants::kContextOffset)); |
| 1629 __ lw(a2, MemOperand(a3, StandardFrameConstants::kContextOffset)); | 1630 __ Branch(&adaptor_frame, eq, a0, |
| 1630 __ Branch(&adaptor_frame, | |
| 1631 eq, | |
| 1632 a2, | |
| 1633 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 1631 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 1634 | 1632 |
| 1635 // No adaptor, parameter count = argument count. | 1633 // No adaptor, parameter count = argument count. |
| 1636 __ mov(a2, a1); | 1634 __ mov(t1, a2); |
| 1637 __ b(&try_allocate); | 1635 __ Branch(USE_DELAY_SLOT, &try_allocate); |
| 1638 __ nop(); // Branch delay slot nop. | 1636 __ mov(t2, a2); // In delay slot. |
| 1639 | 1637 |
| 1640 // We have an adaptor frame. Patch the parameters pointer. | 1638 // We have an adaptor frame. Patch the parameters pointer. |
| 1641 __ bind(&adaptor_frame); | 1639 __ bind(&adaptor_frame); |
| 1642 __ lw(a2, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 1640 __ lw(t1, MemOperand(t0, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 1643 __ sll(t6, a2, 1); | 1641 __ sll(t6, t1, 1); |
| 1644 __ Addu(a3, a3, Operand(t6)); | 1642 __ Addu(t0, t0, Operand(t6)); |
| 1645 __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset)); | 1643 __ Addu(a3, t0, Operand(StandardFrameConstants::kCallerSPOffset)); |
| 1646 __ sw(a3, MemOperand(sp, 1 * kPointerSize)); | |
| 1647 | 1644 |
| 1648 // a1 = parameter count (tagged) | 1645 // t1 = argument count (tagged) |
| 1649 // a2 = argument count (tagged) | 1646 // t2 = parameter count (tagged) |
| 1650 // Compute the mapped parameter count = min(a1, a2) in a1. | 1647 // Compute the mapped parameter count = min(t2, t1) in t2. |
| 1651 Label skip_min; | 1648 __ mov(t2, a2); |
| 1652 __ Branch(&skip_min, lt, a1, Operand(a2)); | 1649 __ Branch(&try_allocate, le, t2, Operand(t1)); |
| 1653 __ mov(a1, a2); | 1650 __ mov(t2, t1); |
| 1654 __ bind(&skip_min); | |
| 1655 | 1651 |
| 1656 __ bind(&try_allocate); | 1652 __ bind(&try_allocate); |
| 1657 | 1653 |
| 1658 // Compute the sizes of backing store, parameter map, and arguments object. | 1654 // Compute the sizes of backing store, parameter map, and arguments object. |
| 1659 // 1. Parameter map, has 2 extra words containing context and backing store. | 1655 // 1. Parameter map, has 2 extra words containing context and backing store. |
| 1660 const int kParameterMapHeaderSize = | 1656 const int kParameterMapHeaderSize = |
| 1661 FixedArray::kHeaderSize + 2 * kPointerSize; | 1657 FixedArray::kHeaderSize + 2 * kPointerSize; |
| 1662 // If there are no mapped parameters, we do not need the parameter_map. | 1658 // If there are no mapped parameters, we do not need the parameter_map. |
| 1663 Label param_map_size; | 1659 Label param_map_size; |
| 1664 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0)); | 1660 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0)); |
| 1665 __ Branch(USE_DELAY_SLOT, ¶m_map_size, eq, a1, Operand(zero_reg)); | 1661 __ Branch(USE_DELAY_SLOT, ¶m_map_size, eq, t2, Operand(zero_reg)); |
| 1666 __ mov(t5, zero_reg); // In delay slot: param map size = 0 when a1 == 0. | 1662 __ mov(t5, zero_reg); // In delay slot: param map size = 0 when t2 == 0. |
| 1667 __ sll(t5, a1, 1); | 1663 __ sll(t5, t2, 1); |
| 1668 __ addiu(t5, t5, kParameterMapHeaderSize); | 1664 __ addiu(t5, t5, kParameterMapHeaderSize); |
| 1669 __ bind(¶m_map_size); | 1665 __ bind(¶m_map_size); |
| 1670 | 1666 |
| 1671 // 2. Backing store. | 1667 // 2. Backing store. |
| 1672 __ sll(t6, a2, 1); | 1668 __ sll(t6, t1, 1); |
| 1673 __ Addu(t5, t5, Operand(t6)); | 1669 __ Addu(t5, t5, Operand(t6)); |
| 1674 __ Addu(t5, t5, Operand(FixedArray::kHeaderSize)); | 1670 __ Addu(t5, t5, Operand(FixedArray::kHeaderSize)); |
| 1675 | 1671 |
| 1676 // 3. Arguments object. | 1672 // 3. Arguments object. |
| 1677 __ Addu(t5, t5, Operand(Heap::kSloppyArgumentsObjectSize)); | 1673 __ Addu(t5, t5, Operand(Heap::kSloppyArgumentsObjectSize)); |
| 1678 | 1674 |
| 1679 // Do the allocation of all three objects in one go. | 1675 // Do the allocation of all three objects in one go. |
| 1680 __ Allocate(t5, v0, a3, t0, &runtime, TAG_OBJECT); | 1676 __ Allocate(t5, v0, t0, t5, &runtime, TAG_OBJECT); |
| 1681 | 1677 |
| 1682 // v0 = address of new object(s) (tagged) | 1678 // v0 = address of new object(s) (tagged) |
| 1683 // a2 = argument count (smi-tagged) | 1679 // a2 = argument count (smi-tagged) |
| 1684 // Get the arguments boilerplate from the current native context into t0. | 1680 // Get the arguments boilerplate from the current native context into t0. |
| 1685 const int kNormalOffset = | 1681 const int kNormalOffset = |
| 1686 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX); | 1682 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX); |
| 1687 const int kAliasedOffset = | 1683 const int kAliasedOffset = |
| 1688 Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX); | 1684 Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX); |
| 1689 | 1685 |
| 1690 __ lw(t0, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | 1686 __ lw(t0, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
| 1691 __ lw(t0, FieldMemOperand(t0, GlobalObject::kNativeContextOffset)); | 1687 __ lw(t0, FieldMemOperand(t0, GlobalObject::kNativeContextOffset)); |
| 1692 Label skip2_ne, skip2_eq; | 1688 Label skip2_ne, skip2_eq; |
| 1693 __ Branch(&skip2_ne, ne, a1, Operand(zero_reg)); | 1689 __ Branch(&skip2_ne, ne, t2, Operand(zero_reg)); |
| 1694 __ lw(t0, MemOperand(t0, kNormalOffset)); | 1690 __ lw(t0, MemOperand(t0, kNormalOffset)); |
| 1695 __ bind(&skip2_ne); | 1691 __ bind(&skip2_ne); |
| 1696 | 1692 |
| 1697 __ Branch(&skip2_eq, eq, a1, Operand(zero_reg)); | 1693 __ Branch(&skip2_eq, eq, t2, Operand(zero_reg)); |
| 1698 __ lw(t0, MemOperand(t0, kAliasedOffset)); | 1694 __ lw(t0, MemOperand(t0, kAliasedOffset)); |
| 1699 __ bind(&skip2_eq); | 1695 __ bind(&skip2_eq); |
| 1700 | 1696 |
| 1701 // v0 = address of new object (tagged) | 1697 // v0 = address of new object (tagged) |
| 1702 // a1 = mapped parameter count (tagged) | |
| 1703 // a2 = argument count (smi-tagged) | 1698 // a2 = argument count (smi-tagged) |
| 1704 // t0 = address of arguments map (tagged) | 1699 // t0 = address of arguments map (tagged) |
| 1700 // t2 = mapped parameter count (tagged) |
| 1705 __ sw(t0, FieldMemOperand(v0, JSObject::kMapOffset)); | 1701 __ sw(t0, FieldMemOperand(v0, JSObject::kMapOffset)); |
| 1706 __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex); | 1702 __ LoadRoot(t5, Heap::kEmptyFixedArrayRootIndex); |
| 1707 __ sw(a3, FieldMemOperand(v0, JSObject::kPropertiesOffset)); | 1703 __ sw(t5, FieldMemOperand(v0, JSObject::kPropertiesOffset)); |
| 1708 __ sw(a3, FieldMemOperand(v0, JSObject::kElementsOffset)); | 1704 __ sw(t5, FieldMemOperand(v0, JSObject::kElementsOffset)); |
| 1709 | 1705 |
| 1710 // Set up the callee in-object property. | 1706 // Set up the callee in-object property. |
| 1711 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1); | 1707 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1); |
| 1712 __ lw(a3, MemOperand(sp, 2 * kPointerSize)); | 1708 __ AssertNotSmi(a1); |
| 1713 __ AssertNotSmi(a3); | |
| 1714 const int kCalleeOffset = JSObject::kHeaderSize + | 1709 const int kCalleeOffset = JSObject::kHeaderSize + |
| 1715 Heap::kArgumentsCalleeIndex * kPointerSize; | 1710 Heap::kArgumentsCalleeIndex * kPointerSize; |
| 1716 __ sw(a3, FieldMemOperand(v0, kCalleeOffset)); | 1711 __ sw(a1, FieldMemOperand(v0, kCalleeOffset)); |
| 1717 | 1712 |
| 1718 // Use the length (smi tagged) and set that as an in-object property too. | 1713 // Use the length (smi tagged) and set that as an in-object property too. |
| 1719 __ AssertSmi(a2); | 1714 __ AssertSmi(t1); |
| 1720 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); | 1715 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); |
| 1721 const int kLengthOffset = JSObject::kHeaderSize + | 1716 const int kLengthOffset = JSObject::kHeaderSize + |
| 1722 Heap::kArgumentsLengthIndex * kPointerSize; | 1717 Heap::kArgumentsLengthIndex * kPointerSize; |
| 1723 __ sw(a2, FieldMemOperand(v0, kLengthOffset)); | 1718 __ sw(t1, FieldMemOperand(v0, kLengthOffset)); |
| 1724 | 1719 |
| 1725 // Set up the elements pointer in the allocated arguments object. | 1720 // Set up the elements pointer in the allocated arguments object. |
| 1726 // If we allocated a parameter map, t0 will point there, otherwise | 1721 // If we allocated a parameter map, t0 will point there, otherwise |
| 1727 // it will point to the backing store. | 1722 // it will point to the backing store. |
| 1728 __ Addu(t0, v0, Operand(Heap::kSloppyArgumentsObjectSize)); | 1723 __ Addu(t0, v0, Operand(Heap::kSloppyArgumentsObjectSize)); |
| 1729 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); | 1724 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); |
| 1730 | 1725 |
| 1731 // v0 = address of new object (tagged) | 1726 // v0 = address of new object (tagged) |
| 1732 // a1 = mapped parameter count (tagged) | |
| 1733 // a2 = argument count (tagged) | 1727 // a2 = argument count (tagged) |
| 1734 // t0 = address of parameter map or backing store (tagged) | 1728 // t0 = address of parameter map or backing store (tagged) |
| 1729 // t2 = mapped parameter count (tagged) |
| 1735 // Initialize parameter map. If there are no mapped arguments, we're done. | 1730 // Initialize parameter map. If there are no mapped arguments, we're done. |
| 1736 Label skip_parameter_map; | 1731 Label skip_parameter_map; |
| 1737 Label skip3; | 1732 Label skip3; |
| 1738 __ Branch(&skip3, ne, a1, Operand(Smi::FromInt(0))); | 1733 __ Branch(&skip3, ne, t2, Operand(Smi::FromInt(0))); |
| 1739 // Move backing store address to a3, because it is | 1734 // Move backing store address to a1, because it is |
| 1740 // expected there when filling in the unmapped arguments. | 1735 // expected there when filling in the unmapped arguments. |
| 1741 __ mov(a3, t0); | 1736 __ mov(a1, t0); |
| 1742 __ bind(&skip3); | 1737 __ bind(&skip3); |
| 1743 | 1738 |
| 1744 __ Branch(&skip_parameter_map, eq, a1, Operand(Smi::FromInt(0))); | 1739 __ Branch(&skip_parameter_map, eq, t2, Operand(Smi::FromInt(0))); |
| 1745 | 1740 |
| 1746 __ LoadRoot(t2, Heap::kSloppyArgumentsElementsMapRootIndex); | 1741 __ LoadRoot(t1, Heap::kSloppyArgumentsElementsMapRootIndex); |
| 1747 __ sw(t2, FieldMemOperand(t0, FixedArray::kMapOffset)); | 1742 __ sw(t1, FieldMemOperand(t0, FixedArray::kMapOffset)); |
| 1748 __ Addu(t2, a1, Operand(Smi::FromInt(2))); | 1743 __ Addu(t1, t2, Operand(Smi::FromInt(2))); |
| 1749 __ sw(t2, FieldMemOperand(t0, FixedArray::kLengthOffset)); | 1744 __ sw(t1, FieldMemOperand(t0, FixedArray::kLengthOffset)); |
| 1750 __ sw(cp, FieldMemOperand(t0, FixedArray::kHeaderSize + 0 * kPointerSize)); | 1745 __ sw(cp, FieldMemOperand(t0, FixedArray::kHeaderSize + 0 * kPointerSize)); |
| 1751 __ sll(t6, a1, 1); | 1746 __ sll(t6, t2, 1); |
| 1752 __ Addu(t2, t0, Operand(t6)); | 1747 __ Addu(t1, t0, Operand(t6)); |
| 1753 __ Addu(t2, t2, Operand(kParameterMapHeaderSize)); | 1748 __ Addu(t1, t1, Operand(kParameterMapHeaderSize)); |
| 1754 __ sw(t2, FieldMemOperand(t0, FixedArray::kHeaderSize + 1 * kPointerSize)); | 1749 __ sw(t1, FieldMemOperand(t0, FixedArray::kHeaderSize + 1 * kPointerSize)); |
| 1755 | 1750 |
| 1756 // Copy the parameter slots and the holes in the arguments. | 1751 // Copy the parameter slots and the holes in the arguments. |
| 1757 // We need to fill in mapped_parameter_count slots. They index the context, | 1752 // We need to fill in mapped_parameter_count slots. They index the context, |
| 1758 // where parameters are stored in reverse order, at | 1753 // where parameters are stored in reverse order, at |
| 1759 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 | 1754 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 |
| 1760 // The mapped parameter thus need to get indices | 1755 // The mapped parameter thus need to get indices |
| 1761 // MIN_CONTEXT_SLOTS+parameter_count-1 .. | 1756 // MIN_CONTEXT_SLOTS+parameter_count-1 .. |
| 1762 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count | 1757 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count |
| 1763 // We loop from right to left. | 1758 // We loop from right to left. |
| 1764 Label parameters_loop, parameters_test; | 1759 Label parameters_loop, parameters_test; |
| 1765 __ mov(t2, a1); | 1760 __ mov(t1, t2); |
| 1766 __ lw(t5, MemOperand(sp, 0 * kPointerSize)); | 1761 __ Addu(t5, a2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS))); |
| 1767 __ Addu(t5, t5, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS))); | 1762 __ Subu(t5, t5, Operand(t2)); |
| 1768 __ Subu(t5, t5, Operand(a1)); | |
| 1769 __ LoadRoot(t3, Heap::kTheHoleValueRootIndex); | 1763 __ LoadRoot(t3, Heap::kTheHoleValueRootIndex); |
| 1770 __ sll(t6, t2, 1); | 1764 __ sll(t6, t1, 1); |
| 1771 __ Addu(a3, t0, Operand(t6)); | 1765 __ Addu(a1, t0, Operand(t6)); |
| 1772 __ Addu(a3, a3, Operand(kParameterMapHeaderSize)); | 1766 __ Addu(a1, a1, Operand(kParameterMapHeaderSize)); |
| 1773 | 1767 |
| 1774 // t2 = loop variable (tagged) | 1768 // a1 = address of backing store (tagged) |
| 1775 // a1 = mapping index (tagged) | |
| 1776 // a3 = address of backing store (tagged) | |
| 1777 // t0 = address of parameter map (tagged) | 1769 // t0 = address of parameter map (tagged) |
| 1778 // t1 = temporary scratch (a.o., for address calculation) | 1770 // a0 = temporary scratch (a.o., for address calculation) |
| 1771 // t1 = loop variable (tagged) |
| 1779 // t3 = the hole value | 1772 // t3 = the hole value |
| 1780 __ jmp(¶meters_test); | 1773 __ jmp(¶meters_test); |
| 1781 | 1774 |
| 1782 __ bind(¶meters_loop); | 1775 __ bind(¶meters_loop); |
| 1783 __ Subu(t2, t2, Operand(Smi::FromInt(1))); | 1776 __ Subu(t1, t1, Operand(Smi::FromInt(1))); |
| 1784 __ sll(t1, t2, 1); | 1777 __ sll(a0, t1, 1); |
| 1785 __ Addu(t1, t1, Operand(kParameterMapHeaderSize - kHeapObjectTag)); | 1778 __ Addu(a0, a0, Operand(kParameterMapHeaderSize - kHeapObjectTag)); |
| 1786 __ Addu(t6, t0, t1); | 1779 __ Addu(t6, t0, a0); |
| 1787 __ sw(t5, MemOperand(t6)); | 1780 __ sw(t5, MemOperand(t6)); |
| 1788 __ Subu(t1, t1, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize)); | 1781 __ Subu(a0, a0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize)); |
| 1789 __ Addu(t6, a3, t1); | 1782 __ Addu(t6, a1, a0); |
| 1790 __ sw(t3, MemOperand(t6)); | 1783 __ sw(t3, MemOperand(t6)); |
| 1791 __ Addu(t5, t5, Operand(Smi::FromInt(1))); | 1784 __ Addu(t5, t5, Operand(Smi::FromInt(1))); |
| 1792 __ bind(¶meters_test); | 1785 __ bind(¶meters_test); |
| 1793 __ Branch(¶meters_loop, ne, t2, Operand(Smi::FromInt(0))); | 1786 __ Branch(¶meters_loop, ne, t1, Operand(Smi::FromInt(0))); |
| 1787 |
| 1788 // t1 = argument count (tagged). |
| 1789 __ lw(t1, FieldMemOperand(v0, kLengthOffset)); |
| 1794 | 1790 |
| 1795 __ bind(&skip_parameter_map); | 1791 __ bind(&skip_parameter_map); |
| 1796 // a2 = argument count (tagged) | 1792 // v0 = address of new object (tagged) |
| 1797 // a3 = address of backing store (tagged) | 1793 // a1 = address of backing store (tagged) |
| 1798 // t1 = scratch | 1794 // t1 = argument count (tagged) |
| 1795 // t2 = mapped parameter count (tagged) |
| 1796 // t5 = scratch |
| 1799 // Copy arguments header and remaining slots (if there are any). | 1797 // Copy arguments header and remaining slots (if there are any). |
| 1800 __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex); | 1798 __ LoadRoot(t5, Heap::kFixedArrayMapRootIndex); |
| 1801 __ sw(t1, FieldMemOperand(a3, FixedArray::kMapOffset)); | 1799 __ sw(t5, FieldMemOperand(a1, FixedArray::kMapOffset)); |
| 1802 __ sw(a2, FieldMemOperand(a3, FixedArray::kLengthOffset)); | 1800 __ sw(t1, FieldMemOperand(a1, FixedArray::kLengthOffset)); |
| 1803 | 1801 |
| 1804 Label arguments_loop, arguments_test; | 1802 Label arguments_loop, arguments_test; |
| 1805 __ mov(t5, a1); | 1803 __ sll(t6, t2, 1); |
| 1806 __ lw(t0, MemOperand(sp, 1 * kPointerSize)); | 1804 __ Subu(a3, a3, Operand(t6)); |
| 1807 __ sll(t6, t5, 1); | |
| 1808 __ Subu(t0, t0, Operand(t6)); | |
| 1809 __ jmp(&arguments_test); | 1805 __ jmp(&arguments_test); |
| 1810 | 1806 |
| 1811 __ bind(&arguments_loop); | 1807 __ bind(&arguments_loop); |
| 1812 __ Subu(t0, t0, Operand(kPointerSize)); | 1808 __ Subu(a3, a3, Operand(kPointerSize)); |
| 1813 __ lw(t2, MemOperand(t0, 0)); | 1809 __ lw(t0, MemOperand(a3, 0)); |
| 1814 __ sll(t6, t5, 1); | 1810 __ sll(t6, t2, 1); |
| 1815 __ Addu(t1, a3, Operand(t6)); | 1811 __ Addu(t5, a1, Operand(t6)); |
| 1816 __ sw(t2, FieldMemOperand(t1, FixedArray::kHeaderSize)); | 1812 __ sw(t0, FieldMemOperand(t5, FixedArray::kHeaderSize)); |
| 1817 __ Addu(t5, t5, Operand(Smi::FromInt(1))); | 1813 __ Addu(t2, t2, Operand(Smi::FromInt(1))); |
| 1818 | 1814 |
| 1819 __ bind(&arguments_test); | 1815 __ bind(&arguments_test); |
| 1820 __ Branch(&arguments_loop, lt, t5, Operand(a2)); | 1816 __ Branch(&arguments_loop, lt, t2, Operand(t1)); |
| 1821 | 1817 |
| 1822 // Return and remove the on-stack parameters. | 1818 // Return. |
| 1823 __ DropAndRet(3); | 1819 __ Ret(); |
| 1824 | 1820 |
| 1825 // Do the runtime call to allocate the arguments object. | 1821 // Do the runtime call to allocate the arguments object. |
| 1826 // a2 = argument count (tagged) | 1822 // t1 = argument count (tagged) |
| 1827 __ bind(&runtime); | 1823 __ bind(&runtime); |
| 1828 __ sw(a2, MemOperand(sp, 0 * kPointerSize)); // Patch argument count. | 1824 __ Push(a1, a3, t1); |
| 1829 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1); | 1825 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1); |
| 1830 } | 1826 } |
| 1831 | 1827 |
| 1832 | 1828 |
| 1833 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { | 1829 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { |
| 1834 // Return address is in ra. | 1830 // Return address is in ra. |
| 1835 Label slow; | 1831 Label slow; |
| 1836 | 1832 |
| 1837 Register receiver = LoadDescriptor::ReceiverRegister(); | 1833 Register receiver = LoadDescriptor::ReceiverRegister(); |
| 1838 Register key = LoadDescriptor::NameRegister(); | 1834 Register key = LoadDescriptor::NameRegister(); |
| 1839 | 1835 |
| 1840 // Check that the key is an array index, that is Uint32. | 1836 // Check that the key is an array index, that is Uint32. |
| 1841 __ And(t0, key, Operand(kSmiTagMask | kSmiSignMask)); | 1837 __ And(t0, key, Operand(kSmiTagMask | kSmiSignMask)); |
| 1842 __ Branch(&slow, ne, t0, Operand(zero_reg)); | 1838 __ Branch(&slow, ne, t0, Operand(zero_reg)); |
| 1843 | 1839 |
| 1844 // Everything is fine, call runtime. | 1840 // Everything is fine, call runtime. |
| 1845 __ Push(receiver, key); // Receiver, key. | 1841 __ Push(receiver, key); // Receiver, key. |
| 1846 | 1842 |
| 1847 // Perform tail call to the entry. | 1843 // Perform tail call to the entry. |
| 1848 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2, 1); | 1844 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2, 1); |
| 1849 | 1845 |
| 1850 __ bind(&slow); | 1846 __ bind(&slow); |
| 1851 PropertyAccessCompiler::TailCallBuiltin( | 1847 PropertyAccessCompiler::TailCallBuiltin( |
| 1852 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); | 1848 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); |
| 1853 } | 1849 } |
| 1854 | 1850 |
| 1855 | 1851 |
| 1856 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { | 1852 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { |
| 1857 // sp[0] : number of parameters | 1853 // a1 : function |
| 1858 // sp[4] : receiver displacement | 1854 // a2 : number of parameters (tagged) |
| 1859 // sp[8] : function | 1855 // a3 : parameters pointer |
| 1856 |
| 1857 DCHECK(a1.is(ArgumentsAccessNewDescriptor::function())); |
| 1858 DCHECK(a2.is(ArgumentsAccessNewDescriptor::parameter_count())); |
| 1859 DCHECK(a3.is(ArgumentsAccessNewDescriptor::parameter_pointer())); |
| 1860 |
| 1860 // Check if the calling frame is an arguments adaptor frame. | 1861 // Check if the calling frame is an arguments adaptor frame. |
| 1861 Label adaptor_frame, try_allocate, runtime; | 1862 Label try_allocate, runtime; |
| 1862 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 1863 __ lw(t0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 1863 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset)); | 1864 __ lw(a0, MemOperand(t0, StandardFrameConstants::kContextOffset)); |
| 1864 __ Branch(&adaptor_frame, | 1865 __ Branch(&try_allocate, ne, a0, |
| 1865 eq, | |
| 1866 a3, | |
| 1867 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 1866 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 1868 | 1867 |
| 1869 // Get the length from the frame. | |
| 1870 __ lw(a1, MemOperand(sp, 0)); | |
| 1871 __ Branch(&try_allocate); | |
| 1872 | |
| 1873 // Patch the arguments.length and the parameters pointer. | 1868 // Patch the arguments.length and the parameters pointer. |
| 1874 __ bind(&adaptor_frame); | 1869 __ lw(a2, MemOperand(t0, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 1875 __ lw(a1, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 1870 __ sll(at, a2, kPointerSizeLog2 - kSmiTagSize); |
| 1876 __ sw(a1, MemOperand(sp, 0)); | 1871 __ Addu(t0, t0, Operand(at)); |
| 1877 __ sll(at, a1, kPointerSizeLog2 - kSmiTagSize); | 1872 __ Addu(a3, t0, Operand(StandardFrameConstants::kCallerSPOffset)); |
| 1878 __ Addu(a3, a2, Operand(at)); | |
| 1879 | |
| 1880 __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset)); | |
| 1881 __ sw(a3, MemOperand(sp, 1 * kPointerSize)); | |
| 1882 | 1873 |
| 1883 // Try the new space allocation. Start out with computing the size | 1874 // Try the new space allocation. Start out with computing the size |
| 1884 // of the arguments object and the elements array in words. | 1875 // of the arguments object and the elements array in words. |
| 1885 Label add_arguments_object; | 1876 Label add_arguments_object; |
| 1886 __ bind(&try_allocate); | 1877 __ bind(&try_allocate); |
| 1887 __ Branch(&add_arguments_object, eq, a1, Operand(zero_reg)); | 1878 __ SmiUntag(t5, a2); |
| 1888 __ srl(a1, a1, kSmiTagSize); | 1879 __ Branch(&add_arguments_object, eq, a2, Operand(zero_reg)); |
| 1889 | 1880 |
| 1890 __ Addu(a1, a1, Operand(FixedArray::kHeaderSize / kPointerSize)); | 1881 __ Addu(t5, t5, Operand(FixedArray::kHeaderSize / kPointerSize)); |
| 1891 __ bind(&add_arguments_object); | 1882 __ bind(&add_arguments_object); |
| 1892 __ Addu(a1, a1, Operand(Heap::kStrictArgumentsObjectSize / kPointerSize)); | 1883 __ Addu(t5, t5, Operand(Heap::kStrictArgumentsObjectSize / kPointerSize)); |
| 1893 | 1884 |
| 1894 // Do the allocation of both objects in one go. | 1885 // Do the allocation of both objects in one go. |
| 1895 __ Allocate(a1, v0, a2, a3, &runtime, | 1886 __ Allocate(t5, v0, t0, t1, &runtime, |
| 1896 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS)); | 1887 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS)); |
| 1897 | 1888 |
| 1898 // Get the arguments boilerplate from the current native context. | 1889 // Get the arguments boilerplate from the current native context. |
| 1899 __ lw(t0, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); | 1890 __ lw(t0, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); |
| 1900 __ lw(t0, FieldMemOperand(t0, GlobalObject::kNativeContextOffset)); | 1891 __ lw(t0, FieldMemOperand(t0, GlobalObject::kNativeContextOffset)); |
| 1901 __ lw(t0, MemOperand( | 1892 __ lw(t0, MemOperand( |
| 1902 t0, Context::SlotOffset(Context::STRICT_ARGUMENTS_MAP_INDEX))); | 1893 t0, Context::SlotOffset(Context::STRICT_ARGUMENTS_MAP_INDEX))); |
| 1903 | 1894 |
| 1904 __ sw(t0, FieldMemOperand(v0, JSObject::kMapOffset)); | 1895 __ sw(t0, FieldMemOperand(v0, JSObject::kMapOffset)); |
| 1905 __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex); | 1896 __ LoadRoot(t1, Heap::kEmptyFixedArrayRootIndex); |
| 1906 __ sw(a3, FieldMemOperand(v0, JSObject::kPropertiesOffset)); | 1897 __ sw(t1, FieldMemOperand(v0, JSObject::kPropertiesOffset)); |
| 1907 __ sw(a3, FieldMemOperand(v0, JSObject::kElementsOffset)); | 1898 __ sw(t1, FieldMemOperand(v0, JSObject::kElementsOffset)); |
| 1908 | 1899 |
| 1909 // Get the length (smi tagged) and set that as an in-object property too. | 1900 // Get the length (smi tagged) and set that as an in-object property too. |
| 1910 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); | 1901 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); |
| 1911 __ lw(a1, MemOperand(sp, 0 * kPointerSize)); | 1902 __ AssertSmi(a2); |
| 1912 __ AssertSmi(a1); | 1903 __ sw(a2, |
| 1913 __ sw(a1, FieldMemOperand(v0, JSObject::kHeaderSize + | 1904 FieldMemOperand(v0, JSObject::kHeaderSize + |
| 1914 Heap::kArgumentsLengthIndex * kPointerSize)); | 1905 Heap::kArgumentsLengthIndex * kPointerSize)); |
| 1915 | 1906 |
| 1916 Label done; | 1907 Label done; |
| 1917 __ Branch(&done, eq, a1, Operand(zero_reg)); | 1908 __ Branch(&done, eq, a2, Operand(zero_reg)); |
| 1918 | |
| 1919 // Get the parameters pointer from the stack. | |
| 1920 __ lw(a2, MemOperand(sp, 1 * kPointerSize)); | |
| 1921 | 1909 |
| 1922 // Set up the elements pointer in the allocated arguments object and | 1910 // Set up the elements pointer in the allocated arguments object and |
| 1923 // initialize the header in the elements fixed array. | 1911 // initialize the header in the elements fixed array. |
| 1924 __ Addu(t0, v0, Operand(Heap::kStrictArgumentsObjectSize)); | 1912 __ Addu(t0, v0, Operand(Heap::kStrictArgumentsObjectSize)); |
| 1925 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); | 1913 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); |
| 1926 __ LoadRoot(a3, Heap::kFixedArrayMapRootIndex); | 1914 __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex); |
| 1927 __ sw(a3, FieldMemOperand(t0, FixedArray::kMapOffset)); | 1915 __ sw(t1, FieldMemOperand(t0, FixedArray::kMapOffset)); |
| 1928 __ sw(a1, FieldMemOperand(t0, FixedArray::kLengthOffset)); | 1916 __ sw(a2, FieldMemOperand(t0, FixedArray::kLengthOffset)); |
| 1929 // Untag the length for the loop. | 1917 __ SmiUntag(a2); |
| 1930 __ srl(a1, a1, kSmiTagSize); | |
| 1931 | 1918 |
| 1932 // Copy the fixed array slots. | 1919 // Copy the fixed array slots. |
| 1933 Label loop; | 1920 Label loop; |
| 1934 // Set up t0 to point to the first array slot. | 1921 // Set up t0 to point to the first array slot. |
| 1935 __ Addu(t0, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 1922 __ Addu(t0, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 1936 __ bind(&loop); | 1923 __ bind(&loop); |
| 1937 // Pre-decrement a2 with kPointerSize on each iteration. | 1924 // Pre-decrement a3 with kPointerSize on each iteration. |
| 1938 // Pre-decrement in order to skip receiver. | 1925 // Pre-decrement in order to skip receiver. |
| 1939 __ Addu(a2, a2, Operand(-kPointerSize)); | 1926 __ Addu(a3, a3, Operand(-kPointerSize)); |
| 1940 __ lw(a3, MemOperand(a2)); | 1927 __ lw(t1, MemOperand(a3)); |
| 1941 // Post-increment t0 with kPointerSize on each iteration. | 1928 // Post-increment t0 with kPointerSize on each iteration. |
| 1942 __ sw(a3, MemOperand(t0)); | 1929 __ sw(t1, MemOperand(t0)); |
| 1943 __ Addu(t0, t0, Operand(kPointerSize)); | 1930 __ Addu(t0, t0, Operand(kPointerSize)); |
| 1944 __ Subu(a1, a1, Operand(1)); | 1931 __ Subu(a2, a2, Operand(1)); |
| 1945 __ Branch(&loop, ne, a1, Operand(zero_reg)); | 1932 __ Branch(&loop, ne, a2, Operand(zero_reg)); |
| 1946 | 1933 |
| 1947 // Return and remove the on-stack parameters. | 1934 // Return. |
| 1948 __ bind(&done); | 1935 __ bind(&done); |
| 1949 __ DropAndRet(3); | 1936 __ Ret(); |
| 1950 | 1937 |
| 1951 // Do the runtime call to allocate the arguments object. | 1938 // Do the runtime call to allocate the arguments object. |
| 1952 __ bind(&runtime); | 1939 __ bind(&runtime); |
| 1940 __ Push(a1, a3, a2); |
| 1953 __ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1); | 1941 __ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1); |
| 1954 } | 1942 } |
| 1955 | 1943 |
| 1956 | 1944 |
| 1957 void RegExpExecStub::Generate(MacroAssembler* masm) { | 1945 void RegExpExecStub::Generate(MacroAssembler* masm) { |
| 1958 // Just jump directly to runtime if native RegExp is not selected at compile | 1946 // Just jump directly to runtime if native RegExp is not selected at compile |
| 1959 // time or if regexp entry in generated code is turned off runtime switch or | 1947 // time or if regexp entry in generated code is turned off runtime switch or |
| 1960 // at compilation. | 1948 // at compilation. |
| 1961 #ifdef V8_INTERPRETED_REGEXP | 1949 #ifdef V8_INTERPRETED_REGEXP |
| 1962 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); | 1950 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); |
| (...skipping 3785 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5748 MemOperand(fp, 6 * kPointerSize), NULL); | 5736 MemOperand(fp, 6 * kPointerSize), NULL); |
| 5749 } | 5737 } |
| 5750 | 5738 |
| 5751 | 5739 |
| 5752 #undef __ | 5740 #undef __ |
| 5753 | 5741 |
| 5754 } // namespace internal | 5742 } // namespace internal |
| 5755 } // namespace v8 | 5743 } // namespace v8 |
| 5756 | 5744 |
| 5757 #endif // V8_TARGET_ARCH_MIPS | 5745 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |