Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(85)

Side by Side Diff: src/mips64/code-stubs-mips64.cc

Issue 1695633003: [runtime] Turn ArgumentAccessStub into FastNewSloppyArgumentsStub. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix MIPS dead code Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/mips/interface-descriptors-mips.cc ('k') | src/mips64/interface-descriptors-mips64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_MIPS64 5 #if V8_TARGET_ARCH_MIPS64
6 6
7 #include "src/bootstrapper.h" 7 #include "src/bootstrapper.h"
8 #include "src/code-stubs.h" 8 #include "src/code-stubs.h"
9 #include "src/codegen.h" 9 #include "src/codegen.h"
10 #include "src/ic/handler-compiler.h" 10 #include "src/ic/handler-compiler.h"
(...skipping 1569 matching lines...) Expand 10 before | Expand all | Expand 10 after
1580 LoadWithVectorDescriptor::SlotRegister())); 1580 LoadWithVectorDescriptor::SlotRegister()));
1581 1581
1582 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, a4, 1582 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, a4,
1583 a5, &miss); 1583 a5, &miss);
1584 __ bind(&miss); 1584 __ bind(&miss);
1585 PropertyAccessCompiler::TailCallBuiltin( 1585 PropertyAccessCompiler::TailCallBuiltin(
1586 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC)); 1586 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
1587 } 1587 }
1588 1588
1589 1589
1590 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
1591 // a1 : function
1592 // a2 : number of parameters (tagged)
1593 // a3 : parameters pointer
1594
1595 DCHECK(a1.is(ArgumentsAccessNewDescriptor::function()));
1596 DCHECK(a2.is(ArgumentsAccessNewDescriptor::parameter_count()));
1597 DCHECK(a3.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
1598
1599 // Check if the calling frame is an arguments adaptor frame.
1600 Label runtime;
1601 __ ld(a4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1602 __ ld(a0, MemOperand(a4, StandardFrameConstants::kContextOffset));
1603 __ Branch(&runtime, ne, a0,
1604 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1605
1606 // Patch the arguments.length and the parameters pointer in the current frame.
1607 __ ld(a2, MemOperand(a4, ArgumentsAdaptorFrameConstants::kLengthOffset));
1608 __ SmiScale(a7, a2, kPointerSizeLog2);
1609 __ Daddu(a4, a4, Operand(a7));
1610 __ daddiu(a3, a4, StandardFrameConstants::kCallerSPOffset);
1611
1612 __ bind(&runtime);
1613 __ Push(a1, a3, a2);
1614 __ TailCallRuntime(Runtime::kNewSloppyArguments);
1615 }
1616
1617
1618 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
1619 // a1 : function
1620 // a2 : number of parameters (tagged)
1621 // a3 : parameters pointer
1622 // Registers used over whole function:
1623 // a5 : arguments count (tagged)
1624 // a6 : mapped parameter count (tagged)
1625
1626 DCHECK(a1.is(ArgumentsAccessNewDescriptor::function()));
1627 DCHECK(a2.is(ArgumentsAccessNewDescriptor::parameter_count()));
1628 DCHECK(a3.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
1629
1630 // Check if the calling frame is an arguments adaptor frame.
1631 Label adaptor_frame, try_allocate, runtime;
1632 __ ld(a4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1633 __ ld(a0, MemOperand(a4, StandardFrameConstants::kContextOffset));
1634 __ Branch(&adaptor_frame, eq, a0,
1635 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1636
1637 // No adaptor, parameter count = argument count.
1638 __ mov(a5, a2);
1639 __ Branch(USE_DELAY_SLOT, &try_allocate);
1640 __ mov(a6, a2); // In delay slot.
1641
1642 // We have an adaptor frame. Patch the parameters pointer.
1643 __ bind(&adaptor_frame);
1644 __ ld(a5, MemOperand(a4, ArgumentsAdaptorFrameConstants::kLengthOffset));
1645 __ SmiScale(t2, a5, kPointerSizeLog2);
1646 __ Daddu(a4, a4, Operand(t2));
1647 __ Daddu(a3, a4, Operand(StandardFrameConstants::kCallerSPOffset));
1648
1649 // a5 = argument count (tagged)
1650 // a6 = parameter count (tagged)
1651 // Compute the mapped parameter count = min(a6, a5) in a6.
1652 __ mov(a6, a2);
1653 __ Branch(&try_allocate, le, a6, Operand(a5));
1654 __ mov(a6, a5);
1655
1656 __ bind(&try_allocate);
1657
1658 // Compute the sizes of backing store, parameter map, and arguments object.
1659 // 1. Parameter map, has 2 extra words containing context and backing store.
1660 const int kParameterMapHeaderSize =
1661 FixedArray::kHeaderSize + 2 * kPointerSize;
1662 // If there are no mapped parameters, we do not need the parameter_map.
1663 Label param_map_size;
1664 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
1665 __ Branch(USE_DELAY_SLOT, &param_map_size, eq, a6, Operand(zero_reg));
1666 __ mov(t1, zero_reg); // In delay slot: param map size = 0 when a6 == 0.
1667 __ SmiScale(t1, a6, kPointerSizeLog2);
1668 __ daddiu(t1, t1, kParameterMapHeaderSize);
1669 __ bind(&param_map_size);
1670
1671 // 2. Backing store.
1672 __ SmiScale(t2, a5, kPointerSizeLog2);
1673 __ Daddu(t1, t1, Operand(t2));
1674 __ Daddu(t1, t1, Operand(FixedArray::kHeaderSize));
1675
1676 // 3. Arguments object.
1677 __ Daddu(t1, t1, Operand(JSSloppyArgumentsObject::kSize));
1678
1679 // Do the allocation of all three objects in one go.
1680 __ Allocate(t1, v0, t1, a4, &runtime, TAG_OBJECT);
1681
1682 // v0 = address of new object(s) (tagged)
1683 // a2 = argument count (smi-tagged)
1684 // Get the arguments boilerplate from the current native context into a4.
1685 const int kNormalOffset =
1686 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX);
1687 const int kAliasedOffset =
1688 Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
1689
1690 __ ld(a4, NativeContextMemOperand());
1691 Label skip2_ne, skip2_eq;
1692 __ Branch(&skip2_ne, ne, a6, Operand(zero_reg));
1693 __ ld(a4, MemOperand(a4, kNormalOffset));
1694 __ bind(&skip2_ne);
1695
1696 __ Branch(&skip2_eq, eq, a6, Operand(zero_reg));
1697 __ ld(a4, MemOperand(a4, kAliasedOffset));
1698 __ bind(&skip2_eq);
1699
1700 // v0 = address of new object (tagged)
1701 // a2 = argument count (smi-tagged)
1702 // a4 = address of arguments map (tagged)
1703 // a6 = mapped parameter count (tagged)
1704 __ sd(a4, FieldMemOperand(v0, JSObject::kMapOffset));
1705 __ LoadRoot(t1, Heap::kEmptyFixedArrayRootIndex);
1706 __ sd(t1, FieldMemOperand(v0, JSObject::kPropertiesOffset));
1707 __ sd(t1, FieldMemOperand(v0, JSObject::kElementsOffset));
1708
1709 // Set up the callee in-object property.
1710 __ AssertNotSmi(a1);
1711 __ sd(a1, FieldMemOperand(v0, JSSloppyArgumentsObject::kCalleeOffset));
1712
1713 // Use the length (smi tagged) and set that as an in-object property too.
1714 __ AssertSmi(a5);
1715 __ sd(a5, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
1716
1717 // Set up the elements pointer in the allocated arguments object.
1718 // If we allocated a parameter map, a4 will point there, otherwise
1719 // it will point to the backing store.
1720 __ Daddu(a4, v0, Operand(JSSloppyArgumentsObject::kSize));
1721 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
1722
1723 // v0 = address of new object (tagged)
1724 // a2 = argument count (tagged)
1725 // a4 = address of parameter map or backing store (tagged)
1726 // a6 = mapped parameter count (tagged)
1727 // Initialize parameter map. If there are no mapped arguments, we're done.
1728 Label skip_parameter_map;
1729 Label skip3;
1730 __ Branch(&skip3, ne, a6, Operand(Smi::FromInt(0)));
1731 // Move backing store address to a1, because it is
1732 // expected there when filling in the unmapped arguments.
1733 __ mov(a1, a4);
1734 __ bind(&skip3);
1735
1736 __ Branch(&skip_parameter_map, eq, a6, Operand(Smi::FromInt(0)));
1737
1738 __ LoadRoot(a5, Heap::kSloppyArgumentsElementsMapRootIndex);
1739 __ sd(a5, FieldMemOperand(a4, FixedArray::kMapOffset));
1740 __ Daddu(a5, a6, Operand(Smi::FromInt(2)));
1741 __ sd(a5, FieldMemOperand(a4, FixedArray::kLengthOffset));
1742 __ sd(cp, FieldMemOperand(a4, FixedArray::kHeaderSize + 0 * kPointerSize));
1743 __ SmiScale(t2, a6, kPointerSizeLog2);
1744 __ Daddu(a5, a4, Operand(t2));
1745 __ Daddu(a5, a5, Operand(kParameterMapHeaderSize));
1746 __ sd(a5, FieldMemOperand(a4, FixedArray::kHeaderSize + 1 * kPointerSize));
1747
1748 // Copy the parameter slots and the holes in the arguments.
1749 // We need to fill in mapped_parameter_count slots. They index the context,
1750 // where parameters are stored in reverse order, at
1751 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
1752 // The mapped parameter thus need to get indices
1753 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
1754 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
1755 // We loop from right to left.
1756 Label parameters_loop, parameters_test;
1757 __ mov(a5, a6);
1758 __ Daddu(t1, a2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
1759 __ Dsubu(t1, t1, Operand(a6));
1760 __ LoadRoot(a7, Heap::kTheHoleValueRootIndex);
1761 __ SmiScale(t2, a5, kPointerSizeLog2);
1762 __ Daddu(a1, a4, Operand(t2));
1763 __ Daddu(a1, a1, Operand(kParameterMapHeaderSize));
1764
1765 // a1 = address of backing store (tagged)
1766 // a4 = address of parameter map (tagged)
1767 // a0 = temporary scratch (a.o., for address calculation)
1768 // t1 = loop variable (tagged)
1769 // a7 = the hole value
1770 __ jmp(&parameters_test);
1771
1772 __ bind(&parameters_loop);
1773 __ Dsubu(a5, a5, Operand(Smi::FromInt(1)));
1774 __ SmiScale(a0, a5, kPointerSizeLog2);
1775 __ Daddu(a0, a0, Operand(kParameterMapHeaderSize - kHeapObjectTag));
1776 __ Daddu(t2, a4, a0);
1777 __ sd(t1, MemOperand(t2));
1778 __ Dsubu(a0, a0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize));
1779 __ Daddu(t2, a1, a0);
1780 __ sd(a7, MemOperand(t2));
1781 __ Daddu(t1, t1, Operand(Smi::FromInt(1)));
1782 __ bind(&parameters_test);
1783 __ Branch(&parameters_loop, ne, a5, Operand(Smi::FromInt(0)));
1784
1785 // Restore t1 = argument count (tagged).
1786 __ ld(a5, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
1787
1788 __ bind(&skip_parameter_map);
1789 // v0 = address of new object (tagged)
1790 // a1 = address of backing store (tagged)
1791 // a5 = argument count (tagged)
1792 // a6 = mapped parameter count (tagged)
1793 // t1 = scratch
1794 // Copy arguments header and remaining slots (if there are any).
1795 __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex);
1796 __ sd(t1, FieldMemOperand(a1, FixedArray::kMapOffset));
1797 __ sd(a5, FieldMemOperand(a1, FixedArray::kLengthOffset));
1798
1799 Label arguments_loop, arguments_test;
1800 __ SmiScale(t2, a6, kPointerSizeLog2);
1801 __ Dsubu(a3, a3, Operand(t2));
1802 __ jmp(&arguments_test);
1803
1804 __ bind(&arguments_loop);
1805 __ Dsubu(a3, a3, Operand(kPointerSize));
1806 __ ld(a4, MemOperand(a3, 0));
1807 __ SmiScale(t2, a6, kPointerSizeLog2);
1808 __ Daddu(t1, a1, Operand(t2));
1809 __ sd(a4, FieldMemOperand(t1, FixedArray::kHeaderSize));
1810 __ Daddu(a6, a6, Operand(Smi::FromInt(1)));
1811
1812 __ bind(&arguments_test);
1813 __ Branch(&arguments_loop, lt, a6, Operand(a5));
1814
1815 // Return.
1816 __ Ret();
1817
1818 // Do the runtime call to allocate the arguments object.
1819 // a5 = argument count (tagged)
1820 __ bind(&runtime);
1821 __ Push(a1, a3, a5);
1822 __ TailCallRuntime(Runtime::kNewSloppyArguments);
1823 }
1824
1825
1826 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { 1590 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
1827 // Return address is in ra. 1591 // Return address is in ra.
1828 Label slow; 1592 Label slow;
1829 1593
1830 Register receiver = LoadDescriptor::ReceiverRegister(); 1594 Register receiver = LoadDescriptor::ReceiverRegister();
1831 Register key = LoadDescriptor::NameRegister(); 1595 Register key = LoadDescriptor::NameRegister();
1832 1596
1833 // Check that the key is an array index, that is Uint32. 1597 // Check that the key is an array index, that is Uint32.
1834 __ And(t0, key, Operand(kSmiTagMask | kSmiSignMask)); 1598 __ And(t0, key, Operand(kSmiTagMask | kSmiSignMask));
1835 __ Branch(&slow, ne, t0, Operand(zero_reg)); 1599 __ Branch(&slow, ne, t0, Operand(zero_reg));
(...skipping 3315 matching lines...) Expand 10 before | Expand all | Expand 10 after
5151 __ Push(a0, a2, a1); 4915 __ Push(a0, a2, a1);
5152 __ CallRuntime(Runtime::kAllocateInNewSpace); 4916 __ CallRuntime(Runtime::kAllocateInNewSpace);
5153 __ Pop(a0, a2); 4917 __ Pop(a0, a2);
5154 __ SmiUntag(a0); 4918 __ SmiUntag(a0);
5155 } 4919 }
5156 __ jmp(&done_allocate); 4920 __ jmp(&done_allocate);
5157 } 4921 }
5158 } 4922 }
5159 4923
5160 4924
4925 void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
4926 // ----------- S t a t e -------------
4927 // -- a1 : function
4928 // -- cp : context
4929 // -- fp : frame pointer
4930 // -- ra : return address
4931 // -----------------------------------
4932 __ AssertFunction(a1);
4933
4934 // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
4935 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
4936 __ lw(a2,
4937 FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
4938 __ Lsa(a3, fp, a2, kPointerSizeLog2);
4939 __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset));
4940 __ SmiTag(a2);
4941
4942 // a1 : function
4943 // a2 : number of parameters (tagged)
4944 // a3 : parameters pointer
4945 // Registers used over whole function:
4946 // a5 : arguments count (tagged)
4947 // a6 : mapped parameter count (tagged)
4948
4949 // Check if the calling frame is an arguments adaptor frame.
4950 Label adaptor_frame, try_allocate, runtime;
4951 __ ld(a4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
4952 __ ld(a0, MemOperand(a4, StandardFrameConstants::kContextOffset));
4953 __ Branch(&adaptor_frame, eq, a0,
4954 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4955
4956 // No adaptor, parameter count = argument count.
4957 __ mov(a5, a2);
4958 __ Branch(USE_DELAY_SLOT, &try_allocate);
4959 __ mov(a6, a2); // In delay slot.
4960
4961 // We have an adaptor frame. Patch the parameters pointer.
4962 __ bind(&adaptor_frame);
4963 __ ld(a5, MemOperand(a4, ArgumentsAdaptorFrameConstants::kLengthOffset));
4964 __ SmiScale(t2, a5, kPointerSizeLog2);
4965 __ Daddu(a4, a4, Operand(t2));
4966 __ Daddu(a3, a4, Operand(StandardFrameConstants::kCallerSPOffset));
4967
4968 // a5 = argument count (tagged)
4969 // a6 = parameter count (tagged)
4970 // Compute the mapped parameter count = min(a6, a5) in a6.
4971 __ mov(a6, a2);
4972 __ Branch(&try_allocate, le, a6, Operand(a5));
4973 __ mov(a6, a5);
4974
4975 __ bind(&try_allocate);
4976
4977 // Compute the sizes of backing store, parameter map, and arguments object.
4978 // 1. Parameter map, has 2 extra words containing context and backing store.
4979 const int kParameterMapHeaderSize =
4980 FixedArray::kHeaderSize + 2 * kPointerSize;
4981 // If there are no mapped parameters, we do not need the parameter_map.
4982 Label param_map_size;
4983 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
4984 __ Branch(USE_DELAY_SLOT, &param_map_size, eq, a6, Operand(zero_reg));
4985 __ mov(t1, zero_reg); // In delay slot: param map size = 0 when a6 == 0.
4986 __ SmiScale(t1, a6, kPointerSizeLog2);
4987 __ daddiu(t1, t1, kParameterMapHeaderSize);
4988 __ bind(&param_map_size);
4989
4990 // 2. Backing store.
4991 __ SmiScale(t2, a5, kPointerSizeLog2);
4992 __ Daddu(t1, t1, Operand(t2));
4993 __ Daddu(t1, t1, Operand(FixedArray::kHeaderSize));
4994
4995 // 3. Arguments object.
4996 __ Daddu(t1, t1, Operand(JSSloppyArgumentsObject::kSize));
4997
4998 // Do the allocation of all three objects in one go.
4999 __ Allocate(t1, v0, t1, a4, &runtime, TAG_OBJECT);
5000
5001 // v0 = address of new object(s) (tagged)
5002 // a2 = argument count (smi-tagged)
5003 // Get the arguments boilerplate from the current native context into a4.
5004 const int kNormalOffset =
5005 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX);
5006 const int kAliasedOffset =
5007 Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
5008
5009 __ ld(a4, NativeContextMemOperand());
5010 Label skip2_ne, skip2_eq;
5011 __ Branch(&skip2_ne, ne, a6, Operand(zero_reg));
5012 __ ld(a4, MemOperand(a4, kNormalOffset));
5013 __ bind(&skip2_ne);
5014
5015 __ Branch(&skip2_eq, eq, a6, Operand(zero_reg));
5016 __ ld(a4, MemOperand(a4, kAliasedOffset));
5017 __ bind(&skip2_eq);
5018
5019 // v0 = address of new object (tagged)
5020 // a2 = argument count (smi-tagged)
5021 // a4 = address of arguments map (tagged)
5022 // a6 = mapped parameter count (tagged)
5023 __ sd(a4, FieldMemOperand(v0, JSObject::kMapOffset));
5024 __ LoadRoot(t1, Heap::kEmptyFixedArrayRootIndex);
5025 __ sd(t1, FieldMemOperand(v0, JSObject::kPropertiesOffset));
5026 __ sd(t1, FieldMemOperand(v0, JSObject::kElementsOffset));
5027
5028 // Set up the callee in-object property.
5029 __ AssertNotSmi(a1);
5030 __ sd(a1, FieldMemOperand(v0, JSSloppyArgumentsObject::kCalleeOffset));
5031
5032 // Use the length (smi tagged) and set that as an in-object property too.
5033 __ AssertSmi(a5);
5034 __ sd(a5, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
5035
5036 // Set up the elements pointer in the allocated arguments object.
5037 // If we allocated a parameter map, a4 will point there, otherwise
5038 // it will point to the backing store.
5039 __ Daddu(a4, v0, Operand(JSSloppyArgumentsObject::kSize));
5040 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
5041
5042 // v0 = address of new object (tagged)
5043 // a2 = argument count (tagged)
5044 // a4 = address of parameter map or backing store (tagged)
5045 // a6 = mapped parameter count (tagged)
5046 // Initialize parameter map. If there are no mapped arguments, we're done.
5047 Label skip_parameter_map;
5048 Label skip3;
5049 __ Branch(&skip3, ne, a6, Operand(Smi::FromInt(0)));
5050 // Move backing store address to a1, because it is
5051 // expected there when filling in the unmapped arguments.
5052 __ mov(a1, a4);
5053 __ bind(&skip3);
5054
5055 __ Branch(&skip_parameter_map, eq, a6, Operand(Smi::FromInt(0)));
5056
5057 __ LoadRoot(a5, Heap::kSloppyArgumentsElementsMapRootIndex);
5058 __ sd(a5, FieldMemOperand(a4, FixedArray::kMapOffset));
5059 __ Daddu(a5, a6, Operand(Smi::FromInt(2)));
5060 __ sd(a5, FieldMemOperand(a4, FixedArray::kLengthOffset));
5061 __ sd(cp, FieldMemOperand(a4, FixedArray::kHeaderSize + 0 * kPointerSize));
5062 __ SmiScale(t2, a6, kPointerSizeLog2);
5063 __ Daddu(a5, a4, Operand(t2));
5064 __ Daddu(a5, a5, Operand(kParameterMapHeaderSize));
5065 __ sd(a5, FieldMemOperand(a4, FixedArray::kHeaderSize + 1 * kPointerSize));
5066
5067 // Copy the parameter slots and the holes in the arguments.
5068 // We need to fill in mapped_parameter_count slots. They index the context,
5069 // where parameters are stored in reverse order, at
5070 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
5071 // The mapped parameter thus need to get indices
5072 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
5073 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
5074 // We loop from right to left.
5075 Label parameters_loop, parameters_test;
5076 __ mov(a5, a6);
5077 __ Daddu(t1, a2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
5078 __ Dsubu(t1, t1, Operand(a6));
5079 __ LoadRoot(a7, Heap::kTheHoleValueRootIndex);
5080 __ SmiScale(t2, a5, kPointerSizeLog2);
5081 __ Daddu(a1, a4, Operand(t2));
5082 __ Daddu(a1, a1, Operand(kParameterMapHeaderSize));
5083
5084 // a1 = address of backing store (tagged)
5085 // a4 = address of parameter map (tagged)
5086 // a0 = temporary scratch (a.o., for address calculation)
5087 // t1 = loop variable (tagged)
5088 // a7 = the hole value
5089 __ jmp(&parameters_test);
5090
5091 __ bind(&parameters_loop);
5092 __ Dsubu(a5, a5, Operand(Smi::FromInt(1)));
5093 __ SmiScale(a0, a5, kPointerSizeLog2);
5094 __ Daddu(a0, a0, Operand(kParameterMapHeaderSize - kHeapObjectTag));
5095 __ Daddu(t2, a4, a0);
5096 __ sd(t1, MemOperand(t2));
5097 __ Dsubu(a0, a0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize));
5098 __ Daddu(t2, a1, a0);
5099 __ sd(a7, MemOperand(t2));
5100 __ Daddu(t1, t1, Operand(Smi::FromInt(1)));
5101 __ bind(&parameters_test);
5102 __ Branch(&parameters_loop, ne, a5, Operand(Smi::FromInt(0)));
5103
5104 // Restore t1 = argument count (tagged).
5105 __ ld(a5, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
5106
5107 __ bind(&skip_parameter_map);
5108 // v0 = address of new object (tagged)
5109 // a1 = address of backing store (tagged)
5110 // a5 = argument count (tagged)
5111 // a6 = mapped parameter count (tagged)
5112 // t1 = scratch
5113 // Copy arguments header and remaining slots (if there are any).
5114 __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex);
5115 __ sd(t1, FieldMemOperand(a1, FixedArray::kMapOffset));
5116 __ sd(a5, FieldMemOperand(a1, FixedArray::kLengthOffset));
5117
5118 Label arguments_loop, arguments_test;
5119 __ SmiScale(t2, a6, kPointerSizeLog2);
5120 __ Dsubu(a3, a3, Operand(t2));
5121 __ jmp(&arguments_test);
5122
5123 __ bind(&arguments_loop);
5124 __ Dsubu(a3, a3, Operand(kPointerSize));
5125 __ ld(a4, MemOperand(a3, 0));
5126 __ SmiScale(t2, a6, kPointerSizeLog2);
5127 __ Daddu(t1, a1, Operand(t2));
5128 __ sd(a4, FieldMemOperand(t1, FixedArray::kHeaderSize));
5129 __ Daddu(a6, a6, Operand(Smi::FromInt(1)));
5130
5131 __ bind(&arguments_test);
5132 __ Branch(&arguments_loop, lt, a6, Operand(a5));
5133
5134 // Return.
5135 __ Ret();
5136
5137 // Do the runtime call to allocate the arguments object.
5138 // a5 = argument count (tagged)
5139 __ bind(&runtime);
5140 __ Push(a1, a3, a5);
5141 __ TailCallRuntime(Runtime::kNewSloppyArguments);
5142 }
5143
5144
5161 void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { 5145 void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
5162 // ----------- S t a t e ------------- 5146 // ----------- S t a t e -------------
5163 // -- a1 : function 5147 // -- a1 : function
5164 // -- cp : context 5148 // -- cp : context
5165 // -- fp : frame pointer 5149 // -- fp : frame pointer
5166 // -- ra : return address 5150 // -- ra : return address
5167 // ----------------------------------- 5151 // -----------------------------------
5168 __ AssertFunction(a1); 5152 __ AssertFunction(a1);
5169 5153
5170 // For Ignition we need to skip all possible handler/stub frames until 5154 // For Ignition we need to skip all possible handler/stub frames until
(...skipping 556 matching lines...) Expand 10 before | Expand all | Expand 10 after
5727 return_value_operand, NULL); 5711 return_value_operand, NULL);
5728 } 5712 }
5729 5713
5730 5714
5731 #undef __ 5715 #undef __
5732 5716
5733 } // namespace internal 5717 } // namespace internal
5734 } // namespace v8 5718 } // namespace v8
5735 5719
5736 #endif // V8_TARGET_ARCH_MIPS64 5720 #endif // V8_TARGET_ARCH_MIPS64
OLDNEW
« no previous file with comments | « src/mips/interface-descriptors-mips.cc ('k') | src/mips64/interface-descriptors-mips64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698