OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_PPC | 5 #if V8_TARGET_ARCH_PPC |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 1522 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1533 | 1533 |
1534 StubRuntimeCallHelper call_helper; | 1534 StubRuntimeCallHelper call_helper; |
1535 char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper); | 1535 char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper); |
1536 | 1536 |
1537 __ bind(&miss); | 1537 __ bind(&miss); |
1538 PropertyAccessCompiler::TailCallBuiltin( | 1538 PropertyAccessCompiler::TailCallBuiltin( |
1539 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); | 1539 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); |
1540 } | 1540 } |
1541 | 1541 |
1542 | 1542 |
1543 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { | |
1544 // r4 : function | |
1545 // r5 : number of parameters (tagged) | |
1546 // r6 : parameters pointer | |
1547 | |
1548 DCHECK(r4.is(ArgumentsAccessNewDescriptor::function())); | |
1549 DCHECK(r5.is(ArgumentsAccessNewDescriptor::parameter_count())); | |
1550 DCHECK(r6.is(ArgumentsAccessNewDescriptor::parameter_pointer())); | |
1551 | |
1552 // Check if the calling frame is an arguments adaptor frame. | |
1553 Label runtime; | |
1554 __ LoadP(r7, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
1555 __ LoadP(r3, MemOperand(r7, StandardFrameConstants::kContextOffset)); | |
1556 __ CmpSmiLiteral(r3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); | |
1557 __ bne(&runtime); | |
1558 | |
1559 // Patch the arguments.length and the parameters pointer in the current frame. | |
1560 __ LoadP(r5, MemOperand(r7, ArgumentsAdaptorFrameConstants::kLengthOffset)); | |
1561 __ SmiToPtrArrayOffset(r6, r5); | |
1562 __ add(r6, r6, r7); | |
1563 __ addi(r6, r6, Operand(StandardFrameConstants::kCallerSPOffset)); | |
1564 | |
1565 __ bind(&runtime); | |
1566 __ Push(r4, r6, r5); | |
1567 __ TailCallRuntime(Runtime::kNewSloppyArguments); | |
1568 } | |
1569 | |
1570 | |
1571 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { | |
1572 // r4 : function | |
1573 // r5 : number of parameters (tagged) | |
1574 // r6 : parameters pointer | |
1575 // Registers used over whole function: | |
1576 // r8 : arguments count (tagged) | |
1577 // r9 : mapped parameter count (tagged) | |
1578 | |
1579 DCHECK(r4.is(ArgumentsAccessNewDescriptor::function())); | |
1580 DCHECK(r5.is(ArgumentsAccessNewDescriptor::parameter_count())); | |
1581 DCHECK(r6.is(ArgumentsAccessNewDescriptor::parameter_pointer())); | |
1582 | |
1583 // Check if the calling frame is an arguments adaptor frame. | |
1584 Label adaptor_frame, try_allocate, runtime; | |
1585 __ LoadP(r7, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
1586 __ LoadP(r3, MemOperand(r7, StandardFrameConstants::kContextOffset)); | |
1587 __ CmpSmiLiteral(r3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); | |
1588 __ beq(&adaptor_frame); | |
1589 | |
1590 // No adaptor, parameter count = argument count. | |
1591 __ mr(r8, r5); | |
1592 __ mr(r9, r5); | |
1593 __ b(&try_allocate); | |
1594 | |
1595 // We have an adaptor frame. Patch the parameters pointer. | |
1596 __ bind(&adaptor_frame); | |
1597 __ LoadP(r8, MemOperand(r7, ArgumentsAdaptorFrameConstants::kLengthOffset)); | |
1598 __ SmiToPtrArrayOffset(r6, r8); | |
1599 __ add(r6, r6, r7); | |
1600 __ addi(r6, r6, Operand(StandardFrameConstants::kCallerSPOffset)); | |
1601 | |
1602 // r8 = argument count (tagged) | |
1603 // r9 = parameter count (tagged) | |
1604 // Compute the mapped parameter count = min(r5, r8) in r9. | |
1605 __ cmp(r5, r8); | |
1606 if (CpuFeatures::IsSupported(ISELECT)) { | |
1607 __ isel(lt, r9, r5, r8); | |
1608 } else { | |
1609 Label skip; | |
1610 __ mr(r9, r5); | |
1611 __ blt(&skip); | |
1612 __ mr(r9, r8); | |
1613 __ bind(&skip); | |
1614 } | |
1615 | |
1616 __ bind(&try_allocate); | |
1617 | |
1618 // Compute the sizes of backing store, parameter map, and arguments object. | |
1619 // 1. Parameter map, has 2 extra words containing context and backing store. | |
1620 const int kParameterMapHeaderSize = | |
1621 FixedArray::kHeaderSize + 2 * kPointerSize; | |
1622 // If there are no mapped parameters, we do not need the parameter_map. | |
1623 __ CmpSmiLiteral(r9, Smi::FromInt(0), r0); | |
1624 if (CpuFeatures::IsSupported(ISELECT)) { | |
1625 __ SmiToPtrArrayOffset(r11, r9); | |
1626 __ addi(r11, r11, Operand(kParameterMapHeaderSize)); | |
1627 __ isel(eq, r11, r0, r11); | |
1628 } else { | |
1629 Label skip2, skip3; | |
1630 __ bne(&skip2); | |
1631 __ li(r11, Operand::Zero()); | |
1632 __ b(&skip3); | |
1633 __ bind(&skip2); | |
1634 __ SmiToPtrArrayOffset(r11, r9); | |
1635 __ addi(r11, r11, Operand(kParameterMapHeaderSize)); | |
1636 __ bind(&skip3); | |
1637 } | |
1638 | |
1639 // 2. Backing store. | |
1640 __ SmiToPtrArrayOffset(r7, r8); | |
1641 __ add(r11, r11, r7); | |
1642 __ addi(r11, r11, Operand(FixedArray::kHeaderSize)); | |
1643 | |
1644 // 3. Arguments object. | |
1645 __ addi(r11, r11, Operand(JSSloppyArgumentsObject::kSize)); | |
1646 | |
1647 // Do the allocation of all three objects in one go. | |
1648 __ Allocate(r11, r3, r11, r7, &runtime, TAG_OBJECT); | |
1649 | |
1650 // r3 = address of new object(s) (tagged) | |
1651 // r5 = argument count (smi-tagged) | |
1652 // Get the arguments boilerplate from the current native context into r4. | |
1653 const int kNormalOffset = | |
1654 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX); | |
1655 const int kAliasedOffset = | |
1656 Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX); | |
1657 | |
1658 __ LoadP(r7, NativeContextMemOperand()); | |
1659 __ cmpi(r9, Operand::Zero()); | |
1660 if (CpuFeatures::IsSupported(ISELECT)) { | |
1661 __ LoadP(r11, MemOperand(r7, kNormalOffset)); | |
1662 __ LoadP(r7, MemOperand(r7, kAliasedOffset)); | |
1663 __ isel(eq, r7, r11, r7); | |
1664 } else { | |
1665 Label skip4, skip5; | |
1666 __ bne(&skip4); | |
1667 __ LoadP(r7, MemOperand(r7, kNormalOffset)); | |
1668 __ b(&skip5); | |
1669 __ bind(&skip4); | |
1670 __ LoadP(r7, MemOperand(r7, kAliasedOffset)); | |
1671 __ bind(&skip5); | |
1672 } | |
1673 | |
1674 // r3 = address of new object (tagged) | |
1675 // r5 = argument count (smi-tagged) | |
1676 // r7 = address of arguments map (tagged) | |
1677 // r9 = mapped parameter count (tagged) | |
1678 __ StoreP(r7, FieldMemOperand(r3, JSObject::kMapOffset), r0); | |
1679 __ LoadRoot(r11, Heap::kEmptyFixedArrayRootIndex); | |
1680 __ StoreP(r11, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0); | |
1681 __ StoreP(r11, FieldMemOperand(r3, JSObject::kElementsOffset), r0); | |
1682 | |
1683 // Set up the callee in-object property. | |
1684 __ AssertNotSmi(r4); | |
1685 __ StoreP(r4, FieldMemOperand(r3, JSSloppyArgumentsObject::kCalleeOffset), | |
1686 r0); | |
1687 | |
1688 // Use the length (smi tagged) and set that as an in-object property too. | |
1689 __ AssertSmi(r8); | |
1690 __ StoreP(r8, FieldMemOperand(r3, JSSloppyArgumentsObject::kLengthOffset), | |
1691 r0); | |
1692 | |
1693 // Set up the elements pointer in the allocated arguments object. | |
1694 // If we allocated a parameter map, r7 will point there, otherwise | |
1695 // it will point to the backing store. | |
1696 __ addi(r7, r3, Operand(JSSloppyArgumentsObject::kSize)); | |
1697 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0); | |
1698 | |
1699 // r3 = address of new object (tagged) | |
1700 // r5 = argument count (tagged) | |
1701 // r7 = address of parameter map or backing store (tagged) | |
1702 // r9 = mapped parameter count (tagged) | |
1703 // Initialize parameter map. If there are no mapped arguments, we're done. | |
1704 Label skip_parameter_map; | |
1705 __ CmpSmiLiteral(r9, Smi::FromInt(0), r0); | |
1706 if (CpuFeatures::IsSupported(ISELECT)) { | |
1707 __ isel(eq, r4, r7, r4); | |
1708 __ beq(&skip_parameter_map); | |
1709 } else { | |
1710 Label skip6; | |
1711 __ bne(&skip6); | |
1712 // Move backing store address to r4, because it is | |
1713 // expected there when filling in the unmapped arguments. | |
1714 __ mr(r4, r7); | |
1715 __ b(&skip_parameter_map); | |
1716 __ bind(&skip6); | |
1717 } | |
1718 | |
1719 __ LoadRoot(r8, Heap::kSloppyArgumentsElementsMapRootIndex); | |
1720 __ StoreP(r8, FieldMemOperand(r7, FixedArray::kMapOffset), r0); | |
1721 __ AddSmiLiteral(r8, r9, Smi::FromInt(2), r0); | |
1722 __ StoreP(r8, FieldMemOperand(r7, FixedArray::kLengthOffset), r0); | |
1723 __ StoreP(cp, FieldMemOperand(r7, FixedArray::kHeaderSize + 0 * kPointerSize), | |
1724 r0); | |
1725 __ SmiToPtrArrayOffset(r8, r9); | |
1726 __ add(r8, r8, r7); | |
1727 __ addi(r8, r8, Operand(kParameterMapHeaderSize)); | |
1728 __ StoreP(r8, FieldMemOperand(r7, FixedArray::kHeaderSize + 1 * kPointerSize), | |
1729 r0); | |
1730 | |
1731 // Copy the parameter slots and the holes in the arguments. | |
1732 // We need to fill in mapped_parameter_count slots. They index the context, | |
1733 // where parameters are stored in reverse order, at | |
1734 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 | |
1735 // The mapped parameter thus need to get indices | |
1736 // MIN_CONTEXT_SLOTS+parameter_count-1 .. | |
1737 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count | |
1738 // We loop from right to left. | |
1739 Label parameters_loop; | |
1740 __ mr(r8, r9); | |
1741 __ AddSmiLiteral(r11, r5, Smi::FromInt(Context::MIN_CONTEXT_SLOTS), r0); | |
1742 __ sub(r11, r11, r9); | |
1743 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | |
1744 __ SmiToPtrArrayOffset(r4, r8); | |
1745 __ add(r4, r4, r7); | |
1746 __ addi(r4, r4, Operand(kParameterMapHeaderSize)); | |
1747 | |
1748 // r4 = address of backing store (tagged) | |
1749 // r7 = address of parameter map (tagged) | |
1750 // r8 = temporary scratch (a.o., for address calculation) | |
1751 // r10 = temporary scratch (a.o., for address calculation) | |
1752 // ip = the hole value | |
1753 __ SmiUntag(r8); | |
1754 __ mtctr(r8); | |
1755 __ ShiftLeftImm(r8, r8, Operand(kPointerSizeLog2)); | |
1756 __ add(r10, r4, r8); | |
1757 __ add(r8, r7, r8); | |
1758 __ addi(r10, r10, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | |
1759 __ addi(r8, r8, Operand(kParameterMapHeaderSize - kHeapObjectTag)); | |
1760 | |
1761 __ bind(¶meters_loop); | |
1762 __ StorePU(r11, MemOperand(r8, -kPointerSize)); | |
1763 __ StorePU(ip, MemOperand(r10, -kPointerSize)); | |
1764 __ AddSmiLiteral(r11, r11, Smi::FromInt(1), r0); | |
1765 __ bdnz(¶meters_loop); | |
1766 | |
1767 // Restore r8 = argument count (tagged). | |
1768 __ LoadP(r8, FieldMemOperand(r3, JSSloppyArgumentsObject::kLengthOffset)); | |
1769 | |
1770 __ bind(&skip_parameter_map); | |
1771 // r3 = address of new object (tagged) | |
1772 // r4 = address of backing store (tagged) | |
1773 // r8 = argument count (tagged) | |
1774 // r9 = mapped parameter count (tagged) | |
1775 // r11 = scratch | |
1776 // Copy arguments header and remaining slots (if there are any). | |
1777 __ LoadRoot(r11, Heap::kFixedArrayMapRootIndex); | |
1778 __ StoreP(r11, FieldMemOperand(r4, FixedArray::kMapOffset), r0); | |
1779 __ StoreP(r8, FieldMemOperand(r4, FixedArray::kLengthOffset), r0); | |
1780 __ sub(r11, r8, r9, LeaveOE, SetRC); | |
1781 __ Ret(eq, cr0); | |
1782 | |
1783 Label arguments_loop; | |
1784 __ SmiUntag(r11); | |
1785 __ mtctr(r11); | |
1786 | |
1787 __ SmiToPtrArrayOffset(r0, r9); | |
1788 __ sub(r6, r6, r0); | |
1789 __ add(r11, r4, r0); | |
1790 __ addi(r11, r11, | |
1791 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize)); | |
1792 | |
1793 __ bind(&arguments_loop); | |
1794 __ LoadPU(r7, MemOperand(r6, -kPointerSize)); | |
1795 __ StorePU(r7, MemOperand(r11, kPointerSize)); | |
1796 __ bdnz(&arguments_loop); | |
1797 | |
1798 // Return. | |
1799 __ Ret(); | |
1800 | |
1801 // Do the runtime call to allocate the arguments object. | |
1802 // r8 = argument count (tagged) | |
1803 __ bind(&runtime); | |
1804 __ Push(r4, r6, r8); | |
1805 __ TailCallRuntime(Runtime::kNewSloppyArguments); | |
1806 } | |
1807 | |
1808 | |
1809 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { | 1543 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { |
1810 // Return address is in lr. | 1544 // Return address is in lr. |
1811 Label slow; | 1545 Label slow; |
1812 | 1546 |
1813 Register receiver = LoadDescriptor::ReceiverRegister(); | 1547 Register receiver = LoadDescriptor::ReceiverRegister(); |
1814 Register key = LoadDescriptor::NameRegister(); | 1548 Register key = LoadDescriptor::NameRegister(); |
1815 | 1549 |
1816 // Check that the key is an array index, that is Uint32. | 1550 // Check that the key is an array index, that is Uint32. |
1817 __ TestIfPositiveSmi(key, r0); | 1551 __ TestIfPositiveSmi(key, r0); |
1818 __ bne(&slow, cr0); | 1552 __ bne(&slow, cr0); |
(...skipping 3311 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5130 __ SmiTag(r4); | 4864 __ SmiTag(r4); |
5131 __ Push(r3, r5, r4); | 4865 __ Push(r3, r5, r4); |
5132 __ CallRuntime(Runtime::kAllocateInNewSpace); | 4866 __ CallRuntime(Runtime::kAllocateInNewSpace); |
5133 __ mr(r6, r3); | 4867 __ mr(r6, r3); |
5134 __ Pop(r3, r5); | 4868 __ Pop(r3, r5); |
5135 } | 4869 } |
5136 __ b(&done_allocate); | 4870 __ b(&done_allocate); |
5137 } | 4871 } |
5138 } | 4872 } |
5139 | 4873 |
| 4874 void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) { |
| 4875 // ----------- S t a t e ------------- |
| 4876 // -- r4 : function |
| 4877 // -- cp : context |
| 4878 // -- fp : frame pointer |
| 4879 // -- lr : return address |
| 4880 // ----------------------------------- |
| 4881 __ AssertFunction(r4); |
| 4882 |
| 4883 // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub. |
| 4884 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); |
| 4885 __ LoadWordArith( |
| 4886 r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset)); |
| 4887 #if V8_TARGET_ARCH_PPC64 |
| 4888 __ SmiTag(r5); |
| 4889 #endif |
| 4890 __ SmiToPtrArrayOffset(r6, r5); |
| 4891 __ add(r6, fp, r6); |
| 4892 __ addi(r6, r6, Operand(StandardFrameConstants::kCallerSPOffset)); |
| 4893 |
| 4894 // r4 : function |
| 4895 // r5 : number of parameters (tagged) |
| 4896 // r6 : parameters pointer |
| 4897 // Registers used over whole function: |
| 4898 // r8 : arguments count (tagged) |
| 4899 // r9 : mapped parameter count (tagged) |
| 4900 |
| 4901 // Check if the calling frame is an arguments adaptor frame. |
| 4902 Label adaptor_frame, try_allocate, runtime; |
| 4903 __ LoadP(r7, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 4904 __ LoadP(r3, MemOperand(r7, StandardFrameConstants::kContextOffset)); |
| 4905 __ CmpSmiLiteral(r3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); |
| 4906 __ beq(&adaptor_frame); |
| 4907 |
| 4908 // No adaptor, parameter count = argument count. |
| 4909 __ mr(r8, r5); |
| 4910 __ mr(r9, r5); |
| 4911 __ b(&try_allocate); |
| 4912 |
| 4913 // We have an adaptor frame. Patch the parameters pointer. |
| 4914 __ bind(&adaptor_frame); |
| 4915 __ LoadP(r8, MemOperand(r7, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 4916 __ SmiToPtrArrayOffset(r6, r8); |
| 4917 __ add(r6, r6, r7); |
| 4918 __ addi(r6, r6, Operand(StandardFrameConstants::kCallerSPOffset)); |
| 4919 |
| 4920 // r8 = argument count (tagged) |
| 4921 // r9 = parameter count (tagged) |
| 4922 // Compute the mapped parameter count = min(r5, r8) in r9. |
| 4923 __ cmp(r5, r8); |
| 4924 if (CpuFeatures::IsSupported(ISELECT)) { |
| 4925 __ isel(lt, r9, r5, r8); |
| 4926 } else { |
| 4927 Label skip; |
| 4928 __ mr(r9, r5); |
| 4929 __ blt(&skip); |
| 4930 __ mr(r9, r8); |
| 4931 __ bind(&skip); |
| 4932 } |
| 4933 |
| 4934 __ bind(&try_allocate); |
| 4935 |
| 4936 // Compute the sizes of backing store, parameter map, and arguments object. |
| 4937 // 1. Parameter map, has 2 extra words containing context and backing store. |
| 4938 const int kParameterMapHeaderSize = |
| 4939 FixedArray::kHeaderSize + 2 * kPointerSize; |
| 4940 // If there are no mapped parameters, we do not need the parameter_map. |
| 4941 __ CmpSmiLiteral(r9, Smi::FromInt(0), r0); |
| 4942 if (CpuFeatures::IsSupported(ISELECT)) { |
| 4943 __ SmiToPtrArrayOffset(r11, r9); |
| 4944 __ addi(r11, r11, Operand(kParameterMapHeaderSize)); |
| 4945 __ isel(eq, r11, r0, r11); |
| 4946 } else { |
| 4947 Label skip2, skip3; |
| 4948 __ bne(&skip2); |
| 4949 __ li(r11, Operand::Zero()); |
| 4950 __ b(&skip3); |
| 4951 __ bind(&skip2); |
| 4952 __ SmiToPtrArrayOffset(r11, r9); |
| 4953 __ addi(r11, r11, Operand(kParameterMapHeaderSize)); |
| 4954 __ bind(&skip3); |
| 4955 } |
| 4956 |
| 4957 // 2. Backing store. |
| 4958 __ SmiToPtrArrayOffset(r7, r8); |
| 4959 __ add(r11, r11, r7); |
| 4960 __ addi(r11, r11, Operand(FixedArray::kHeaderSize)); |
| 4961 |
| 4962 // 3. Arguments object. |
| 4963 __ addi(r11, r11, Operand(JSSloppyArgumentsObject::kSize)); |
| 4964 |
| 4965 // Do the allocation of all three objects in one go. |
| 4966 __ Allocate(r11, r3, r11, r7, &runtime, TAG_OBJECT); |
| 4967 |
| 4968 // r3 = address of new object(s) (tagged) |
| 4969 // r5 = argument count (smi-tagged) |
| 4970 // Get the arguments boilerplate from the current native context into r4. |
| 4971 const int kNormalOffset = |
| 4972 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX); |
| 4973 const int kAliasedOffset = |
| 4974 Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX); |
| 4975 |
| 4976 __ LoadP(r7, NativeContextMemOperand()); |
| 4977 __ cmpi(r9, Operand::Zero()); |
| 4978 if (CpuFeatures::IsSupported(ISELECT)) { |
| 4979 __ LoadP(r11, MemOperand(r7, kNormalOffset)); |
| 4980 __ LoadP(r7, MemOperand(r7, kAliasedOffset)); |
| 4981 __ isel(eq, r7, r11, r7); |
| 4982 } else { |
| 4983 Label skip4, skip5; |
| 4984 __ bne(&skip4); |
| 4985 __ LoadP(r7, MemOperand(r7, kNormalOffset)); |
| 4986 __ b(&skip5); |
| 4987 __ bind(&skip4); |
| 4988 __ LoadP(r7, MemOperand(r7, kAliasedOffset)); |
| 4989 __ bind(&skip5); |
| 4990 } |
| 4991 |
| 4992 // r3 = address of new object (tagged) |
| 4993 // r5 = argument count (smi-tagged) |
| 4994 // r7 = address of arguments map (tagged) |
| 4995 // r9 = mapped parameter count (tagged) |
| 4996 __ StoreP(r7, FieldMemOperand(r3, JSObject::kMapOffset), r0); |
| 4997 __ LoadRoot(r11, Heap::kEmptyFixedArrayRootIndex); |
| 4998 __ StoreP(r11, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0); |
| 4999 __ StoreP(r11, FieldMemOperand(r3, JSObject::kElementsOffset), r0); |
| 5000 |
| 5001 // Set up the callee in-object property. |
| 5002 __ AssertNotSmi(r4); |
| 5003 __ StoreP(r4, FieldMemOperand(r3, JSSloppyArgumentsObject::kCalleeOffset), |
| 5004 r0); |
| 5005 |
| 5006 // Use the length (smi tagged) and set that as an in-object property too. |
| 5007 __ AssertSmi(r8); |
| 5008 __ StoreP(r8, FieldMemOperand(r3, JSSloppyArgumentsObject::kLengthOffset), |
| 5009 r0); |
| 5010 |
| 5011 // Set up the elements pointer in the allocated arguments object. |
| 5012 // If we allocated a parameter map, r7 will point there, otherwise |
| 5013 // it will point to the backing store. |
| 5014 __ addi(r7, r3, Operand(JSSloppyArgumentsObject::kSize)); |
| 5015 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0); |
| 5016 |
| 5017 // r3 = address of new object (tagged) |
| 5018 // r5 = argument count (tagged) |
| 5019 // r7 = address of parameter map or backing store (tagged) |
| 5020 // r9 = mapped parameter count (tagged) |
| 5021 // Initialize parameter map. If there are no mapped arguments, we're done. |
| 5022 Label skip_parameter_map; |
| 5023 __ CmpSmiLiteral(r9, Smi::FromInt(0), r0); |
| 5024 if (CpuFeatures::IsSupported(ISELECT)) { |
| 5025 __ isel(eq, r4, r7, r4); |
| 5026 __ beq(&skip_parameter_map); |
| 5027 } else { |
| 5028 Label skip6; |
| 5029 __ bne(&skip6); |
| 5030 // Move backing store address to r4, because it is |
| 5031 // expected there when filling in the unmapped arguments. |
| 5032 __ mr(r4, r7); |
| 5033 __ b(&skip_parameter_map); |
| 5034 __ bind(&skip6); |
| 5035 } |
| 5036 |
| 5037 __ LoadRoot(r8, Heap::kSloppyArgumentsElementsMapRootIndex); |
| 5038 __ StoreP(r8, FieldMemOperand(r7, FixedArray::kMapOffset), r0); |
| 5039 __ AddSmiLiteral(r8, r9, Smi::FromInt(2), r0); |
| 5040 __ StoreP(r8, FieldMemOperand(r7, FixedArray::kLengthOffset), r0); |
| 5041 __ StoreP(cp, FieldMemOperand(r7, FixedArray::kHeaderSize + 0 * kPointerSize), |
| 5042 r0); |
| 5043 __ SmiToPtrArrayOffset(r8, r9); |
| 5044 __ add(r8, r8, r7); |
| 5045 __ addi(r8, r8, Operand(kParameterMapHeaderSize)); |
| 5046 __ StoreP(r8, FieldMemOperand(r7, FixedArray::kHeaderSize + 1 * kPointerSize), |
| 5047 r0); |
| 5048 |
| 5049 // Copy the parameter slots and the holes in the arguments. |
| 5050 // We need to fill in mapped_parameter_count slots. They index the context, |
| 5051 // where parameters are stored in reverse order, at |
| 5052 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 |
| 5053 // The mapped parameter thus need to get indices |
| 5054 // MIN_CONTEXT_SLOTS+parameter_count-1 .. |
| 5055 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count |
| 5056 // We loop from right to left. |
| 5057 Label parameters_loop; |
| 5058 __ mr(r8, r9); |
| 5059 __ AddSmiLiteral(r11, r5, Smi::FromInt(Context::MIN_CONTEXT_SLOTS), r0); |
| 5060 __ sub(r11, r11, r9); |
| 5061 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 5062 __ SmiToPtrArrayOffset(r4, r8); |
| 5063 __ add(r4, r4, r7); |
| 5064 __ addi(r4, r4, Operand(kParameterMapHeaderSize)); |
| 5065 |
| 5066 // r4 = address of backing store (tagged) |
| 5067 // r7 = address of parameter map (tagged) |
| 5068 // r8 = temporary scratch (a.o., for address calculation) |
| 5069 // r10 = temporary scratch (a.o., for address calculation) |
| 5070 // ip = the hole value |
| 5071 __ SmiUntag(r8); |
| 5072 __ mtctr(r8); |
| 5073 __ ShiftLeftImm(r8, r8, Operand(kPointerSizeLog2)); |
| 5074 __ add(r10, r4, r8); |
| 5075 __ add(r8, r7, r8); |
| 5076 __ addi(r10, r10, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 5077 __ addi(r8, r8, Operand(kParameterMapHeaderSize - kHeapObjectTag)); |
| 5078 |
| 5079 __ bind(¶meters_loop); |
| 5080 __ StorePU(r11, MemOperand(r8, -kPointerSize)); |
| 5081 __ StorePU(ip, MemOperand(r10, -kPointerSize)); |
| 5082 __ AddSmiLiteral(r11, r11, Smi::FromInt(1), r0); |
| 5083 __ bdnz(¶meters_loop); |
| 5084 |
| 5085 // Restore r8 = argument count (tagged). |
| 5086 __ LoadP(r8, FieldMemOperand(r3, JSSloppyArgumentsObject::kLengthOffset)); |
| 5087 |
| 5088 __ bind(&skip_parameter_map); |
| 5089 // r3 = address of new object (tagged) |
| 5090 // r4 = address of backing store (tagged) |
| 5091 // r8 = argument count (tagged) |
| 5092 // r9 = mapped parameter count (tagged) |
| 5093 // r11 = scratch |
| 5094 // Copy arguments header and remaining slots (if there are any). |
| 5095 __ LoadRoot(r11, Heap::kFixedArrayMapRootIndex); |
| 5096 __ StoreP(r11, FieldMemOperand(r4, FixedArray::kMapOffset), r0); |
| 5097 __ StoreP(r8, FieldMemOperand(r4, FixedArray::kLengthOffset), r0); |
| 5098 __ sub(r11, r8, r9, LeaveOE, SetRC); |
| 5099 __ Ret(eq, cr0); |
| 5100 |
| 5101 Label arguments_loop; |
| 5102 __ SmiUntag(r11); |
| 5103 __ mtctr(r11); |
| 5104 |
| 5105 __ SmiToPtrArrayOffset(r0, r9); |
| 5106 __ sub(r6, r6, r0); |
| 5107 __ add(r11, r4, r0); |
| 5108 __ addi(r11, r11, |
| 5109 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize)); |
| 5110 |
| 5111 __ bind(&arguments_loop); |
| 5112 __ LoadPU(r7, MemOperand(r6, -kPointerSize)); |
| 5113 __ StorePU(r7, MemOperand(r11, kPointerSize)); |
| 5114 __ bdnz(&arguments_loop); |
| 5115 |
| 5116 // Return. |
| 5117 __ Ret(); |
| 5118 |
| 5119 // Do the runtime call to allocate the arguments object. |
| 5120 // r8 = argument count (tagged) |
| 5121 __ bind(&runtime); |
| 5122 __ Push(r4, r6, r8); |
| 5123 __ TailCallRuntime(Runtime::kNewSloppyArguments); |
| 5124 } |
| 5125 |
5140 void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { | 5126 void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { |
5141 // ----------- S t a t e ------------- | 5127 // ----------- S t a t e ------------- |
5142 // -- r4 : function | 5128 // -- r4 : function |
5143 // -- cp : context | 5129 // -- cp : context |
5144 // -- fp : frame pointer | 5130 // -- fp : frame pointer |
5145 // -- lr : return address | 5131 // -- lr : return address |
5146 // ----------------------------------- | 5132 // ----------------------------------- |
5147 __ AssertFunction(r4); | 5133 __ AssertFunction(r4); |
5148 | 5134 |
5149 // For Ignition we need to skip all possible handler/stub frames until | 5135 // For Ignition we need to skip all possible handler/stub frames until |
(...skipping 620 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5770 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, | 5756 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, |
5771 kStackUnwindSpace, NULL, return_value_operand, NULL); | 5757 kStackUnwindSpace, NULL, return_value_operand, NULL); |
5772 } | 5758 } |
5773 | 5759 |
5774 | 5760 |
5775 #undef __ | 5761 #undef __ |
5776 } // namespace internal | 5762 } // namespace internal |
5777 } // namespace v8 | 5763 } // namespace v8 |
5778 | 5764 |
5779 #endif // V8_TARGET_ARCH_PPC | 5765 #endif // V8_TARGET_ARCH_PPC |
OLD | NEW |