Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(8)

Side by Side Diff: src/ppc/code-stubs-ppc.cc

Issue 1374423003: Version 4.7.80.4 (cherry-pick) (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@4.7
Patch Set: Created 5 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/ppc/builtins-ppc.cc ('k') | src/ppc/interface-descriptors-ppc.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_PPC 5 #if V8_TARGET_ARCH_PPC
6 6
7 #include "src/base/bits.h" 7 #include "src/base/bits.h"
8 #include "src/bootstrapper.h" 8 #include "src/bootstrapper.h"
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/codegen.h" 10 #include "src/codegen.h"
(...skipping 1563 matching lines...) Expand 10 before | Expand all | Expand 10 after
1574 1574
1575 // Slow-case: Handle non-smi or out-of-bounds access to arguments 1575 // Slow-case: Handle non-smi or out-of-bounds access to arguments
1576 // by calling the runtime system. 1576 // by calling the runtime system.
1577 __ bind(&slow); 1577 __ bind(&slow);
1578 __ push(r4); 1578 __ push(r4);
1579 __ TailCallRuntime(Runtime::kArguments, 1, 1); 1579 __ TailCallRuntime(Runtime::kArguments, 1, 1);
1580 } 1580 }
1581 1581
1582 1582
1583 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { 1583 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
1584 // sp[0] : number of parameters 1584 // r4 : function
1585 // sp[1] : receiver displacement 1585 // r5 : number of parameters (tagged)
1586 // sp[2] : function 1586 // r6 : parameters pointer
1587
1588 DCHECK(r4.is(ArgumentsAccessNewDescriptor::function()));
1589 DCHECK(r5.is(ArgumentsAccessNewDescriptor::parameter_count()));
1590 DCHECK(r6.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
1587 1591
1588 // Check if the calling frame is an arguments adaptor frame. 1592 // Check if the calling frame is an arguments adaptor frame.
1589 Label runtime; 1593 Label runtime;
1590 __ LoadP(r6, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 1594 __ LoadP(r7, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1591 __ LoadP(r5, MemOperand(r6, StandardFrameConstants::kContextOffset)); 1595 __ LoadP(r3, MemOperand(r7, StandardFrameConstants::kContextOffset));
1592 STATIC_ASSERT(StackFrame::ARGUMENTS_ADAPTOR < 0x3fffu); 1596 __ CmpSmiLiteral(r3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
1593 __ CmpSmiLiteral(r5, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
1594 __ bne(&runtime); 1597 __ bne(&runtime);
1595 1598
1596 // Patch the arguments.length and the parameters pointer in the current frame. 1599 // Patch the arguments.length and the parameters pointer in the current frame.
1597 __ LoadP(r5, MemOperand(r6, ArgumentsAdaptorFrameConstants::kLengthOffset)); 1600 __ LoadP(r5, MemOperand(r7, ArgumentsAdaptorFrameConstants::kLengthOffset));
1598 __ StoreP(r5, MemOperand(sp, 0 * kPointerSize)); 1601 __ SmiToPtrArrayOffset(r6, r5);
1599 __ SmiToPtrArrayOffset(r5, r5); 1602 __ add(r6, r6, r7);
1600 __ add(r6, r6, r5);
1601 __ addi(r6, r6, Operand(StandardFrameConstants::kCallerSPOffset)); 1603 __ addi(r6, r6, Operand(StandardFrameConstants::kCallerSPOffset));
1602 __ StoreP(r6, MemOperand(sp, 1 * kPointerSize));
1603 1604
1604 __ bind(&runtime); 1605 __ bind(&runtime);
1606 __ Push(r4, r6, r5);
1605 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1); 1607 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
1606 } 1608 }
1607 1609
1608 1610
1609 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { 1611 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
1610 // Stack layout: 1612 // r4 : function
1611 // sp[0] : number of parameters (tagged) 1613 // r5 : number of parameters (tagged)
1612 // sp[1] : address of receiver argument 1614 // r6 : parameters pointer
1613 // sp[2] : function
1614 // Registers used over whole function: 1615 // Registers used over whole function:
1615 // r9 : allocated object (tagged) 1616 // r8 : arguments count (tagged)
1616 // r11 : mapped parameter count (tagged) 1617 // r9 : mapped parameter count (tagged)
1617 1618
1618 __ LoadP(r4, MemOperand(sp, 0 * kPointerSize)); 1619 DCHECK(r4.is(ArgumentsAccessNewDescriptor::function()));
1619 // r4 = parameter count (tagged) 1620 DCHECK(r5.is(ArgumentsAccessNewDescriptor::parameter_count()));
1621 DCHECK(r6.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
1620 1622
1621 // Check if the calling frame is an arguments adaptor frame. 1623 // Check if the calling frame is an arguments adaptor frame.
1622 Label runtime; 1624 Label adaptor_frame, try_allocate, runtime;
1623 Label adaptor_frame, try_allocate; 1625 __ LoadP(r7, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1624 __ LoadP(r6, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 1626 __ LoadP(r3, MemOperand(r7, StandardFrameConstants::kContextOffset));
1625 __ LoadP(r5, MemOperand(r6, StandardFrameConstants::kContextOffset)); 1627 __ CmpSmiLiteral(r3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
1626 STATIC_ASSERT(StackFrame::ARGUMENTS_ADAPTOR < 0x3fffu);
1627 __ CmpSmiLiteral(r5, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
1628 __ beq(&adaptor_frame); 1628 __ beq(&adaptor_frame);
1629 1629
1630 // No adaptor, parameter count = argument count. 1630 // No adaptor, parameter count = argument count.
1631 __ mr(r5, r4); 1631 __ mr(r8, r5);
1632 __ mr(r9, r5);
1632 __ b(&try_allocate); 1633 __ b(&try_allocate);
1633 1634
1634 // We have an adaptor frame. Patch the parameters pointer. 1635 // We have an adaptor frame. Patch the parameters pointer.
1635 __ bind(&adaptor_frame); 1636 __ bind(&adaptor_frame);
1636 __ LoadP(r5, MemOperand(r6, ArgumentsAdaptorFrameConstants::kLengthOffset)); 1637 __ LoadP(r8, MemOperand(r7, ArgumentsAdaptorFrameConstants::kLengthOffset));
1637 __ SmiToPtrArrayOffset(r7, r5); 1638 __ SmiToPtrArrayOffset(r6, r8);
1638 __ add(r6, r6, r7); 1639 __ add(r6, r6, r7);
1639 __ addi(r6, r6, Operand(StandardFrameConstants::kCallerSPOffset)); 1640 __ addi(r6, r6, Operand(StandardFrameConstants::kCallerSPOffset));
1640 __ StoreP(r6, MemOperand(sp, 1 * kPointerSize));
1641 1641
1642 // r4 = parameter count (tagged) 1642 // r8 = argument count (tagged)
1643 // r5 = argument count (tagged) 1643 // r9 = parameter count (tagged)
1644 // Compute the mapped parameter count = min(r4, r5) in r4. 1644 // Compute the mapped parameter count = min(r5, r8) in r9.
1645 __ cmp(r4, r5); 1645 __ cmp(r5, r8);
1646 if (CpuFeatures::IsSupported(ISELECT)) { 1646 if (CpuFeatures::IsSupported(ISELECT)) {
1647 __ isel(lt, r4, r4, r5); 1647 __ isel(lt, r9, r5, r8);
1648 } else { 1648 } else {
1649 Label skip; 1649 Label skip;
1650 __ mr(r9, r5);
1650 __ blt(&skip); 1651 __ blt(&skip);
1651 __ mr(r4, r5); 1652 __ mr(r9, r8);
1652 __ bind(&skip); 1653 __ bind(&skip);
1653 } 1654 }
1654 1655
1655 __ bind(&try_allocate); 1656 __ bind(&try_allocate);
1656 1657
1657 // Compute the sizes of backing store, parameter map, and arguments object. 1658 // Compute the sizes of backing store, parameter map, and arguments object.
1658 // 1. Parameter map, has 2 extra words containing context and backing store. 1659 // 1. Parameter map, has 2 extra words containing context and backing store.
1659 const int kParameterMapHeaderSize = 1660 const int kParameterMapHeaderSize =
1660 FixedArray::kHeaderSize + 2 * kPointerSize; 1661 FixedArray::kHeaderSize + 2 * kPointerSize;
1661 // If there are no mapped parameters, we do not need the parameter_map. 1662 // If there are no mapped parameters, we do not need the parameter_map.
1662 __ CmpSmiLiteral(r4, Smi::FromInt(0), r0); 1663 __ CmpSmiLiteral(r9, Smi::FromInt(0), r0);
1663 if (CpuFeatures::IsSupported(ISELECT)) { 1664 if (CpuFeatures::IsSupported(ISELECT)) {
1664 __ SmiToPtrArrayOffset(r11, r4); 1665 __ SmiToPtrArrayOffset(r11, r9);
1665 __ addi(r11, r11, Operand(kParameterMapHeaderSize)); 1666 __ addi(r11, r11, Operand(kParameterMapHeaderSize));
1666 __ isel(eq, r11, r0, r11); 1667 __ isel(eq, r11, r0, r11);
1667 } else { 1668 } else {
1668 Label skip2, skip3; 1669 Label skip2, skip3;
1669 __ bne(&skip2); 1670 __ bne(&skip2);
1670 __ li(r11, Operand::Zero()); 1671 __ li(r11, Operand::Zero());
1671 __ b(&skip3); 1672 __ b(&skip3);
1672 __ bind(&skip2); 1673 __ bind(&skip2);
1673 __ SmiToPtrArrayOffset(r11, r4); 1674 __ SmiToPtrArrayOffset(r11, r9);
1674 __ addi(r11, r11, Operand(kParameterMapHeaderSize)); 1675 __ addi(r11, r11, Operand(kParameterMapHeaderSize));
1675 __ bind(&skip3); 1676 __ bind(&skip3);
1676 } 1677 }
1677 1678
1678 // 2. Backing store. 1679 // 2. Backing store.
1679 __ SmiToPtrArrayOffset(r7, r5); 1680 __ SmiToPtrArrayOffset(r7, r8);
1680 __ add(r11, r11, r7); 1681 __ add(r11, r11, r7);
1681 __ addi(r11, r11, Operand(FixedArray::kHeaderSize)); 1682 __ addi(r11, r11, Operand(FixedArray::kHeaderSize));
1682 1683
1683 // 3. Arguments object. 1684 // 3. Arguments object.
1684 __ addi(r11, r11, Operand(Heap::kSloppyArgumentsObjectSize)); 1685 __ addi(r11, r11, Operand(Heap::kSloppyArgumentsObjectSize));
1685 1686
1686 // Do the allocation of all three objects in one go. 1687 // Do the allocation of all three objects in one go.
1687 __ Allocate(r11, r3, r6, r7, &runtime, TAG_OBJECT); 1688 __ Allocate(r11, r3, r7, r11, &runtime, TAG_OBJECT);
1688 1689
1689 // r3 = address of new object(s) (tagged) 1690 // r3 = address of new object(s) (tagged)
1690 // r5 = argument count (smi-tagged) 1691 // r5 = argument count (smi-tagged)
1691 // Get the arguments boilerplate from the current native context into r4. 1692 // Get the arguments boilerplate from the current native context into r4.
1692 const int kNormalOffset = 1693 const int kNormalOffset =
1693 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX); 1694 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX);
1694 const int kAliasedOffset = 1695 const int kAliasedOffset =
1695 Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX); 1696 Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
1696 1697
1697 __ LoadP(r7, 1698 __ LoadP(r7,
1698 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 1699 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1699 __ LoadP(r7, FieldMemOperand(r7, GlobalObject::kNativeContextOffset)); 1700 __ LoadP(r7, FieldMemOperand(r7, GlobalObject::kNativeContextOffset));
1700 __ cmpi(r4, Operand::Zero()); 1701 __ cmpi(r9, Operand::Zero());
1701 if (CpuFeatures::IsSupported(ISELECT)) { 1702 if (CpuFeatures::IsSupported(ISELECT)) {
1702 __ LoadP(r11, MemOperand(r7, kNormalOffset)); 1703 __ LoadP(r11, MemOperand(r7, kNormalOffset));
1703 __ LoadP(r7, MemOperand(r7, kAliasedOffset)); 1704 __ LoadP(r7, MemOperand(r7, kAliasedOffset));
1704 __ isel(eq, r7, r11, r7); 1705 __ isel(eq, r7, r11, r7);
1705 } else { 1706 } else {
1706 Label skip4, skip5; 1707 Label skip4, skip5;
1707 __ bne(&skip4); 1708 __ bne(&skip4);
1708 __ LoadP(r7, MemOperand(r7, kNormalOffset)); 1709 __ LoadP(r7, MemOperand(r7, kNormalOffset));
1709 __ b(&skip5); 1710 __ b(&skip5);
1710 __ bind(&skip4); 1711 __ bind(&skip4);
1711 __ LoadP(r7, MemOperand(r7, kAliasedOffset)); 1712 __ LoadP(r7, MemOperand(r7, kAliasedOffset));
1712 __ bind(&skip5); 1713 __ bind(&skip5);
1713 } 1714 }
1714 1715
1715 // r3 = address of new object (tagged) 1716 // r3 = address of new object (tagged)
1716 // r4 = mapped parameter count (tagged)
1717 // r5 = argument count (smi-tagged) 1717 // r5 = argument count (smi-tagged)
1718 // r7 = address of arguments map (tagged) 1718 // r7 = address of arguments map (tagged)
1719 // r9 = mapped parameter count (tagged)
1719 __ StoreP(r7, FieldMemOperand(r3, JSObject::kMapOffset), r0); 1720 __ StoreP(r7, FieldMemOperand(r3, JSObject::kMapOffset), r0);
1720 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex); 1721 __ LoadRoot(r11, Heap::kEmptyFixedArrayRootIndex);
1721 __ StoreP(r6, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0); 1722 __ StoreP(r11, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
1722 __ StoreP(r6, FieldMemOperand(r3, JSObject::kElementsOffset), r0); 1723 __ StoreP(r11, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
1723 1724
1724 // Set up the callee in-object property. 1725 // Set up the callee in-object property.
1725 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1); 1726 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
1726 __ LoadP(r6, MemOperand(sp, 2 * kPointerSize)); 1727 __ AssertNotSmi(r4);
1727 __ AssertNotSmi(r6);
1728 const int kCalleeOffset = 1728 const int kCalleeOffset =
1729 JSObject::kHeaderSize + Heap::kArgumentsCalleeIndex * kPointerSize; 1729 JSObject::kHeaderSize + Heap::kArgumentsCalleeIndex * kPointerSize;
1730 __ StoreP(r6, FieldMemOperand(r3, kCalleeOffset), r0); 1730 __ StoreP(r4, FieldMemOperand(r3, kCalleeOffset), r0);
1731 1731
1732 // Use the length (smi tagged) and set that as an in-object property too. 1732 // Use the length (smi tagged) and set that as an in-object property too.
1733 __ AssertSmi(r5); 1733 __ AssertSmi(r8);
1734 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); 1734 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
1735 const int kLengthOffset = 1735 const int kLengthOffset =
1736 JSObject::kHeaderSize + Heap::kArgumentsLengthIndex * kPointerSize; 1736 JSObject::kHeaderSize + Heap::kArgumentsLengthIndex * kPointerSize;
1737 __ StoreP(r5, FieldMemOperand(r3, kLengthOffset), r0); 1737 __ StoreP(r8, FieldMemOperand(r3, kLengthOffset), r0);
1738 1738
1739 // Set up the elements pointer in the allocated arguments object. 1739 // Set up the elements pointer in the allocated arguments object.
1740 // If we allocated a parameter map, r7 will point there, otherwise 1740 // If we allocated a parameter map, r7 will point there, otherwise
1741 // it will point to the backing store. 1741 // it will point to the backing store.
1742 __ addi(r7, r3, Operand(Heap::kSloppyArgumentsObjectSize)); 1742 __ addi(r7, r3, Operand(Heap::kSloppyArgumentsObjectSize));
1743 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0); 1743 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
1744 1744
1745 // r3 = address of new object (tagged) 1745 // r3 = address of new object (tagged)
1746 // r4 = mapped parameter count (tagged)
1747 // r5 = argument count (tagged) 1746 // r5 = argument count (tagged)
1748 // r7 = address of parameter map or backing store (tagged) 1747 // r7 = address of parameter map or backing store (tagged)
1748 // r9 = mapped parameter count (tagged)
1749 // Initialize parameter map. If there are no mapped arguments, we're done. 1749 // Initialize parameter map. If there are no mapped arguments, we're done.
1750 Label skip_parameter_map; 1750 Label skip_parameter_map;
1751 __ CmpSmiLiteral(r4, Smi::FromInt(0), r0); 1751 __ CmpSmiLiteral(r9, Smi::FromInt(0), r0);
1752 if (CpuFeatures::IsSupported(ISELECT)) { 1752 if (CpuFeatures::IsSupported(ISELECT)) {
1753 __ isel(eq, r6, r7, r6); 1753 __ isel(eq, r4, r7, r4);
1754 __ beq(&skip_parameter_map); 1754 __ beq(&skip_parameter_map);
1755 } else { 1755 } else {
1756 Label skip6; 1756 Label skip6;
1757 __ bne(&skip6); 1757 __ bne(&skip6);
1758 // Move backing store address to r6, because it is 1758 // Move backing store address to r4, because it is
1759 // expected there when filling in the unmapped arguments. 1759 // expected there when filling in the unmapped arguments.
1760 __ mr(r6, r7); 1760 __ mr(r4, r7);
1761 __ b(&skip_parameter_map); 1761 __ b(&skip_parameter_map);
1762 __ bind(&skip6); 1762 __ bind(&skip6);
1763 } 1763 }
1764 1764
1765 __ LoadRoot(r9, Heap::kSloppyArgumentsElementsMapRootIndex); 1765 __ LoadRoot(r8, Heap::kSloppyArgumentsElementsMapRootIndex);
1766 __ StoreP(r9, FieldMemOperand(r7, FixedArray::kMapOffset), r0); 1766 __ StoreP(r8, FieldMemOperand(r7, FixedArray::kMapOffset), r0);
1767 __ AddSmiLiteral(r9, r4, Smi::FromInt(2), r0); 1767 __ AddSmiLiteral(r8, r9, Smi::FromInt(2), r0);
1768 __ StoreP(r9, FieldMemOperand(r7, FixedArray::kLengthOffset), r0); 1768 __ StoreP(r8, FieldMemOperand(r7, FixedArray::kLengthOffset), r0);
1769 __ StoreP(cp, FieldMemOperand(r7, FixedArray::kHeaderSize + 0 * kPointerSize), 1769 __ StoreP(cp, FieldMemOperand(r7, FixedArray::kHeaderSize + 0 * kPointerSize),
1770 r0); 1770 r0);
1771 __ SmiToPtrArrayOffset(r9, r4); 1771 __ SmiToPtrArrayOffset(r8, r9);
1772 __ add(r9, r7, r9); 1772 __ add(r8, r8, r7);
1773 __ addi(r9, r9, Operand(kParameterMapHeaderSize)); 1773 __ addi(r8, r8, Operand(kParameterMapHeaderSize));
1774 __ StoreP(r9, FieldMemOperand(r7, FixedArray::kHeaderSize + 1 * kPointerSize), 1774 __ StoreP(r8, FieldMemOperand(r7, FixedArray::kHeaderSize + 1 * kPointerSize),
1775 r0); 1775 r0);
1776 1776
1777 // Copy the parameter slots and the holes in the arguments. 1777 // Copy the parameter slots and the holes in the arguments.
1778 // We need to fill in mapped_parameter_count slots. They index the context, 1778 // We need to fill in mapped_parameter_count slots. They index the context,
1779 // where parameters are stored in reverse order, at 1779 // where parameters are stored in reverse order, at
1780 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 1780 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
1781 // The mapped parameter thus need to get indices 1781 // The mapped parameter thus need to get indices
1782 // MIN_CONTEXT_SLOTS+parameter_count-1 .. 1782 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
1783 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count 1783 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
1784 // We loop from right to left. 1784 // We loop from right to left.
1785 Label parameters_loop, parameters_test; 1785 Label parameters_loop;
1786 __ mr(r9, r4); 1786 __ mr(r8, r9);
1787 __ LoadP(r11, MemOperand(sp, 0 * kPointerSize)); 1787 __ AddSmiLiteral(r11, r5, Smi::FromInt(Context::MIN_CONTEXT_SLOTS), r0);
1788 __ AddSmiLiteral(r11, r11, Smi::FromInt(Context::MIN_CONTEXT_SLOTS), r0); 1788 __ sub(r11, r11, r9);
1789 __ sub(r11, r11, r4); 1789 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1790 __ LoadRoot(r10, Heap::kTheHoleValueRootIndex); 1790 __ SmiToPtrArrayOffset(r4, r8);
1791 __ SmiToPtrArrayOffset(r6, r9); 1791 __ add(r4, r4, r7);
1792 __ add(r6, r7, r6); 1792 __ addi(r4, r4, Operand(kParameterMapHeaderSize));
1793 __ addi(r6, r6, Operand(kParameterMapHeaderSize));
1794 1793
1795 // r9 = loop variable (tagged) 1794 // r4 = address of backing store (tagged)
1796 // r4 = mapping index (tagged)
1797 // r6 = address of backing store (tagged)
1798 // r7 = address of parameter map (tagged) 1795 // r7 = address of parameter map (tagged)
1799 // r8 = temporary scratch (a.o., for address calculation) 1796 // r8 = temporary scratch (a.o., for address calculation)
1800 // r10 = the hole value 1797 // r10 = temporary scratch (a.o., for address calculation)
1801 __ b(&parameters_test); 1798 // ip = the hole value
1799 __ SmiUntag(r8);
1800 __ mtctr(r8);
1801 __ ShiftLeftImm(r8, r8, Operand(kPointerSizeLog2));
1802 __ add(r10, r4, r8);
1803 __ add(r8, r7, r8);
1804 __ addi(r10, r10, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1805 __ addi(r8, r8, Operand(kParameterMapHeaderSize - kHeapObjectTag));
1802 1806
1803 __ bind(&parameters_loop); 1807 __ bind(&parameters_loop);
1804 __ SubSmiLiteral(r9, r9, Smi::FromInt(1), r0); 1808 __ StorePU(r11, MemOperand(r8, -kPointerSize));
1805 __ SmiToPtrArrayOffset(r8, r9); 1809 __ StorePU(ip, MemOperand(r10, -kPointerSize));
1806 __ addi(r8, r8, Operand(kParameterMapHeaderSize - kHeapObjectTag));
1807 __ StorePX(r11, MemOperand(r8, r7));
1808 __ subi(r8, r8, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize));
1809 __ StorePX(r10, MemOperand(r8, r6));
1810 __ AddSmiLiteral(r11, r11, Smi::FromInt(1), r0); 1810 __ AddSmiLiteral(r11, r11, Smi::FromInt(1), r0);
1811 __ bind(&parameters_test); 1811 __ bdnz(&parameters_loop);
1812 __ CmpSmiLiteral(r9, Smi::FromInt(0), r0); 1812
1813 __ bne(&parameters_loop); 1813 // Restore r8 = argument count (tagged).
1814 __ LoadP(r8, FieldMemOperand(r3, kLengthOffset));
1814 1815
1815 __ bind(&skip_parameter_map); 1816 __ bind(&skip_parameter_map);
1816 // r5 = argument count (tagged) 1817 // r3 = address of new object (tagged)
1817 // r6 = address of backing store (tagged) 1818 // r4 = address of backing store (tagged)
1818 // r8 = scratch 1819 // r8 = argument count (tagged)
1820 // r9 = mapped parameter count (tagged)
1821 // r11 = scratch
1819 // Copy arguments header and remaining slots (if there are any). 1822 // Copy arguments header and remaining slots (if there are any).
1820 __ LoadRoot(r8, Heap::kFixedArrayMapRootIndex); 1823 __ LoadRoot(r11, Heap::kFixedArrayMapRootIndex);
1821 __ StoreP(r8, FieldMemOperand(r6, FixedArray::kMapOffset), r0); 1824 __ StoreP(r11, FieldMemOperand(r4, FixedArray::kMapOffset), r0);
1822 __ StoreP(r5, FieldMemOperand(r6, FixedArray::kLengthOffset), r0); 1825 __ StoreP(r8, FieldMemOperand(r4, FixedArray::kLengthOffset), r0);
1826 __ sub(r11, r8, r9, LeaveOE, SetRC);
1827 __ Ret(eq, cr0);
1823 1828
1824 Label arguments_loop, arguments_test; 1829 Label arguments_loop;
1825 __ mr(r11, r4); 1830 __ SmiUntag(r11);
1826 __ LoadP(r7, MemOperand(sp, 1 * kPointerSize)); 1831 __ mtctr(r11);
1827 __ SmiToPtrArrayOffset(r8, r11); 1832
1828 __ sub(r7, r7, r8); 1833 __ SmiToPtrArrayOffset(r0, r9);
1829 __ b(&arguments_test); 1834 __ sub(r6, r6, r0);
1835 __ add(r11, r4, r0);
1836 __ addi(r11, r11,
1837 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
1830 1838
1831 __ bind(&arguments_loop); 1839 __ bind(&arguments_loop);
1832 __ subi(r7, r7, Operand(kPointerSize)); 1840 __ LoadPU(r7, MemOperand(r6, -kPointerSize));
1833 __ LoadP(r9, MemOperand(r7, 0)); 1841 __ StorePU(r7, MemOperand(r11, kPointerSize));
1834 __ SmiToPtrArrayOffset(r8, r11); 1842 __ bdnz(&arguments_loop);
1835 __ add(r8, r6, r8);
1836 __ StoreP(r9, FieldMemOperand(r8, FixedArray::kHeaderSize), r0);
1837 __ AddSmiLiteral(r11, r11, Smi::FromInt(1), r0);
1838 1843
1839 __ bind(&arguments_test); 1844 // Return.
1840 __ cmp(r11, r5);
1841 __ blt(&arguments_loop);
1842
1843 // Return and remove the on-stack parameters.
1844 __ addi(sp, sp, Operand(3 * kPointerSize));
1845 __ Ret(); 1845 __ Ret();
1846 1846
1847 // Do the runtime call to allocate the arguments object. 1847 // Do the runtime call to allocate the arguments object.
1848 // r5 = argument count (tagged) 1848 // r8 = argument count (tagged)
1849 __ bind(&runtime); 1849 __ bind(&runtime);
1850 __ StoreP(r5, MemOperand(sp, 0 * kPointerSize)); // Patch argument count. 1850 __ Push(r4, r6, r8);
1851 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1); 1851 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
1852 } 1852 }
1853 1853
1854 1854
1855 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { 1855 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
1856 // Return address is in lr. 1856 // Return address is in lr.
1857 Label slow; 1857 Label slow;
1858 1858
1859 Register receiver = LoadDescriptor::ReceiverRegister(); 1859 Register receiver = LoadDescriptor::ReceiverRegister();
1860 Register key = LoadDescriptor::NameRegister(); 1860 Register key = LoadDescriptor::NameRegister();
1861 1861
1862 // Check that the key is an array index, that is Uint32. 1862 // Check that the key is an array index, that is Uint32.
1863 __ TestIfPositiveSmi(key, r0); 1863 __ TestIfPositiveSmi(key, r0);
1864 __ bne(&slow, cr0); 1864 __ bne(&slow, cr0);
1865 1865
1866 // Everything is fine, call runtime. 1866 // Everything is fine, call runtime.
1867 __ Push(receiver, key); // Receiver, key. 1867 __ Push(receiver, key); // Receiver, key.
1868 1868
1869 // Perform tail call to the entry. 1869 // Perform tail call to the entry.
1870 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2, 1); 1870 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2, 1);
1871 1871
1872 __ bind(&slow); 1872 __ bind(&slow);
1873 PropertyAccessCompiler::TailCallBuiltin( 1873 PropertyAccessCompiler::TailCallBuiltin(
1874 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); 1874 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
1875 } 1875 }
1876 1876
1877 1877
1878 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) { 1878 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
1879 // sp[0] : number of parameters 1879 // r4 : function
1880 // sp[4] : receiver displacement 1880 // r5 : number of parameters (tagged)
1881 // sp[8] : function 1881 // r6 : parameters pointer
1882
1883 DCHECK(r4.is(ArgumentsAccessNewDescriptor::function()));
1884 DCHECK(r5.is(ArgumentsAccessNewDescriptor::parameter_count()));
1885 DCHECK(r6.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
1886
1882 // Check if the calling frame is an arguments adaptor frame. 1887 // Check if the calling frame is an arguments adaptor frame.
1883 Label adaptor_frame, try_allocate, runtime; 1888 Label try_allocate, runtime;
1884 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 1889 __ LoadP(r7, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1885 __ LoadP(r6, MemOperand(r5, StandardFrameConstants::kContextOffset)); 1890 __ LoadP(r3, MemOperand(r7, StandardFrameConstants::kContextOffset));
1886 STATIC_ASSERT(StackFrame::ARGUMENTS_ADAPTOR < 0x3fffu); 1891 __ CmpSmiLiteral(r3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
1887 __ CmpSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0); 1892 __ bne(&try_allocate);
1888 __ beq(&adaptor_frame);
1889
1890 // Get the length from the frame.
1891 __ LoadP(r4, MemOperand(sp, 0));
1892 __ b(&try_allocate);
1893 1893
1894 // Patch the arguments.length and the parameters pointer. 1894 // Patch the arguments.length and the parameters pointer.
1895 __ bind(&adaptor_frame); 1895 __ LoadP(r5, MemOperand(r7, ArgumentsAdaptorFrameConstants::kLengthOffset));
1896 __ LoadP(r4, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset)); 1896 __ SmiToPtrArrayOffset(r6, r5);
1897 __ StoreP(r4, MemOperand(sp, 0)); 1897 __ add(r6, r6, r7);
1898 __ SmiToPtrArrayOffset(r6, r4);
1899 __ add(r6, r5, r6);
1900 __ addi(r6, r6, Operand(StandardFrameConstants::kCallerSPOffset)); 1898 __ addi(r6, r6, Operand(StandardFrameConstants::kCallerSPOffset));
1901 __ StoreP(r6, MemOperand(sp, 1 * kPointerSize));
1902 1899
1903 // Try the new space allocation. Start out with computing the size 1900 // Try the new space allocation. Start out with computing the size
1904 // of the arguments object and the elements array in words. 1901 // of the arguments object and the elements array in words.
1905 Label add_arguments_object; 1902 Label add_arguments_object;
1906 __ bind(&try_allocate); 1903 __ bind(&try_allocate);
1907 __ cmpi(r4, Operand::Zero()); 1904 __ SmiUntag(r11, r5, SetRC);
1908 __ beq(&add_arguments_object); 1905 __ beq(&add_arguments_object, cr0);
1909 __ SmiUntag(r4); 1906 __ addi(r11, r11, Operand(FixedArray::kHeaderSize / kPointerSize));
1910 __ addi(r4, r4, Operand(FixedArray::kHeaderSize / kPointerSize));
1911 __ bind(&add_arguments_object); 1907 __ bind(&add_arguments_object);
1912 __ addi(r4, r4, Operand(Heap::kStrictArgumentsObjectSize / kPointerSize)); 1908 __ addi(r11, r11, Operand(Heap::kStrictArgumentsObjectSize / kPointerSize));
1913 1909
1914 // Do the allocation of both objects in one go. 1910 // Do the allocation of both objects in one go.
1915 __ Allocate(r4, r3, r5, r6, &runtime, 1911 __ Allocate(r11, r3, r7, r8, &runtime,
1916 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS)); 1912 static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
1917 1913
1918 // Get the arguments boilerplate from the current native context. 1914 // Get the arguments boilerplate from the current native context.
1919 __ LoadP(r7, 1915 __ LoadP(r7,
1920 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 1916 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1921 __ LoadP(r7, FieldMemOperand(r7, GlobalObject::kNativeContextOffset)); 1917 __ LoadP(r7, FieldMemOperand(r7, GlobalObject::kNativeContextOffset));
1922 __ LoadP( 1918 __ LoadP(
1923 r7, 1919 r7,
1924 MemOperand(r7, Context::SlotOffset(Context::STRICT_ARGUMENTS_MAP_INDEX))); 1920 MemOperand(r7, Context::SlotOffset(Context::STRICT_ARGUMENTS_MAP_INDEX)));
1925 1921
1926 __ StoreP(r7, FieldMemOperand(r3, JSObject::kMapOffset), r0); 1922 __ StoreP(r7, FieldMemOperand(r3, JSObject::kMapOffset), r0);
1927 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex); 1923 __ LoadRoot(r8, Heap::kEmptyFixedArrayRootIndex);
1928 __ StoreP(r6, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0); 1924 __ StoreP(r8, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
1929 __ StoreP(r6, FieldMemOperand(r3, JSObject::kElementsOffset), r0); 1925 __ StoreP(r8, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
1930 1926
1931 // Get the length (smi tagged) and set that as an in-object property too. 1927 // Get the length (smi tagged) and set that as an in-object property too.
1932 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0); 1928 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
1933 __ LoadP(r4, MemOperand(sp, 0 * kPointerSize)); 1929 __ AssertSmi(r5);
1934 __ AssertSmi(r4); 1930 __ StoreP(r5,
1935 __ StoreP(r4,
1936 FieldMemOperand(r3, JSObject::kHeaderSize + 1931 FieldMemOperand(r3, JSObject::kHeaderSize +
1937 Heap::kArgumentsLengthIndex * kPointerSize), 1932 Heap::kArgumentsLengthIndex * kPointerSize),
1938 r0); 1933 r0);
1939 1934
1940 // If there are no actual arguments, we're done. 1935 // If there are no actual arguments, we're done.
1941 Label done; 1936 __ SmiUntag(r9, r5, SetRC);
1942 __ cmpi(r4, Operand::Zero()); 1937 __ Ret(eq, cr0);
1943 __ beq(&done);
1944
1945 // Get the parameters pointer from the stack.
1946 __ LoadP(r5, MemOperand(sp, 1 * kPointerSize));
1947 1938
1948 // Set up the elements pointer in the allocated arguments object and 1939 // Set up the elements pointer in the allocated arguments object and
1949 // initialize the header in the elements fixed array. 1940 // initialize the header in the elements fixed array.
1950 __ addi(r7, r3, Operand(Heap::kStrictArgumentsObjectSize)); 1941 __ addi(r7, r3, Operand(Heap::kStrictArgumentsObjectSize));
1951 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0); 1942 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
1952 __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex); 1943 __ LoadRoot(r8, Heap::kFixedArrayMapRootIndex);
1953 __ StoreP(r6, FieldMemOperand(r7, FixedArray::kMapOffset), r0); 1944 __ StoreP(r8, FieldMemOperand(r7, FixedArray::kMapOffset), r0);
1954 __ StoreP(r4, FieldMemOperand(r7, FixedArray::kLengthOffset), r0); 1945 __ StoreP(r5, FieldMemOperand(r7, FixedArray::kLengthOffset), r0);
1955 // Untag the length for the loop.
1956 __ SmiUntag(r4);
1957 1946
1958 // Copy the fixed array slots. 1947 // Copy the fixed array slots.
1959 Label loop; 1948 Label loop;
1960 // Set up r7 to point just prior to the first array slot. 1949 // Set up r7 to point just prior to the first array slot.
1961 __ addi(r7, r7, 1950 __ addi(r7, r7,
1962 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize)); 1951 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
1963 __ mtctr(r4); 1952 __ mtctr(r9);
1964 __ bind(&loop); 1953 __ bind(&loop);
1965 // Pre-decrement r5 with kPointerSize on each iteration. 1954 // Pre-decrement r6 with kPointerSize on each iteration.
1966 // Pre-decrement in order to skip receiver. 1955 // Pre-decrement in order to skip receiver.
1967 __ LoadPU(r6, MemOperand(r5, -kPointerSize)); 1956 __ LoadPU(r8, MemOperand(r6, -kPointerSize));
1968 // Pre-increment r7 with kPointerSize on each iteration. 1957 // Pre-increment r7 with kPointerSize on each iteration.
1969 __ StorePU(r6, MemOperand(r7, kPointerSize)); 1958 __ StorePU(r8, MemOperand(r7, kPointerSize));
1970 __ bdnz(&loop); 1959 __ bdnz(&loop);
1971 1960
1972 // Return and remove the on-stack parameters. 1961 // Return.
1973 __ bind(&done);
1974 __ addi(sp, sp, Operand(3 * kPointerSize));
1975 __ Ret(); 1962 __ Ret();
1976 1963
1977 // Do the runtime call to allocate the arguments object. 1964 // Do the runtime call to allocate the arguments object.
1978 __ bind(&runtime); 1965 __ bind(&runtime);
1966 __ Push(r4, r6, r5);
1979 __ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1); 1967 __ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1);
1980 } 1968 }
1981 1969
1982 1970
1983 void RegExpExecStub::Generate(MacroAssembler* masm) { 1971 void RegExpExecStub::Generate(MacroAssembler* masm) {
1984 // Just jump directly to runtime if native RegExp is not selected at compile 1972 // Just jump directly to runtime if native RegExp is not selected at compile
1985 // time or if regexp entry in generated code is turned off runtime switch or 1973 // time or if regexp entry in generated code is turned off runtime switch or
1986 // at compilation. 1974 // at compilation.
1987 #ifdef V8_INTERPRETED_REGEXP 1975 #ifdef V8_INTERPRETED_REGEXP
1988 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); 1976 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
(...skipping 697 matching lines...) Expand 10 before | Expand all | Expand 10 after
2686 __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kConstructStubOffset)); 2674 __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kConstructStubOffset));
2687 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag)); 2675 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
2688 __ JumpToJSEntry(ip); 2676 __ JumpToJSEntry(ip);
2689 2677
2690 __ bind(&non_function); 2678 __ bind(&non_function);
2691 __ mr(r6, r4); 2679 __ mr(r6, r4);
2692 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 2680 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2693 } 2681 }
2694 2682
2695 2683
2696 static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
2697 __ LoadP(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2698 __ LoadP(vector,
2699 FieldMemOperand(vector, JSFunction::kSharedFunctionInfoOffset));
2700 __ LoadP(vector,
2701 FieldMemOperand(vector, SharedFunctionInfo::kFeedbackVectorOffset));
2702 }
2703
2704
2705 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { 2684 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
2706 // r4 - function 2685 // r4 - function
2707 // r6 - slot id 2686 // r6 - slot id
2708 // r5 - vector 2687 // r5 - vector
2709 // r7 - allocation site (loaded from vector[slot]) 2688 // r7 - allocation site (loaded from vector[slot])
2710 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r8); 2689 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r8);
2711 __ cmp(r4, r8); 2690 __ cmp(r4, r8);
2712 __ bne(miss); 2691 __ bne(miss);
2713 2692
2714 __ mov(r3, Operand(arg_count())); 2693 __ mov(r3, Operand(arg_count()));
(...skipping 1755 matching lines...) Expand 10 before | Expand all | Expand 10 after
4470 __ addi(r4, r4, Operand(1)); 4449 __ addi(r4, r4, Operand(1));
4471 } 4450 }
4472 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); 4451 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
4473 __ slwi(r4, r4, Operand(kPointerSizeLog2)); 4452 __ slwi(r4, r4, Operand(kPointerSizeLog2));
4474 __ add(sp, sp, r4); 4453 __ add(sp, sp, r4);
4475 __ Ret(); 4454 __ Ret();
4476 } 4455 }
4477 4456
4478 4457
4479 void LoadICTrampolineStub::Generate(MacroAssembler* masm) { 4458 void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
4480 EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister()); 4459 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
4481 LoadICStub stub(isolate(), state()); 4460 LoadICStub stub(isolate(), state());
4482 stub.GenerateForTrampoline(masm); 4461 stub.GenerateForTrampoline(masm);
4483 } 4462 }
4484 4463
4485 4464
4486 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { 4465 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
4487 EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister()); 4466 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
4488 KeyedLoadICStub stub(isolate(), state()); 4467 KeyedLoadICStub stub(isolate(), state());
4489 stub.GenerateForTrampoline(masm); 4468 stub.GenerateForTrampoline(masm);
4490 } 4469 }
4491 4470
4492 4471
4493 void CallICTrampolineStub::Generate(MacroAssembler* masm) { 4472 void CallICTrampolineStub::Generate(MacroAssembler* masm) {
4494 EmitLoadTypeFeedbackVector(masm, r5); 4473 __ EmitLoadTypeFeedbackVector(r5);
4495 CallICStub stub(isolate(), state()); 4474 CallICStub stub(isolate(), state());
4496 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); 4475 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
4497 } 4476 }
4498 4477
4499 4478
4500 void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); } 4479 void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
4501 4480
4502 4481
4503 void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) { 4482 void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
4504 GenerateImpl(masm, true); 4483 GenerateImpl(masm, true);
(...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after
4710 __ bind(&miss); 4689 __ bind(&miss);
4711 KeyedLoadIC::GenerateMiss(masm); 4690 KeyedLoadIC::GenerateMiss(masm);
4712 4691
4713 __ bind(&load_smi_map); 4692 __ bind(&load_smi_map);
4714 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); 4693 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
4715 __ b(&compare_map); 4694 __ b(&compare_map);
4716 } 4695 }
4717 4696
4718 4697
4719 void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) { 4698 void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
4720 EmitLoadTypeFeedbackVector(masm, VectorStoreICDescriptor::VectorRegister()); 4699 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
4721 VectorStoreICStub stub(isolate(), state()); 4700 VectorStoreICStub stub(isolate(), state());
4722 stub.GenerateForTrampoline(masm); 4701 stub.GenerateForTrampoline(masm);
4723 } 4702 }
4724 4703
4725 4704
4726 void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { 4705 void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
4727 EmitLoadTypeFeedbackVector(masm, VectorStoreICDescriptor::VectorRegister()); 4706 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
4728 VectorKeyedStoreICStub stub(isolate(), state()); 4707 VectorKeyedStoreICStub stub(isolate(), state());
4729 stub.GenerateForTrampoline(masm); 4708 stub.GenerateForTrampoline(masm);
4730 } 4709 }
4731 4710
4732 4711
4733 void VectorStoreICStub::Generate(MacroAssembler* masm) { 4712 void VectorStoreICStub::Generate(MacroAssembler* masm) {
4734 GenerateImpl(masm, false); 4713 GenerateImpl(masm, false);
4735 } 4714 }
4736 4715
4737 4716
(...skipping 1110 matching lines...) Expand 10 before | Expand all | Expand 10 after
5848 kStackUnwindSpace, NULL, 5827 kStackUnwindSpace, NULL,
5849 MemOperand(fp, 6 * kPointerSize), NULL); 5828 MemOperand(fp, 6 * kPointerSize), NULL);
5850 } 5829 }
5851 5830
5852 5831
5853 #undef __ 5832 #undef __
5854 } // namespace internal 5833 } // namespace internal
5855 } // namespace v8 5834 } // namespace v8
5856 5835
5857 #endif // V8_TARGET_ARCH_PPC 5836 #endif // V8_TARGET_ARCH_PPC
OLDNEW
« no previous file with comments | « src/ppc/builtins-ppc.cc ('k') | src/ppc/interface-descriptors-ppc.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698