| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_ARM | 7 #if V8_TARGET_ARCH_ARM |
| 8 | 8 |
| 9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
| 10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
| (...skipping 1665 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1676 __ ldm(ia_w, sp, kCalleeSaved | pc.bit()); | 1676 __ ldm(ia_w, sp, kCalleeSaved | pc.bit()); |
| 1677 } | 1677 } |
| 1678 | 1678 |
| 1679 | 1679 |
| 1680 // Uses registers r0 to r4. | 1680 // Uses registers r0 to r4. |
| 1681 // Expected input (depending on whether args are in registers or on the stack): | 1681 // Expected input (depending on whether args are in registers or on the stack): |
| 1682 // * object: r0 or at sp + 1 * kPointerSize. | 1682 // * object: r0 or at sp + 1 * kPointerSize. |
| 1683 // * function: r1 or at sp. | 1683 // * function: r1 or at sp. |
| 1684 // | 1684 // |
| 1685 // An inlined call site may have been generated before calling this stub. | 1685 // An inlined call site may have been generated before calling this stub. |
| 1686 // In this case the offset to the inline site to patch is passed in r5. | 1686 // In this case the offset to the inline sites to patch are passed in r5 and r6. |
| 1687 // (See LCodeGen::DoInstanceOfKnownGlobal) | 1687 // (See LCodeGen::DoInstanceOfKnownGlobal) |
| 1688 void InstanceofStub::Generate(MacroAssembler* masm) { | 1688 void InstanceofStub::Generate(MacroAssembler* masm) { |
| 1689 // Call site inlining and patching implies arguments in registers. | 1689 // Call site inlining and patching implies arguments in registers. |
| 1690 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck()); | 1690 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck()); |
| 1691 // ReturnTrueFalse is only implemented for inlined call sites. | 1691 // ReturnTrueFalse is only implemented for inlined call sites. |
| 1692 ASSERT(!ReturnTrueFalseObject() || HasCallSiteInlineCheck()); | 1692 ASSERT(!ReturnTrueFalseObject() || HasCallSiteInlineCheck()); |
| 1693 | 1693 |
| 1694 // Fixed register usage throughout the stub: | 1694 // Fixed register usage throughout the stub: |
| 1695 const Register object = r0; // Object (lhs). | 1695 const Register object = r0; // Object (lhs). |
| 1696 Register map = r3; // Map of the object. | 1696 Register map = r3; // Map of the object. |
| 1697 const Register function = r1; // Function (rhs). | 1697 const Register function = r1; // Function (rhs). |
| 1698 const Register prototype = r4; // Prototype of the function. | 1698 const Register prototype = r4; // Prototype of the function. |
| 1699 const Register inline_site = r9; | |
| 1700 const Register scratch = r2; | 1699 const Register scratch = r2; |
| 1701 | 1700 |
| 1702 const int32_t kDeltaToLoadBoolResult = 4 * kPointerSize; | |
| 1703 | |
| 1704 Label slow, loop, is_instance, is_not_instance, not_js_object; | 1701 Label slow, loop, is_instance, is_not_instance, not_js_object; |
| 1705 | 1702 |
| 1706 if (!HasArgsInRegisters()) { | 1703 if (!HasArgsInRegisters()) { |
| 1707 __ ldr(object, MemOperand(sp, 1 * kPointerSize)); | 1704 __ ldr(object, MemOperand(sp, 1 * kPointerSize)); |
| 1708 __ ldr(function, MemOperand(sp, 0)); | 1705 __ ldr(function, MemOperand(sp, 0)); |
| 1709 } | 1706 } |
| 1710 | 1707 |
| 1711 // Check that the left hand is a JS object and load map. | 1708 // Check that the left hand is a JS object and load map. |
| 1712 __ JumpIfSmi(object, ¬_js_object); | 1709 __ JumpIfSmi(object, ¬_js_object); |
| 1713 __ IsObjectJSObjectType(object, map, scratch, ¬_js_object); | 1710 __ IsObjectJSObjectType(object, map, scratch, ¬_js_object); |
| (...skipping 21 matching lines...) Expand all Loading... |
| 1735 | 1732 |
| 1736 // Update the global instanceof or call site inlined cache with the current | 1733 // Update the global instanceof or call site inlined cache with the current |
| 1737 // map and function. The cached answer will be set when it is known below. | 1734 // map and function. The cached answer will be set when it is known below. |
| 1738 if (!HasCallSiteInlineCheck()) { | 1735 if (!HasCallSiteInlineCheck()) { |
| 1739 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex); | 1736 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex); |
| 1740 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex); | 1737 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex); |
| 1741 } else { | 1738 } else { |
| 1742 ASSERT(HasArgsInRegisters()); | 1739 ASSERT(HasArgsInRegisters()); |
| 1743 // Patch the (relocated) inlined map check. | 1740 // Patch the (relocated) inlined map check. |
| 1744 | 1741 |
| 1745 // The offset was stored in r5 | 1742 // The map_load_offset was stored in r5 |
| 1746 // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal). | 1743 // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal). |
| 1747 const Register offset = r5; | 1744 const Register map_load_offset = r5; |
| 1748 __ sub(inline_site, lr, offset); | 1745 __ sub(r9, lr, map_load_offset); |
| 1749 // Get the map location in r5 and patch it. | 1746 // Get the map location in r5 and patch it. |
| 1750 __ GetRelocatedValueLocation(inline_site, offset); | 1747 __ GetRelocatedValueLocation(r9, map_load_offset, scratch); |
| 1751 __ ldr(offset, MemOperand(offset)); | 1748 __ ldr(map_load_offset, MemOperand(map_load_offset)); |
| 1752 __ str(map, FieldMemOperand(offset, Cell::kValueOffset)); | 1749 __ str(map, FieldMemOperand(map_load_offset, Cell::kValueOffset)); |
| 1753 } | 1750 } |
| 1754 | 1751 |
| 1755 // Register mapping: r3 is object map and r4 is function prototype. | 1752 // Register mapping: r3 is object map and r4 is function prototype. |
| 1756 // Get prototype of object into r2. | 1753 // Get prototype of object into r2. |
| 1757 __ ldr(scratch, FieldMemOperand(map, Map::kPrototypeOffset)); | 1754 __ ldr(scratch, FieldMemOperand(map, Map::kPrototypeOffset)); |
| 1758 | 1755 |
| 1759 // We don't need map any more. Use it as a scratch register. | 1756 // We don't need map any more. Use it as a scratch register. |
| 1760 Register scratch2 = map; | 1757 Register scratch2 = map; |
| 1761 map = no_reg; | 1758 map = no_reg; |
| 1762 | 1759 |
| 1763 // Loop through the prototype chain looking for the function prototype. | 1760 // Loop through the prototype chain looking for the function prototype. |
| 1764 __ LoadRoot(scratch2, Heap::kNullValueRootIndex); | 1761 __ LoadRoot(scratch2, Heap::kNullValueRootIndex); |
| 1765 __ bind(&loop); | 1762 __ bind(&loop); |
| 1766 __ cmp(scratch, Operand(prototype)); | 1763 __ cmp(scratch, Operand(prototype)); |
| 1767 __ b(eq, &is_instance); | 1764 __ b(eq, &is_instance); |
| 1768 __ cmp(scratch, scratch2); | 1765 __ cmp(scratch, scratch2); |
| 1769 __ b(eq, &is_not_instance); | 1766 __ b(eq, &is_not_instance); |
| 1770 __ ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); | 1767 __ ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); |
| 1771 __ ldr(scratch, FieldMemOperand(scratch, Map::kPrototypeOffset)); | 1768 __ ldr(scratch, FieldMemOperand(scratch, Map::kPrototypeOffset)); |
| 1772 __ jmp(&loop); | 1769 __ jmp(&loop); |
| 1773 | 1770 |
| 1774 __ bind(&is_instance); | 1771 __ bind(&is_instance); |
| 1775 if (!HasCallSiteInlineCheck()) { | 1772 if (!HasCallSiteInlineCheck()) { |
| 1776 __ mov(r0, Operand(Smi::FromInt(0))); | 1773 __ mov(r0, Operand(Smi::FromInt(0))); |
| 1777 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); | 1774 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); |
| 1778 } else { | 1775 } else { |
| 1779 // Patch the call site to return true. | 1776 // Patch the call site to return true. |
| 1780 __ LoadRoot(r0, Heap::kTrueValueRootIndex); | 1777 __ LoadRoot(r0, Heap::kTrueValueRootIndex); |
| 1781 __ add(inline_site, inline_site, Operand(kDeltaToLoadBoolResult)); | 1778 // The bool_load_offset was stored in r6 |
| 1779 // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal). |
| 1780 const Register bool_load_offset = r6; |
| 1781 __ sub(r9, lr, bool_load_offset); |
| 1782 // Get the boolean result location in scratch and patch it. | 1782 // Get the boolean result location in scratch and patch it. |
| 1783 __ GetRelocatedValueLocation(inline_site, scratch); | 1783 __ GetRelocatedValueLocation(r9, scratch, scratch2); |
| 1784 __ str(r0, MemOperand(scratch)); | 1784 __ str(r0, MemOperand(scratch)); |
| 1785 | 1785 |
| 1786 if (!ReturnTrueFalseObject()) { | 1786 if (!ReturnTrueFalseObject()) { |
| 1787 __ mov(r0, Operand(Smi::FromInt(0))); | 1787 __ mov(r0, Operand(Smi::FromInt(0))); |
| 1788 } | 1788 } |
| 1789 } | 1789 } |
| 1790 __ Ret(HasArgsInRegisters() ? 0 : 2); | 1790 __ Ret(HasArgsInRegisters() ? 0 : 2); |
| 1791 | 1791 |
| 1792 __ bind(&is_not_instance); | 1792 __ bind(&is_not_instance); |
| 1793 if (!HasCallSiteInlineCheck()) { | 1793 if (!HasCallSiteInlineCheck()) { |
| 1794 __ mov(r0, Operand(Smi::FromInt(1))); | 1794 __ mov(r0, Operand(Smi::FromInt(1))); |
| 1795 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); | 1795 __ StoreRoot(r0, Heap::kInstanceofCacheAnswerRootIndex); |
| 1796 } else { | 1796 } else { |
| 1797 // Patch the call site to return false. | 1797 // Patch the call site to return false. |
| 1798 __ LoadRoot(r0, Heap::kFalseValueRootIndex); | 1798 __ LoadRoot(r0, Heap::kFalseValueRootIndex); |
| 1799 __ add(inline_site, inline_site, Operand(kDeltaToLoadBoolResult)); | 1799 // The bool_load_offset was stored in r6 |
| 1800 // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal). |
| 1801 const Register bool_load_offset = r6; |
| 1802 __ sub(r9, lr, bool_load_offset); |
| 1803 ; |
| 1800 // Get the boolean result location in scratch and patch it. | 1804 // Get the boolean result location in scratch and patch it. |
| 1801 __ GetRelocatedValueLocation(inline_site, scratch); | 1805 __ GetRelocatedValueLocation(r9, scratch, scratch2); |
| 1802 __ str(r0, MemOperand(scratch)); | 1806 __ str(r0, MemOperand(scratch)); |
| 1803 | 1807 |
| 1804 if (!ReturnTrueFalseObject()) { | 1808 if (!ReturnTrueFalseObject()) { |
| 1805 __ mov(r0, Operand(Smi::FromInt(1))); | 1809 __ mov(r0, Operand(Smi::FromInt(1))); |
| 1806 } | 1810 } |
| 1807 } | 1811 } |
| 1808 __ Ret(HasArgsInRegisters() ? 0 : 2); | 1812 __ Ret(HasArgsInRegisters() ? 0 : 2); |
| 1809 | 1813 |
| 1810 Label object_not_null, object_not_null_or_smi; | 1814 Label object_not_null, object_not_null_or_smi; |
| 1811 __ bind(¬_js_object); | 1815 __ bind(¬_js_object); |
| (...skipping 3255 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5067 MemOperand(fp, 6 * kPointerSize), | 5071 MemOperand(fp, 6 * kPointerSize), |
| 5068 NULL); | 5072 NULL); |
| 5069 } | 5073 } |
| 5070 | 5074 |
| 5071 | 5075 |
| 5072 #undef __ | 5076 #undef __ |
| 5073 | 5077 |
| 5074 } } // namespace v8::internal | 5078 } } // namespace v8::internal |
| 5075 | 5079 |
| 5076 #endif // V8_TARGET_ARCH_ARM | 5080 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |