OLD | NEW |
1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1658 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1669 // Return the generated code. | 1669 // Return the generated code. |
1670 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name); | 1670 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name); |
1671 } | 1671 } |
1672 | 1672 |
1673 | 1673 |
1674 MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object, | 1674 MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object, |
1675 JSObject* holder, | 1675 JSObject* holder, |
1676 JSGlobalPropertyCell* cell, | 1676 JSGlobalPropertyCell* cell, |
1677 JSFunction* function, | 1677 JSFunction* function, |
1678 String* name) { | 1678 String* name) { |
1679 // TODO(872): implement this. | 1679 // ----------- S t a t e ------------- |
1680 return Heap::undefined_value(); | 1680 // -- r2 : function name |
| 1681 // -- lr : return address |
| 1682 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based) |
| 1683 // -- ... |
| 1684 // -- sp[argc * 4] : receiver |
| 1685 // ----------------------------------- |
| 1686 |
| 1687 if (!CpuFeatures::IsSupported(VFP3)) return Heap::undefined_value(); |
| 1688 CpuFeatures::Scope scope_vfp3(VFP3); |
| 1689 |
| 1690 const int argc = arguments().immediate(); |
| 1691 |
| 1692 // If the object is not a JSObject or we got an unexpected number of |
| 1693 // arguments, bail out to the regular call. |
| 1694 if (!object->IsJSObject() || argc != 1) return Heap::undefined_value(); |
| 1695 |
| 1696 Label miss, slow; |
| 1697 GenerateNameCheck(name, &miss); |
| 1698 |
| 1699 if (cell == NULL) { |
| 1700 __ ldr(r1, MemOperand(sp, 1 * kPointerSize)); |
| 1701 |
| 1702 STATIC_ASSERT(kSmiTag == 0); |
| 1703 __ BranchOnSmi(r1, &miss); |
| 1704 |
| 1705 CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name, |
| 1706 &miss); |
| 1707 } else { |
| 1708 ASSERT(cell->value() == function); |
| 1709 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss); |
| 1710 GenerateLoadFunctionFromCell(cell, function, &miss); |
| 1711 } |
| 1712 |
| 1713 // Load the (only) argument into r0. |
| 1714 __ ldr(r0, MemOperand(sp, 0 * kPointerSize)); |
| 1715 |
| 1716 // If the argument is a smi, just return. |
| 1717 STATIC_ASSERT(kSmiTag == 0); |
| 1718 __ tst(r0, Operand(kSmiTagMask)); |
| 1719 __ Drop(argc + 1, eq); |
| 1720 __ Ret(eq); |
| 1721 |
| 1722 __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, &slow, true); |
| 1723 |
| 1724 Label wont_fit_smi, no_vfp_exception, restore_fpscr_and_return; |
| 1725 |
| 1726 // If vfp3 is enabled, we use the fpu rounding with the RM (round towards |
| 1727 // minus infinity) mode. |
| 1728 |
| 1729 // Load the HeapNumber value. |
| 1730 // We will need access to the value in the core registers, so we load it |
| 1731 // with ldrd and move it to the fpu. It also spares a sub instruction for |
| 1732 // updating the HeapNumber value address, as vldr expects a multiple |
| 1733 // of 4 offset. |
| 1734 __ Ldrd(r4, r5, FieldMemOperand(r0, HeapNumber::kValueOffset)); |
| 1735 __ vmov(d1, r4, r5); |
| 1736 |
| 1737 // Backup FPSCR. |
| 1738 __ vmrs(r3); |
| 1739 // Set custom FPCSR: |
| 1740 // - Set rounding mode to "Round towards Minus Infinity" |
| 1741 // (ie bits [23:22] = 0b10). |
| 1742 // - Clear vfp cumulative exception flags (bits [3:0]). |
| 1743 // - Make sure Flush-to-zero mode control bit is unset (bit 22). |
| 1744 __ bic(r9, r3, |
| 1745 Operand(kVFPExceptionMask | kVFPRoundingModeMask | kVFPFlushToZeroMask)); |
| 1746 __ orr(r9, r9, Operand(kVFPRoundToMinusInfinityBits)); |
| 1747 __ vmsr(r9); |
| 1748 |
| 1749 // Convert the argument to an integer. |
| 1750 __ vcvt_s32_f64(s0, d1, Assembler::FPSCRRounding, al); |
| 1751 |
| 1752 // Use vcvt latency to start checking for special cases. |
| 1753 // Get the argument exponent and clear the sign bit. |
| 1754 __ bic(r6, r5, Operand(HeapNumber::kSignMask)); |
| 1755 __ mov(r6, Operand(r6, LSR, HeapNumber::kMantissaBitsInTopWord)); |
| 1756 |
| 1757 // Retrieve FPSCR and check for vfp exceptions. |
| 1758 __ vmrs(r9); |
| 1759 __ tst(r9, Operand(kVFPExceptionMask)); |
| 1760 __ b(&no_vfp_exception, eq); |
| 1761 |
| 1762 // Check for NaN, Infinity, and -Infinity. |
| 1763 // They are invariant through a Math.Floor call, so just |
| 1764 // return the original argument. |
| 1765 __ sub(r7, r6, Operand(HeapNumber::kExponentMask |
| 1766 >> HeapNumber::kMantissaBitsInTopWord), SetCC); |
| 1767 __ b(&restore_fpscr_and_return, eq); |
| 1768 // We had an overflow or underflow in the conversion. Check if we |
| 1769 // have a big exponent. |
| 1770 __ cmp(r7, Operand(HeapNumber::kMantissaBits)); |
| 1771 // If greater or equal, the argument is already round and in r0. |
| 1772 __ b(&restore_fpscr_and_return, ge); |
| 1773 __ b(&slow); |
| 1774 |
| 1775 __ bind(&no_vfp_exception); |
| 1776 // Move the result back to general purpose register r0. |
| 1777 __ vmov(r0, s0); |
| 1778 // Check if the result fits into a smi. |
| 1779 __ add(r1, r0, Operand(0x40000000), SetCC); |
| 1780 __ b(&wont_fit_smi, mi); |
| 1781 // Tag the result. |
| 1782 STATIC_ASSERT(kSmiTag == 0); |
| 1783 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); |
| 1784 |
| 1785 // Check for -0. |
| 1786 __ cmp(r0, Operand(0)); |
| 1787 __ b(&restore_fpscr_and_return, ne); |
| 1788 // r5 already holds the HeapNumber exponent. |
| 1789 __ tst(r5, Operand(HeapNumber::kSignMask)); |
| 1790 // If our HeapNumber is negative it was -0, so load its address and return. |
| 1791 // Else r0 is loaded with 0, so we can also just return. |
| 1792 __ ldr(r0, MemOperand(sp, 0 * kPointerSize), ne); |
| 1793 |
| 1794 __ bind(&restore_fpscr_and_return); |
| 1795 // Restore FPSCR and return. |
| 1796 __ vmsr(r3); |
| 1797 __ Drop(argc + 1); |
| 1798 __ Ret(); |
| 1799 |
| 1800 __ bind(&wont_fit_smi); |
| 1801 __ bind(&slow); |
| 1802 // Restore FPCSR and fall to slow case. |
| 1803 __ vmsr(r3); |
| 1804 |
| 1805 // Tail call the full function. We do not have to patch the receiver |
| 1806 // because the function makes no use of it. |
| 1807 __ InvokeFunction(function, arguments(), JUMP_FUNCTION); |
| 1808 |
| 1809 __ bind(&miss); |
| 1810 // r2: function name. |
| 1811 MaybeObject* obj = GenerateMissBranch(); |
| 1812 if (obj->IsFailure()) return obj; |
| 1813 |
| 1814 // Return the generated code. |
| 1815 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name); |
1681 } | 1816 } |
1682 | 1817 |
1683 | 1818 |
1684 MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object, | 1819 MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object, |
1685 JSObject* holder, | 1820 JSObject* holder, |
1686 JSGlobalPropertyCell* cell, | 1821 JSGlobalPropertyCell* cell, |
1687 JSFunction* function, | 1822 JSFunction* function, |
1688 String* name) { | 1823 String* name) { |
1689 // ----------- S t a t e ------------- | 1824 // ----------- S t a t e ------------- |
1690 // -- r2 : function name | 1825 // -- r2 : function name |
(...skipping 1054 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2745 // Return the generated code. | 2880 // Return the generated code. |
2746 return GetCode(); | 2881 return GetCode(); |
2747 } | 2882 } |
2748 | 2883 |
2749 | 2884 |
2750 #undef __ | 2885 #undef __ |
2751 | 2886 |
2752 } } // namespace v8::internal | 2887 } } // namespace v8::internal |
2753 | 2888 |
2754 #endif // V8_TARGET_ARCH_ARM | 2889 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |