| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1718 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1729 __ j(not_equal, on_not_smis); | 1729 __ j(not_equal, on_not_smis); |
| 1730 __ Integer32ToSmi(second, smi_result); | 1730 __ Integer32ToSmi(second, smi_result); |
| 1731 if (on_success != NULL) { | 1731 if (on_success != NULL) { |
| 1732 __ jmp(on_success); | 1732 __ jmp(on_success); |
| 1733 } else { | 1733 } else { |
| 1734 __ bind(&done); | 1734 __ bind(&done); |
| 1735 } | 1735 } |
| 1736 } | 1736 } |
| 1737 | 1737 |
| 1738 | 1738 |
| 1739 void GenericUnaryOpStub::Generate(MacroAssembler* masm) { | |
| 1740 Label slow, done; | |
| 1741 | |
| 1742 if (op_ == Token::SUB) { | |
| 1743 if (include_smi_code_) { | |
| 1744 // Check whether the value is a smi. | |
| 1745 Label try_float; | |
| 1746 __ JumpIfNotSmi(rax, &try_float); | |
| 1747 if (negative_zero_ == kIgnoreNegativeZero) { | |
| 1748 __ SmiCompare(rax, Smi::FromInt(0)); | |
| 1749 __ j(equal, &done); | |
| 1750 } | |
| 1751 __ SmiNeg(rax, rax, &done); | |
| 1752 __ jmp(&slow); // zero, if not handled above, and Smi::kMinValue. | |
| 1753 | |
| 1754 // Try floating point case. | |
| 1755 __ bind(&try_float); | |
| 1756 } else if (FLAG_debug_code) { | |
| 1757 __ AbortIfSmi(rax); | |
| 1758 } | |
| 1759 | |
| 1760 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), | |
| 1761 Heap::kHeapNumberMapRootIndex); | |
| 1762 __ j(not_equal, &slow); | |
| 1763 // Operand is a float, negate its value by flipping sign bit. | |
| 1764 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset)); | |
| 1765 __ Set(kScratchRegister, 0x01); | |
| 1766 __ shl(kScratchRegister, Immediate(63)); | |
| 1767 __ xor_(rdx, kScratchRegister); // Flip sign. | |
| 1768 // rdx is value to store. | |
| 1769 if (overwrite_ == UNARY_OVERWRITE) { | |
| 1770 __ movq(FieldOperand(rax, HeapNumber::kValueOffset), rdx); | |
| 1771 } else { | |
| 1772 __ AllocateHeapNumber(rcx, rbx, &slow); | |
| 1773 // rcx: allocated 'empty' number | |
| 1774 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx); | |
| 1775 __ movq(rax, rcx); | |
| 1776 } | |
| 1777 } else if (op_ == Token::BIT_NOT) { | |
| 1778 if (include_smi_code_) { | |
| 1779 Label try_float; | |
| 1780 __ JumpIfNotSmi(rax, &try_float); | |
| 1781 __ SmiNot(rax, rax); | |
| 1782 __ jmp(&done); | |
| 1783 // Try floating point case. | |
| 1784 __ bind(&try_float); | |
| 1785 } else if (FLAG_debug_code) { | |
| 1786 __ AbortIfSmi(rax); | |
| 1787 } | |
| 1788 | |
| 1789 // Check if the operand is a heap number. | |
| 1790 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), | |
| 1791 Heap::kHeapNumberMapRootIndex); | |
| 1792 __ j(not_equal, &slow); | |
| 1793 | |
| 1794 // Convert the heap number in rax to an untagged integer in rcx. | |
| 1795 IntegerConvert(masm, rax, rax); | |
| 1796 | |
| 1797 // Do the bitwise operation and smi tag the result. | |
| 1798 __ notl(rax); | |
| 1799 __ Integer32ToSmi(rax, rax); | |
| 1800 } | |
| 1801 | |
| 1802 // Return from the stub. | |
| 1803 __ bind(&done); | |
| 1804 __ StubReturn(1); | |
| 1805 | |
| 1806 // Handle the slow case by jumping to the JavaScript builtin. | |
| 1807 __ bind(&slow); | |
| 1808 __ pop(rcx); // pop return address | |
| 1809 __ push(rax); | |
| 1810 __ push(rcx); // push return address | |
| 1811 switch (op_) { | |
| 1812 case Token::SUB: | |
| 1813 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION); | |
| 1814 break; | |
| 1815 case Token::BIT_NOT: | |
| 1816 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION); | |
| 1817 break; | |
| 1818 default: | |
| 1819 UNREACHABLE(); | |
| 1820 } | |
| 1821 } | |
| 1822 | |
| 1823 | |
| 1824 void MathPowStub::Generate(MacroAssembler* masm) { | 1739 void MathPowStub::Generate(MacroAssembler* masm) { |
| 1825 // Registers are used as follows: | 1740 // Registers are used as follows: |
| 1826 // rdx = base | 1741 // rdx = base |
| 1827 // rax = exponent | 1742 // rax = exponent |
| 1828 // rcx = temporary, result | 1743 // rcx = temporary, result |
| 1829 | 1744 |
| 1830 Label allocate_return, call_runtime; | 1745 Label allocate_return, call_runtime; |
| 1831 | 1746 |
| 1832 // Load input parameters. | 1747 // Load input parameters. |
| 1833 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); | 1748 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); |
| (...skipping 2981 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4815 // Do a tail call to the rewritten stub. | 4730 // Do a tail call to the rewritten stub. |
| 4816 __ jmp(rdi); | 4731 __ jmp(rdi); |
| 4817 } | 4732 } |
| 4818 | 4733 |
| 4819 | 4734 |
| 4820 #undef __ | 4735 #undef __ |
| 4821 | 4736 |
| 4822 } } // namespace v8::internal | 4737 } } // namespace v8::internal |
| 4823 | 4738 |
| 4824 #endif // V8_TARGET_ARCH_X64 | 4739 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |