Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(265)

Side by Side Diff: src/mips/code-stubs-mips.cc

Issue 18763003: MIPS: Convert UnaryOpStub to a HydrogenCodeStub. (Closed) Base URL: https://github.com/v8/v8.git@gbl
Patch Set: Created 7 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/mips/code-stubs-mips.h ('k') | src/mips/full-codegen-mips.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after
220 } 220 }
221 221
222 222
223 void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( 223 void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
224 Isolate* isolate, 224 Isolate* isolate,
225 CodeStubInterfaceDescriptor* descriptor) { 225 CodeStubInterfaceDescriptor* descriptor) {
226 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1); 226 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1);
227 } 227 }
228 228
229 229
230 void UnaryOpStub::InitializeInterfaceDescriptor(
231 Isolate* isolate,
232 CodeStubInterfaceDescriptor* descriptor) {
233 static Register registers[] = { a0 };
234 descriptor->register_param_count_ = 1;
235 descriptor->register_params_ = registers;
236 descriptor->deoptimization_handler_ =
237 FUNCTION_ADDR(UnaryOpIC_Miss);
238 }
239
240
230 #define __ ACCESS_MASM(masm) 241 #define __ ACCESS_MASM(masm)
231 242
243
232 static void EmitIdenticalObjectComparison(MacroAssembler* masm, 244 static void EmitIdenticalObjectComparison(MacroAssembler* masm,
233 Label* slow, 245 Label* slow,
234 Condition cc); 246 Condition cc);
235 static void EmitSmiNonsmiComparison(MacroAssembler* masm, 247 static void EmitSmiNonsmiComparison(MacroAssembler* masm,
236 Register lhs, 248 Register lhs,
237 Register rhs, 249 Register rhs,
238 Label* rhs_not_nan, 250 Label* rhs_not_nan,
239 Label* slow, 251 Label* slow,
240 bool strict); 252 bool strict);
241 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm, 253 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
(...skipping 1337 matching lines...) Expand 10 before | Expand all | Expand 10 after
1579 argument_count); 1591 argument_count);
1580 if (save_doubles_ == kSaveFPRegs) { 1592 if (save_doubles_ == kSaveFPRegs) {
1581 __ MultiPopFPU(kCallerSavedFPU); 1593 __ MultiPopFPU(kCallerSavedFPU);
1582 } 1594 }
1583 1595
1584 __ MultiPop(kJSCallerSaved | ra.bit()); 1596 __ MultiPop(kJSCallerSaved | ra.bit());
1585 __ Ret(); 1597 __ Ret();
1586 } 1598 }
1587 1599
1588 1600
1589 void UnaryOpStub::PrintName(StringStream* stream) {
1590 const char* op_name = Token::Name(op_);
1591 const char* overwrite_name = NULL; // Make g++ happy.
1592 switch (mode_) {
1593 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
1594 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
1595 }
1596 stream->Add("UnaryOpStub_%s_%s_%s",
1597 op_name,
1598 overwrite_name,
1599 UnaryOpIC::GetName(operand_type_));
1600 }
1601
1602
1603 // TODO(svenpanne): Use virtual functions instead of switch.
1604 void UnaryOpStub::Generate(MacroAssembler* masm) {
1605 switch (operand_type_) {
1606 case UnaryOpIC::UNINITIALIZED:
1607 GenerateTypeTransition(masm);
1608 break;
1609 case UnaryOpIC::SMI:
1610 GenerateSmiStub(masm);
1611 break;
1612 case UnaryOpIC::NUMBER:
1613 GenerateNumberStub(masm);
1614 break;
1615 case UnaryOpIC::GENERIC:
1616 GenerateGenericStub(masm);
1617 break;
1618 }
1619 }
1620
1621
1622 void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
1623 // Argument is in a0 and v0 at this point, so we can overwrite a0.
1624 __ li(a2, Operand(Smi::FromInt(op_)));
1625 __ li(a1, Operand(Smi::FromInt(mode_)));
1626 __ li(a0, Operand(Smi::FromInt(operand_type_)));
1627 __ Push(v0, a2, a1, a0);
1628
1629 __ TailCallExternalReference(
1630 ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1);
1631 }
1632
1633
1634 // TODO(svenpanne): Use virtual functions instead of switch.
1635 void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
1636 switch (op_) {
1637 case Token::SUB:
1638 GenerateSmiStubSub(masm);
1639 break;
1640 case Token::BIT_NOT:
1641 GenerateSmiStubBitNot(masm);
1642 break;
1643 default:
1644 UNREACHABLE();
1645 }
1646 }
1647
1648
1649 void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
1650 Label non_smi, slow;
1651 GenerateSmiCodeSub(masm, &non_smi, &slow);
1652 __ bind(&non_smi);
1653 __ bind(&slow);
1654 GenerateTypeTransition(masm);
1655 }
1656
1657
1658 void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
1659 Label non_smi;
1660 GenerateSmiCodeBitNot(masm, &non_smi);
1661 __ bind(&non_smi);
1662 GenerateTypeTransition(masm);
1663 }
1664
1665
1666 void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
1667 Label* non_smi,
1668 Label* slow) {
1669 __ JumpIfNotSmi(a0, non_smi);
1670
1671 // The result of negating zero or the smallest negative smi is not a smi.
1672 __ And(t0, a0, ~0x80000000);
1673 __ Branch(slow, eq, t0, Operand(zero_reg));
1674
1675 // Return '0 - value'.
1676 __ Ret(USE_DELAY_SLOT);
1677 __ subu(v0, zero_reg, a0);
1678 }
1679
1680
1681 void UnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm,
1682 Label* non_smi) {
1683 __ JumpIfNotSmi(a0, non_smi);
1684
1685 // Flip bits and revert inverted smi-tag.
1686 __ Neg(v0, a0);
1687 __ And(v0, v0, ~kSmiTagMask);
1688 __ Ret();
1689 }
1690
1691
1692 // TODO(svenpanne): Use virtual functions instead of switch.
1693 void UnaryOpStub::GenerateNumberStub(MacroAssembler* masm) {
1694 switch (op_) {
1695 case Token::SUB:
1696 GenerateNumberStubSub(masm);
1697 break;
1698 case Token::BIT_NOT:
1699 GenerateNumberStubBitNot(masm);
1700 break;
1701 default:
1702 UNREACHABLE();
1703 }
1704 }
1705
1706
1707 void UnaryOpStub::GenerateNumberStubSub(MacroAssembler* masm) {
1708 Label non_smi, slow, call_builtin;
1709 GenerateSmiCodeSub(masm, &non_smi, &call_builtin);
1710 __ bind(&non_smi);
1711 GenerateHeapNumberCodeSub(masm, &slow);
1712 __ bind(&slow);
1713 GenerateTypeTransition(masm);
1714 __ bind(&call_builtin);
1715 GenerateGenericCodeFallback(masm);
1716 }
1717
1718
1719 void UnaryOpStub::GenerateNumberStubBitNot(MacroAssembler* masm) {
1720 Label non_smi, slow;
1721 GenerateSmiCodeBitNot(masm, &non_smi);
1722 __ bind(&non_smi);
1723 GenerateHeapNumberCodeBitNot(masm, &slow);
1724 __ bind(&slow);
1725 GenerateTypeTransition(masm);
1726 }
1727
1728
1729 void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
1730 Label* slow) {
1731 EmitCheckForHeapNumber(masm, a0, a1, t2, slow);
1732 // a0 is a heap number. Get a new heap number in a1.
1733 if (mode_ == UNARY_OVERWRITE) {
1734 __ lw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset));
1735 __ Xor(a2, a2, Operand(HeapNumber::kSignMask)); // Flip sign.
1736 __ Ret(USE_DELAY_SLOT);
1737 __ sw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset));
1738 } else {
1739 Label slow_allocate_heapnumber, heapnumber_allocated;
1740 __ AllocateHeapNumber(a1, a2, a3, t2, &slow_allocate_heapnumber);
1741 __ jmp(&heapnumber_allocated);
1742
1743 __ bind(&slow_allocate_heapnumber);
1744 {
1745 FrameScope scope(masm, StackFrame::INTERNAL);
1746 __ push(a0);
1747 __ CallRuntime(Runtime::kNumberAlloc, 0);
1748 __ mov(a1, v0);
1749 __ pop(a0);
1750 }
1751
1752 __ bind(&heapnumber_allocated);
1753 __ lw(a3, FieldMemOperand(a0, HeapNumber::kMantissaOffset));
1754 __ lw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset));
1755 __ sw(a3, FieldMemOperand(a1, HeapNumber::kMantissaOffset));
1756 __ Xor(a2, a2, Operand(HeapNumber::kSignMask)); // Flip sign.
1757 __ sw(a2, FieldMemOperand(a1, HeapNumber::kExponentOffset));
1758 __ Ret(USE_DELAY_SLOT);
1759 __ mov(v0, a1);
1760 }
1761 }
1762
1763
1764 void UnaryOpStub::GenerateHeapNumberCodeBitNot(
1765 MacroAssembler* masm,
1766 Label* slow) {
1767 Label impossible;
1768
1769 EmitCheckForHeapNumber(masm, a0, a1, t2, slow);
1770 // Convert the heap number in a0 to an untagged integer in a1.
1771 __ ConvertToInt32(a0, a1, a2, a3, f0, slow);
1772
1773 // Do the bitwise operation and check if the result fits in a smi.
1774 Label try_float;
1775 __ Neg(a1, a1);
1776 __ Addu(a2, a1, Operand(0x40000000));
1777 __ Branch(&try_float, lt, a2, Operand(zero_reg));
1778
1779 // Tag the result as a smi and we're done.
1780 __ Ret(USE_DELAY_SLOT); // SmiTag emits one instruction in delay slot.
1781 __ SmiTag(v0, a1);
1782
1783 // Try to store the result in a heap number.
1784 __ bind(&try_float);
1785 if (mode_ == UNARY_NO_OVERWRITE) {
1786 Label slow_allocate_heapnumber, heapnumber_allocated;
1787 // Allocate a new heap number without zapping v0, which we need if it fails.
1788 __ AllocateHeapNumber(a2, a3, t0, t2, &slow_allocate_heapnumber);
1789 __ jmp(&heapnumber_allocated);
1790
1791 __ bind(&slow_allocate_heapnumber);
1792 {
1793 FrameScope scope(masm, StackFrame::INTERNAL);
1794 __ push(v0); // Push the heap number, not the untagged int32.
1795 __ CallRuntime(Runtime::kNumberAlloc, 0);
1796 __ mov(a2, v0); // Move the new heap number into a2.
1797 // Get the heap number into v0, now that the new heap number is in a2.
1798 __ pop(v0);
1799 }
1800
1801 // Convert the heap number in v0 to an untagged integer in a1.
1802 // This can't go slow-case because it's the same number we already
1803 // converted once again.
1804 __ ConvertToInt32(v0, a1, a3, t0, f0, &impossible);
1805 // Negate the result.
1806 __ Xor(a1, a1, -1);
1807
1808 __ bind(&heapnumber_allocated);
1809 __ mov(v0, a2); // Move newly allocated heap number to v0.
1810 }
1811
1812 // Convert the int32 in a1 to the heap number in v0. a2 is corrupted.
1813 __ mtc1(a1, f0);
1814 __ cvt_d_w(f0, f0);
1815 __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
1816 __ Ret();
1817
1818 __ bind(&impossible);
1819 if (FLAG_debug_code) {
1820 __ stop("Incorrect assumption in bit-not stub");
1821 }
1822 }
1823
1824
1825 // TODO(svenpanne): Use virtual functions instead of switch.
1826 void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
1827 switch (op_) {
1828 case Token::SUB:
1829 GenerateGenericStubSub(masm);
1830 break;
1831 case Token::BIT_NOT:
1832 GenerateGenericStubBitNot(masm);
1833 break;
1834 default:
1835 UNREACHABLE();
1836 }
1837 }
1838
1839
1840 void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
1841 Label non_smi, slow;
1842 GenerateSmiCodeSub(masm, &non_smi, &slow);
1843 __ bind(&non_smi);
1844 GenerateHeapNumberCodeSub(masm, &slow);
1845 __ bind(&slow);
1846 GenerateGenericCodeFallback(masm);
1847 }
1848
1849
1850 void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
1851 Label non_smi, slow;
1852 GenerateSmiCodeBitNot(masm, &non_smi);
1853 __ bind(&non_smi);
1854 GenerateHeapNumberCodeBitNot(masm, &slow);
1855 __ bind(&slow);
1856 GenerateGenericCodeFallback(masm);
1857 }
1858
1859
1860 void UnaryOpStub::GenerateGenericCodeFallback(
1861 MacroAssembler* masm) {
1862 // Handle the slow case by jumping to the JavaScript builtin.
1863 __ push(a0);
1864 switch (op_) {
1865 case Token::SUB:
1866 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
1867 break;
1868 case Token::BIT_NOT:
1869 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
1870 break;
1871 default:
1872 UNREACHABLE();
1873 }
1874 }
1875
1876
1877 void BinaryOpStub::Initialize() { 1601 void BinaryOpStub::Initialize() {
1878 platform_specific_bit_ = true; // FPU is a base requirement for V8. 1602 platform_specific_bit_ = true; // FPU is a base requirement for V8.
1879 } 1603 }
1880 1604
1881 1605
1882 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { 1606 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
1883 Label get_result; 1607 Label get_result;
1884 1608
1885 __ Push(a1, a0); 1609 __ Push(a1, a0);
1886 1610
(...skipping 5964 matching lines...) Expand 10 before | Expand all | Expand 10 after
7851 __ bind(&fast_elements_case); 7575 __ bind(&fast_elements_case);
7852 GenerateCase(masm, FAST_ELEMENTS); 7576 GenerateCase(masm, FAST_ELEMENTS);
7853 } 7577 }
7854 7578
7855 7579
7856 #undef __ 7580 #undef __
7857 7581
7858 } } // namespace v8::internal 7582 } } // namespace v8::internal
7859 7583
7860 #endif // V8_TARGET_ARCH_MIPS 7584 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/mips/code-stubs-mips.h ('k') | src/mips/full-codegen-mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698