OLD | NEW |
---|---|
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM. |
6 #if defined(TARGET_ARCH_ARM) | 6 #if defined(TARGET_ARCH_ARM) |
7 | 7 |
8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
9 | 9 |
10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
(...skipping 1563 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1574 __ b(fail, NE); | 1574 __ b(fail, NE); |
1575 } else { | 1575 } else { |
1576 UNREACHABLE(); | 1576 UNREACHABLE(); |
1577 } | 1577 } |
1578 } | 1578 } |
1579 } | 1579 } |
1580 __ Bind(&ok); | 1580 __ Bind(&ok); |
1581 } | 1581 } |
1582 | 1582 |
1583 | 1583 |
1584 bool Field::IsUnboxedField() const { | |
Florian Schneider
2014/02/06 12:38:49
Maybe this should still be in object.cc? In this c
srdjan
2014/02/06 16:55:31
I do not like architecture dependent code in share
Cutch
2014/02/06 23:15:38
Done.
| |
1585 bool valid_class = (guarded_cid() == kDoubleCid) || | |
1586 (guarded_cid() == kFloat32x4Cid); | |
1587 return is_unboxing_candidate() && !is_final() && !is_nullable() && | |
1588 valid_class; | |
1589 } | |
1590 | |
1591 | |
1584 class StoreInstanceFieldSlowPath : public SlowPathCode { | 1592 class StoreInstanceFieldSlowPath : public SlowPathCode { |
1585 public: | 1593 public: |
1586 StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction, | 1594 explicit StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction) |
1587 const Class& cls) | 1595 : instruction_(instruction) { } |
1588 : instruction_(instruction), cls_(cls) { } | |
1589 | 1596 |
1590 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { | 1597 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
1591 __ Comment("StoreInstanceFieldSlowPath"); | 1598 __ Comment("StoreInstanceFieldSlowPath"); |
1592 __ Bind(entry_label()); | 1599 { |
1593 const Code& stub = | 1600 __ Bind(double_entry_label()); |
1594 Code::Handle(StubCode::GetAllocationStubForClass(cls_)); | |
1595 const ExternalLabel label(cls_.ToCString(), stub.EntryPoint()); | |
1596 | 1601 |
1597 LocationSummary* locs = instruction_->locs(); | 1602 const Class& cls = compiler->double_class(); |
1598 locs->live_registers()->Remove(locs->out()); | 1603 const Code& stub = |
1604 Code::Handle(StubCode::GetAllocationStubForClass(cls)); | |
1605 const ExternalLabel label(cls.ToCString(), stub.EntryPoint()); | |
1599 | 1606 |
1600 compiler->SaveLiveRegisters(locs); | 1607 LocationSummary* locs = instruction_->locs(); |
1601 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | 1608 locs->live_registers()->Remove(locs->out()); |
1602 &label, | |
1603 PcDescriptors::kOther, | |
1604 locs); | |
1605 __ MoveRegister(locs->temp(0).reg(), R0); | |
1606 compiler->RestoreLiveRegisters(locs); | |
1607 | 1609 |
1608 __ b(exit_label()); | 1610 compiler->SaveLiveRegisters(locs); |
1611 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | |
1612 &label, | |
1613 PcDescriptors::kOther, | |
1614 locs); | |
1615 __ MoveRegister(locs->temp(0).reg(), R0); | |
1616 compiler->RestoreLiveRegisters(locs); | |
1617 | |
1618 __ b(double_exit_label()); | |
1619 } | |
1620 | |
1621 { | |
1622 __ Bind(float32x4_entry_label()); | |
1623 | |
1624 const Class& cls = compiler->float32x4_class(); | |
1625 const Code& stub = | |
1626 Code::Handle(StubCode::GetAllocationStubForClass(cls)); | |
1627 const ExternalLabel label(cls.ToCString(), stub.EntryPoint()); | |
1628 | |
1629 LocationSummary* locs = instruction_->locs(); | |
1630 locs->live_registers()->Remove(locs->out()); | |
Florian Schneider
2014/02/06 12:38:49
locs->live_registers is already modified in the sa
Cutch
2014/02/06 23:15:38
Done.
| |
1631 | |
1632 compiler->SaveLiveRegisters(locs); | |
1633 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | |
1634 &label, | |
1635 PcDescriptors::kOther, | |
1636 locs); | |
1637 __ MoveRegister(locs->temp(0).reg(), R0); | |
1638 compiler->RestoreLiveRegisters(locs); | |
1639 | |
1640 __ b(float32x4_exit_label()); | |
1641 } | |
1609 } | 1642 } |
1610 | 1643 |
1644 Label* double_entry_label() { | |
1645 // Use default SlowPathCode label for double. | |
1646 return entry_label(); | |
1647 } | |
1648 Label* double_exit_label() { | |
1649 // Use default SlowPathCode label for double. | |
1650 return exit_label(); | |
1651 } | |
1652 | |
1653 Label* float32x4_entry_label() { return &float32x4_entry_label_; } | |
1654 Label* float32x4_exit_label() { return &float32x4_exit_label_; } | |
1655 | |
1611 private: | 1656 private: |
1657 Label float32x4_entry_label_; | |
1658 Label float32x4_exit_label_; | |
1612 StoreInstanceFieldInstr* instruction_; | 1659 StoreInstanceFieldInstr* instruction_; |
1613 const Class& cls_; | |
1614 }; | 1660 }; |
1615 | 1661 |
1616 | 1662 |
1617 LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(bool opt) const { | 1663 LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(bool opt) const { |
1618 const intptr_t kNumInputs = 2; | 1664 const intptr_t kNumInputs = 2; |
1619 const intptr_t kNumTemps = 0; | 1665 const intptr_t kNumTemps = 0; |
1620 LocationSummary* summary = | 1666 LocationSummary* summary = |
1621 new LocationSummary(kNumInputs, kNumTemps, | 1667 new LocationSummary(kNumInputs, kNumTemps, |
1622 (field().guarded_cid() == kIllegalCid) || (is_initialization_) | 1668 (field().guarded_cid() == kIllegalCid) || (is_initialization_) |
1623 ? LocationSummary::kCallOnSlowPath | 1669 ? LocationSummary::kCallOnSlowPath |
(...skipping 21 matching lines...) Expand all Loading... | |
1645 } | 1691 } |
1646 | 1692 |
1647 | 1693 |
1648 void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1694 void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
1649 Label skip_store; | 1695 Label skip_store; |
1650 | 1696 |
1651 Register instance_reg = locs()->in(0).reg(); | 1697 Register instance_reg = locs()->in(0).reg(); |
1652 | 1698 |
1653 if (IsUnboxedStore() && compiler->is_optimizing()) { | 1699 if (IsUnboxedStore() && compiler->is_optimizing()) { |
1654 DRegister value = EvenDRegisterOf(locs()->in(1).fpu_reg()); | 1700 DRegister value = EvenDRegisterOf(locs()->in(1).fpu_reg()); |
1701 DRegister value_odd = OddDRegisterOf(locs()->in(1).fpu_reg()); | |
1655 Register temp = locs()->temp(0).reg(); | 1702 Register temp = locs()->temp(0).reg(); |
1656 Register temp2 = locs()->temp(1).reg(); | 1703 Register temp2 = locs()->temp(1).reg(); |
1657 const intptr_t cid = field().UnboxedFieldCid(); | 1704 const intptr_t cid = field().UnboxedFieldCid(); |
1658 | 1705 |
1659 if (is_initialization_) { | 1706 if (is_initialization_) { |
1707 StoreInstanceFieldSlowPath* slow_path = | |
1708 new StoreInstanceFieldSlowPath(this); | |
1709 compiler->AddSlowPathCode(slow_path); | |
1710 | |
1660 const Class* cls = NULL; | 1711 const Class* cls = NULL; |
1712 Label* entry_label = NULL; | |
1713 Label* exit_label = NULL; | |
1661 switch (cid) { | 1714 switch (cid) { |
1662 case kDoubleCid: | 1715 case kDoubleCid: |
1663 cls = &compiler->double_class(); | 1716 cls = &compiler->double_class(); |
1717 entry_label = slow_path->double_entry_label(); | |
1718 exit_label = slow_path->double_exit_label(); | |
1664 break; | 1719 break; |
1665 // TODO(johnmccutchan): Add kFloat32x4Cid here. | 1720 case kFloat32x4Cid: |
1721 cls = &compiler->float32x4_class(); | |
1722 entry_label = slow_path->float32x4_entry_label(); | |
1723 exit_label = slow_path->float32x4_exit_label(); | |
1724 break; | |
1666 default: | 1725 default: |
1667 UNREACHABLE(); | 1726 UNREACHABLE(); |
1668 } | 1727 } |
1669 StoreInstanceFieldSlowPath* slow_path = | 1728 |
1670 new StoreInstanceFieldSlowPath(this, *cls); | |
1671 compiler->AddSlowPathCode(slow_path); | |
1672 __ TryAllocate(*cls, | 1729 __ TryAllocate(*cls, |
1673 slow_path->entry_label(), | 1730 entry_label, |
1674 temp, | 1731 temp, |
1675 temp2); | 1732 temp2); |
1676 __ Bind(slow_path->exit_label()); | 1733 __ Bind(exit_label); |
1677 __ MoveRegister(temp2, temp); | 1734 __ MoveRegister(temp2, temp); |
1678 __ StoreIntoObject(instance_reg, | 1735 __ StoreIntoObject(instance_reg, |
1679 FieldAddress(instance_reg, field().Offset()), | 1736 FieldAddress(instance_reg, field().Offset()), |
1680 temp2); | 1737 temp2); |
1681 } else { | 1738 } else { |
1682 __ ldr(temp, FieldAddress(instance_reg, field().Offset())); | 1739 __ ldr(temp, FieldAddress(instance_reg, field().Offset())); |
1683 } | 1740 } |
1684 switch (cid) { | 1741 switch (cid) { |
1685 case kDoubleCid: | 1742 case kDoubleCid: |
1686 __ StoreDToOffset(value, temp, Double::value_offset() - kHeapObjectTag); | 1743 __ Comment("UnboxedDoubleStoreInstanceFieldInstr"); |
1687 // TODO(johnmccutchan): Add kFloat32x4Cid here. | 1744 __ StoreDToOffset(value, temp, Double::value_offset() - kHeapObjectTag); |
1688 break; | 1745 break; |
1746 case kFloat32x4Cid: | |
1747 __ Comment("UnboxedFloat32x4StoreInstanceFieldInstr"); | |
1748 __ StoreDToOffset(value, temp, | |
1749 Float32x4::value_offset() - kHeapObjectTag); | |
1750 __ StoreDToOffset(value_odd, temp, | |
1751 Float32x4::value_offset() + 2*kWordSize - kHeapObjectTag); | |
1752 break; | |
1689 default: | 1753 default: |
1690 UNREACHABLE(); | 1754 UNREACHABLE(); |
1691 } | 1755 } |
1692 | 1756 |
1693 return; | 1757 return; |
1694 } | 1758 } |
1695 | 1759 |
1696 if (IsPotentialUnboxedStore()) { | 1760 if (IsPotentialUnboxedStore()) { |
1697 Register value_reg = locs()->in(1).reg(); | 1761 Register value_reg = locs()->in(1).reg(); |
1698 Register temp = locs()->temp(0).reg(); | 1762 Register temp = locs()->temp(0).reg(); |
1699 Register temp2 = locs()->temp(1).reg(); | 1763 Register temp2 = locs()->temp(1).reg(); |
1700 DRegister fpu_temp = EvenDRegisterOf(locs()->temp(2).fpu_reg()); | 1764 DRegister fpu_temp = EvenDRegisterOf(locs()->temp(2).fpu_reg()); |
1765 DRegister fpu_temp_odd = OddDRegisterOf(locs()->temp(2).fpu_reg()); | |
1701 | 1766 |
1702 Label store_pointer; | 1767 Label store_pointer; |
1703 Label copy_double; | |
1704 Label store_double; | 1768 Label store_double; |
1769 Label store_float32x4; | |
1705 | 1770 |
1706 __ LoadObject(temp, Field::ZoneHandle(field().raw())); | 1771 __ LoadObject(temp, Field::ZoneHandle(field().raw())); |
1707 | 1772 |
1708 __ ldr(temp2, FieldAddress(temp, Field::is_nullable_offset())); | 1773 __ ldr(temp2, FieldAddress(temp, Field::is_nullable_offset())); |
1709 __ CompareImmediate(temp2, kNullCid); | 1774 __ CompareImmediate(temp2, kNullCid); |
1710 __ b(&store_pointer, EQ); | 1775 __ b(&store_pointer, EQ); |
1711 | 1776 |
1712 __ ldrb(temp2, FieldAddress(temp, Field::kind_bits_offset())); | 1777 __ ldrb(temp2, FieldAddress(temp, Field::kind_bits_offset())); |
1713 __ tst(temp2, ShifterOperand(1 << Field::kUnboxingCandidateBit)); | 1778 __ tst(temp2, ShifterOperand(1 << Field::kUnboxingCandidateBit)); |
1714 __ b(&store_pointer, EQ); | 1779 __ b(&store_pointer, EQ); |
1715 | 1780 |
1716 __ ldr(temp2, FieldAddress(temp, Field::guarded_cid_offset())); | 1781 __ ldr(temp2, FieldAddress(temp, Field::guarded_cid_offset())); |
1717 __ CompareImmediate(temp2, kDoubleCid); | 1782 __ CompareImmediate(temp2, kDoubleCid); |
1718 __ b(&store_double, EQ); | 1783 __ b(&store_double, EQ); |
1719 | 1784 |
1785 __ ldr(temp2, FieldAddress(temp, Field::guarded_cid_offset())); | |
1786 __ CompareImmediate(temp2, kFloat32x4Cid); | |
1787 __ b(&store_float32x4, EQ); | |
1788 | |
1720 // Fall through. | 1789 // Fall through. |
1721 __ b(&store_pointer); | 1790 __ b(&store_pointer); |
1722 | 1791 |
1723 __ Bind(&store_double); | |
1724 | |
1725 __ ldr(temp, FieldAddress(instance_reg, field().Offset())); | |
1726 __ CompareImmediate(temp, | |
1727 reinterpret_cast<intptr_t>(Object::null())); | |
1728 __ b(©_double, NE); | |
1729 | |
1730 StoreInstanceFieldSlowPath* slow_path = | 1792 StoreInstanceFieldSlowPath* slow_path = |
1731 new StoreInstanceFieldSlowPath(this, compiler->double_class()); | 1793 new StoreInstanceFieldSlowPath(this); |
1732 compiler->AddSlowPathCode(slow_path); | 1794 compiler->AddSlowPathCode(slow_path); |
1733 | 1795 |
1734 if (!compiler->is_optimizing()) { | 1796 { |
1735 locs()->live_registers()->Add(locs()->in(0)); | 1797 __ Bind(&store_double); |
1736 locs()->live_registers()->Add(locs()->in(1)); | 1798 Label copy_double; |
1799 | |
1800 __ ldr(temp, FieldAddress(instance_reg, field().Offset())); | |
1801 __ CompareImmediate(temp, | |
1802 reinterpret_cast<intptr_t>(Object::null())); | |
1803 __ b(©_double, NE); | |
1804 | |
1805 if (!compiler->is_optimizing()) { | |
1806 locs()->live_registers()->Add(locs()->in(0)); | |
1807 locs()->live_registers()->Add(locs()->in(1)); | |
1808 } | |
1809 | |
1810 __ TryAllocate(compiler->double_class(), | |
1811 slow_path->double_entry_label(), | |
1812 temp, | |
1813 temp2); | |
1814 __ Bind(slow_path->double_exit_label()); | |
1815 __ MoveRegister(temp2, temp); | |
1816 __ StoreIntoObject(instance_reg, | |
1817 FieldAddress(instance_reg, field().Offset()), | |
1818 temp2); | |
1819 __ Bind(©_double); | |
1820 __ LoadDFromOffset(fpu_temp, | |
1821 value_reg, | |
1822 Double::value_offset() - kHeapObjectTag); | |
1823 __ StoreDToOffset(fpu_temp, | |
1824 temp, | |
1825 Double::value_offset() - kHeapObjectTag); | |
1826 __ b(&skip_store); | |
1737 } | 1827 } |
1738 | 1828 |
1739 __ TryAllocate(compiler->double_class(), | 1829 { |
1740 slow_path->entry_label(), | 1830 __ Bind(&store_float32x4); |
1741 temp, | 1831 Label copy_float32x4; |
1742 temp2); | 1832 |
1743 __ Bind(slow_path->exit_label()); | 1833 __ ldr(temp, FieldAddress(instance_reg, field().Offset())); |
1744 __ MoveRegister(temp2, temp); | 1834 __ CompareImmediate(temp, |
1745 __ StoreIntoObject(instance_reg, | 1835 reinterpret_cast<intptr_t>(Object::null())); |
1746 FieldAddress(instance_reg, field().Offset()), | 1836 __ b(©_float32x4, NE); |
1747 temp2); | 1837 |
1748 __ Bind(©_double); | 1838 if (!compiler->is_optimizing()) { |
1749 __ LoadDFromOffset(fpu_temp, | 1839 locs()->live_registers()->Add(locs()->in(0)); |
Florian Schneider
2014/02/06 12:38:49
locs() is already modified in the same way above,
Cutch
2014/02/06 23:15:38
Done.
| |
1750 value_reg, | 1840 locs()->live_registers()->Add(locs()->in(1)); |
1751 Double::value_offset() - kHeapObjectTag); | 1841 } |
1752 __ StoreDToOffset(fpu_temp, temp, Double::value_offset() - kHeapObjectTag); | 1842 |
1753 __ b(&skip_store); | 1843 __ TryAllocate(compiler->float32x4_class(), |
1844 slow_path->float32x4_entry_label(), | |
1845 temp, | |
1846 temp2); | |
1847 __ Bind(slow_path->float32x4_exit_label()); | |
1848 __ MoveRegister(temp2, temp); | |
1849 __ StoreIntoObject(instance_reg, | |
1850 FieldAddress(instance_reg, field().Offset()), | |
1851 temp2); | |
1852 __ Bind(©_float32x4); | |
1853 __ LoadDFromOffset(fpu_temp, value_reg, | |
1854 Float32x4::value_offset() - kHeapObjectTag); | |
1855 __ LoadDFromOffset(fpu_temp_odd, value_reg, | |
1856 Float32x4::value_offset() + 2*kWordSize - kHeapObjectTag); | |
1857 __ StoreDToOffset(fpu_temp, temp, | |
1858 Float32x4::value_offset() - kHeapObjectTag); | |
1859 __ StoreDToOffset(fpu_temp_odd, temp, | |
1860 Float32x4::value_offset() + 2*kWordSize - kHeapObjectTag); | |
1861 __ b(&skip_store); | |
1862 } | |
1863 | |
1754 __ Bind(&store_pointer); | 1864 __ Bind(&store_pointer); |
1755 } | 1865 } |
1756 | 1866 |
1757 if (ShouldEmitStoreBarrier()) { | 1867 if (ShouldEmitStoreBarrier()) { |
1758 Register value_reg = locs()->in(1).reg(); | 1868 Register value_reg = locs()->in(1).reg(); |
1759 __ StoreIntoObject(instance_reg, | 1869 __ StoreIntoObject(instance_reg, |
1760 FieldAddress(instance_reg, field().Offset()), | 1870 FieldAddress(instance_reg, field().Offset()), |
1761 value_reg, | 1871 value_reg, |
1762 CanValueBeSmi()); | 1872 CanValueBeSmi()); |
1763 } else { | 1873 } else { |
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1887 deopt_id(), | 1997 deopt_id(), |
1888 kAllocateObjectWithBoundsCheckRuntimeEntry, | 1998 kAllocateObjectWithBoundsCheckRuntimeEntry, |
1889 3, | 1999 3, |
1890 locs()); | 2000 locs()); |
1891 __ Drop(3); | 2001 __ Drop(3); |
1892 ASSERT(locs()->out().reg() == R0); | 2002 ASSERT(locs()->out().reg() == R0); |
1893 __ Pop(R0); // Pop new instance. | 2003 __ Pop(R0); // Pop new instance. |
1894 } | 2004 } |
1895 | 2005 |
1896 | 2006 |
1897 class BoxDoubleSlowPath : public SlowPathCode { | 2007 class LoadFieldSlowPath : public SlowPathCode { |
1898 public: | 2008 public: |
1899 explicit BoxDoubleSlowPath(Instruction* instruction) | 2009 explicit LoadFieldSlowPath(Instruction* instruction) |
1900 : instruction_(instruction) { } | 2010 : instruction_(instruction) { } |
1901 | 2011 |
1902 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { | 2012 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
1903 __ Comment("BoxDoubleSlowPath"); | 2013 __ Comment("LoadFieldSlowPath"); |
1904 __ Bind(entry_label()); | 2014 { |
1905 const Class& double_class = compiler->double_class(); | 2015 __ Bind(double_entry_label()); |
1906 const Code& stub = | 2016 const Class& double_class = compiler->double_class(); |
1907 Code::Handle(StubCode::GetAllocationStubForClass(double_class)); | 2017 const Code& stub = |
1908 const ExternalLabel label(double_class.ToCString(), stub.EntryPoint()); | 2018 Code::Handle(StubCode::GetAllocationStubForClass(double_class)); |
2019 const ExternalLabel label(double_class.ToCString(), stub.EntryPoint()); | |
1909 | 2020 |
1910 LocationSummary* locs = instruction_->locs(); | 2021 LocationSummary* locs = instruction_->locs(); |
1911 locs->live_registers()->Remove(locs->out()); | 2022 locs->live_registers()->Remove(locs->out()); |
1912 | 2023 |
1913 compiler->SaveLiveRegisters(locs); | 2024 compiler->SaveLiveRegisters(locs); |
1914 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | 2025 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
1915 &label, | 2026 &label, |
1916 PcDescriptors::kOther, | 2027 PcDescriptors::kOther, |
1917 locs); | 2028 locs); |
1918 __ MoveRegister(locs->out().reg(), R0); | 2029 __ MoveRegister(locs->out().reg(), R0); |
1919 compiler->RestoreLiveRegisters(locs); | 2030 compiler->RestoreLiveRegisters(locs); |
1920 | 2031 |
1921 __ b(exit_label()); | 2032 __ b(double_exit_label()); |
2033 } | |
2034 { | |
2035 __ Bind(float32x4_entry_label()); | |
2036 const Class& float32x4_class = compiler->float32x4_class(); | |
2037 const Code& stub = | |
2038 Code::Handle(StubCode::GetAllocationStubForClass(float32x4_class)); | |
2039 const ExternalLabel label(float32x4_class.ToCString(), stub.EntryPoint()); | |
2040 | |
2041 LocationSummary* locs = instruction_->locs(); | |
2042 locs->live_registers()->Remove(locs->out()); | |
2043 | |
2044 compiler->SaveLiveRegisters(locs); | |
2045 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | |
2046 &label, | |
2047 PcDescriptors::kOther, | |
2048 locs); | |
2049 __ MoveRegister(locs->out().reg(), R0); | |
2050 compiler->RestoreLiveRegisters(locs); | |
2051 | |
2052 __ b(float32x4_exit_label()); | |
2053 } | |
1922 } | 2054 } |
1923 | 2055 |
2056 Label* double_entry_label() { | |
2057 // Use default SlowPathCode label for double. | |
2058 return entry_label(); | |
2059 } | |
2060 Label* double_exit_label() { | |
2061 // Use default SlowPathCode label for double. | |
2062 return exit_label(); | |
2063 } | |
2064 | |
2065 Label* float32x4_entry_label() { return &float32x4_entry_label_; } | |
2066 Label* float32x4_exit_label() { return &float32x4_exit_label_; } | |
2067 | |
1924 private: | 2068 private: |
2069 Label float32x4_entry_label_; | |
2070 Label float32x4_exit_label_; | |
1925 Instruction* instruction_; | 2071 Instruction* instruction_; |
1926 }; | 2072 }; |
1927 | 2073 |
1928 | 2074 |
1929 LocationSummary* LoadFieldInstr::MakeLocationSummary(bool opt) const { | 2075 LocationSummary* LoadFieldInstr::MakeLocationSummary(bool opt) const { |
1930 const intptr_t kNumInputs = 1; | 2076 const intptr_t kNumInputs = 1; |
1931 const intptr_t kNumTemps = 0; | 2077 const intptr_t kNumTemps = 0; |
1932 LocationSummary* locs = | 2078 LocationSummary* locs = |
1933 new LocationSummary( | 2079 new LocationSummary( |
1934 kNumInputs, kNumTemps, | 2080 kNumInputs, kNumTemps, |
(...skipping 12 matching lines...) Expand all Loading... | |
1947 } | 2093 } |
1948 locs->set_out(Location::RequiresRegister()); | 2094 locs->set_out(Location::RequiresRegister()); |
1949 return locs; | 2095 return locs; |
1950 } | 2096 } |
1951 | 2097 |
1952 | 2098 |
1953 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2099 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
1954 Register instance_reg = locs()->in(0).reg(); | 2100 Register instance_reg = locs()->in(0).reg(); |
1955 if (IsUnboxedLoad() && compiler->is_optimizing()) { | 2101 if (IsUnboxedLoad() && compiler->is_optimizing()) { |
1956 DRegister result = EvenDRegisterOf(locs()->out().fpu_reg()); | 2102 DRegister result = EvenDRegisterOf(locs()->out().fpu_reg()); |
2103 DRegister result_odd = OddDRegisterOf(locs()->out().fpu_reg()); | |
1957 Register temp = locs()->temp(0).reg(); | 2104 Register temp = locs()->temp(0).reg(); |
1958 __ ldr(temp, FieldAddress(instance_reg, offset_in_bytes())); | 2105 __ ldr(temp, FieldAddress(instance_reg, offset_in_bytes())); |
1959 intptr_t cid = field()->UnboxedFieldCid(); | 2106 intptr_t cid = field()->UnboxedFieldCid(); |
1960 switch (cid) { | 2107 switch (cid) { |
1961 case kDoubleCid: | 2108 case kDoubleCid: |
2109 __ Comment("UnboxedDoubleLoadFieldInstr"); | |
1962 __ LoadDFromOffset(result, temp, | 2110 __ LoadDFromOffset(result, temp, |
1963 Double::value_offset() - kHeapObjectTag); | 2111 Double::value_offset() - kHeapObjectTag); |
1964 break; | 2112 break; |
1965 // TODO(johnmccutchan): Add Float32x4 path here. | 2113 case kFloat32x4Cid: |
2114 __ Comment("UnboxedFloat32x4LoadFieldInstr"); | |
2115 __ LoadDFromOffset(result, temp, | |
2116 Float32x4::value_offset() - kHeapObjectTag); | |
2117 __ LoadDFromOffset(result_odd, temp, | |
2118 Float32x4::value_offset() + 2*kWordSize - kHeapObjectTag); | |
2119 break; | |
1966 default: | 2120 default: |
1967 UNREACHABLE(); | 2121 UNREACHABLE(); |
1968 } | 2122 } |
1969 return; | 2123 return; |
1970 } | 2124 } |
1971 | 2125 |
1972 Label done; | 2126 Label done; |
1973 Register result_reg = locs()->out().reg(); | 2127 Register result_reg = locs()->out().reg(); |
1974 if (IsPotentialUnboxedLoad()) { | 2128 if (IsPotentialUnboxedLoad()) { |
1975 Register temp = locs()->temp(1).reg(); | 2129 Register temp = locs()->temp(1).reg(); |
1976 DRegister value = EvenDRegisterOf(locs()->temp(0).fpu_reg()); | 2130 DRegister value = EvenDRegisterOf(locs()->temp(0).fpu_reg()); |
2131 DRegister value_odd = OddDRegisterOf(locs()->temp(0).fpu_reg()); | |
2132 | |
2133 LoadFieldSlowPath* slow_path = new LoadFieldSlowPath(this); | |
2134 compiler->AddSlowPathCode(slow_path); | |
1977 | 2135 |
1978 Label load_pointer; | 2136 Label load_pointer; |
1979 Label load_double; | 2137 Label load_double; |
2138 Label load_float32x4; | |
1980 | 2139 |
1981 __ LoadObject(result_reg, Field::ZoneHandle(field()->raw())); | 2140 __ LoadObject(result_reg, Field::ZoneHandle(field()->raw())); |
1982 | 2141 |
1983 FieldAddress field_cid_operand(result_reg, Field::guarded_cid_offset()); | 2142 FieldAddress field_cid_operand(result_reg, Field::guarded_cid_offset()); |
1984 FieldAddress field_nullability_operand(result_reg, | 2143 FieldAddress field_nullability_operand(result_reg, |
1985 Field::is_nullable_offset()); | 2144 Field::is_nullable_offset()); |
1986 | 2145 |
1987 __ ldr(temp, field_nullability_operand); | 2146 __ ldr(temp, field_nullability_operand); |
1988 __ CompareImmediate(temp, kNullCid); | 2147 __ CompareImmediate(temp, kNullCid); |
1989 __ b(&load_pointer, EQ); | 2148 __ b(&load_pointer, EQ); |
1990 | 2149 |
1991 __ ldr(temp, field_cid_operand); | 2150 __ ldr(temp, field_cid_operand); |
1992 __ CompareImmediate(temp, kDoubleCid); | 2151 __ CompareImmediate(temp, kDoubleCid); |
1993 __ b(&load_double, EQ); | 2152 __ b(&load_double, EQ); |
1994 | 2153 |
2154 __ ldr(temp, field_cid_operand); | |
2155 __ CompareImmediate(temp, kFloat32x4Cid); | |
2156 __ b(&load_float32x4, EQ); | |
2157 | |
1995 // Fall through. | 2158 // Fall through. |
1996 __ b(&load_pointer); | 2159 __ b(&load_pointer); |
1997 | 2160 |
1998 __ Bind(&load_double); | 2161 { |
2162 __ Bind(&load_double); | |
1999 | 2163 |
2000 BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this); | 2164 if (!compiler->is_optimizing()) { |
2001 compiler->AddSlowPathCode(slow_path); | 2165 locs()->live_registers()->Add(locs()->in(0)); |
2166 } | |
2002 | 2167 |
2003 if (!compiler->is_optimizing()) { | 2168 __ TryAllocate(compiler->double_class(), |
2004 locs()->live_registers()->Add(locs()->in(0)); | 2169 slow_path->double_entry_label(), |
2170 result_reg, | |
2171 temp); | |
2172 __ Bind(slow_path->double_exit_label()); | |
2173 __ ldr(temp, FieldAddress(instance_reg, offset_in_bytes())); | |
2174 __ LoadDFromOffset(value, temp, Double::value_offset() - kHeapObjectTag); | |
2175 __ StoreDToOffset(value, | |
2176 result_reg, | |
2177 Double::value_offset() - kHeapObjectTag); | |
2178 __ b(&done); | |
2005 } | 2179 } |
2006 | 2180 |
2007 __ TryAllocate(compiler->double_class(), | 2181 { |
2008 slow_path->entry_label(), | 2182 __ Bind(&load_float32x4); |
2009 result_reg, | |
2010 temp); | |
2011 __ Bind(slow_path->exit_label()); | |
2012 __ ldr(temp, FieldAddress(instance_reg, offset_in_bytes())); | |
2013 __ LoadDFromOffset(value, temp, Double::value_offset() - kHeapObjectTag); | |
2014 __ StoreDToOffset(value, | |
2015 result_reg, | |
2016 Double::value_offset() - kHeapObjectTag); | |
2017 __ b(&done); | |
2018 | 2183 |
2019 // TODO(johnmccutchan): Add Float32x4 path here. | 2184 if (!compiler->is_optimizing()) { |
2185 locs()->live_registers()->Add(locs()->in(0)); | |
2186 } | |
2187 | |
2188 __ TryAllocate(compiler->float32x4_class(), | |
2189 slow_path->float32x4_entry_label(), | |
2190 result_reg, | |
2191 temp); | |
2192 __ Bind(slow_path->float32x4_exit_label()); | |
2193 __ ldr(temp, FieldAddress(instance_reg, offset_in_bytes())); | |
2194 __ LoadDFromOffset(value, temp, | |
2195 Float32x4::value_offset() - kHeapObjectTag); | |
2196 __ LoadDFromOffset(value_odd, temp, | |
2197 Float32x4::value_offset() + 2*kWordSize - kHeapObjectTag); | |
2198 __ StoreDToOffset(value, result_reg, | |
2199 Float32x4::value_offset() - kHeapObjectTag); | |
2200 __ StoreDToOffset(value_odd, result_reg, | |
2201 Float32x4::value_offset() + 2*kWordSize - kHeapObjectTag); | |
2202 __ b(&done); | |
2203 } | |
2020 | 2204 |
2021 __ Bind(&load_pointer); | 2205 __ Bind(&load_pointer); |
2022 } | 2206 } |
2023 __ LoadFromOffset(kWord, result_reg, | 2207 __ LoadFromOffset(kWord, result_reg, |
2024 instance_reg, offset_in_bytes() - kHeapObjectTag); | 2208 instance_reg, offset_in_bytes() - kHeapObjectTag); |
2025 __ Bind(&done); | 2209 __ Bind(&done); |
2026 } | 2210 } |
2027 | 2211 |
2028 | 2212 |
2029 LocationSummary* InstantiateTypeInstr::MakeLocationSummary(bool opt) const { | 2213 LocationSummary* InstantiateTypeInstr::MakeLocationSummary(bool opt) const { |
(...skipping 787 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2817 } else if (right_cid == kSmiCid) { | 3001 } else if (right_cid == kSmiCid) { |
2818 __ tst(left, ShifterOperand(kSmiTagMask)); | 3002 __ tst(left, ShifterOperand(kSmiTagMask)); |
2819 } else { | 3003 } else { |
2820 __ orr(IP, left, ShifterOperand(right)); | 3004 __ orr(IP, left, ShifterOperand(right)); |
2821 __ tst(IP, ShifterOperand(kSmiTagMask)); | 3005 __ tst(IP, ShifterOperand(kSmiTagMask)); |
2822 } | 3006 } |
2823 __ b(deopt, EQ); | 3007 __ b(deopt, EQ); |
2824 } | 3008 } |
2825 | 3009 |
2826 | 3010 |
3011 class BoxDoubleSlowPath : public SlowPathCode { | |
3012 public: | |
3013 explicit BoxDoubleSlowPath(BoxDoubleInstr* instruction) | |
3014 : instruction_(instruction) { } | |
3015 | |
3016 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { | |
3017 __ Comment("BoxDoubleSlowPath"); | |
3018 __ Bind(entry_label()); | |
3019 const Class& double_class = compiler->double_class(); | |
3020 const Code& stub = | |
3021 Code::Handle(StubCode::GetAllocationStubForClass(double_class)); | |
3022 const ExternalLabel label(double_class.ToCString(), stub.EntryPoint()); | |
3023 | |
3024 LocationSummary* locs = instruction_->locs(); | |
3025 locs->live_registers()->Remove(locs->out()); | |
3026 | |
3027 compiler->SaveLiveRegisters(locs); | |
3028 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | |
3029 &label, | |
3030 PcDescriptors::kOther, | |
3031 locs); | |
3032 __ MoveRegister(locs->out().reg(), R0); | |
3033 compiler->RestoreLiveRegisters(locs); | |
3034 | |
3035 __ b(exit_label()); | |
3036 } | |
3037 | |
3038 private: | |
3039 BoxDoubleInstr* instruction_; | |
3040 }; | |
3041 | |
3042 | |
2827 LocationSummary* BoxDoubleInstr::MakeLocationSummary(bool opt) const { | 3043 LocationSummary* BoxDoubleInstr::MakeLocationSummary(bool opt) const { |
2828 const intptr_t kNumInputs = 1; | 3044 const intptr_t kNumInputs = 1; |
2829 const intptr_t kNumTemps = 1; | 3045 const intptr_t kNumTemps = 1; |
2830 LocationSummary* summary = | 3046 LocationSummary* summary = |
2831 new LocationSummary(kNumInputs, | 3047 new LocationSummary(kNumInputs, |
2832 kNumTemps, | 3048 kNumTemps, |
2833 LocationSummary::kCallOnSlowPath); | 3049 LocationSummary::kCallOnSlowPath); |
2834 summary->set_in(0, Location::RequiresFpuRegister()); | 3050 summary->set_in(0, Location::RequiresFpuRegister()); |
2835 summary->set_temp(0, Location::RequiresRegister()); | 3051 summary->set_temp(0, Location::RequiresRegister()); |
2836 summary->set_out(Location::RequiresRegister()); | 3052 summary->set_out(Location::RequiresRegister()); |
(...skipping 2050 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4887 compiler->GenerateCall(token_pos(), | 5103 compiler->GenerateCall(token_pos(), |
4888 &label, | 5104 &label, |
4889 PcDescriptors::kOther, | 5105 PcDescriptors::kOther, |
4890 locs()); | 5106 locs()); |
4891 __ Drop(2); // Discard type arguments and receiver. | 5107 __ Drop(2); // Discard type arguments and receiver. |
4892 } | 5108 } |
4893 | 5109 |
4894 } // namespace dart | 5110 } // namespace dart |
4895 | 5111 |
4896 #endif // defined TARGET_ARCH_ARM | 5112 #endif // defined TARGET_ARCH_ARM |
OLD | NEW |