| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. |
| 6 #if defined(TARGET_ARCH_MIPS) | 6 #if defined(TARGET_ARCH_MIPS) |
| 7 | 7 |
| 8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
| 9 | 9 |
| 10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
| (...skipping 1722 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1733 | 1733 |
| 1734 if (is_initialization_) { | 1734 if (is_initialization_) { |
| 1735 const Class* cls = NULL; | 1735 const Class* cls = NULL; |
| 1736 switch (cid) { | 1736 switch (cid) { |
| 1737 case kDoubleCid: | 1737 case kDoubleCid: |
| 1738 cls = &compiler->double_class(); | 1738 cls = &compiler->double_class(); |
| 1739 break; | 1739 break; |
| 1740 default: | 1740 default: |
| 1741 UNREACHABLE(); | 1741 UNREACHABLE(); |
| 1742 } | 1742 } |
| 1743 |
| 1743 StoreInstanceFieldSlowPath* slow_path = | 1744 StoreInstanceFieldSlowPath* slow_path = |
| 1744 new StoreInstanceFieldSlowPath(this, *cls); | 1745 new StoreInstanceFieldSlowPath(this, *cls); |
| 1745 compiler->AddSlowPathCode(slow_path); | 1746 compiler->AddSlowPathCode(slow_path); |
| 1747 |
| 1746 __ TryAllocate(*cls, | 1748 __ TryAllocate(*cls, |
| 1747 slow_path->entry_label(), | 1749 slow_path->entry_label(), |
| 1748 temp, | 1750 temp, |
| 1749 temp2); | 1751 temp2); |
| 1750 __ Bind(slow_path->exit_label()); | 1752 __ Bind(slow_path->exit_label()); |
| 1751 __ mov(temp2, temp); | 1753 __ mov(temp2, temp); |
| 1752 __ StoreIntoObject(instance_reg, | 1754 __ StoreIntoObject(instance_reg, |
| 1753 FieldAddress(instance_reg, field().Offset()), | 1755 FieldAddress(instance_reg, field().Offset()), |
| 1754 temp2); | 1756 temp2); |
| 1755 } else { | 1757 } else { |
| 1756 __ lw(temp, FieldAddress(instance_reg, field().Offset())); | 1758 __ lw(temp, FieldAddress(instance_reg, field().Offset())); |
| 1757 } | 1759 } |
| 1758 switch (cid) { | 1760 switch (cid) { |
| 1759 case kDoubleCid: | 1761 case kDoubleCid: |
| 1760 __ StoreDToOffset(value, temp, Double::value_offset() - kHeapObjectTag); | 1762 __ StoreDToOffset(value, temp, Double::value_offset() - kHeapObjectTag); |
| 1761 break; | 1763 break; |
| 1762 default: | 1764 default: |
| 1763 UNREACHABLE(); | 1765 UNREACHABLE(); |
| 1764 } | 1766 } |
| 1765 return; | 1767 return; |
| 1766 } | 1768 } |
| 1767 | 1769 |
| 1768 if (IsPotentialUnboxedStore()) { | 1770 if (IsPotentialUnboxedStore()) { |
| 1769 Register value_reg = locs()->in(1).reg(); | 1771 Register value_reg = locs()->in(1).reg(); |
| 1770 Register temp = locs()->temp(0).reg(); | 1772 Register temp = locs()->temp(0).reg(); |
| 1771 Register temp2 = locs()->temp(1).reg(); | 1773 Register temp2 = locs()->temp(1).reg(); |
| 1772 DRegister fpu_temp = locs()->temp(2).fpu_reg(); | 1774 DRegister fpu_temp = locs()->temp(2).fpu_reg(); |
| 1773 | 1775 |
| 1774 Label store_pointer; | 1776 Label store_pointer; |
| 1775 Label copy_double; | |
| 1776 Label store_double; | 1777 Label store_double; |
| 1777 | 1778 |
| 1778 __ LoadObject(temp, Field::ZoneHandle(field().raw())); | 1779 __ LoadObject(temp, Field::ZoneHandle(field().raw())); |
| 1779 | 1780 |
| 1780 __ lw(temp2, FieldAddress(temp, Field::is_nullable_offset())); | 1781 __ lw(temp2, FieldAddress(temp, Field::is_nullable_offset())); |
| 1781 __ BranchEqual(temp2, kNullCid, &store_pointer); | 1782 __ BranchEqual(temp2, kNullCid, &store_pointer); |
| 1782 | 1783 |
| 1783 __ lbu(temp2, FieldAddress(temp, Field::kind_bits_offset())); | 1784 __ lbu(temp2, FieldAddress(temp, Field::kind_bits_offset())); |
| 1784 __ andi(CMPRES1, temp2, Immediate(1 << Field::kUnboxingCandidateBit)); | 1785 __ andi(CMPRES1, temp2, Immediate(1 << Field::kUnboxingCandidateBit)); |
| 1785 __ beq(CMPRES1, ZR, &store_pointer); | 1786 __ beq(CMPRES1, ZR, &store_pointer); |
| 1786 | 1787 |
| 1787 __ lw(temp2, FieldAddress(temp, Field::guarded_cid_offset())); | 1788 __ lw(temp2, FieldAddress(temp, Field::guarded_cid_offset())); |
| 1788 __ BranchEqual(temp2, kDoubleCid, &store_double); | 1789 __ BranchEqual(temp2, kDoubleCid, &store_double); |
| 1789 | 1790 |
| 1790 // Fall through. | 1791 // Fall through. |
| 1791 __ b(&store_pointer); | 1792 __ b(&store_pointer); |
| 1792 | 1793 |
| 1793 __ Bind(&store_double); | |
| 1794 | |
| 1795 __ lw(temp, FieldAddress(instance_reg, field().Offset())); | |
| 1796 __ BranchNotEqual(temp, reinterpret_cast<int32_t>(Object::null()), | |
| 1797 ©_double); | |
| 1798 | |
| 1799 StoreInstanceFieldSlowPath* slow_path = | |
| 1800 new StoreInstanceFieldSlowPath(this, compiler->double_class()); | |
| 1801 compiler->AddSlowPathCode(slow_path); | |
| 1802 | |
| 1803 if (!compiler->is_optimizing()) { | 1794 if (!compiler->is_optimizing()) { |
| 1804 locs()->live_registers()->Add(locs()->in(0)); | 1795 locs()->live_registers()->Add(locs()->in(0)); |
| 1805 locs()->live_registers()->Add(locs()->in(1)); | 1796 locs()->live_registers()->Add(locs()->in(1)); |
| 1806 } | 1797 } |
| 1807 | 1798 |
| 1808 __ TryAllocate(compiler->double_class(), | 1799 { |
| 1809 slow_path->entry_label(), | 1800 __ Bind(&store_double); |
| 1810 temp, | 1801 Label copy_double; |
| 1811 temp2); | |
| 1812 __ Bind(slow_path->exit_label()); | |
| 1813 __ mov(temp2, temp); | |
| 1814 __ StoreIntoObject(instance_reg, | |
| 1815 FieldAddress(instance_reg, field().Offset()), | |
| 1816 temp2); | |
| 1817 | 1802 |
| 1818 __ Bind(©_double); | 1803 __ lw(temp, FieldAddress(instance_reg, field().Offset())); |
| 1819 __ LoadDFromOffset(fpu_temp, | 1804 __ BranchNotEqual(temp, reinterpret_cast<int32_t>(Object::null()), |
| 1820 value_reg, | 1805 ©_double); |
| 1821 Double::value_offset() - kHeapObjectTag); | 1806 |
| 1822 __ StoreDToOffset(fpu_temp, temp, Double::value_offset() - kHeapObjectTag); | 1807 StoreInstanceFieldSlowPath* slow_path = |
| 1823 __ b(&skip_store); | 1808 new StoreInstanceFieldSlowPath(this, compiler->double_class()); |
| 1809 compiler->AddSlowPathCode(slow_path); |
| 1810 |
| 1811 __ TryAllocate(compiler->double_class(), |
| 1812 slow_path->entry_label(), |
| 1813 temp, |
| 1814 temp2); |
| 1815 __ Bind(slow_path->exit_label()); |
| 1816 __ mov(temp2, temp); |
| 1817 __ StoreIntoObject(instance_reg, |
| 1818 FieldAddress(instance_reg, field().Offset()), |
| 1819 temp2); |
| 1820 |
| 1821 __ Bind(©_double); |
| 1822 __ LoadDFromOffset(fpu_temp, |
| 1823 value_reg, |
| 1824 Double::value_offset() - kHeapObjectTag); |
| 1825 __ StoreDToOffset(fpu_temp, temp, |
| 1826 Double::value_offset() - kHeapObjectTag); |
| 1827 __ b(&skip_store); |
| 1828 } |
| 1829 |
| 1824 __ Bind(&store_pointer); | 1830 __ Bind(&store_pointer); |
| 1825 } | 1831 } |
| 1826 | 1832 |
| 1827 if (ShouldEmitStoreBarrier()) { | 1833 if (ShouldEmitStoreBarrier()) { |
| 1828 Register value_reg = locs()->in(1).reg(); | 1834 Register value_reg = locs()->in(1).reg(); |
| 1829 __ StoreIntoObject(instance_reg, | 1835 __ StoreIntoObject(instance_reg, |
| 1830 FieldAddress(instance_reg, field().Offset()), | 1836 FieldAddress(instance_reg, field().Offset()), |
| 1831 value_reg, | 1837 value_reg, |
| 1832 CanValueBeSmi()); | 1838 CanValueBeSmi()); |
| 1833 } else { | 1839 } else { |
| (...skipping 226 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2060 | 2066 |
| 2061 __ lw(temp, field_nullability_operand); | 2067 __ lw(temp, field_nullability_operand); |
| 2062 __ BranchEqual(temp, kNullCid, &load_pointer); | 2068 __ BranchEqual(temp, kNullCid, &load_pointer); |
| 2063 | 2069 |
| 2064 __ lw(temp, field_cid_operand); | 2070 __ lw(temp, field_cid_operand); |
| 2065 __ BranchEqual(temp, kDoubleCid, &load_double); | 2071 __ BranchEqual(temp, kDoubleCid, &load_double); |
| 2066 | 2072 |
| 2067 // Fall through. | 2073 // Fall through. |
| 2068 __ b(&load_pointer); | 2074 __ b(&load_pointer); |
| 2069 | 2075 |
| 2070 __ Bind(&load_double); | |
| 2071 | |
| 2072 BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this); | |
| 2073 compiler->AddSlowPathCode(slow_path); | |
| 2074 | |
| 2075 if (!compiler->is_optimizing()) { | 2076 if (!compiler->is_optimizing()) { |
| 2076 locs()->live_registers()->Add(locs()->in(0)); | 2077 locs()->live_registers()->Add(locs()->in(0)); |
| 2077 } | 2078 } |
| 2078 | 2079 |
| 2079 __ TryAllocate(compiler->double_class(), | 2080 { |
| 2080 slow_path->entry_label(), | 2081 __ Bind(&load_double); |
| 2081 result_reg, | 2082 BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this); |
| 2082 temp); | 2083 compiler->AddSlowPathCode(slow_path); |
| 2083 __ Bind(slow_path->exit_label()); | 2084 |
| 2084 __ lw(temp, FieldAddress(instance_reg, offset_in_bytes())); | 2085 __ TryAllocate(compiler->double_class(), |
| 2085 __ LoadDFromOffset(value, temp, Double::value_offset() - kHeapObjectTag); | 2086 slow_path->entry_label(), |
| 2086 __ StoreDToOffset(value, | 2087 result_reg, |
| 2087 result_reg, | 2088 temp); |
| 2088 Double::value_offset() - kHeapObjectTag); | 2089 __ Bind(slow_path->exit_label()); |
| 2089 __ b(&done); | 2090 __ lw(temp, FieldAddress(instance_reg, offset_in_bytes())); |
| 2091 __ LoadDFromOffset(value, temp, Double::value_offset() - kHeapObjectTag); |
| 2092 __ StoreDToOffset(value, |
| 2093 result_reg, |
| 2094 Double::value_offset() - kHeapObjectTag); |
| 2095 __ b(&done); |
| 2096 } |
| 2097 |
| 2090 __ Bind(&load_pointer); | 2098 __ Bind(&load_pointer); |
| 2091 } | 2099 } |
| 2092 __ lw(result_reg, Address(instance_reg, offset_in_bytes() - kHeapObjectTag)); | 2100 __ lw(result_reg, Address(instance_reg, offset_in_bytes() - kHeapObjectTag)); |
| 2093 __ Bind(&done); | 2101 __ Bind(&done); |
| 2094 } | 2102 } |
| 2095 | 2103 |
| 2096 | 2104 |
| 2097 LocationSummary* InstantiateTypeInstr::MakeLocationSummary(bool opt) const { | 2105 LocationSummary* InstantiateTypeInstr::MakeLocationSummary(bool opt) const { |
| 2098 const intptr_t kNumInputs = 1; | 2106 const intptr_t kNumInputs = 1; |
| 2099 const intptr_t kNumTemps = 0; | 2107 const intptr_t kNumTemps = 0; |
| (...skipping 2144 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4244 compiler->GenerateCall(token_pos(), | 4252 compiler->GenerateCall(token_pos(), |
| 4245 &label, | 4253 &label, |
| 4246 PcDescriptors::kOther, | 4254 PcDescriptors::kOther, |
| 4247 locs()); | 4255 locs()); |
| 4248 __ Drop(2); // Discard type arguments and receiver. | 4256 __ Drop(2); // Discard type arguments and receiver. |
| 4249 } | 4257 } |
| 4250 | 4258 |
| 4251 } // namespace dart | 4259 } // namespace dart |
| 4252 | 4260 |
| 4253 #endif // defined TARGET_ARCH_MIPS | 4261 #endif // defined TARGET_ARCH_MIPS |
| OLD | NEW |