OLD | NEW |
---|---|
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_IA32. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_IA32. |
6 #if defined(TARGET_ARCH_IA32) | 6 #if defined(TARGET_ARCH_IA32) |
7 | 7 |
8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
9 | 9 |
10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
11 #include "vm/flow_graph_compiler.h" | 11 #include "vm/flow_graph_compiler.h" |
12 #include "vm/locations.h" | 12 #include "vm/locations.h" |
13 #include "vm/object_store.h" | 13 #include "vm/object_store.h" |
14 #include "vm/parser.h" | 14 #include "vm/parser.h" |
15 #include "vm/stack_frame.h" | 15 #include "vm/stack_frame.h" |
16 #include "vm/stub_code.h" | 16 #include "vm/stub_code.h" |
17 #include "vm/symbols.h" | 17 #include "vm/symbols.h" |
18 #include "vm/il_printer.h" | |
srdjan
2014/02/05 22:20:09
Please use alphabetic order.
Cutch
2014/02/05 23:16:02
Removed.
| |
18 | 19 |
19 #define __ compiler->assembler()-> | 20 #define __ compiler->assembler()-> |
20 | 21 |
21 namespace dart { | 22 namespace dart { |
22 | 23 |
23 DECLARE_FLAG(int, optimization_counter_threshold); | 24 DECLARE_FLAG(int, optimization_counter_threshold); |
24 DECLARE_FLAG(bool, propagate_ic_data); | 25 DECLARE_FLAG(bool, propagate_ic_data); |
25 DECLARE_FLAG(bool, use_osr); | 26 DECLARE_FLAG(bool, use_osr); |
26 DECLARE_FLAG(bool, throw_on_javascript_int_overflow); | 27 DECLARE_FLAG(bool, throw_on_javascript_int_overflow); |
27 | 28 |
(...skipping 1555 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1583 __ j(NOT_EQUAL, fail); | 1584 __ j(NOT_EQUAL, fail); |
1584 } else { | 1585 } else { |
1585 UNREACHABLE(); | 1586 UNREACHABLE(); |
1586 } | 1587 } |
1587 } | 1588 } |
1588 } | 1589 } |
1589 __ Bind(&ok); | 1590 __ Bind(&ok); |
1590 } | 1591 } |
1591 | 1592 |
1592 | 1593 |
1594 static void PrintLocationSummary(const char* prefix, LocationSummary* locs) { | |
1595 return; | |
1596 char str[4000]; | |
1597 BufferFormatter f(str, sizeof(str)); | |
1598 locs->PrintTo(&f); | |
1599 printf("%s: [%" Pd "] %s\n", prefix, locs->stack_bitmap()->Length(), str); | |
1600 } | |
1601 | |
1602 | |
1603 static void PrintStack(const char* prefix, FlowGraphCompiler* compiler) { | |
1604 return; | |
1605 printf("%s: StackSize() -> %" Pd "\n", prefix, compiler->StackSize()); | |
1606 } | |
1607 | |
1608 | |
1593 class StoreInstanceFieldSlowPath : public SlowPathCode { | 1609 class StoreInstanceFieldSlowPath : public SlowPathCode { |
1594 public: | 1610 public: |
1595 StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction, | 1611 explicit StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction) |
1596 const Class& cls) | 1612 : instruction_(instruction) { } |
1597 : instruction_(instruction), cls_(cls) { } | |
1598 | 1613 |
1599 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { | 1614 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
1600 __ Comment("StoreInstanceFieldSlowPath"); | 1615 __ Comment("StoreInstanceFieldSlowPath"); |
1601 __ Bind(entry_label()); | 1616 { |
1602 const Code& stub = | 1617 __ Bind(double_entry_label()); |
1603 Code::Handle(StubCode::GetAllocationStubForClass(cls_)); | 1618 const Class& cls = compiler->double_class(); |
1604 const ExternalLabel label(cls_.ToCString(), stub.EntryPoint()); | 1619 const Code& stub = |
1620 Code::Handle(StubCode::GetAllocationStubForClass(cls)); | |
1621 const ExternalLabel label(cls.ToCString(), stub.EntryPoint()); | |
1605 | 1622 |
1606 LocationSummary* locs = instruction_->locs(); | 1623 LocationSummary* locs = instruction_->locs(); |
1607 locs->live_registers()->Remove(locs->out()); | 1624 PrintLocationSummary("A", locs); |
1608 | 1625 PrintStack("A", compiler); |
1609 compiler->SaveLiveRegisters(locs); | 1626 locs->live_registers()->Remove(locs->out()); |
1610 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | 1627 PrintLocationSummary("B", locs); |
1611 &label, | 1628 PrintStack("B", compiler); |
1612 PcDescriptors::kOther, | 1629 compiler->SaveLiveRegisters(locs); |
1613 locs); | 1630 PrintLocationSummary("C", locs); |
1614 __ MoveRegister(locs->temp(0).reg(), EAX); | 1631 PrintStack("C", compiler); |
1615 compiler->RestoreLiveRegisters(locs); | 1632 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
1616 | 1633 &label, |
1617 __ jmp(exit_label()); | 1634 PcDescriptors::kOther, |
1635 locs); | |
1636 PrintLocationSummary("D", locs); | |
1637 PrintStack("D", compiler); | |
1638 __ MoveRegister(locs->temp(0).reg(), EAX); | |
1639 compiler->RestoreLiveRegisters(locs); | |
1640 PrintLocationSummary("E", locs); | |
1641 PrintStack("E", compiler); | |
1642 __ jmp(double_exit_label()); | |
1643 } | |
1644 { | |
1645 __ Bind(float32x4_entry_label()); | |
1646 const Class& cls = compiler->float32x4_class(); | |
1647 const Code& stub = | |
1648 Code::Handle(StubCode::GetAllocationStubForClass(cls)); | |
1649 const ExternalLabel label(cls.ToCString(), stub.EntryPoint()); | |
1650 LocationSummary* locs = instruction_->locs(); | |
1651 PrintLocationSummary("F", locs); | |
1652 PrintStack("F", compiler); | |
1653 locs->live_registers()->Remove(locs->out()); | |
1654 PrintLocationSummary("G", locs); | |
1655 PrintStack("G", compiler); | |
1656 compiler->SaveLiveRegisters(locs); | |
1657 PrintLocationSummary("H", locs); | |
1658 PrintStack("H", compiler); | |
1659 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | |
1660 &label, | |
1661 PcDescriptors::kOther, | |
1662 locs); | |
1663 PrintLocationSummary("I", locs); | |
1664 PrintStack("I", compiler); | |
1665 __ MoveRegister(locs->temp(0).reg(), EAX); | |
1666 compiler->RestoreLiveRegisters(locs); | |
1667 PrintLocationSummary("J", locs); | |
1668 PrintStack("J", compiler); | |
1669 __ jmp(float32x4_exit_label()); | |
1670 } | |
1618 } | 1671 } |
1619 | 1672 |
1673 Label* double_entry_label() { | |
1674 // Use default SlowPathCode label for double. | |
1675 return entry_label(); | |
1676 } | |
1677 Label* double_exit_label() { | |
1678 // Use default SlowPathCode label for double. | |
1679 return exit_label(); | |
1680 } | |
1681 | |
1682 Label* float32x4_entry_label() { return &float32x4_entry_label_; } | |
1683 Label* float32x4_exit_label() { return &float32x4_exit_label_; } | |
1684 | |
1620 private: | 1685 private: |
1686 Label float32x4_entry_label_; | |
1687 Label float32x4_exit_label_; | |
1621 StoreInstanceFieldInstr* instruction_; | 1688 StoreInstanceFieldInstr* instruction_; |
1622 const Class& cls_; | |
1623 }; | 1689 }; |
1624 | 1690 |
1625 | 1691 |
1626 LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(bool opt) const { | 1692 LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(bool opt) const { |
1627 const intptr_t kNumInputs = 2; | 1693 const intptr_t kNumInputs = 2; |
1628 const intptr_t kNumTemps = 0; | 1694 const intptr_t kNumTemps = 0; |
1629 LocationSummary* summary = | 1695 LocationSummary* summary = |
1630 new LocationSummary(kNumInputs, kNumTemps, | 1696 new LocationSummary(kNumInputs, kNumTemps, |
1631 (field().guarded_cid() == kIllegalCid) || (is_initialization_) | 1697 (field().guarded_cid() == kIllegalCid) || (is_initialization_) |
1632 ? LocationSummary::kCallOnSlowPath | 1698 ? LocationSummary::kCallOnSlowPath |
(...skipping 26 matching lines...) Expand all Loading... | |
1659 | 1725 |
1660 Register instance_reg = locs()->in(0).reg(); | 1726 Register instance_reg = locs()->in(0).reg(); |
1661 | 1727 |
1662 if (IsUnboxedStore() && compiler->is_optimizing()) { | 1728 if (IsUnboxedStore() && compiler->is_optimizing()) { |
1663 XmmRegister value = locs()->in(1).fpu_reg(); | 1729 XmmRegister value = locs()->in(1).fpu_reg(); |
1664 Register temp = locs()->temp(0).reg(); | 1730 Register temp = locs()->temp(0).reg(); |
1665 Register temp2 = locs()->temp(1).reg(); | 1731 Register temp2 = locs()->temp(1).reg(); |
1666 const intptr_t cid = field().UnboxedFieldCid(); | 1732 const intptr_t cid = field().UnboxedFieldCid(); |
1667 | 1733 |
1668 if (is_initialization_) { | 1734 if (is_initialization_) { |
1735 StoreInstanceFieldSlowPath* slow_path = | |
1736 new StoreInstanceFieldSlowPath(this); | |
1737 compiler->AddSlowPathCode(slow_path); | |
1738 | |
1669 const Class* cls = NULL; | 1739 const Class* cls = NULL; |
1740 Label* entry_label = NULL; | |
1741 Label* exit_label = NULL; | |
1670 switch (cid) { | 1742 switch (cid) { |
1671 case kDoubleCid: | 1743 case kDoubleCid: |
1672 cls = &compiler->double_class(); | 1744 cls = &compiler->double_class(); |
1745 entry_label = slow_path->double_entry_label(); | |
1746 exit_label = slow_path->double_exit_label(); | |
1673 break; | 1747 break; |
1674 // TODO(johnmccutchan): Add kFloat32x4Cid here. | 1748 case kFloat32x4Cid: |
1749 cls = &compiler->float32x4_class(); | |
1750 entry_label = slow_path->float32x4_entry_label(); | |
1751 exit_label = slow_path->float32x4_exit_label(); | |
1752 break; | |
1675 default: | 1753 default: |
1676 UNREACHABLE(); | 1754 UNREACHABLE(); |
1677 } | 1755 } |
1678 StoreInstanceFieldSlowPath* slow_path = | |
1679 new StoreInstanceFieldSlowPath(this, *cls); | |
1680 compiler->AddSlowPathCode(slow_path); | |
1681 | 1756 |
1682 __ TryAllocate(*cls, | 1757 __ TryAllocate(*cls, |
1683 slow_path->entry_label(), | 1758 entry_label, |
1684 Assembler::kFarJump, | 1759 Assembler::kFarJump, |
1685 temp, | 1760 temp, |
1686 temp2); | 1761 temp2); |
1687 __ Bind(slow_path->exit_label()); | 1762 __ Bind(exit_label); |
1688 __ movl(temp2, temp); | 1763 __ movl(temp2, temp); |
1689 __ StoreIntoObject(instance_reg, | 1764 __ StoreIntoObject(instance_reg, |
1690 FieldAddress(instance_reg, field().Offset()), | 1765 FieldAddress(instance_reg, field().Offset()), |
1691 temp2); | 1766 temp2); |
1692 } else { | 1767 } else { |
1693 __ movl(temp, FieldAddress(instance_reg, field().Offset())); | 1768 __ movl(temp, FieldAddress(instance_reg, field().Offset())); |
1694 } | 1769 } |
1695 switch (cid) { | 1770 switch (cid) { |
1696 case kDoubleCid: | 1771 case kDoubleCid: |
1697 __ movsd(FieldAddress(temp, Double::value_offset()), value); | 1772 __ Comment("UnboxedDoubleStoreInstanceFieldInstr"); |
1698 // TODO(johnmccutchan): Add kFloat32x4Cid here. | 1773 __ movsd(FieldAddress(temp, Double::value_offset()), value); |
1774 break; | |
1775 case kFloat32x4Cid: | |
1776 __ Comment("UnboxedFloat32x4StoreInstanceFieldInstr"); | |
1777 __ movups(FieldAddress(temp, Float32x4::value_offset()), value); | |
1699 break; | 1778 break; |
1700 default: | 1779 default: |
1701 UNREACHABLE(); | 1780 UNREACHABLE(); |
1702 } | 1781 } |
1703 return; | 1782 return; |
1704 } | 1783 } |
1705 | 1784 |
1706 if (IsPotentialUnboxedStore()) { | 1785 if (IsPotentialUnboxedStore()) { |
1707 Register value_reg = locs()->in(1).reg(); | 1786 Register value_reg = locs()->in(1).reg(); |
1708 Register temp = locs()->temp(0).reg(); | 1787 Register temp = locs()->temp(0).reg(); |
1709 Register temp2 = locs()->temp(1).reg(); | 1788 Register temp2 = locs()->temp(1).reg(); |
1710 FpuRegister fpu_temp = locs()->temp(2).fpu_reg(); | 1789 FpuRegister fpu_temp = locs()->temp(2).fpu_reg(); |
1711 | 1790 |
1712 Label store_pointer; | 1791 Label store_pointer; |
1713 Label copy_double; | |
1714 Label store_double; | 1792 Label store_double; |
1793 Label store_float32x4; | |
1715 | 1794 |
1716 __ LoadObject(temp, Field::ZoneHandle(field().raw())); | 1795 __ LoadObject(temp, Field::ZoneHandle(field().raw())); |
1717 | 1796 |
1718 __ cmpl(FieldAddress(temp, Field::is_nullable_offset()), | 1797 __ cmpl(FieldAddress(temp, Field::is_nullable_offset()), |
1719 Immediate(kNullCid)); | 1798 Immediate(kNullCid)); |
1720 __ j(EQUAL, &store_pointer); | 1799 __ j(EQUAL, &store_pointer); |
1721 | 1800 |
1722 __ movzxb(temp2, FieldAddress(temp, Field::kind_bits_offset())); | 1801 __ movzxb(temp2, FieldAddress(temp, Field::kind_bits_offset())); |
1723 __ testl(temp2, Immediate(1 << Field::kUnboxingCandidateBit)); | 1802 __ testl(temp2, Immediate(1 << Field::kUnboxingCandidateBit)); |
1724 __ j(ZERO, &store_pointer); | 1803 __ j(ZERO, &store_pointer); |
1725 | 1804 |
1726 __ cmpl(FieldAddress(temp, Field::guarded_cid_offset()), | 1805 __ cmpl(FieldAddress(temp, Field::guarded_cid_offset()), |
1727 Immediate(kDoubleCid)); | 1806 Immediate(kDoubleCid)); |
1728 __ j(EQUAL, &store_double); | 1807 __ j(EQUAL, &store_double); |
1729 | 1808 |
1809 __ cmpl(FieldAddress(temp, Field::guarded_cid_offset()), | |
1810 Immediate(kFloat32x4Cid)); | |
1811 __ j(EQUAL, &store_float32x4); | |
1812 | |
1730 // Fall through. | 1813 // Fall through. |
1731 __ jmp(&store_pointer); | 1814 __ jmp(&store_pointer); |
1732 | 1815 |
1733 __ Bind(&store_double); | |
1734 | |
1735 const Immediate& raw_null = | |
1736 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
1737 __ movl(temp, FieldAddress(instance_reg, field().Offset())); | |
1738 __ cmpl(temp, raw_null); | |
1739 __ j(NOT_EQUAL, ©_double); | |
1740 | |
1741 StoreInstanceFieldSlowPath* slow_path = | 1816 StoreInstanceFieldSlowPath* slow_path = |
1742 new StoreInstanceFieldSlowPath(this, compiler->double_class()); | 1817 new StoreInstanceFieldSlowPath(this); |
1743 compiler->AddSlowPathCode(slow_path); | 1818 compiler->AddSlowPathCode(slow_path); |
1744 | 1819 |
1745 if (!compiler->is_optimizing()) { | 1820 { |
1746 locs()->live_registers()->Add(locs()->in(0)); | 1821 __ Bind(&store_double); |
1747 locs()->live_registers()->Add(locs()->in(1)); | 1822 Label copy_double; |
1823 | |
1824 const Immediate& raw_null = | |
1825 Immediate(reinterpret_cast<intptr_t>(Object::null())); | |
1826 __ movl(temp, FieldAddress(instance_reg, field().Offset())); | |
1827 __ cmpl(temp, raw_null); | |
1828 __ j(NOT_EQUAL, ©_double); | |
1829 | |
1830 if (!compiler->is_optimizing()) { | |
1831 locs()->live_registers()->Add(locs()->in(0)); | |
1832 locs()->live_registers()->Add(locs()->in(1)); | |
1833 } | |
1834 | |
1835 __ TryAllocate(compiler->double_class(), | |
1836 slow_path->double_entry_label(), | |
1837 Assembler::kFarJump, | |
1838 temp, | |
1839 temp2); | |
1840 __ Bind(slow_path->double_exit_label()); | |
1841 __ movl(temp2, temp); | |
1842 __ StoreIntoObject(instance_reg, | |
1843 FieldAddress(instance_reg, field().Offset()), | |
1844 temp2); | |
1845 | |
1846 __ Bind(©_double); | |
1847 __ movsd(fpu_temp, FieldAddress(value_reg, Double::value_offset())); | |
1848 __ movsd(FieldAddress(temp, Double::value_offset()), fpu_temp); | |
1849 __ jmp(&skip_store); | |
1748 } | 1850 } |
1749 | 1851 |
1750 __ TryAllocate(compiler->double_class(), | 1852 { |
1751 slow_path->entry_label(), | 1853 __ Bind(&store_float32x4); |
1752 Assembler::kFarJump, | 1854 Label copy_float32x4; |
1753 temp, | |
1754 temp2); | |
1755 __ Bind(slow_path->exit_label()); | |
1756 __ movl(temp2, temp); | |
1757 __ StoreIntoObject(instance_reg, | |
1758 FieldAddress(instance_reg, field().Offset()), | |
1759 temp2); | |
1760 | 1855 |
1761 __ Bind(©_double); | 1856 const Immediate& raw_null = |
1762 __ movsd(fpu_temp, FieldAddress(value_reg, Double::value_offset())); | 1857 Immediate(reinterpret_cast<intptr_t>(Object::null())); |
1763 __ movsd(FieldAddress(temp, Double::value_offset()), fpu_temp); | 1858 __ movl(temp, FieldAddress(instance_reg, field().Offset())); |
1764 __ jmp(&skip_store); | 1859 __ cmpl(temp, raw_null); |
1860 __ j(NOT_EQUAL, ©_float32x4); | |
1861 | |
1862 if (!compiler->is_optimizing()) { | |
1863 locs()->live_registers()->Add(locs()->in(0)); | |
1864 locs()->live_registers()->Add(locs()->in(1)); | |
1865 } | |
1866 | |
1867 __ TryAllocate(compiler->float32x4_class(), | |
1868 slow_path->float32x4_entry_label(), | |
1869 Assembler::kFarJump, | |
1870 temp, | |
1871 temp2); | |
1872 __ Bind(slow_path->float32x4_exit_label()); | |
1873 __ movl(temp2, temp); | |
1874 __ StoreIntoObject(instance_reg, | |
1875 FieldAddress(instance_reg, field().Offset()), | |
1876 temp2); | |
1877 | |
1878 __ Bind(©_float32x4); | |
1879 __ movups(fpu_temp, FieldAddress(value_reg, Float32x4::value_offset())); | |
1880 __ movups(FieldAddress(temp, Float32x4::value_offset()), fpu_temp); | |
1881 __ jmp(&skip_store); | |
1882 } | |
1765 __ Bind(&store_pointer); | 1883 __ Bind(&store_pointer); |
1766 } | 1884 } |
1767 | 1885 |
1768 if (ShouldEmitStoreBarrier()) { | 1886 if (ShouldEmitStoreBarrier()) { |
1769 Register value_reg = locs()->in(1).reg(); | 1887 Register value_reg = locs()->in(1).reg(); |
1770 __ StoreIntoObject(instance_reg, | 1888 __ StoreIntoObject(instance_reg, |
1771 FieldAddress(instance_reg, field().Offset()), | 1889 FieldAddress(instance_reg, field().Offset()), |
1772 value_reg, | 1890 value_reg, |
1773 CanValueBeSmi()); | 1891 CanValueBeSmi()); |
1774 } else { | 1892 } else { |
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1899 deopt_id(), | 2017 deopt_id(), |
1900 kAllocateObjectWithBoundsCheckRuntimeEntry, | 2018 kAllocateObjectWithBoundsCheckRuntimeEntry, |
1901 3, | 2019 3, |
1902 locs()); | 2020 locs()); |
1903 __ Drop(3); | 2021 __ Drop(3); |
1904 ASSERT(locs()->out().reg() == EAX); | 2022 ASSERT(locs()->out().reg() == EAX); |
1905 __ popl(EAX); // Pop new instance. | 2023 __ popl(EAX); // Pop new instance. |
1906 } | 2024 } |
1907 | 2025 |
1908 | 2026 |
1909 class BoxDoubleSlowPath : public SlowPathCode { | 2027 class LoadFieldSlowPath : public SlowPathCode { |
1910 public: | 2028 public: |
1911 explicit BoxDoubleSlowPath(Instruction* instruction) | 2029 explicit LoadFieldSlowPath(Instruction* instruction) |
1912 : instruction_(instruction) { } | 2030 : instruction_(instruction) { } |
1913 | 2031 |
1914 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { | 2032 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
1915 __ Comment("BoxDoubleSlowPath"); | 2033 __ Comment("LoadFieldSlowPath"); |
1916 __ Bind(entry_label()); | 2034 { |
1917 const Class& double_class = compiler->double_class(); | 2035 __ Bind(double_entry_label()); |
1918 const Code& stub = | 2036 const Class& double_class = compiler->double_class(); |
1919 Code::Handle(StubCode::GetAllocationStubForClass(double_class)); | 2037 const Code& stub = |
1920 const ExternalLabel label(double_class.ToCString(), stub.EntryPoint()); | 2038 Code::Handle(StubCode::GetAllocationStubForClass(double_class)); |
2039 const ExternalLabel label(double_class.ToCString(), stub.EntryPoint()); | |
1921 | 2040 |
1922 LocationSummary* locs = instruction_->locs(); | 2041 LocationSummary* locs = instruction_->locs(); |
1923 locs->live_registers()->Remove(locs->out()); | 2042 locs->live_registers()->Remove(locs->out()); |
1924 | 2043 |
1925 compiler->SaveLiveRegisters(locs); | 2044 compiler->SaveLiveRegisters(locs); |
1926 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | 2045 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
1927 &label, | 2046 &label, |
1928 PcDescriptors::kOther, | 2047 PcDescriptors::kOther, |
1929 locs); | 2048 locs); |
1930 __ MoveRegister(locs->out().reg(), EAX); | 2049 __ MoveRegister(locs->out().reg(), EAX); |
1931 compiler->RestoreLiveRegisters(locs); | 2050 compiler->RestoreLiveRegisters(locs); |
1932 | 2051 |
1933 __ jmp(exit_label()); | 2052 __ jmp(double_exit_label()); |
2053 } | |
2054 { | |
2055 __ Bind(float32x4_entry_label()); | |
2056 const Class& float32x4_class = compiler->float32x4_class(); | |
2057 const Code& stub = | |
2058 Code::Handle(StubCode::GetAllocationStubForClass(float32x4_class)); | |
2059 const ExternalLabel label(float32x4_class.ToCString(), stub.EntryPoint()); | |
2060 | |
2061 LocationSummary* locs = instruction_->locs(); | |
2062 locs->live_registers()->Remove(locs->out()); | |
2063 | |
2064 compiler->SaveLiveRegisters(locs); | |
2065 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | |
2066 &label, | |
2067 PcDescriptors::kOther, | |
2068 locs); | |
2069 __ MoveRegister(locs->out().reg(), EAX); | |
2070 compiler->RestoreLiveRegisters(locs); | |
2071 | |
2072 __ jmp(float32x4_exit_label()); | |
2073 } | |
1934 } | 2074 } |
1935 | 2075 |
2076 Label* double_entry_label() { | |
2077 // Use default SlowPathCode label for double. | |
2078 return entry_label(); | |
2079 } | |
2080 Label* double_exit_label() { | |
2081 // Use default SlowPathCode label for double. | |
2082 return exit_label(); | |
2083 } | |
2084 | |
2085 Label* float32x4_entry_label() { return &float32x4_entry_label_; } | |
2086 Label* float32x4_exit_label() { return &float32x4_exit_label_; } | |
2087 | |
1936 private: | 2088 private: |
2089 Label float32x4_entry_label_; | |
2090 Label float32x4_exit_label_; | |
1937 Instruction* instruction_; | 2091 Instruction* instruction_; |
1938 }; | 2092 }; |
1939 | 2093 |
1940 | 2094 |
1941 LocationSummary* LoadFieldInstr::MakeLocationSummary(bool opt) const { | 2095 LocationSummary* LoadFieldInstr::MakeLocationSummary(bool opt) const { |
1942 const intptr_t kNumInputs = 1; | 2096 const intptr_t kNumInputs = 1; |
1943 const intptr_t kNumTemps = 0; | 2097 const intptr_t kNumTemps = 0; |
1944 LocationSummary* locs = | 2098 LocationSummary* locs = |
1945 new LocationSummary( | 2099 new LocationSummary( |
1946 kNumInputs, kNumTemps, | 2100 kNumInputs, kNumTemps, |
(...skipping 17 matching lines...) Expand all Loading... | |
1964 | 2118 |
1965 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2119 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
1966 Register instance_reg = locs()->in(0).reg(); | 2120 Register instance_reg = locs()->in(0).reg(); |
1967 if (IsUnboxedLoad() && compiler->is_optimizing()) { | 2121 if (IsUnboxedLoad() && compiler->is_optimizing()) { |
1968 XmmRegister result = locs()->out().fpu_reg(); | 2122 XmmRegister result = locs()->out().fpu_reg(); |
1969 Register temp = locs()->temp(0).reg(); | 2123 Register temp = locs()->temp(0).reg(); |
1970 __ movl(temp, FieldAddress(instance_reg, offset_in_bytes())); | 2124 __ movl(temp, FieldAddress(instance_reg, offset_in_bytes())); |
1971 const intptr_t cid = field()->UnboxedFieldCid(); | 2125 const intptr_t cid = field()->UnboxedFieldCid(); |
1972 switch (cid) { | 2126 switch (cid) { |
1973 case kDoubleCid: | 2127 case kDoubleCid: |
2128 __ Comment("UnboxedDoubleLoadFieldInstr"); | |
1974 __ movsd(result, FieldAddress(temp, Double::value_offset())); | 2129 __ movsd(result, FieldAddress(temp, Double::value_offset())); |
1975 break; | 2130 break; |
1976 // TODO(johnmccutchan): Add Float32x4 path here. | 2131 case kFloat32x4Cid: |
2132 __ Comment("UnboxedFloat32x4LoadFieldInstr"); | |
2133 __ movups(result, FieldAddress(temp, Float32x4::value_offset())); | |
2134 break; | |
1977 default: | 2135 default: |
1978 UNREACHABLE(); | 2136 UNREACHABLE(); |
1979 } | 2137 } |
1980 return; | 2138 return; |
1981 } | 2139 } |
1982 | 2140 |
1983 Label done; | 2141 Label done; |
1984 Register result = locs()->out().reg(); | 2142 Register result = locs()->out().reg(); |
1985 if (IsPotentialUnboxedLoad()) { | 2143 if (IsPotentialUnboxedLoad()) { |
1986 Register temp = locs()->temp(1).reg(); | 2144 Register temp = locs()->temp(1).reg(); |
1987 XmmRegister value = locs()->temp(0).fpu_reg(); | 2145 XmmRegister value = locs()->temp(0).fpu_reg(); |
2146 LoadFieldSlowPath* slow_path = new LoadFieldSlowPath(this); | |
2147 compiler->AddSlowPathCode(slow_path); | |
1988 | 2148 |
1989 Label load_pointer; | 2149 Label load_pointer; |
1990 Label load_double; | 2150 Label load_double; |
2151 Label load_float32x4; | |
2152 | |
1991 __ LoadObject(result, Field::ZoneHandle(field()->raw())); | 2153 __ LoadObject(result, Field::ZoneHandle(field()->raw())); |
1992 | 2154 |
1993 FieldAddress field_cid_operand(result, Field::guarded_cid_offset()); | 2155 FieldAddress field_cid_operand(result, Field::guarded_cid_offset()); |
1994 FieldAddress field_nullability_operand(result, Field::is_nullable_offset()); | 2156 FieldAddress field_nullability_operand(result, Field::is_nullable_offset()); |
1995 | 2157 |
1996 __ cmpl(field_nullability_operand, Immediate(kNullCid)); | 2158 __ cmpl(field_nullability_operand, Immediate(kNullCid)); |
1997 __ j(EQUAL, &load_pointer); | 2159 __ j(EQUAL, &load_pointer); |
1998 | 2160 |
1999 __ cmpl(field_cid_operand, Immediate(kDoubleCid)); | 2161 __ cmpl(field_cid_operand, Immediate(kDoubleCid)); |
2000 __ j(EQUAL, &load_double); | 2162 __ j(EQUAL, &load_double); |
2001 | 2163 |
2164 __ cmpl(field_cid_operand, Immediate(kFloat32x4Cid)); | |
2165 __ j(EQUAL, &load_float32x4); | |
2166 | |
2002 // Fall through. | 2167 // Fall through. |
2003 __ jmp(&load_pointer); | 2168 __ jmp(&load_pointer); |
2004 | 2169 |
2005 __ Bind(&load_double); | 2170 { |
2006 BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this); | 2171 __ Bind(&load_double); |
2007 compiler->AddSlowPathCode(slow_path); | |
2008 | 2172 |
2009 if (!compiler->is_optimizing()) { | 2173 if (!compiler->is_optimizing()) { |
2010 locs()->live_registers()->Add(locs()->in(0)); | 2174 locs()->live_registers()->Add(locs()->in(0)); |
2175 } | |
2176 | |
2177 __ TryAllocate(compiler->double_class(), | |
2178 slow_path->double_entry_label(), | |
2179 Assembler::kFarJump, | |
2180 result, | |
2181 temp); | |
2182 __ Bind(slow_path->double_exit_label()); | |
2183 __ movl(temp, FieldAddress(instance_reg, offset_in_bytes())); | |
2184 __ movsd(value, FieldAddress(temp, Double::value_offset())); | |
2185 __ movsd(FieldAddress(result, Double::value_offset()), value); | |
2186 __ jmp(&done); | |
2011 } | 2187 } |
2012 | 2188 |
2013 __ TryAllocate(compiler->double_class(), | 2189 { |
2014 slow_path->entry_label(), | 2190 __ Bind(&load_float32x4); |
2015 Assembler::kFarJump, | |
2016 result, | |
2017 temp); | |
2018 __ Bind(slow_path->exit_label()); | |
2019 __ movl(temp, FieldAddress(instance_reg, offset_in_bytes())); | |
2020 __ movsd(value, FieldAddress(temp, Double::value_offset())); | |
2021 __ movsd(FieldAddress(result, Double::value_offset()), value); | |
2022 __ jmp(&done); | |
2023 | 2191 |
2024 // TODO(johnmccutchan): Add Float32x4 path here. | 2192 if (!compiler->is_optimizing()) { |
2193 locs()->live_registers()->Add(locs()->in(0)); | |
2194 } | |
2195 | |
2196 __ TryAllocate(compiler->float32x4_class(), | |
2197 slow_path->float32x4_entry_label(), | |
2198 Assembler::kFarJump, | |
2199 result, | |
2200 temp); | |
2201 __ Bind(slow_path->float32x4_exit_label()); | |
2202 __ movl(temp, FieldAddress(instance_reg, offset_in_bytes())); | |
2203 __ movups(value, FieldAddress(temp, Float32x4::value_offset())); | |
2204 __ movups(FieldAddress(result, Float32x4::value_offset()), value); | |
2205 __ jmp(&done); | |
2206 } | |
2025 | 2207 |
2026 __ Bind(&load_pointer); | 2208 __ Bind(&load_pointer); |
2027 } | 2209 } |
2028 __ movl(result, FieldAddress(instance_reg, offset_in_bytes())); | 2210 __ movl(result, FieldAddress(instance_reg, offset_in_bytes())); |
2029 __ Bind(&done); | 2211 __ Bind(&done); |
2030 } | 2212 } |
2031 | 2213 |
2032 | 2214 |
2033 LocationSummary* InstantiateTypeInstr::MakeLocationSummary(bool opt) const { | 2215 LocationSummary* InstantiateTypeInstr::MakeLocationSummary(bool opt) const { |
2034 const intptr_t kNumInputs = 1; | 2216 const intptr_t kNumInputs = 1; |
(...skipping 835 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2870 } else { | 3052 } else { |
2871 Register temp = locs()->temp(0).reg(); | 3053 Register temp = locs()->temp(0).reg(); |
2872 __ movl(temp, left); | 3054 __ movl(temp, left); |
2873 __ orl(temp, right); | 3055 __ orl(temp, right); |
2874 __ testl(temp, Immediate(kSmiTagMask)); | 3056 __ testl(temp, Immediate(kSmiTagMask)); |
2875 } | 3057 } |
2876 __ j(ZERO, deopt); | 3058 __ j(ZERO, deopt); |
2877 } | 3059 } |
2878 | 3060 |
2879 | 3061 |
3062 class BoxDoubleSlowPath : public SlowPathCode { | |
3063 public: | |
3064 explicit BoxDoubleSlowPath(BoxDoubleInstr* instruction) | |
3065 : instruction_(instruction) { } | |
3066 | |
3067 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { | |
3068 __ Comment("BoxDoubleSlowPath"); | |
3069 __ Bind(entry_label()); | |
3070 const Class& double_class = compiler->double_class(); | |
3071 const Code& stub = | |
3072 Code::Handle(StubCode::GetAllocationStubForClass(double_class)); | |
3073 const ExternalLabel label(double_class.ToCString(), stub.EntryPoint()); | |
3074 | |
3075 LocationSummary* locs = instruction_->locs(); | |
3076 locs->live_registers()->Remove(locs->out()); | |
3077 | |
3078 compiler->SaveLiveRegisters(locs); | |
3079 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | |
3080 &label, | |
3081 PcDescriptors::kOther, | |
3082 locs); | |
3083 __ MoveRegister(locs->out().reg(), EAX); | |
3084 compiler->RestoreLiveRegisters(locs); | |
3085 | |
3086 __ jmp(exit_label()); | |
3087 } | |
3088 | |
3089 private: | |
3090 BoxDoubleInstr* instruction_; | |
3091 }; | |
3092 | |
3093 | |
2880 LocationSummary* BoxDoubleInstr::MakeLocationSummary(bool opt) const { | 3094 LocationSummary* BoxDoubleInstr::MakeLocationSummary(bool opt) const { |
2881 const intptr_t kNumInputs = 1; | 3095 const intptr_t kNumInputs = 1; |
2882 const intptr_t kNumTemps = 0; | 3096 const intptr_t kNumTemps = 0; |
2883 LocationSummary* summary = | 3097 LocationSummary* summary = |
2884 new LocationSummary(kNumInputs, | 3098 new LocationSummary(kNumInputs, |
2885 kNumTemps, | 3099 kNumTemps, |
2886 LocationSummary::kCallOnSlowPath); | 3100 LocationSummary::kCallOnSlowPath); |
2887 summary->set_in(0, Location::RequiresFpuRegister()); | 3101 summary->set_in(0, Location::RequiresFpuRegister()); |
2888 summary->set_out(Location::RequiresRegister()); | 3102 summary->set_out(Location::RequiresRegister()); |
2889 return summary; | 3103 return summary; |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2946 __ jmp(&done); | 3160 __ jmp(&done); |
2947 __ Bind(&is_smi); | 3161 __ Bind(&is_smi); |
2948 __ movl(temp, value); | 3162 __ movl(temp, value); |
2949 __ SmiUntag(temp); | 3163 __ SmiUntag(temp); |
2950 __ cvtsi2sd(result, temp); | 3164 __ cvtsi2sd(result, temp); |
2951 __ Bind(&done); | 3165 __ Bind(&done); |
2952 } | 3166 } |
2953 } | 3167 } |
2954 | 3168 |
2955 | 3169 |
2956 LocationSummary* BoxFloat32x4Instr::MakeLocationSummary(bool opt) const { | |
2957 const intptr_t kNumInputs = 1; | |
2958 const intptr_t kNumTemps = 0; | |
2959 LocationSummary* summary = | |
2960 new LocationSummary(kNumInputs, | |
2961 kNumTemps, | |
2962 LocationSummary::kCallOnSlowPath); | |
2963 summary->set_in(0, Location::RequiresFpuRegister()); | |
2964 summary->set_out(Location::RequiresRegister()); | |
2965 return summary; | |
2966 } | |
2967 | |
2968 | |
2969 class BoxFloat32x4SlowPath : public SlowPathCode { | 3170 class BoxFloat32x4SlowPath : public SlowPathCode { |
2970 public: | 3171 public: |
2971 explicit BoxFloat32x4SlowPath(BoxFloat32x4Instr* instruction) | 3172 explicit BoxFloat32x4SlowPath(BoxFloat32x4Instr* instruction) |
2972 : instruction_(instruction) { } | 3173 : instruction_(instruction) { } |
2973 | 3174 |
2974 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { | 3175 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
2975 __ Comment("BoxFloat32x4SlowPath"); | 3176 __ Comment("BoxFloat32x4SlowPath"); |
2976 __ Bind(entry_label()); | 3177 __ Bind(entry_label()); |
2977 const Class& float32x4_class = compiler->float32x4_class(); | 3178 const Class& float32x4_class = compiler->float32x4_class(); |
2978 const Code& stub = | 3179 const Code& stub = |
(...skipping 12 matching lines...) Expand all Loading... | |
2991 compiler->RestoreLiveRegisters(locs); | 3192 compiler->RestoreLiveRegisters(locs); |
2992 | 3193 |
2993 __ jmp(exit_label()); | 3194 __ jmp(exit_label()); |
2994 } | 3195 } |
2995 | 3196 |
2996 private: | 3197 private: |
2997 BoxFloat32x4Instr* instruction_; | 3198 BoxFloat32x4Instr* instruction_; |
2998 }; | 3199 }; |
2999 | 3200 |
3000 | 3201 |
3202 LocationSummary* BoxFloat32x4Instr::MakeLocationSummary(bool opt) const { | |
3203 const intptr_t kNumInputs = 1; | |
3204 const intptr_t kNumTemps = 0; | |
3205 LocationSummary* summary = | |
3206 new LocationSummary(kNumInputs, | |
3207 kNumTemps, | |
3208 LocationSummary::kCallOnSlowPath); | |
3209 summary->set_in(0, Location::RequiresFpuRegister()); | |
3210 summary->set_out(Location::RequiresRegister()); | |
3211 return summary; | |
3212 } | |
3213 | |
3214 | |
3001 void BoxFloat32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3215 void BoxFloat32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) { |
3002 BoxFloat32x4SlowPath* slow_path = new BoxFloat32x4SlowPath(this); | 3216 BoxFloat32x4SlowPath* slow_path = new BoxFloat32x4SlowPath(this); |
3003 compiler->AddSlowPathCode(slow_path); | 3217 compiler->AddSlowPathCode(slow_path); |
3004 | 3218 |
3005 Register out_reg = locs()->out().reg(); | 3219 Register out_reg = locs()->out().reg(); |
3006 XmmRegister value = locs()->in(0).fpu_reg(); | 3220 XmmRegister value = locs()->in(0).fpu_reg(); |
3007 | 3221 |
3008 __ TryAllocate(compiler->float32x4_class(), | 3222 __ TryAllocate(compiler->float32x4_class(), |
3009 slow_path->entry_label(), | 3223 slow_path->entry_label(), |
3010 Assembler::kFarJump, | 3224 Assembler::kFarJump, |
(...skipping 2270 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5281 PcDescriptors::kOther, | 5495 PcDescriptors::kOther, |
5282 locs()); | 5496 locs()); |
5283 __ Drop(2); // Discard type arguments and receiver. | 5497 __ Drop(2); // Discard type arguments and receiver. |
5284 } | 5498 } |
5285 | 5499 |
5286 } // namespace dart | 5500 } // namespace dart |
5287 | 5501 |
5288 #undef __ | 5502 #undef __ |
5289 | 5503 |
5290 #endif // defined TARGET_ARCH_IA32 | 5504 #endif // defined TARGET_ARCH_IA32 |
OLD | NEW |