OLD | NEW |
1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM64. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM64. |
6 #if defined(TARGET_ARCH_ARM64) | 6 #if defined(TARGET_ARCH_ARM64) |
7 | 7 |
8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
9 | 9 |
10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
(...skipping 1546 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1557 Field::kUnknownLengthOffset); | 1557 Field::kUnknownLengthOffset); |
1558 | 1558 |
1559 __ ldr(TMP, FieldAddress(value_reg, | 1559 __ ldr(TMP, FieldAddress(value_reg, |
1560 field().guarded_list_length_in_object_offset())); | 1560 field().guarded_list_length_in_object_offset())); |
1561 __ CompareImmediate(TMP, Smi::RawValue(field().guarded_list_length()), PP); | 1561 __ CompareImmediate(TMP, Smi::RawValue(field().guarded_list_length()), PP); |
1562 __ b(deopt, NE); | 1562 __ b(deopt, NE); |
1563 } | 1563 } |
1564 } | 1564 } |
1565 | 1565 |
1566 | 1566 |
1567 class StoreInstanceFieldSlowPath : public SlowPathCode { | 1567 class BoxAllocationSlowPath : public SlowPathCode { |
1568 public: | 1568 public: |
1569 StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction, | 1569 BoxAllocationSlowPath(Instruction* instruction, |
1570 const Class& cls) | 1570 const Class& cls, |
1571 : instruction_(instruction), cls_(cls) { } | 1571 Register result) |
| 1572 : instruction_(instruction), |
| 1573 cls_(cls), |
| 1574 result_(result) { } |
1572 | 1575 |
1573 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { | 1576 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
1574 Isolate* isolate = compiler->isolate(); | 1577 Isolate* isolate = compiler->isolate(); |
1575 StubCode* stub_code = isolate->stub_code(); | 1578 StubCode* stub_code = isolate->stub_code(); |
1576 | 1579 |
1577 __ Comment("StoreInstanceFieldSlowPath"); | 1580 if (Assembler::EmittingComments()) { |
| 1581 __ Comment("%s slow path allocation of %s", |
| 1582 instruction_->DebugName(), |
| 1583 String::Handle(cls_.PrettyName()).ToCString()); |
| 1584 } |
1578 __ Bind(entry_label()); | 1585 __ Bind(entry_label()); |
1579 | 1586 |
1580 const Code& stub = | 1587 const Code& stub = |
1581 Code::Handle(isolate, stub_code->GetAllocationStubForClass(cls_)); | 1588 Code::Handle(isolate, stub_code->GetAllocationStubForClass(cls_)); |
1582 const ExternalLabel label(stub.EntryPoint()); | 1589 const ExternalLabel label(stub.EntryPoint()); |
1583 | 1590 |
1584 LocationSummary* locs = instruction_->locs(); | 1591 LocationSummary* locs = instruction_->locs(); |
1585 locs->live_registers()->Remove(locs->temp(0)); | 1592 |
| 1593 locs->live_registers()->Remove(Location::RegisterLocation(result_)); |
1586 | 1594 |
1587 compiler->SaveLiveRegisters(locs); | 1595 compiler->SaveLiveRegisters(locs); |
1588 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | 1596 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
1589 &label, | 1597 &label, |
1590 RawPcDescriptors::kOther, | 1598 RawPcDescriptors::kOther, |
1591 locs); | 1599 locs); |
1592 __ mov(locs->temp(0).reg(), R0); | 1600 __ mov(result_, R0); |
1593 compiler->RestoreLiveRegisters(locs); | 1601 compiler->RestoreLiveRegisters(locs); |
1594 | 1602 |
1595 __ b(exit_label()); | 1603 __ b(exit_label()); |
1596 } | 1604 } |
1597 | 1605 |
| 1606 static void Allocate(FlowGraphCompiler* compiler, |
| 1607 Instruction* instruction, |
| 1608 const Class& cls, |
| 1609 Register result, |
| 1610 Register temp) { |
| 1611 BoxAllocationSlowPath* slow_path = |
| 1612 new BoxAllocationSlowPath(instruction, cls, result); |
| 1613 compiler->AddSlowPathCode(slow_path); |
| 1614 |
| 1615 __ TryAllocate(cls, |
| 1616 slow_path->entry_label(), |
| 1617 result, |
| 1618 temp, |
| 1619 PP); |
| 1620 __ Bind(slow_path->exit_label()); |
| 1621 } |
| 1622 |
1598 private: | 1623 private: |
1599 StoreInstanceFieldInstr* instruction_; | 1624 Instruction* instruction_; |
1600 const Class& cls_; | 1625 const Class& cls_; |
| 1626 Register result_; |
1601 }; | 1627 }; |
1602 | 1628 |
1603 | 1629 |
| 1630 static void EnsureMutableBox(FlowGraphCompiler* compiler, |
| 1631 StoreInstanceFieldInstr* instruction, |
| 1632 Register box_reg, |
| 1633 const Class& cls, |
| 1634 Register instance_reg, |
| 1635 intptr_t offset, |
| 1636 Register temp) { |
| 1637 Label done; |
| 1638 __ LoadFieldFromOffset(box_reg, instance_reg, offset, PP); |
| 1639 __ CompareObject(box_reg, Object::null_object(), PP); |
| 1640 __ b(&done, NE); |
| 1641 BoxAllocationSlowPath::Allocate( |
| 1642 compiler, instruction, cls, box_reg, temp); |
| 1643 __ mov(temp, box_reg); |
| 1644 __ StoreIntoObjectOffset(instance_reg, offset, temp, PP); |
| 1645 __ Bind(&done); |
| 1646 } |
| 1647 |
| 1648 |
1604 LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Isolate* isolate, | 1649 LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Isolate* isolate, |
1605 bool opt) const { | 1650 bool opt) const { |
1606 const intptr_t kNumInputs = 2; | 1651 const intptr_t kNumInputs = 2; |
1607 const intptr_t kNumTemps = | 1652 const intptr_t kNumTemps = |
1608 (IsUnboxedStore() && opt) ? 2 : | 1653 (IsUnboxedStore() && opt) ? 2 : |
1609 ((IsPotentialUnboxedStore()) ? 2 : 0); | 1654 ((IsPotentialUnboxedStore()) ? 2 : 0); |
1610 LocationSummary* summary = new(isolate) LocationSummary( | 1655 LocationSummary* summary = new(isolate) LocationSummary( |
1611 isolate, kNumInputs, kNumTemps, | 1656 isolate, kNumInputs, kNumTemps, |
1612 ((IsUnboxedStore() && opt && is_initialization_) || | 1657 ((IsUnboxedStore() && opt && is_initialization_) || |
1613 IsPotentialUnboxedStore()) | 1658 IsPotentialUnboxedStore()) |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1654 case kFloat32x4Cid: | 1699 case kFloat32x4Cid: |
1655 cls = &compiler->float32x4_class(); | 1700 cls = &compiler->float32x4_class(); |
1656 break; | 1701 break; |
1657 case kFloat64x2Cid: | 1702 case kFloat64x2Cid: |
1658 cls = &compiler->float64x2_class(); | 1703 cls = &compiler->float64x2_class(); |
1659 break; | 1704 break; |
1660 default: | 1705 default: |
1661 UNREACHABLE(); | 1706 UNREACHABLE(); |
1662 } | 1707 } |
1663 | 1708 |
1664 StoreInstanceFieldSlowPath* slow_path = | 1709 BoxAllocationSlowPath::Allocate(compiler, this, *cls, temp, temp2); |
1665 new StoreInstanceFieldSlowPath(this, *cls); | |
1666 compiler->AddSlowPathCode(slow_path); | |
1667 | |
1668 __ TryAllocate(*cls, | |
1669 slow_path->entry_label(), | |
1670 temp, | |
1671 PP); | |
1672 __ Bind(slow_path->exit_label()); | |
1673 __ mov(temp2, temp); | 1710 __ mov(temp2, temp); |
1674 __ StoreIntoObjectOffset(instance_reg, offset_in_bytes_, temp2, PP); | 1711 __ StoreIntoObjectOffset(instance_reg, offset_in_bytes_, temp2, PP); |
1675 } else { | 1712 } else { |
1676 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes_, PP); | 1713 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes_, PP); |
1677 } | 1714 } |
1678 switch (cid) { | 1715 switch (cid) { |
1679 case kDoubleCid: | 1716 case kDoubleCid: |
1680 __ Comment("UnboxedDoubleStoreInstanceFieldInstr"); | 1717 __ Comment("UnboxedDoubleStoreInstanceFieldInstr"); |
1681 __ StoreDFieldToOffset(value, temp, Double::value_offset(), PP); | 1718 __ StoreDFieldToOffset(value, temp, Double::value_offset(), PP); |
1682 break; | 1719 break; |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1732 // Fall through. | 1769 // Fall through. |
1733 __ b(&store_pointer); | 1770 __ b(&store_pointer); |
1734 | 1771 |
1735 if (!compiler->is_optimizing()) { | 1772 if (!compiler->is_optimizing()) { |
1736 locs()->live_registers()->Add(locs()->in(0)); | 1773 locs()->live_registers()->Add(locs()->in(0)); |
1737 locs()->live_registers()->Add(locs()->in(1)); | 1774 locs()->live_registers()->Add(locs()->in(1)); |
1738 } | 1775 } |
1739 | 1776 |
1740 { | 1777 { |
1741 __ Bind(&store_double); | 1778 __ Bind(&store_double); |
1742 Label copy_double; | 1779 EnsureMutableBox(compiler, |
1743 StoreInstanceFieldSlowPath* slow_path = | 1780 this, |
1744 new StoreInstanceFieldSlowPath(this, compiler->double_class()); | 1781 temp, |
1745 compiler->AddSlowPathCode(slow_path); | 1782 compiler->double_class(), |
1746 | 1783 instance_reg, |
1747 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes_, PP); | 1784 offset_in_bytes_, |
1748 __ CompareObject(temp, Object::null_object(), PP); | 1785 temp2); |
1749 __ b(©_double, NE); | |
1750 | |
1751 __ TryAllocate(compiler->double_class(), | |
1752 slow_path->entry_label(), | |
1753 temp, | |
1754 PP); | |
1755 __ Bind(slow_path->exit_label()); | |
1756 __ mov(temp2, temp); | |
1757 __ StoreIntoObjectOffset(instance_reg, offset_in_bytes_, temp2, PP); | |
1758 __ Bind(©_double); | |
1759 __ LoadDFieldFromOffset(VTMP, value_reg, Double::value_offset(), PP); | 1786 __ LoadDFieldFromOffset(VTMP, value_reg, Double::value_offset(), PP); |
1760 __ StoreDFieldToOffset(VTMP, temp, Double::value_offset(), PP); | 1787 __ StoreDFieldToOffset(VTMP, temp, Double::value_offset(), PP); |
1761 __ b(&skip_store); | 1788 __ b(&skip_store); |
1762 } | 1789 } |
1763 | 1790 |
1764 { | 1791 { |
1765 __ Bind(&store_float32x4); | 1792 __ Bind(&store_float32x4); |
1766 Label copy_float32x4; | 1793 EnsureMutableBox(compiler, |
1767 StoreInstanceFieldSlowPath* slow_path = | 1794 this, |
1768 new StoreInstanceFieldSlowPath(this, compiler->float32x4_class()); | 1795 temp, |
1769 compiler->AddSlowPathCode(slow_path); | 1796 compiler->float32x4_class(), |
1770 | 1797 instance_reg, |
1771 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes_, PP); | 1798 offset_in_bytes_, |
1772 __ CompareObject(temp, Object::null_object(), PP); | 1799 temp2); |
1773 __ b(©_float32x4, NE); | |
1774 | |
1775 __ TryAllocate(compiler->float32x4_class(), | |
1776 slow_path->entry_label(), | |
1777 temp, | |
1778 PP); | |
1779 __ Bind(slow_path->exit_label()); | |
1780 __ mov(temp2, temp); | |
1781 __ StoreIntoObjectOffset(instance_reg, offset_in_bytes_, temp2, PP); | |
1782 __ Bind(©_float32x4); | |
1783 __ LoadQFieldFromOffset(VTMP, value_reg, Float32x4::value_offset(), PP); | 1800 __ LoadQFieldFromOffset(VTMP, value_reg, Float32x4::value_offset(), PP); |
1784 __ StoreQFieldToOffset(VTMP, temp, Float32x4::value_offset(), PP); | 1801 __ StoreQFieldToOffset(VTMP, temp, Float32x4::value_offset(), PP); |
1785 __ b(&skip_store); | 1802 __ b(&skip_store); |
1786 } | 1803 } |
1787 | 1804 |
1788 { | 1805 { |
1789 __ Bind(&store_float64x2); | 1806 __ Bind(&store_float64x2); |
1790 Label copy_float64x2; | 1807 EnsureMutableBox(compiler, |
1791 StoreInstanceFieldSlowPath* slow_path = | 1808 this, |
1792 new StoreInstanceFieldSlowPath(this, compiler->float64x2_class()); | 1809 temp, |
1793 compiler->AddSlowPathCode(slow_path); | 1810 compiler->float64x2_class(), |
1794 | 1811 instance_reg, |
1795 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes_, PP); | 1812 offset_in_bytes_, |
1796 __ CompareObject(temp, Object::null_object(), PP); | 1813 temp2); |
1797 __ b(©_float64x2, NE); | |
1798 | |
1799 __ TryAllocate(compiler->float64x2_class(), | |
1800 slow_path->entry_label(), | |
1801 temp, | |
1802 PP); | |
1803 __ Bind(slow_path->exit_label()); | |
1804 __ mov(temp2, temp); | |
1805 __ StoreIntoObjectOffset(instance_reg, offset_in_bytes_, temp2, PP); | |
1806 __ Bind(©_float64x2); | |
1807 __ LoadQFieldFromOffset(VTMP, value_reg, Float64x2::value_offset(), PP); | 1814 __ LoadQFieldFromOffset(VTMP, value_reg, Float64x2::value_offset(), PP); |
1808 __ StoreQFieldToOffset(VTMP, temp, Float64x2::value_offset(), PP); | 1815 __ StoreQFieldToOffset(VTMP, temp, Float64x2::value_offset(), PP); |
1809 __ b(&skip_store); | 1816 __ b(&skip_store); |
1810 } | 1817 } |
1811 | 1818 |
1812 __ Bind(&store_pointer); | 1819 __ Bind(&store_pointer); |
1813 } | 1820 } |
1814 | 1821 |
1815 if (ShouldEmitStoreBarrier()) { | 1822 if (ShouldEmitStoreBarrier()) { |
1816 const Register value_reg = locs()->in(1).reg(); | 1823 const Register value_reg = locs()->in(1).reg(); |
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1932 ASSERT(locs()->in(kLengthPos).reg() == R2); | 1939 ASSERT(locs()->in(kLengthPos).reg() == R2); |
1933 StubCode* stub_code = compiler->isolate()->stub_code(); | 1940 StubCode* stub_code = compiler->isolate()->stub_code(); |
1934 compiler->GenerateCall(token_pos(), | 1941 compiler->GenerateCall(token_pos(), |
1935 &stub_code->AllocateArrayLabel(), | 1942 &stub_code->AllocateArrayLabel(), |
1936 RawPcDescriptors::kOther, | 1943 RawPcDescriptors::kOther, |
1937 locs()); | 1944 locs()); |
1938 ASSERT(locs()->out(0).reg() == R0); | 1945 ASSERT(locs()->out(0).reg() == R0); |
1939 } | 1946 } |
1940 | 1947 |
1941 | 1948 |
1942 class BoxDoubleSlowPath : public SlowPathCode { | |
1943 public: | |
1944 explicit BoxDoubleSlowPath(Instruction* instruction) | |
1945 : instruction_(instruction) { } | |
1946 | |
1947 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { | |
1948 __ Comment("BoxDoubleSlowPath"); | |
1949 __ Bind(entry_label()); | |
1950 Isolate* isolate = compiler->isolate(); | |
1951 StubCode* stub_code = isolate->stub_code(); | |
1952 const Class& double_class = compiler->double_class(); | |
1953 const Code& stub = | |
1954 Code::Handle(isolate, | |
1955 stub_code->GetAllocationStubForClass(double_class)); | |
1956 const ExternalLabel label(stub.EntryPoint()); | |
1957 | |
1958 LocationSummary* locs = instruction_->locs(); | |
1959 ASSERT(!locs->live_registers()->Contains(locs->out(0))); | |
1960 | |
1961 compiler->SaveLiveRegisters(locs); | |
1962 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | |
1963 &label, | |
1964 RawPcDescriptors::kOther, | |
1965 locs); | |
1966 __ mov(locs->out(0).reg(), R0); | |
1967 compiler->RestoreLiveRegisters(locs); | |
1968 | |
1969 __ b(exit_label()); | |
1970 } | |
1971 | |
1972 private: | |
1973 Instruction* instruction_; | |
1974 }; | |
1975 | |
1976 | |
1977 class BoxFloat32x4SlowPath : public SlowPathCode { | |
1978 public: | |
1979 explicit BoxFloat32x4SlowPath(Instruction* instruction) | |
1980 : instruction_(instruction) { } | |
1981 | |
1982 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { | |
1983 __ Comment("BoxFloat32x4SlowPath"); | |
1984 __ Bind(entry_label()); | |
1985 Isolate* isolate = compiler->isolate(); | |
1986 StubCode* stub_code = isolate->stub_code(); | |
1987 const Class& float32x4_class = compiler->float32x4_class(); | |
1988 const Code& stub = | |
1989 Code::Handle(isolate, | |
1990 stub_code->GetAllocationStubForClass(float32x4_class)); | |
1991 const ExternalLabel label(stub.EntryPoint()); | |
1992 | |
1993 LocationSummary* locs = instruction_->locs(); | |
1994 ASSERT(!locs->live_registers()->Contains(locs->out(0))); | |
1995 | |
1996 compiler->SaveLiveRegisters(locs); | |
1997 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | |
1998 &label, | |
1999 RawPcDescriptors::kOther, | |
2000 locs); | |
2001 __ mov(locs->out(0).reg(), R0); | |
2002 compiler->RestoreLiveRegisters(locs); | |
2003 | |
2004 __ b(exit_label()); | |
2005 } | |
2006 | |
2007 private: | |
2008 Instruction* instruction_; | |
2009 }; | |
2010 | |
2011 | |
2012 class BoxFloat64x2SlowPath : public SlowPathCode { | |
2013 public: | |
2014 explicit BoxFloat64x2SlowPath(Instruction* instruction) | |
2015 : instruction_(instruction) { } | |
2016 | |
2017 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { | |
2018 __ Comment("BoxFloat64x2SlowPath"); | |
2019 __ Bind(entry_label()); | |
2020 Isolate* isolate = compiler->isolate(); | |
2021 StubCode* stub_code = isolate->stub_code(); | |
2022 const Class& float64x2_class = compiler->float64x2_class(); | |
2023 const Code& stub = | |
2024 Code::Handle(isolate, | |
2025 stub_code->GetAllocationStubForClass(float64x2_class)); | |
2026 const ExternalLabel label(stub.EntryPoint()); | |
2027 | |
2028 LocationSummary* locs = instruction_->locs(); | |
2029 ASSERT(!locs->live_registers()->Contains(locs->out(0))); | |
2030 | |
2031 compiler->SaveLiveRegisters(locs); | |
2032 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | |
2033 &label, | |
2034 RawPcDescriptors::kOther, | |
2035 locs); | |
2036 __ mov(locs->out(0).reg(), R0); | |
2037 compiler->RestoreLiveRegisters(locs); | |
2038 | |
2039 __ b(exit_label()); | |
2040 } | |
2041 | |
2042 private: | |
2043 Instruction* instruction_; | |
2044 }; | |
2045 | |
2046 | |
2047 LocationSummary* LoadFieldInstr::MakeLocationSummary(Isolate* isolate, | 1949 LocationSummary* LoadFieldInstr::MakeLocationSummary(Isolate* isolate, |
2048 bool opt) const { | 1950 bool opt) const { |
2049 const intptr_t kNumInputs = 1; | 1951 const intptr_t kNumInputs = 1; |
2050 const intptr_t kNumTemps = | 1952 const intptr_t kNumTemps = |
2051 (IsUnboxedLoad() && opt) ? 1 : | 1953 (IsUnboxedLoad() && opt) ? 1 : |
2052 ((IsPotentialUnboxedLoad()) ? 1 : 0); | 1954 ((IsPotentialUnboxedLoad()) ? 1 : 0); |
2053 LocationSummary* locs = new(isolate) LocationSummary( | 1955 LocationSummary* locs = new(isolate) LocationSummary( |
2054 isolate, kNumInputs, kNumTemps, | 1956 isolate, kNumInputs, kNumTemps, |
2055 (opt && !IsPotentialUnboxedLoad()) | 1957 (opt && !IsPotentialUnboxedLoad()) |
2056 ? LocationSummary::kNoCall | 1958 ? LocationSummary::kNoCall |
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2127 | 2029 |
2128 // Fall through. | 2030 // Fall through. |
2129 __ b(&load_pointer); | 2031 __ b(&load_pointer); |
2130 | 2032 |
2131 if (!compiler->is_optimizing()) { | 2033 if (!compiler->is_optimizing()) { |
2132 locs()->live_registers()->Add(locs()->in(0)); | 2034 locs()->live_registers()->Add(locs()->in(0)); |
2133 } | 2035 } |
2134 | 2036 |
2135 { | 2037 { |
2136 __ Bind(&load_double); | 2038 __ Bind(&load_double); |
2137 BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this); | 2039 BoxAllocationSlowPath::Allocate(compiler, |
2138 compiler->AddSlowPathCode(slow_path); | 2040 this, |
2139 | 2041 compiler->double_class(), |
2140 __ TryAllocate(compiler->double_class(), | 2042 result_reg, |
2141 slow_path->entry_label(), | 2043 temp); |
2142 result_reg, | |
2143 PP); | |
2144 __ Bind(slow_path->exit_label()); | |
2145 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes(), PP); | 2044 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes(), PP); |
2146 __ LoadDFieldFromOffset(VTMP, temp, Double::value_offset(), PP); | 2045 __ LoadDFieldFromOffset(VTMP, temp, Double::value_offset(), PP); |
2147 __ StoreDFieldToOffset(VTMP, result_reg, Double::value_offset(), PP); | 2046 __ StoreDFieldToOffset(VTMP, result_reg, Double::value_offset(), PP); |
2148 __ b(&done); | 2047 __ b(&done); |
2149 } | 2048 } |
2150 | 2049 |
2151 { | 2050 { |
2152 __ Bind(&load_float32x4); | 2051 __ Bind(&load_float32x4); |
2153 BoxFloat32x4SlowPath* slow_path = new BoxFloat32x4SlowPath(this); | 2052 BoxAllocationSlowPath::Allocate(compiler, |
2154 compiler->AddSlowPathCode(slow_path); | 2053 this, |
2155 | 2054 compiler->float32x4_class(), |
2156 __ TryAllocate(compiler->float32x4_class(), | 2055 result_reg, |
2157 slow_path->entry_label(), | 2056 temp); |
2158 result_reg, | |
2159 PP); | |
2160 __ Bind(slow_path->exit_label()); | |
2161 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes(), PP); | 2057 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes(), PP); |
2162 __ LoadQFieldFromOffset(VTMP, temp, Float32x4::value_offset(), PP); | 2058 __ LoadQFieldFromOffset(VTMP, temp, Float32x4::value_offset(), PP); |
2163 __ StoreQFieldToOffset(VTMP, result_reg, Float32x4::value_offset(), PP); | 2059 __ StoreQFieldToOffset(VTMP, result_reg, Float32x4::value_offset(), PP); |
2164 __ b(&done); | 2060 __ b(&done); |
2165 } | 2061 } |
2166 | 2062 |
2167 { | 2063 { |
2168 __ Bind(&load_float64x2); | 2064 __ Bind(&load_float64x2); |
2169 BoxFloat64x2SlowPath* slow_path = new BoxFloat64x2SlowPath(this); | 2065 BoxAllocationSlowPath::Allocate(compiler, |
2170 compiler->AddSlowPathCode(slow_path); | 2066 this, |
2171 | 2067 compiler->float64x2_class(), |
2172 __ TryAllocate(compiler->float64x2_class(), | 2068 result_reg, |
2173 slow_path->entry_label(), | 2069 temp); |
2174 result_reg, | |
2175 PP); | |
2176 __ Bind(slow_path->exit_label()); | |
2177 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes(), PP); | 2070 __ LoadFieldFromOffset(temp, instance_reg, offset_in_bytes(), PP); |
2178 __ LoadQFieldFromOffset(VTMP, temp, Float64x2::value_offset(), PP); | 2071 __ LoadQFieldFromOffset(VTMP, temp, Float64x2::value_offset(), PP); |
2179 __ StoreQFieldToOffset(VTMP, result_reg, Float64x2::value_offset(), PP); | 2072 __ StoreQFieldToOffset(VTMP, result_reg, Float64x2::value_offset(), PP); |
2180 __ b(&done); | 2073 __ b(&done); |
2181 } | 2074 } |
2182 | 2075 |
2183 __ Bind(&load_pointer); | 2076 __ Bind(&load_pointer); |
2184 } | 2077 } |
2185 __ LoadFieldFromOffset(result_reg, instance_reg, offset_in_bytes(), PP); | 2078 __ LoadFieldFromOffset(result_reg, instance_reg, offset_in_bytes(), PP); |
2186 __ Bind(&done); | 2079 __ Bind(&done); |
(...skipping 772 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2959 __ orr(TMP, left, Operand(right)); | 2852 __ orr(TMP, left, Operand(right)); |
2960 __ tsti(TMP, kSmiTagMask); | 2853 __ tsti(TMP, kSmiTagMask); |
2961 } | 2854 } |
2962 __ b(deopt, EQ); | 2855 __ b(deopt, EQ); |
2963 } | 2856 } |
2964 | 2857 |
2965 | 2858 |
2966 LocationSummary* BoxDoubleInstr::MakeLocationSummary(Isolate* isolate, | 2859 LocationSummary* BoxDoubleInstr::MakeLocationSummary(Isolate* isolate, |
2967 bool opt) const { | 2860 bool opt) const { |
2968 const intptr_t kNumInputs = 1; | 2861 const intptr_t kNumInputs = 1; |
2969 const intptr_t kNumTemps = 0; | 2862 const intptr_t kNumTemps = 1; |
2970 LocationSummary* summary = new(isolate) LocationSummary( | 2863 LocationSummary* summary = new(isolate) LocationSummary( |
2971 isolate, kNumInputs, kNumTemps, LocationSummary::kCallOnSlowPath); | 2864 isolate, kNumInputs, kNumTemps, LocationSummary::kCallOnSlowPath); |
2972 summary->set_in(0, Location::RequiresFpuRegister()); | 2865 summary->set_in(0, Location::RequiresFpuRegister()); |
| 2866 summary->set_temp(0, Location::RequiresRegister()); |
2973 summary->set_out(0, Location::RequiresRegister()); | 2867 summary->set_out(0, Location::RequiresRegister()); |
2974 return summary; | 2868 return summary; |
2975 } | 2869 } |
2976 | 2870 |
2977 | 2871 |
2978 void BoxDoubleInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2872 void BoxDoubleInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2979 BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this); | |
2980 compiler->AddSlowPathCode(slow_path); | |
2981 | |
2982 const Register out_reg = locs()->out(0).reg(); | 2873 const Register out_reg = locs()->out(0).reg(); |
| 2874 const Register temp_reg = locs()->temp(0).reg(); |
2983 const VRegister value = locs()->in(0).fpu_reg(); | 2875 const VRegister value = locs()->in(0).fpu_reg(); |
2984 | 2876 |
2985 __ TryAllocate(compiler->double_class(), | 2877 BoxAllocationSlowPath::Allocate( |
2986 slow_path->entry_label(), | 2878 compiler, this, compiler->double_class(), out_reg, temp_reg); |
2987 out_reg, | |
2988 PP); | |
2989 __ Bind(slow_path->exit_label()); | |
2990 __ StoreDFieldToOffset(value, out_reg, Double::value_offset(), PP); | 2879 __ StoreDFieldToOffset(value, out_reg, Double::value_offset(), PP); |
2991 } | 2880 } |
2992 | 2881 |
2993 | 2882 |
2994 LocationSummary* UnboxDoubleInstr::MakeLocationSummary(Isolate* isolate, | 2883 LocationSummary* UnboxDoubleInstr::MakeLocationSummary(Isolate* isolate, |
2995 bool opt) const { | 2884 bool opt) const { |
2996 const intptr_t kNumInputs = 1; | 2885 const intptr_t kNumInputs = 1; |
2997 const intptr_t kNumTemps = 0; | 2886 const intptr_t kNumTemps = 0; |
2998 LocationSummary* summary = new(isolate) LocationSummary( | 2887 LocationSummary* summary = new(isolate) LocationSummary( |
2999 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall); | 2888 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall); |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3036 __ scvtfd(result, TMP); | 2925 __ scvtfd(result, TMP); |
3037 __ Bind(&done); | 2926 __ Bind(&done); |
3038 } | 2927 } |
3039 } | 2928 } |
3040 } | 2929 } |
3041 | 2930 |
3042 | 2931 |
3043 LocationSummary* BoxFloat32x4Instr::MakeLocationSummary(Isolate* isolate, | 2932 LocationSummary* BoxFloat32x4Instr::MakeLocationSummary(Isolate* isolate, |
3044 bool opt) const { | 2933 bool opt) const { |
3045 const intptr_t kNumInputs = 1; | 2934 const intptr_t kNumInputs = 1; |
3046 const intptr_t kNumTemps = 0; | 2935 const intptr_t kNumTemps = 1; |
3047 LocationSummary* summary = new(isolate) LocationSummary( | 2936 LocationSummary* summary = new(isolate) LocationSummary( |
3048 isolate, kNumInputs, kNumTemps, LocationSummary::kCallOnSlowPath); | 2937 isolate, kNumInputs, kNumTemps, LocationSummary::kCallOnSlowPath); |
3049 summary->set_in(0, Location::RequiresFpuRegister()); | 2938 summary->set_in(0, Location::RequiresFpuRegister()); |
| 2939 summary->set_temp(0, Location::RequiresRegister()); |
3050 summary->set_out(0, Location::RequiresRegister()); | 2940 summary->set_out(0, Location::RequiresRegister()); |
3051 return summary; | 2941 return summary; |
3052 } | 2942 } |
3053 | 2943 |
3054 | 2944 |
3055 void BoxFloat32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2945 void BoxFloat32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) { |
3056 BoxFloat32x4SlowPath* slow_path = new BoxFloat32x4SlowPath(this); | |
3057 compiler->AddSlowPathCode(slow_path); | |
3058 | |
3059 const Register out_reg = locs()->out(0).reg(); | 2946 const Register out_reg = locs()->out(0).reg(); |
| 2947 const Register temp_reg = locs()->temp(0).reg(); |
3060 const VRegister value = locs()->in(0).fpu_reg(); | 2948 const VRegister value = locs()->in(0).fpu_reg(); |
3061 | 2949 |
3062 __ TryAllocate(compiler->float32x4_class(), | 2950 BoxAllocationSlowPath::Allocate( |
3063 slow_path->entry_label(), | 2951 compiler, this, compiler->float32x4_class(), out_reg, temp_reg); |
3064 out_reg, | |
3065 PP); | |
3066 __ Bind(slow_path->exit_label()); | |
3067 | |
3068 __ StoreQFieldToOffset(value, out_reg, Float32x4::value_offset(), PP); | 2952 __ StoreQFieldToOffset(value, out_reg, Float32x4::value_offset(), PP); |
3069 } | 2953 } |
3070 | 2954 |
3071 | 2955 |
3072 LocationSummary* UnboxFloat32x4Instr::MakeLocationSummary(Isolate* isolate, | 2956 LocationSummary* UnboxFloat32x4Instr::MakeLocationSummary(Isolate* isolate, |
3073 bool opt) const { | 2957 bool opt) const { |
3074 const intptr_t kNumInputs = 1; | 2958 const intptr_t kNumInputs = 1; |
3075 const intptr_t kNumTemps = 0; | 2959 const intptr_t kNumTemps = 0; |
3076 LocationSummary* summary = new(isolate) LocationSummary( | 2960 LocationSummary* summary = new(isolate) LocationSummary( |
3077 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall); | 2961 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall); |
(...skipping 16 matching lines...) Expand all Loading... |
3094 __ b(deopt, NE); | 2978 __ b(deopt, NE); |
3095 } | 2979 } |
3096 | 2980 |
3097 __ LoadQFieldFromOffset(result, value, Float32x4::value_offset(), PP); | 2981 __ LoadQFieldFromOffset(result, value, Float32x4::value_offset(), PP); |
3098 } | 2982 } |
3099 | 2983 |
3100 | 2984 |
3101 LocationSummary* BoxFloat64x2Instr::MakeLocationSummary(Isolate* isolate, | 2985 LocationSummary* BoxFloat64x2Instr::MakeLocationSummary(Isolate* isolate, |
3102 bool opt) const { | 2986 bool opt) const { |
3103 const intptr_t kNumInputs = 1; | 2987 const intptr_t kNumInputs = 1; |
3104 const intptr_t kNumTemps = 0; | 2988 const intptr_t kNumTemps = 1; |
3105 LocationSummary* summary = new(isolate) LocationSummary( | 2989 LocationSummary* summary = new(isolate) LocationSummary( |
3106 isolate, kNumInputs, kNumTemps, LocationSummary::kCallOnSlowPath); | 2990 isolate, kNumInputs, kNumTemps, LocationSummary::kCallOnSlowPath); |
3107 summary->set_in(0, Location::RequiresFpuRegister()); | 2991 summary->set_in(0, Location::RequiresFpuRegister()); |
| 2992 summary->set_temp(0, Location::RequiresRegister()); |
3108 summary->set_out(0, Location::RequiresRegister()); | 2993 summary->set_out(0, Location::RequiresRegister()); |
3109 return summary; | 2994 return summary; |
3110 } | 2995 } |
3111 | 2996 |
3112 | 2997 |
3113 void BoxFloat64x2Instr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2998 void BoxFloat64x2Instr::EmitNativeCode(FlowGraphCompiler* compiler) { |
3114 BoxFloat64x2SlowPath* slow_path = new BoxFloat64x2SlowPath(this); | |
3115 compiler->AddSlowPathCode(slow_path); | |
3116 | |
3117 const Register out_reg = locs()->out(0).reg(); | 2999 const Register out_reg = locs()->out(0).reg(); |
| 3000 const Register temp_reg = locs()->temp(0).reg(); |
3118 const VRegister value = locs()->in(0).fpu_reg(); | 3001 const VRegister value = locs()->in(0).fpu_reg(); |
3119 | 3002 |
3120 __ TryAllocate(compiler->float64x2_class(), | 3003 BoxAllocationSlowPath::Allocate( |
3121 slow_path->entry_label(), | 3004 compiler, this, compiler->float64x2_class(), out_reg, temp_reg); |
3122 out_reg, | |
3123 PP); | |
3124 __ Bind(slow_path->exit_label()); | |
3125 | |
3126 __ StoreQFieldToOffset(value, out_reg, Float64x2::value_offset(), PP); | 3005 __ StoreQFieldToOffset(value, out_reg, Float64x2::value_offset(), PP); |
3127 } | 3006 } |
3128 | 3007 |
3129 | 3008 |
3130 LocationSummary* UnboxFloat64x2Instr::MakeLocationSummary(Isolate* isolate, | 3009 LocationSummary* UnboxFloat64x2Instr::MakeLocationSummary(Isolate* isolate, |
3131 bool opt) const { | 3010 bool opt) const { |
3132 const intptr_t kNumInputs = 1; | 3011 const intptr_t kNumInputs = 1; |
3133 const intptr_t kNumTemps = 0; | 3012 const intptr_t kNumTemps = 0; |
3134 LocationSummary* summary = new(isolate) LocationSummary( | 3013 LocationSummary* summary = new(isolate) LocationSummary( |
3135 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall); | 3014 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall); |
(...skipping 16 matching lines...) Expand all Loading... |
3152 __ b(deopt, NE); | 3031 __ b(deopt, NE); |
3153 } | 3032 } |
3154 | 3033 |
3155 __ LoadQFieldFromOffset(result, value, Float64x2::value_offset(), PP); | 3034 __ LoadQFieldFromOffset(result, value, Float64x2::value_offset(), PP); |
3156 } | 3035 } |
3157 | 3036 |
3158 | 3037 |
3159 LocationSummary* BoxInt32x4Instr::MakeLocationSummary(Isolate* isolate, | 3038 LocationSummary* BoxInt32x4Instr::MakeLocationSummary(Isolate* isolate, |
3160 bool opt) const { | 3039 bool opt) const { |
3161 const intptr_t kNumInputs = 1; | 3040 const intptr_t kNumInputs = 1; |
3162 const intptr_t kNumTemps = 0; | 3041 const intptr_t kNumTemps = 1; |
3163 LocationSummary* summary = new(isolate) LocationSummary( | 3042 LocationSummary* summary = new(isolate) LocationSummary( |
3164 isolate, kNumInputs, kNumTemps, LocationSummary::kCallOnSlowPath); | 3043 isolate, kNumInputs, kNumTemps, LocationSummary::kCallOnSlowPath); |
3165 summary->set_in(0, Location::RequiresFpuRegister()); | 3044 summary->set_in(0, Location::RequiresFpuRegister()); |
| 3045 summary->set_temp(0, Location::RequiresRegister()); |
3166 summary->set_out(0, Location::RequiresRegister()); | 3046 summary->set_out(0, Location::RequiresRegister()); |
3167 return summary; | 3047 return summary; |
3168 } | 3048 } |
3169 | 3049 |
3170 | 3050 |
3171 class BoxInt32x4SlowPath : public SlowPathCode { | |
3172 public: | |
3173 explicit BoxInt32x4SlowPath(BoxInt32x4Instr* instruction) | |
3174 : instruction_(instruction) { } | |
3175 | |
3176 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { | |
3177 __ Comment("BoxInt32x4SlowPath"); | |
3178 __ Bind(entry_label()); | |
3179 Isolate* isolate = compiler->isolate(); | |
3180 StubCode* stub_code = isolate->stub_code(); | |
3181 const Class& int32x4_class = compiler->int32x4_class(); | |
3182 const Code& stub = | |
3183 Code::Handle(isolate, | |
3184 stub_code->GetAllocationStubForClass(int32x4_class)); | |
3185 const ExternalLabel label(stub.EntryPoint()); | |
3186 | |
3187 LocationSummary* locs = instruction_->locs(); | |
3188 ASSERT(!locs->live_registers()->Contains(locs->out(0))); | |
3189 | |
3190 compiler->SaveLiveRegisters(locs); | |
3191 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | |
3192 &label, | |
3193 RawPcDescriptors::kOther, | |
3194 locs); | |
3195 __ mov(locs->out(0).reg(), R0); | |
3196 compiler->RestoreLiveRegisters(locs); | |
3197 | |
3198 __ b(exit_label()); | |
3199 } | |
3200 | |
3201 private: | |
3202 BoxInt32x4Instr* instruction_; | |
3203 }; | |
3204 | |
3205 | |
3206 void BoxInt32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3051 void BoxInt32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) { |
3207 BoxInt32x4SlowPath* slow_path = new BoxInt32x4SlowPath(this); | |
3208 compiler->AddSlowPathCode(slow_path); | |
3209 | |
3210 const Register out_reg = locs()->out(0).reg(); | 3052 const Register out_reg = locs()->out(0).reg(); |
| 3053 const Register temp_reg = locs()->temp(0).reg(); |
3211 const VRegister value = locs()->in(0).fpu_reg(); | 3054 const VRegister value = locs()->in(0).fpu_reg(); |
3212 | 3055 BoxAllocationSlowPath::Allocate( |
3213 __ TryAllocate(compiler->int32x4_class(), | 3056 compiler, this, compiler->int32x4_class(), out_reg, temp_reg); |
3214 slow_path->entry_label(), | |
3215 out_reg, | |
3216 PP); | |
3217 __ Bind(slow_path->exit_label()); | |
3218 | |
3219 __ StoreQFieldToOffset(value, out_reg, Int32x4::value_offset(), PP); | 3057 __ StoreQFieldToOffset(value, out_reg, Int32x4::value_offset(), PP); |
3220 } | 3058 } |
3221 | 3059 |
3222 | 3060 |
3223 LocationSummary* UnboxInt32x4Instr::MakeLocationSummary(Isolate* isolate, | 3061 LocationSummary* UnboxInt32x4Instr::MakeLocationSummary(Isolate* isolate, |
3224 bool opt) const { | 3062 bool opt) const { |
3225 const intptr_t kNumInputs = 1; | 3063 const intptr_t kNumInputs = 1; |
3226 const intptr_t kNumTemps = 0; | 3064 const intptr_t kNumTemps = 0; |
3227 LocationSummary* summary = new(isolate) LocationSummary( | 3065 LocationSummary* summary = new(isolate) LocationSummary( |
3228 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall); | 3066 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall); |
(...skipping 2245 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5474 compiler->GenerateCall(token_pos(), &label, stub_kind_, locs()); | 5312 compiler->GenerateCall(token_pos(), &label, stub_kind_, locs()); |
5475 #if defined(DEBUG) | 5313 #if defined(DEBUG) |
5476 __ LoadImmediate(R4, kInvalidObjectPointer, kNoPP); | 5314 __ LoadImmediate(R4, kInvalidObjectPointer, kNoPP); |
5477 __ LoadImmediate(R5, kInvalidObjectPointer, kNoPP); | 5315 __ LoadImmediate(R5, kInvalidObjectPointer, kNoPP); |
5478 #endif | 5316 #endif |
5479 } | 5317 } |
5480 | 5318 |
5481 } // namespace dart | 5319 } // namespace dart |
5482 | 5320 |
5483 #endif // defined TARGET_ARCH_ARM64 | 5321 #endif // defined TARGET_ARCH_ARM64 |
OLD | NEW |