OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. |
6 #if defined(TARGET_ARCH_X64) | 6 #if defined(TARGET_ARCH_X64) |
7 | 7 |
8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
9 | 9 |
10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
(...skipping 1474 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1485 UNREACHABLE(); | 1485 UNREACHABLE(); |
1486 } | 1486 } |
1487 } | 1487 } |
1488 } | 1488 } |
1489 __ Bind(&ok); | 1489 __ Bind(&ok); |
1490 } | 1490 } |
1491 | 1491 |
1492 | 1492 |
1493 class StoreInstanceFieldSlowPath : public SlowPathCode { | 1493 class StoreInstanceFieldSlowPath : public SlowPathCode { |
1494 public: | 1494 public: |
1495 explicit StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction) | 1495 StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction, |
1496 : instruction_(instruction) { } | 1496 const Class& cls) |
| 1497 : instruction_(instruction), cls_(cls) { } |
1497 | 1498 |
1498 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { | 1499 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
1499 __ Comment("StoreInstanceFieldSlowPath"); | 1500 __ Comment("StoreInstanceFieldSlowPath"); |
1500 __ Bind(entry_label()); | 1501 __ Bind(entry_label()); |
1501 const Class& double_class = compiler->double_class(); | |
1502 const Code& stub = | 1502 const Code& stub = |
1503 Code::Handle(StubCode::GetAllocationStubForClass(double_class)); | 1503 Code::Handle(StubCode::GetAllocationStubForClass(cls_)); |
1504 const ExternalLabel label(double_class.ToCString(), stub.EntryPoint()); | 1504 const ExternalLabel label(cls_.ToCString(), stub.EntryPoint()); |
1505 | 1505 |
1506 LocationSummary* locs = instruction_->locs(); | 1506 LocationSummary* locs = instruction_->locs(); |
1507 locs->live_registers()->Remove(locs->out()); | 1507 locs->live_registers()->Remove(locs->out()); |
1508 | 1508 |
1509 compiler->SaveLiveRegisters(locs); | 1509 compiler->SaveLiveRegisters(locs); |
1510 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. | 1510 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
1511 &label, | 1511 &label, |
1512 PcDescriptors::kOther, | 1512 PcDescriptors::kOther, |
1513 locs); | 1513 locs); |
1514 __ MoveRegister(locs->temp(0).reg(), RAX); | 1514 __ MoveRegister(locs->temp(0).reg(), RAX); |
1515 compiler->RestoreLiveRegisters(locs); | 1515 compiler->RestoreLiveRegisters(locs); |
1516 | 1516 |
1517 __ jmp(exit_label()); | 1517 __ jmp(exit_label()); |
1518 } | 1518 } |
1519 | 1519 |
1520 private: | 1520 private: |
1521 StoreInstanceFieldInstr* instruction_; | 1521 StoreInstanceFieldInstr* instruction_; |
| 1522 const Class& cls_; |
1522 }; | 1523 }; |
1523 | 1524 |
1524 | 1525 |
1525 LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(bool opt) const { | 1526 LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(bool opt) const { |
1526 const intptr_t kNumInputs = 2; | 1527 const intptr_t kNumInputs = 2; |
1527 const intptr_t kNumTemps = 0; | 1528 const intptr_t kNumTemps = 0; |
1528 LocationSummary* summary = | 1529 LocationSummary* summary = |
1529 new LocationSummary(kNumInputs, kNumTemps, | 1530 new LocationSummary(kNumInputs, kNumTemps, |
1530 (field().guarded_cid() == kIllegalCid) || (is_initialization_) | 1531 (field().guarded_cid() == kIllegalCid) || (is_initialization_) |
1531 ? LocationSummary::kCallOnSlowPath | 1532 ? LocationSummary::kCallOnSlowPath |
(...skipping 23 matching lines...) Expand all Loading... |
1555 | 1556 |
1556 void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1557 void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
1557 Label skip_store; | 1558 Label skip_store; |
1558 | 1559 |
1559 Register instance_reg = locs()->in(0).reg(); | 1560 Register instance_reg = locs()->in(0).reg(); |
1560 | 1561 |
1561 if (IsUnboxedStore() && compiler->is_optimizing()) { | 1562 if (IsUnboxedStore() && compiler->is_optimizing()) { |
1562 XmmRegister value = locs()->in(1).fpu_reg(); | 1563 XmmRegister value = locs()->in(1).fpu_reg(); |
1563 Register temp = locs()->temp(0).reg(); | 1564 Register temp = locs()->temp(0).reg(); |
1564 Register temp2 = locs()->temp(1).reg(); | 1565 Register temp2 = locs()->temp(1).reg(); |
| 1566 const intptr_t cid = field().UnboxedFieldCid(); |
1565 | 1567 |
1566 if (is_initialization_) { | 1568 if (is_initialization_) { |
| 1569 const Class* cls = NULL; |
| 1570 switch (cid) { |
| 1571 case kDoubleCid: |
| 1572 cls = &compiler->double_class(); |
| 1573 break; |
| 1574 // TODO(johnmccutchan): Add kFloat32x4Cid here. |
| 1575 default: |
| 1576 UNREACHABLE(); |
| 1577 } |
1567 StoreInstanceFieldSlowPath* slow_path = | 1578 StoreInstanceFieldSlowPath* slow_path = |
1568 new StoreInstanceFieldSlowPath(this); | 1579 new StoreInstanceFieldSlowPath(this, *cls); |
1569 compiler->AddSlowPathCode(slow_path); | 1580 compiler->AddSlowPathCode(slow_path); |
1570 | 1581 |
1571 __ TryAllocate(compiler->double_class(), | 1582 __ TryAllocate(*cls, |
1572 slow_path->entry_label(), | 1583 slow_path->entry_label(), |
1573 Assembler::kFarJump, | 1584 Assembler::kFarJump, |
1574 temp, | 1585 temp, |
1575 PP); | 1586 PP); |
1576 __ Bind(slow_path->exit_label()); | 1587 __ Bind(slow_path->exit_label()); |
1577 __ movq(temp2, temp); | 1588 __ movq(temp2, temp); |
1578 __ StoreIntoObject(instance_reg, | 1589 __ StoreIntoObject(instance_reg, |
1579 FieldAddress(instance_reg, field().Offset()), | 1590 FieldAddress(instance_reg, field().Offset()), |
1580 temp2); | 1591 temp2); |
1581 } else { | 1592 } else { |
1582 __ movq(temp, FieldAddress(instance_reg, field().Offset())); | 1593 __ movq(temp, FieldAddress(instance_reg, field().Offset())); |
1583 } | 1594 } |
1584 __ movsd(FieldAddress(temp, Double::value_offset()), value); | 1595 switch (cid) { |
| 1596 case kDoubleCid: |
| 1597 __ movsd(FieldAddress(temp, Double::value_offset()), value); |
| 1598 // TODO(johnmccutchan): Add kFloat32x4Cid here. |
| 1599 break; |
| 1600 default: |
| 1601 UNREACHABLE(); |
| 1602 } |
1585 return; | 1603 return; |
1586 } | 1604 } |
1587 | 1605 |
1588 if (IsPotentialUnboxedStore()) { | 1606 if (IsPotentialUnboxedStore()) { |
1589 Register value_reg = locs()->in(1).reg(); | 1607 Register value_reg = locs()->in(1).reg(); |
1590 Register temp = locs()->temp(0).reg(); | 1608 Register temp = locs()->temp(0).reg(); |
1591 Register temp2 = locs()->temp(1).reg(); | 1609 Register temp2 = locs()->temp(1).reg(); |
1592 FpuRegister fpu_temp = locs()->temp(2).fpu_reg(); | 1610 FpuRegister fpu_temp = locs()->temp(2).fpu_reg(); |
1593 | 1611 |
1594 Label store_pointer, copy_payload; | 1612 Label store_pointer; |
| 1613 Label copy_double; |
| 1614 Label store_double; |
| 1615 |
1595 __ LoadObject(temp, Field::ZoneHandle(field().raw()), PP); | 1616 __ LoadObject(temp, Field::ZoneHandle(field().raw()), PP); |
1596 __ cmpq(FieldAddress(temp, Field::guarded_cid_offset()), | 1617 |
1597 Immediate(kDoubleCid)); | |
1598 __ j(NOT_EQUAL, &store_pointer); | |
1599 __ cmpq(FieldAddress(temp, Field::is_nullable_offset()), | 1618 __ cmpq(FieldAddress(temp, Field::is_nullable_offset()), |
1600 Immediate(kNullCid)); | 1619 Immediate(kNullCid)); |
1601 __ j(EQUAL, &store_pointer); | 1620 __ j(EQUAL, &store_pointer); |
| 1621 |
1602 __ movzxb(temp2, FieldAddress(temp, Field::kind_bits_offset())); | 1622 __ movzxb(temp2, FieldAddress(temp, Field::kind_bits_offset())); |
1603 __ testq(temp2, Immediate(1 << Field::kUnboxingCandidateBit)); | 1623 __ testq(temp2, Immediate(1 << Field::kUnboxingCandidateBit)); |
1604 __ j(ZERO, &store_pointer); | 1624 __ j(ZERO, &store_pointer); |
1605 | 1625 |
| 1626 __ cmpq(FieldAddress(temp, Field::guarded_cid_offset()), |
| 1627 Immediate(kDoubleCid)); |
| 1628 __ j(EQUAL, &store_double); |
| 1629 |
| 1630 // Fall through. |
| 1631 __ jmp(&store_pointer); |
| 1632 |
| 1633 __ Bind(&store_double); |
| 1634 |
1606 __ movq(temp, FieldAddress(instance_reg, field().Offset())); | 1635 __ movq(temp, FieldAddress(instance_reg, field().Offset())); |
1607 __ CompareObject(temp, Object::null_object(), PP); | 1636 __ CompareObject(temp, Object::null_object(), PP); |
1608 __ j(NOT_EQUAL, ©_payload); | 1637 __ j(NOT_EQUAL, ©_double); |
1609 | 1638 |
1610 StoreInstanceFieldSlowPath* slow_path = | 1639 StoreInstanceFieldSlowPath* slow_path = |
1611 new StoreInstanceFieldSlowPath(this); | 1640 new StoreInstanceFieldSlowPath(this, compiler->double_class()); |
1612 compiler->AddSlowPathCode(slow_path); | 1641 compiler->AddSlowPathCode(slow_path); |
1613 | 1642 |
1614 if (!compiler->is_optimizing()) { | 1643 if (!compiler->is_optimizing()) { |
1615 locs()->live_registers()->Add(locs()->in(0)); | 1644 locs()->live_registers()->Add(locs()->in(0)); |
1616 locs()->live_registers()->Add(locs()->in(1)); | 1645 locs()->live_registers()->Add(locs()->in(1)); |
1617 } | 1646 } |
1618 __ TryAllocate(compiler->double_class(), | 1647 __ TryAllocate(compiler->double_class(), |
1619 slow_path->entry_label(), | 1648 slow_path->entry_label(), |
1620 Assembler::kFarJump, | 1649 Assembler::kFarJump, |
1621 temp, | 1650 temp, |
1622 PP); | 1651 PP); |
1623 __ Bind(slow_path->exit_label()); | 1652 __ Bind(slow_path->exit_label()); |
1624 __ movq(temp2, temp); | 1653 __ movq(temp2, temp); |
1625 __ StoreIntoObject(instance_reg, | 1654 __ StoreIntoObject(instance_reg, |
1626 FieldAddress(instance_reg, field().Offset()), | 1655 FieldAddress(instance_reg, field().Offset()), |
1627 temp2); | 1656 temp2); |
1628 | 1657 |
1629 __ Bind(©_payload); | 1658 __ Bind(©_double); |
1630 __ movsd(fpu_temp, FieldAddress(value_reg, Double::value_offset())); | 1659 __ movsd(fpu_temp, FieldAddress(value_reg, Double::value_offset())); |
1631 __ movsd(FieldAddress(temp, Double::value_offset()), fpu_temp); | 1660 __ movsd(FieldAddress(temp, Double::value_offset()), fpu_temp); |
1632 __ jmp(&skip_store); | 1661 __ jmp(&skip_store); |
1633 __ Bind(&store_pointer); | 1662 __ Bind(&store_pointer); |
1634 } | 1663 } |
1635 | 1664 |
1636 if (ShouldEmitStoreBarrier()) { | 1665 if (ShouldEmitStoreBarrier()) { |
1637 Register value_reg = locs()->in(1).reg(); | 1666 Register value_reg = locs()->in(1).reg(); |
1638 __ StoreIntoObject(instance_reg, | 1667 __ StoreIntoObject(instance_reg, |
1639 FieldAddress(instance_reg, field().Offset()), | 1668 FieldAddress(instance_reg, field().Offset()), |
(...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1825 return locs; | 1854 return locs; |
1826 } | 1855 } |
1827 | 1856 |
1828 | 1857 |
1829 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1858 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
1830 Register instance_reg = locs()->in(0).reg(); | 1859 Register instance_reg = locs()->in(0).reg(); |
1831 if (IsUnboxedLoad() && compiler->is_optimizing()) { | 1860 if (IsUnboxedLoad() && compiler->is_optimizing()) { |
1832 XmmRegister result = locs()->out().fpu_reg(); | 1861 XmmRegister result = locs()->out().fpu_reg(); |
1833 Register temp = locs()->temp(0).reg(); | 1862 Register temp = locs()->temp(0).reg(); |
1834 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes())); | 1863 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes())); |
1835 __ movsd(result, FieldAddress(temp, Double::value_offset())); | 1864 intptr_t cid = field()->UnboxedFieldCid(); |
| 1865 switch (cid) { |
| 1866 case kDoubleCid: |
| 1867 __ movsd(result, FieldAddress(temp, Double::value_offset())); |
| 1868 break; |
| 1869 // TODO(johnmccutchan): Add Float32x4 path here. |
| 1870 default: |
| 1871 UNREACHABLE(); |
| 1872 } |
1836 return; | 1873 return; |
1837 } | 1874 } |
1838 | 1875 |
1839 Label done; | 1876 Label done; |
1840 Register result = locs()->out().reg(); | 1877 Register result = locs()->out().reg(); |
1841 if (IsPotentialUnboxedLoad()) { | 1878 if (IsPotentialUnboxedLoad()) { |
1842 Register temp = locs()->temp(1).reg(); | 1879 Register temp = locs()->temp(1).reg(); |
1843 XmmRegister value = locs()->temp(0).fpu_reg(); | 1880 XmmRegister value = locs()->temp(0).fpu_reg(); |
1844 | 1881 |
1845 Label load_pointer; | 1882 Label load_pointer; |
| 1883 Label load_double; |
| 1884 |
1846 __ LoadObject(result, Field::ZoneHandle(field()->raw()), PP); | 1885 __ LoadObject(result, Field::ZoneHandle(field()->raw()), PP); |
1847 | 1886 |
1848 | |
1849 __ cmpq(FieldAddress(result, Field::guarded_cid_offset()), | |
1850 Immediate(kDoubleCid)); | |
1851 __ j(NOT_EQUAL, &load_pointer); | |
1852 __ cmpq(FieldAddress(result, Field::is_nullable_offset()), | 1887 __ cmpq(FieldAddress(result, Field::is_nullable_offset()), |
1853 Immediate(kNullCid)); | 1888 Immediate(kNullCid)); |
1854 __ j(EQUAL, &load_pointer); | 1889 __ j(EQUAL, &load_pointer); |
1855 | 1890 |
| 1891 __ cmpq(FieldAddress(result, Field::guarded_cid_offset()), |
| 1892 Immediate(kDoubleCid)); |
| 1893 __ j(EQUAL, &load_double); |
| 1894 |
| 1895 // Fall through. |
| 1896 __ jmp(&load_pointer); |
| 1897 |
| 1898 __ Bind(&load_double); |
| 1899 |
1856 BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this); | 1900 BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this); |
1857 compiler->AddSlowPathCode(slow_path); | 1901 compiler->AddSlowPathCode(slow_path); |
1858 | 1902 |
1859 if (!compiler->is_optimizing()) { | 1903 if (!compiler->is_optimizing()) { |
1860 locs()->live_registers()->Add(locs()->in(0)); | 1904 locs()->live_registers()->Add(locs()->in(0)); |
1861 } | 1905 } |
1862 | 1906 |
1863 __ TryAllocate(compiler->double_class(), | 1907 __ TryAllocate(compiler->double_class(), |
1864 slow_path->entry_label(), | 1908 slow_path->entry_label(), |
1865 Assembler::kFarJump, | 1909 Assembler::kFarJump, |
1866 result, | 1910 result, |
1867 PP); | 1911 PP); |
1868 __ Bind(slow_path->exit_label()); | 1912 __ Bind(slow_path->exit_label()); |
1869 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes())); | 1913 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes())); |
1870 __ movsd(value, FieldAddress(temp, Double::value_offset())); | 1914 __ movsd(value, FieldAddress(temp, Double::value_offset())); |
1871 __ movsd(FieldAddress(result, Double::value_offset()), value); | 1915 __ movsd(FieldAddress(result, Double::value_offset()), value); |
1872 __ jmp(&done); | 1916 __ jmp(&done); |
| 1917 |
| 1918 // TODO(johnmccutchan): Add Float32x4 path here. |
| 1919 |
1873 __ Bind(&load_pointer); | 1920 __ Bind(&load_pointer); |
1874 } | 1921 } |
1875 __ movq(result, FieldAddress(instance_reg, offset_in_bytes())); | 1922 __ movq(result, FieldAddress(instance_reg, offset_in_bytes())); |
1876 __ Bind(&done); | 1923 __ Bind(&done); |
1877 } | 1924 } |
1878 | 1925 |
1879 | 1926 |
1880 LocationSummary* InstantiateTypeInstr::MakeLocationSummary(bool opt) const { | 1927 LocationSummary* InstantiateTypeInstr::MakeLocationSummary(bool opt) const { |
1881 const intptr_t kNumInputs = 1; | 1928 const intptr_t kNumInputs = 1; |
1882 const intptr_t kNumTemps = 0; | 1929 const intptr_t kNumTemps = 0; |
(...skipping 3071 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4954 PcDescriptors::kOther, | 5001 PcDescriptors::kOther, |
4955 locs()); | 5002 locs()); |
4956 __ Drop(2); // Discard type arguments and receiver. | 5003 __ Drop(2); // Discard type arguments and receiver. |
4957 } | 5004 } |
4958 | 5005 |
4959 } // namespace dart | 5006 } // namespace dart |
4960 | 5007 |
4961 #undef __ | 5008 #undef __ |
4962 | 5009 |
4963 #endif // defined TARGET_ARCH_X64 | 5010 #endif // defined TARGET_ARCH_X64 |
OLD | NEW |