Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(172)

Side by Side Diff: runtime/vm/intermediate_language_x64.cc

Issue 150063004: Support reusable boxes for Float32x4 fields (Closed) Base URL: https://dart.googlecode.com/svn/branches/bleeding_edge/dart
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64.
6 #if defined(TARGET_ARCH_X64) 6 #if defined(TARGET_ARCH_X64)
7 7
8 #include "vm/intermediate_language.h" 8 #include "vm/intermediate_language.h"
9 9
10 #include "vm/dart_entry.h" 10 #include "vm/dart_entry.h"
(...skipping 1474 matching lines...) Expand 10 before | Expand all | Expand 10 after
1485 UNREACHABLE(); 1485 UNREACHABLE();
1486 } 1486 }
1487 } 1487 }
1488 } 1488 }
1489 __ Bind(&ok); 1489 __ Bind(&ok);
1490 } 1490 }
1491 1491
1492 1492
1493 class StoreInstanceFieldSlowPath : public SlowPathCode { 1493 class StoreInstanceFieldSlowPath : public SlowPathCode {
1494 public: 1494 public:
1495 StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction, 1495 explicit StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction)
1496 const Class& cls) 1496 : instruction_(instruction) { }
1497 : instruction_(instruction), cls_(cls) { }
1498 1497
1499 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { 1498 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
1500 __ Comment("StoreInstanceFieldSlowPath"); 1499 __ Comment("StoreInstanceFieldSlowPath");
1501 __ Bind(entry_label()); 1500 {
1502 const Code& stub = 1501 __ Bind(double_entry_label());
1503 Code::Handle(StubCode::GetAllocationStubForClass(cls_)); 1502 const Class& cls = compiler->double_class();
1504 const ExternalLabel label(cls_.ToCString(), stub.EntryPoint()); 1503 const Code& stub =
1504 Code::Handle(StubCode::GetAllocationStubForClass(cls));
1505 const ExternalLabel label(cls.ToCString(), stub.EntryPoint());
1505 1506
1506 LocationSummary* locs = instruction_->locs(); 1507 LocationSummary* locs = instruction_->locs();
1507 locs->live_registers()->Remove(locs->out()); 1508 locs->live_registers()->Remove(locs->out());
1508 1509
1509 compiler->SaveLiveRegisters(locs); 1510 compiler->SaveLiveRegisters(locs);
1510 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. 1511 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position.
1511 &label, 1512 &label,
1512 PcDescriptors::kOther, 1513 PcDescriptors::kOther,
1513 locs); 1514 locs);
1514 __ MoveRegister(locs->temp(0).reg(), RAX); 1515 __ MoveRegister(locs->temp(0).reg(), RAX);
1515 compiler->RestoreLiveRegisters(locs); 1516 compiler->RestoreLiveRegisters(locs);
1516 1517
1517 __ jmp(exit_label()); 1518 __ jmp(double_exit_label());
1519 }
1520 {
1521 __ Bind(float32x4_entry_label());
1522 const Class& cls = compiler->float32x4_class();
1523 const Code& stub =
1524 Code::Handle(StubCode::GetAllocationStubForClass(cls));
1525 const ExternalLabel label(cls.ToCString(), stub.EntryPoint());
1526
1527 LocationSummary* locs = instruction_->locs();
1528 locs->live_registers()->Remove(locs->out());
1529
1530 compiler->SaveLiveRegisters(locs);
1531 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position.
1532 &label,
1533 PcDescriptors::kOther,
1534 locs);
1535 __ MoveRegister(locs->temp(0).reg(), RAX);
1536 compiler->RestoreLiveRegisters(locs);
1537
1538 __ jmp(float32x4_exit_label());
1539 }
1518 } 1540 }
1519 1541
1542 Label* double_entry_label() {
1543 // Use default SlowPathCode label for double.
1544 return entry_label();
1545 }
1546 Label* double_exit_label() {
1547 // Use default SlowPathCode label for double.
1548 return exit_label();
1549 }
1550
1551 Label* float32x4_entry_label() { return &float32x4_entry_label_; }
1552 Label* float32x4_exit_label() { return &float32x4_exit_label_; }
1553
1520 private: 1554 private:
1555 Label float32x4_entry_label_;
1556 Label float32x4_exit_label_;
1521 StoreInstanceFieldInstr* instruction_; 1557 StoreInstanceFieldInstr* instruction_;
1522 const Class& cls_;
1523 }; 1558 };
1524 1559
1525 1560
1526 LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(bool opt) const { 1561 LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(bool opt) const {
1527 const intptr_t kNumInputs = 2; 1562 const intptr_t kNumInputs = 2;
1528 const intptr_t kNumTemps = 0; 1563 const intptr_t kNumTemps = 0;
1529 LocationSummary* summary = 1564 LocationSummary* summary =
1530 new LocationSummary(kNumInputs, kNumTemps, 1565 new LocationSummary(kNumInputs, kNumTemps,
1531 (field().guarded_cid() == kIllegalCid) || (is_initialization_) 1566 (field().guarded_cid() == kIllegalCid) || (is_initialization_)
1532 ? LocationSummary::kCallOnSlowPath 1567 ? LocationSummary::kCallOnSlowPath
(...skipping 26 matching lines...) Expand all
1559 1594
1560 Register instance_reg = locs()->in(0).reg(); 1595 Register instance_reg = locs()->in(0).reg();
1561 1596
1562 if (IsUnboxedStore() && compiler->is_optimizing()) { 1597 if (IsUnboxedStore() && compiler->is_optimizing()) {
1563 XmmRegister value = locs()->in(1).fpu_reg(); 1598 XmmRegister value = locs()->in(1).fpu_reg();
1564 Register temp = locs()->temp(0).reg(); 1599 Register temp = locs()->temp(0).reg();
1565 Register temp2 = locs()->temp(1).reg(); 1600 Register temp2 = locs()->temp(1).reg();
1566 const intptr_t cid = field().UnboxedFieldCid(); 1601 const intptr_t cid = field().UnboxedFieldCid();
1567 1602
1568 if (is_initialization_) { 1603 if (is_initialization_) {
1604 StoreInstanceFieldSlowPath* slow_path =
1605 new StoreInstanceFieldSlowPath(this);
1606 compiler->AddSlowPathCode(slow_path);
1607
1569 const Class* cls = NULL; 1608 const Class* cls = NULL;
1609 Label* entry_label = NULL;
1610 Label* exit_label = NULL;
1570 switch (cid) { 1611 switch (cid) {
1571 case kDoubleCid: 1612 case kDoubleCid:
1572 cls = &compiler->double_class(); 1613 cls = &compiler->double_class();
1614 entry_label = slow_path->double_entry_label();
1615 exit_label = slow_path->double_exit_label();
1573 break; 1616 break;
1574 // TODO(johnmccutchan): Add kFloat32x4Cid here. 1617 case kFloat32x4Cid:
1618 cls = &compiler->float32x4_class();
1619 entry_label = slow_path->float32x4_entry_label();
1620 exit_label = slow_path->float32x4_exit_label();
1621 break;
1575 default: 1622 default:
1576 UNREACHABLE(); 1623 UNREACHABLE();
1577 } 1624 }
1578 StoreInstanceFieldSlowPath* slow_path =
1579 new StoreInstanceFieldSlowPath(this, *cls);
1580 compiler->AddSlowPathCode(slow_path);
1581 1625
1582 __ TryAllocate(*cls, 1626 __ TryAllocate(*cls,
1583 slow_path->entry_label(), 1627 entry_label,
1584 Assembler::kFarJump, 1628 Assembler::kFarJump,
1585 temp, 1629 temp,
1586 PP); 1630 PP);
1587 __ Bind(slow_path->exit_label()); 1631 __ Bind(exit_label);
1588 __ movq(temp2, temp); 1632 __ movq(temp2, temp);
1589 __ StoreIntoObject(instance_reg, 1633 __ StoreIntoObject(instance_reg,
1590 FieldAddress(instance_reg, field().Offset()), 1634 FieldAddress(instance_reg, field().Offset()),
1591 temp2); 1635 temp2);
1592 } else { 1636 } else {
1593 __ movq(temp, FieldAddress(instance_reg, field().Offset())); 1637 __ movq(temp, FieldAddress(instance_reg, field().Offset()));
1594 } 1638 }
1595 switch (cid) { 1639 switch (cid) {
1596 case kDoubleCid: 1640 case kDoubleCid:
1597 __ movsd(FieldAddress(temp, Double::value_offset()), value); 1641 __ Comment("UnboxedDoubleStoreInstanceFieldInstr");
1598 // TODO(johnmccutchan): Add kFloat32x4Cid here. 1642 __ movsd(FieldAddress(temp, Double::value_offset()), value);
1599 break; 1643 break;
1644 case kFloat32x4Cid:
1645 __ Comment("UnboxedFloat32x4StoreInstanceFieldInstr");
1646 __ movups(FieldAddress(temp, Float32x4::value_offset()), value);
1647 break;
1600 default: 1648 default:
1601 UNREACHABLE(); 1649 UNREACHABLE();
1602 } 1650 }
1603 return; 1651 return;
1604 } 1652 }
1605 1653
1606 if (IsPotentialUnboxedStore()) { 1654 if (IsPotentialUnboxedStore()) {
1607 Register value_reg = locs()->in(1).reg(); 1655 Register value_reg = locs()->in(1).reg();
1608 Register temp = locs()->temp(0).reg(); 1656 Register temp = locs()->temp(0).reg();
1609 Register temp2 = locs()->temp(1).reg(); 1657 Register temp2 = locs()->temp(1).reg();
1610 FpuRegister fpu_temp = locs()->temp(2).fpu_reg(); 1658 FpuRegister fpu_temp = locs()->temp(2).fpu_reg();
1611 1659
1612 Label store_pointer; 1660 Label store_pointer;
1613 Label copy_double;
1614 Label store_double; 1661 Label store_double;
1662 Label store_float32x4;
1615 1663
1616 __ LoadObject(temp, Field::ZoneHandle(field().raw()), PP); 1664 __ LoadObject(temp, Field::ZoneHandle(field().raw()), PP);
1617 1665
1618 __ cmpq(FieldAddress(temp, Field::is_nullable_offset()), 1666 __ cmpq(FieldAddress(temp, Field::is_nullable_offset()),
1619 Immediate(kNullCid)); 1667 Immediate(kNullCid));
1620 __ j(EQUAL, &store_pointer); 1668 __ j(EQUAL, &store_pointer);
1621 1669
1622 __ movzxb(temp2, FieldAddress(temp, Field::kind_bits_offset())); 1670 __ movzxb(temp2, FieldAddress(temp, Field::kind_bits_offset()));
1623 __ testq(temp2, Immediate(1 << Field::kUnboxingCandidateBit)); 1671 __ testq(temp2, Immediate(1 << Field::kUnboxingCandidateBit));
1624 __ j(ZERO, &store_pointer); 1672 __ j(ZERO, &store_pointer);
1625 1673
1626 __ cmpq(FieldAddress(temp, Field::guarded_cid_offset()), 1674 __ cmpq(FieldAddress(temp, Field::guarded_cid_offset()),
1627 Immediate(kDoubleCid)); 1675 Immediate(kDoubleCid));
1628 __ j(EQUAL, &store_double); 1676 __ j(EQUAL, &store_double);
1629 1677
1678 __ cmpq(FieldAddress(temp, Field::guarded_cid_offset()),
1679 Immediate(kFloat32x4Cid));
1680 __ j(EQUAL, &store_float32x4);
1681
1630 // Fall through. 1682 // Fall through.
1631 __ jmp(&store_pointer); 1683 __ jmp(&store_pointer);
1632 1684
1633 __ Bind(&store_double);
1634
1635 __ movq(temp, FieldAddress(instance_reg, field().Offset()));
1636 __ CompareObject(temp, Object::null_object(), PP);
1637 __ j(NOT_EQUAL, &copy_double);
1638
1639 StoreInstanceFieldSlowPath* slow_path = 1685 StoreInstanceFieldSlowPath* slow_path =
1640 new StoreInstanceFieldSlowPath(this, compiler->double_class()); 1686 new StoreInstanceFieldSlowPath(this);
1641 compiler->AddSlowPathCode(slow_path); 1687 compiler->AddSlowPathCode(slow_path);
1642 1688
1643 if (!compiler->is_optimizing()) { 1689 {
1644 locs()->live_registers()->Add(locs()->in(0)); 1690 __ Bind(&store_double);
1645 locs()->live_registers()->Add(locs()->in(1)); 1691 Label copy_double;
1692
1693 __ movq(temp, FieldAddress(instance_reg, field().Offset()));
1694 __ CompareObject(temp, Object::null_object(), PP);
1695 __ j(NOT_EQUAL, &copy_double);
1696
1697 if (!compiler->is_optimizing()) {
1698 locs()->live_registers()->Add(locs()->in(0));
1699 locs()->live_registers()->Add(locs()->in(1));
1700 }
1701 __ TryAllocate(compiler->double_class(),
1702 slow_path->double_entry_label(),
1703 Assembler::kFarJump,
1704 temp,
1705 PP);
1706 __ Bind(slow_path->double_exit_label());
1707 __ movq(temp2, temp);
1708 __ StoreIntoObject(instance_reg,
1709 FieldAddress(instance_reg, field().Offset()),
1710 temp2);
1711
1712 __ Bind(&copy_double);
1713 __ movsd(fpu_temp, FieldAddress(value_reg, Double::value_offset()));
1714 __ movsd(FieldAddress(temp, Double::value_offset()), fpu_temp);
1715 __ jmp(&skip_store);
1646 } 1716 }
1647 __ TryAllocate(compiler->double_class(),
1648 slow_path->entry_label(),
1649 Assembler::kFarJump,
1650 temp,
1651 PP);
1652 __ Bind(slow_path->exit_label());
1653 __ movq(temp2, temp);
1654 __ StoreIntoObject(instance_reg,
1655 FieldAddress(instance_reg, field().Offset()),
1656 temp2);
1657 1717
1658 __ Bind(&copy_double); 1718 {
1659 __ movsd(fpu_temp, FieldAddress(value_reg, Double::value_offset())); 1719 __ Bind(&store_float32x4);
1660 __ movsd(FieldAddress(temp, Double::value_offset()), fpu_temp); 1720 Label copy_float32x4;
1661 __ jmp(&skip_store); 1721
1722 __ movq(temp, FieldAddress(instance_reg, field().Offset()));
1723 __ CompareObject(temp, Object::null_object(), PP);
1724 __ j(NOT_EQUAL, &copy_float32x4);
1725
1726 if (!compiler->is_optimizing()) {
1727 locs()->live_registers()->Add(locs()->in(0));
1728 locs()->live_registers()->Add(locs()->in(1));
1729 }
1730 __ TryAllocate(compiler->float32x4_class(),
1731 slow_path->float32x4_entry_label(),
1732 Assembler::kFarJump,
1733 temp,
1734 PP);
1735 __ Bind(slow_path->float32x4_exit_label());
1736 __ movq(temp2, temp);
1737 __ StoreIntoObject(instance_reg,
1738 FieldAddress(instance_reg, field().Offset()),
1739 temp2);
1740
1741 __ Bind(&copy_float32x4);
1742 __ movups(fpu_temp, FieldAddress(value_reg, Float32x4::value_offset()));
1743 __ movups(FieldAddress(temp, Float32x4::value_offset()), fpu_temp);
1744 __ jmp(&skip_store);
1745 }
1746
1662 __ Bind(&store_pointer); 1747 __ Bind(&store_pointer);
1663 } 1748 }
1664 1749
1665 if (ShouldEmitStoreBarrier()) { 1750 if (ShouldEmitStoreBarrier()) {
1666 Register value_reg = locs()->in(1).reg(); 1751 Register value_reg = locs()->in(1).reg();
1667 __ StoreIntoObject(instance_reg, 1752 __ StoreIntoObject(instance_reg,
1668 FieldAddress(instance_reg, field().Offset()), 1753 FieldAddress(instance_reg, field().Offset()),
1669 value_reg, 1754 value_reg,
1670 CanValueBeSmi()); 1755 CanValueBeSmi());
1671 } else { 1756 } else {
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
1792 deopt_id(), 1877 deopt_id(),
1793 kAllocateObjectWithBoundsCheckRuntimeEntry, 1878 kAllocateObjectWithBoundsCheckRuntimeEntry,
1794 3, 1879 3,
1795 locs()); 1880 locs());
1796 __ Drop(3); 1881 __ Drop(3);
1797 ASSERT(locs()->out().reg() == RAX); 1882 ASSERT(locs()->out().reg() == RAX);
1798 __ popq(RAX); // Pop new instance. 1883 __ popq(RAX); // Pop new instance.
1799 } 1884 }
1800 1885
1801 1886
1802 class BoxDoubleSlowPath : public SlowPathCode { 1887 class LoadFieldSlowPath : public SlowPathCode {
1803 public: 1888 public:
1804 explicit BoxDoubleSlowPath(Instruction* instruction) 1889 explicit LoadFieldSlowPath(Instruction* instruction)
1805 : instruction_(instruction) { } 1890 : instruction_(instruction) { }
1806 1891
1807 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { 1892 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
1808 __ Comment("BoxDoubleSlowPath"); 1893 __ Comment("LoadFieldSlowPath");
1809 __ Bind(entry_label()); 1894 {
1810 const Class& double_class = compiler->double_class(); 1895 __ Bind(double_entry_label());
1811 const Code& stub = 1896 const Class& double_class = compiler->double_class();
1812 Code::Handle(StubCode::GetAllocationStubForClass(double_class)); 1897 const Code& stub =
1813 const ExternalLabel label(double_class.ToCString(), stub.EntryPoint()); 1898 Code::Handle(StubCode::GetAllocationStubForClass(double_class));
1899 const ExternalLabel label(double_class.ToCString(), stub.EntryPoint());
1814 1900
1815 LocationSummary* locs = instruction_->locs(); 1901 LocationSummary* locs = instruction_->locs();
1816 locs->live_registers()->Remove(locs->out()); 1902 locs->live_registers()->Remove(locs->out());
1817 1903
1818 compiler->SaveLiveRegisters(locs); 1904 compiler->SaveLiveRegisters(locs);
1819 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. 1905 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position.
1820 &label, 1906 &label,
1821 PcDescriptors::kOther, 1907 PcDescriptors::kOther,
1822 locs); 1908 locs);
1823 __ MoveRegister(locs->out().reg(), RAX); 1909 __ MoveRegister(locs->out().reg(), RAX);
1824 compiler->RestoreLiveRegisters(locs); 1910 compiler->RestoreLiveRegisters(locs);
1825 1911
1826 __ jmp(exit_label()); 1912 __ jmp(double_exit_label());
1913 }
1914 {
1915 __ Bind(float32x4_entry_label());
1916 const Class& float32x4_class = compiler->float32x4_class();
1917 const Code& stub =
1918 Code::Handle(StubCode::GetAllocationStubForClass(float32x4_class));
1919 const ExternalLabel label(float32x4_class.ToCString(), stub.EntryPoint());
1920
1921 LocationSummary* locs = instruction_->locs();
1922 locs->live_registers()->Remove(locs->out());
1923
1924 compiler->SaveLiveRegisters(locs);
1925 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position.
1926 &label,
1927 PcDescriptors::kOther,
1928 locs);
1929 __ MoveRegister(locs->out().reg(), RAX);
1930 compiler->RestoreLiveRegisters(locs);
1931
1932 __ jmp(float32x4_exit_label());
1933 }
1827 } 1934 }
1828 1935
1936 Label* double_entry_label() {
1937 // Use default SlowPathCode label for double.
1938 return entry_label();
1939 }
1940 Label* double_exit_label() {
1941 // Use default SlowPathCode label for double.
1942 return exit_label();
1943 }
1944
1945 Label* float32x4_entry_label() { return &float32x4_entry_label_; }
1946 Label* float32x4_exit_label() { return &float32x4_exit_label_; }
1947
1829 private: 1948 private:
1949 Label float32x4_entry_label_;
1950 Label float32x4_exit_label_;
1830 Instruction* instruction_; 1951 Instruction* instruction_;
1831 }; 1952 };
1832 1953
1833 1954
1834 LocationSummary* LoadFieldInstr::MakeLocationSummary(bool opt) const { 1955 LocationSummary* LoadFieldInstr::MakeLocationSummary(bool opt) const {
1835 const intptr_t kNumInputs = 1; 1956 const intptr_t kNumInputs = 1;
1836 const intptr_t kNumTemps = 0; 1957 const intptr_t kNumTemps = 0;
1837 LocationSummary* locs = 1958 LocationSummary* locs =
1838 new LocationSummary( 1959 new LocationSummary(
1839 kNumInputs, kNumTemps, 1960 kNumInputs, kNumTemps,
(...skipping 17 matching lines...) Expand all
1857 1978
1858 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 1979 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
1859 Register instance_reg = locs()->in(0).reg(); 1980 Register instance_reg = locs()->in(0).reg();
1860 if (IsUnboxedLoad() && compiler->is_optimizing()) { 1981 if (IsUnboxedLoad() && compiler->is_optimizing()) {
1861 XmmRegister result = locs()->out().fpu_reg(); 1982 XmmRegister result = locs()->out().fpu_reg();
1862 Register temp = locs()->temp(0).reg(); 1983 Register temp = locs()->temp(0).reg();
1863 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes())); 1984 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes()));
1864 intptr_t cid = field()->UnboxedFieldCid(); 1985 intptr_t cid = field()->UnboxedFieldCid();
1865 switch (cid) { 1986 switch (cid) {
1866 case kDoubleCid: 1987 case kDoubleCid:
1988 __ Comment("UnboxedDoubleLoadFieldInstr");
1867 __ movsd(result, FieldAddress(temp, Double::value_offset())); 1989 __ movsd(result, FieldAddress(temp, Double::value_offset()));
1868 break; 1990 break;
1869 // TODO(johnmccutchan): Add Float32x4 path here. 1991 case kFloat32x4Cid:
1992 __ Comment("UnboxedFloat32x4LoadFieldInstr");
1993 __ movups(result, FieldAddress(temp, Float32x4::value_offset()));
1994 break;
1870 default: 1995 default:
1871 UNREACHABLE(); 1996 UNREACHABLE();
1872 } 1997 }
1873 return; 1998 return;
1874 } 1999 }
1875 2000
1876 Label done; 2001 Label done;
1877 Register result = locs()->out().reg(); 2002 Register result = locs()->out().reg();
1878 if (IsPotentialUnboxedLoad()) { 2003 if (IsPotentialUnboxedLoad()) {
1879 Register temp = locs()->temp(1).reg(); 2004 Register temp = locs()->temp(1).reg();
1880 XmmRegister value = locs()->temp(0).fpu_reg(); 2005 XmmRegister value = locs()->temp(0).fpu_reg();
2006 LoadFieldSlowPath* slow_path = new LoadFieldSlowPath(this);
2007 compiler->AddSlowPathCode(slow_path);
1881 2008
1882 Label load_pointer; 2009 Label load_pointer;
1883 Label load_double; 2010 Label load_double;
2011 Label load_float32x4;
1884 2012
1885 __ LoadObject(result, Field::ZoneHandle(field()->raw()), PP); 2013 __ LoadObject(result, Field::ZoneHandle(field()->raw()), PP);
1886 2014
1887 __ cmpq(FieldAddress(result, Field::is_nullable_offset()), 2015 __ cmpq(FieldAddress(result, Field::is_nullable_offset()),
1888 Immediate(kNullCid)); 2016 Immediate(kNullCid));
1889 __ j(EQUAL, &load_pointer); 2017 __ j(EQUAL, &load_pointer);
1890 2018
1891 __ cmpq(FieldAddress(result, Field::guarded_cid_offset()), 2019 __ cmpq(FieldAddress(result, Field::guarded_cid_offset()),
1892 Immediate(kDoubleCid)); 2020 Immediate(kDoubleCid));
1893 __ j(EQUAL, &load_double); 2021 __ j(EQUAL, &load_double);
1894 2022
2023 __ cmpq(FieldAddress(result, Field::guarded_cid_offset()),
2024 Immediate(kFloat32x4Cid));
2025 __ j(EQUAL, &load_float32x4);
2026
1895 // Fall through. 2027 // Fall through.
1896 __ jmp(&load_pointer); 2028 __ jmp(&load_pointer);
1897 2029
1898 __ Bind(&load_double); 2030 {
2031 __ Bind(&load_double);
1899 2032
1900 BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this); 2033 if (!compiler->is_optimizing()) {
1901 compiler->AddSlowPathCode(slow_path); 2034 locs()->live_registers()->Add(locs()->in(0));
2035 }
1902 2036
1903 if (!compiler->is_optimizing()) { 2037 __ TryAllocate(compiler->double_class(),
1904 locs()->live_registers()->Add(locs()->in(0)); 2038 slow_path->double_entry_label(),
2039 Assembler::kFarJump,
2040 result,
2041 PP);
2042 __ Bind(slow_path->double_exit_label());
2043 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes()));
2044 __ movsd(value, FieldAddress(temp, Double::value_offset()));
2045 __ movsd(FieldAddress(result, Double::value_offset()), value);
2046 __ jmp(&done);
1905 } 2047 }
2048 {
2049 __ Bind(&load_float32x4);
1906 2050
1907 __ TryAllocate(compiler->double_class(), 2051 if (!compiler->is_optimizing()) {
1908 slow_path->entry_label(), 2052 locs()->live_registers()->Add(locs()->in(0));
1909 Assembler::kFarJump, 2053 }
1910 result,
1911 PP);
1912 __ Bind(slow_path->exit_label());
1913 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes()));
1914 __ movsd(value, FieldAddress(temp, Double::value_offset()));
1915 __ movsd(FieldAddress(result, Double::value_offset()), value);
1916 __ jmp(&done);
1917 2054
1918 // TODO(johnmccutchan): Add Float32x4 path here. 2055 __ TryAllocate(compiler->float32x4_class(),
2056 slow_path->float32x4_entry_label(),
2057 Assembler::kFarJump,
2058 result,
2059 PP);
2060 __ Bind(slow_path->float32x4_exit_label());
2061 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes()));
2062 __ movups(value, FieldAddress(temp, Float32x4::value_offset()));
2063 __ movups(FieldAddress(result, Float32x4::value_offset()), value);
2064 __ jmp(&done);
2065 }
1919 2066
1920 __ Bind(&load_pointer); 2067 __ Bind(&load_pointer);
1921 } 2068 }
1922 __ movq(result, FieldAddress(instance_reg, offset_in_bytes())); 2069 __ movq(result, FieldAddress(instance_reg, offset_in_bytes()));
1923 __ Bind(&done); 2070 __ Bind(&done);
1924 } 2071 }
1925 2072
1926 2073
1927 LocationSummary* InstantiateTypeInstr::MakeLocationSummary(bool opt) const { 2074 LocationSummary* InstantiateTypeInstr::MakeLocationSummary(bool opt) const {
1928 const intptr_t kNumInputs = 1; 2075 const intptr_t kNumInputs = 1;
(...skipping 949 matching lines...) Expand 10 before | Expand all | Expand 10 after
2878 } else { 3025 } else {
2879 Register temp = locs()->temp(0).reg(); 3026 Register temp = locs()->temp(0).reg();
2880 __ movq(temp, left); 3027 __ movq(temp, left);
2881 __ orq(temp, right); 3028 __ orq(temp, right);
2882 __ testq(temp, Immediate(kSmiTagMask)); 3029 __ testq(temp, Immediate(kSmiTagMask));
2883 } 3030 }
2884 __ j(ZERO, deopt); 3031 __ j(ZERO, deopt);
2885 } 3032 }
2886 3033
2887 3034
3035 class BoxDoubleSlowPath : public SlowPathCode {
3036 public:
3037 explicit BoxDoubleSlowPath(BoxDoubleInstr* instruction)
3038 : instruction_(instruction) { }
3039
3040 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
3041 __ Comment("BoxDoubleSlowPath");
3042 __ Bind(entry_label());
3043 const Class& double_class = compiler->double_class();
3044 const Code& stub =
3045 Code::Handle(StubCode::GetAllocationStubForClass(double_class));
3046 const ExternalLabel label(double_class.ToCString(), stub.EntryPoint());
3047
3048 LocationSummary* locs = instruction_->locs();
3049 locs->live_registers()->Remove(locs->out());
3050
3051 compiler->SaveLiveRegisters(locs);
3052 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position.
3053 &label,
3054 PcDescriptors::kOther,
3055 locs);
3056 __ MoveRegister(locs->out().reg(), RAX);
3057 compiler->RestoreLiveRegisters(locs);
3058
3059 __ jmp(exit_label());
3060 }
3061
3062 private:
3063 BoxDoubleInstr* instruction_;
3064 };
3065
3066
2888 LocationSummary* BoxDoubleInstr::MakeLocationSummary(bool opt) const { 3067 LocationSummary* BoxDoubleInstr::MakeLocationSummary(bool opt) const {
2889 const intptr_t kNumInputs = 1; 3068 const intptr_t kNumInputs = 1;
2890 const intptr_t kNumTemps = 0; 3069 const intptr_t kNumTemps = 0;
2891 LocationSummary* summary = 3070 LocationSummary* summary =
2892 new LocationSummary(kNumInputs, 3071 new LocationSummary(kNumInputs,
2893 kNumTemps, 3072 kNumTemps,
2894 LocationSummary::kCallOnSlowPath); 3073 LocationSummary::kCallOnSlowPath);
2895 summary->set_in(0, Location::RequiresFpuRegister()); 3074 summary->set_in(0, Location::RequiresFpuRegister());
2896 summary->set_out(Location::RequiresRegister()); 3075 summary->set_out(Location::RequiresRegister());
2897 return summary; 3076 return summary;
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
2949 __ movsd(result, FieldAddress(value, Double::value_offset())); 3128 __ movsd(result, FieldAddress(value, Double::value_offset()));
2950 __ jmp(&done); 3129 __ jmp(&done);
2951 __ Bind(&is_smi); 3130 __ Bind(&is_smi);
2952 __ SmiUntag(value); 3131 __ SmiUntag(value);
2953 __ cvtsi2sd(result, value); 3132 __ cvtsi2sd(result, value);
2954 __ Bind(&done); 3133 __ Bind(&done);
2955 } 3134 }
2956 } 3135 }
2957 3136
2958 3137
2959 LocationSummary* BoxFloat32x4Instr::MakeLocationSummary(bool opt) const {
2960 const intptr_t kNumInputs = 1;
2961 const intptr_t kNumTemps = 0;
2962 LocationSummary* summary =
2963 new LocationSummary(kNumInputs,
2964 kNumTemps,
2965 LocationSummary::kCallOnSlowPath);
2966 summary->set_in(0, Location::RequiresFpuRegister());
2967 summary->set_out(Location::RequiresRegister());
2968 return summary;
2969 }
2970
2971
2972 class BoxFloat32x4SlowPath : public SlowPathCode { 3138 class BoxFloat32x4SlowPath : public SlowPathCode {
2973 public: 3139 public:
2974 explicit BoxFloat32x4SlowPath(BoxFloat32x4Instr* instruction) 3140 explicit BoxFloat32x4SlowPath(BoxFloat32x4Instr* instruction)
2975 : instruction_(instruction) { } 3141 : instruction_(instruction) { }
2976 3142
2977 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { 3143 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
2978 __ Comment("BoxFloat32x4SlowPath"); 3144 __ Comment("BoxFloat32x4SlowPath");
2979 __ Bind(entry_label()); 3145 __ Bind(entry_label());
2980 const Class& float32x4_class = compiler->float32x4_class(); 3146 const Class& float32x4_class = compiler->float32x4_class();
2981 const Code& stub = 3147 const Code& stub =
(...skipping 12 matching lines...) Expand all
2994 compiler->RestoreLiveRegisters(locs); 3160 compiler->RestoreLiveRegisters(locs);
2995 3161
2996 __ jmp(exit_label()); 3162 __ jmp(exit_label());
2997 } 3163 }
2998 3164
2999 private: 3165 private:
3000 BoxFloat32x4Instr* instruction_; 3166 BoxFloat32x4Instr* instruction_;
3001 }; 3167 };
3002 3168
3003 3169
3170 LocationSummary* BoxFloat32x4Instr::MakeLocationSummary(bool opt) const {
3171 const intptr_t kNumInputs = 1;
3172 const intptr_t kNumTemps = 0;
3173 LocationSummary* summary =
3174 new LocationSummary(kNumInputs,
3175 kNumTemps,
3176 LocationSummary::kCallOnSlowPath);
3177 summary->set_in(0, Location::RequiresFpuRegister());
3178 summary->set_out(Location::RequiresRegister());
3179 return summary;
3180 }
3181
3182
3004 void BoxFloat32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) { 3183 void BoxFloat32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) {
3005 BoxFloat32x4SlowPath* slow_path = new BoxFloat32x4SlowPath(this); 3184 BoxFloat32x4SlowPath* slow_path = new BoxFloat32x4SlowPath(this);
3006 compiler->AddSlowPathCode(slow_path); 3185 compiler->AddSlowPathCode(slow_path);
3007 3186
3008 Register out_reg = locs()->out().reg(); 3187 Register out_reg = locs()->out().reg();
3009 XmmRegister value = locs()->in(0).fpu_reg(); 3188 XmmRegister value = locs()->in(0).fpu_reg();
3010 3189
3011 __ TryAllocate(compiler->float32x4_class(), 3190 __ TryAllocate(compiler->float32x4_class(),
3012 slow_path->entry_label(), 3191 slow_path->entry_label(),
3013 Assembler::kFarJump, 3192 Assembler::kFarJump,
(...skipping 1987 matching lines...) Expand 10 before | Expand all | Expand 10 after
5001 PcDescriptors::kOther, 5180 PcDescriptors::kOther,
5002 locs()); 5181 locs());
5003 __ Drop(2); // Discard type arguments and receiver. 5182 __ Drop(2); // Discard type arguments and receiver.
5004 } 5183 }
5005 5184
5006 } // namespace dart 5185 } // namespace dart
5007 5186
5008 #undef __ 5187 #undef __
5009 5188
5010 #endif // defined TARGET_ARCH_X64 5189 #endif // defined TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698