Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(208)

Side by Side Diff: runtime/vm/intermediate_language_x64.cc

Issue 150063004: Support reusable boxes for Float32x4 fields (Closed) Base URL: https://dart.googlecode.com/svn/branches/bleeding_edge/dart
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64.
6 #if defined(TARGET_ARCH_X64) 6 #if defined(TARGET_ARCH_X64)
7 7
8 #include "vm/intermediate_language.h" 8 #include "vm/intermediate_language.h"
9 9
10 #include "vm/dart_entry.h" 10 #include "vm/dart_entry.h"
(...skipping 1472 matching lines...) Expand 10 before | Expand all | Expand 10 after
1483 __ j(NOT_EQUAL, fail); 1483 __ j(NOT_EQUAL, fail);
1484 } else { 1484 } else {
1485 UNREACHABLE(); 1485 UNREACHABLE();
1486 } 1486 }
1487 } 1487 }
1488 } 1488 }
1489 __ Bind(&ok); 1489 __ Bind(&ok);
1490 } 1490 }
1491 1491
1492 1492
1493 bool Field::IsUnboxedField() const {
1494 bool valid_class = (guarded_cid() == kDoubleCid) ||
1495 (guarded_cid() == kFloat32x4Cid);
1496 return is_unboxing_candidate() && !is_final() && !is_nullable() &&
1497 valid_class;
1498 }
1499
1500
1493 class StoreInstanceFieldSlowPath : public SlowPathCode { 1501 class StoreInstanceFieldSlowPath : public SlowPathCode {
1494 public: 1502 public:
1495 StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction, 1503 explicit StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction)
1496 const Class& cls) 1504 : instruction_(instruction) { }
1497 : instruction_(instruction), cls_(cls) { }
1498 1505
1499 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { 1506 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
1500 __ Comment("StoreInstanceFieldSlowPath"); 1507 __ Comment("StoreInstanceFieldSlowPath");
1501 __ Bind(entry_label()); 1508 {
1502 const Code& stub = 1509 __ Bind(double_entry_label());
1503 Code::Handle(StubCode::GetAllocationStubForClass(cls_)); 1510 const Class& cls = compiler->double_class();
1504 const ExternalLabel label(cls_.ToCString(), stub.EntryPoint()); 1511 const Code& stub =
1512 Code::Handle(StubCode::GetAllocationStubForClass(cls));
1513 const ExternalLabel label(cls.ToCString(), stub.EntryPoint());
1505 1514
1506 LocationSummary* locs = instruction_->locs(); 1515 LocationSummary* locs = instruction_->locs();
1507 locs->live_registers()->Remove(locs->out()); 1516 locs->live_registers()->Remove(locs->out());
1508 1517
1509 compiler->SaveLiveRegisters(locs); 1518 compiler->SaveLiveRegisters(locs);
1510 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. 1519 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position.
1511 &label, 1520 &label,
1512 PcDescriptors::kOther, 1521 PcDescriptors::kOther,
1513 locs); 1522 locs);
1514 __ MoveRegister(locs->temp(0).reg(), RAX); 1523 __ MoveRegister(locs->temp(0).reg(), RAX);
1515 compiler->RestoreLiveRegisters(locs); 1524 compiler->RestoreLiveRegisters(locs);
1516 1525
1517 __ jmp(exit_label()); 1526 __ jmp(double_exit_label());
1527 }
1528 {
1529 __ Bind(float32x4_entry_label());
1530 const Class& cls = compiler->float32x4_class();
1531 const Code& stub =
1532 Code::Handle(StubCode::GetAllocationStubForClass(cls));
1533 const ExternalLabel label(cls.ToCString(), stub.EntryPoint());
1534
1535 LocationSummary* locs = instruction_->locs();
1536 locs->live_registers()->Remove(locs->out());
1537
1538 compiler->SaveLiveRegisters(locs);
1539 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position.
1540 &label,
1541 PcDescriptors::kOther,
1542 locs);
1543 __ MoveRegister(locs->temp(0).reg(), RAX);
1544 compiler->RestoreLiveRegisters(locs);
1545
1546 __ jmp(float32x4_exit_label());
1547 }
1518 } 1548 }
1519 1549
1550 Label* double_entry_label() {
1551 // Use default SlowPathCode label for double.
1552 return entry_label();
1553 }
1554 Label* double_exit_label() {
1555 // Use default SlowPathCode label for double.
1556 return exit_label();
1557 }
1558
1559 Label* float32x4_entry_label() { return &float32x4_entry_label_; }
1560 Label* float32x4_exit_label() { return &float32x4_exit_label_; }
1561
1520 private: 1562 private:
1563 Label float32x4_entry_label_;
1564 Label float32x4_exit_label_;
1521 StoreInstanceFieldInstr* instruction_; 1565 StoreInstanceFieldInstr* instruction_;
1522 const Class& cls_;
1523 }; 1566 };
1524 1567
1525 1568
1526 LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(bool opt) const { 1569 LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(bool opt) const {
1527 const intptr_t kNumInputs = 2; 1570 const intptr_t kNumInputs = 2;
1528 const intptr_t kNumTemps = 0; 1571 const intptr_t kNumTemps = 0;
1529 LocationSummary* summary = 1572 LocationSummary* summary =
1530 new LocationSummary(kNumInputs, kNumTemps, 1573 new LocationSummary(kNumInputs, kNumTemps,
1531 (field().guarded_cid() == kIllegalCid) || (is_initialization_) 1574 (field().guarded_cid() == kIllegalCid) || (is_initialization_)
1532 ? LocationSummary::kCallOnSlowPath 1575 ? LocationSummary::kCallOnSlowPath
(...skipping 26 matching lines...) Expand all
1559 1602
1560 Register instance_reg = locs()->in(0).reg(); 1603 Register instance_reg = locs()->in(0).reg();
1561 1604
1562 if (IsUnboxedStore() && compiler->is_optimizing()) { 1605 if (IsUnboxedStore() && compiler->is_optimizing()) {
1563 XmmRegister value = locs()->in(1).fpu_reg(); 1606 XmmRegister value = locs()->in(1).fpu_reg();
1564 Register temp = locs()->temp(0).reg(); 1607 Register temp = locs()->temp(0).reg();
1565 Register temp2 = locs()->temp(1).reg(); 1608 Register temp2 = locs()->temp(1).reg();
1566 const intptr_t cid = field().UnboxedFieldCid(); 1609 const intptr_t cid = field().UnboxedFieldCid();
1567 1610
1568 if (is_initialization_) { 1611 if (is_initialization_) {
1612 StoreInstanceFieldSlowPath* slow_path =
1613 new StoreInstanceFieldSlowPath(this);
1614 compiler->AddSlowPathCode(slow_path);
1615
1569 const Class* cls = NULL; 1616 const Class* cls = NULL;
1617 Label* entry_label = NULL;
1618 Label* exit_label = NULL;
1570 switch (cid) { 1619 switch (cid) {
1571 case kDoubleCid: 1620 case kDoubleCid:
1572 cls = &compiler->double_class(); 1621 cls = &compiler->double_class();
1622 entry_label = slow_path->double_entry_label();
1623 exit_label = slow_path->double_exit_label();
1573 break; 1624 break;
1574 // TODO(johnmccutchan): Add kFloat32x4Cid here. 1625 case kFloat32x4Cid:
1626 cls = &compiler->float32x4_class();
1627 entry_label = slow_path->float32x4_entry_label();
1628 exit_label = slow_path->float32x4_exit_label();
1629 break;
1575 default: 1630 default:
1576 UNREACHABLE(); 1631 UNREACHABLE();
1577 } 1632 }
1578 StoreInstanceFieldSlowPath* slow_path =
1579 new StoreInstanceFieldSlowPath(this, *cls);
1580 compiler->AddSlowPathCode(slow_path);
1581 1633
1582 __ TryAllocate(*cls, 1634 __ TryAllocate(*cls,
1583 slow_path->entry_label(), 1635 entry_label,
1584 Assembler::kFarJump, 1636 Assembler::kFarJump,
1585 temp, 1637 temp,
1586 PP); 1638 PP);
1587 __ Bind(slow_path->exit_label()); 1639 __ Bind(exit_label);
1588 __ movq(temp2, temp); 1640 __ movq(temp2, temp);
1589 __ StoreIntoObject(instance_reg, 1641 __ StoreIntoObject(instance_reg,
1590 FieldAddress(instance_reg, field().Offset()), 1642 FieldAddress(instance_reg, field().Offset()),
1591 temp2); 1643 temp2);
1592 } else { 1644 } else {
1593 __ movq(temp, FieldAddress(instance_reg, field().Offset())); 1645 __ movq(temp, FieldAddress(instance_reg, field().Offset()));
1594 } 1646 }
1595 switch (cid) { 1647 switch (cid) {
1596 case kDoubleCid: 1648 case kDoubleCid:
1597 __ movsd(FieldAddress(temp, Double::value_offset()), value); 1649 __ Comment("UnboxedDoubleStoreInstanceFieldInstr");
1598 // TODO(johnmccutchan): Add kFloat32x4Cid here. 1650 __ movsd(FieldAddress(temp, Double::value_offset()), value);
1599 break; 1651 break;
1652 case kFloat32x4Cid:
1653 __ Comment("UnboxedFloat32x4StoreInstanceFieldInstr");
1654 __ movups(FieldAddress(temp, Float32x4::value_offset()), value);
1655 break;
1600 default: 1656 default:
1601 UNREACHABLE(); 1657 UNREACHABLE();
1602 } 1658 }
1603 return; 1659 return;
1604 } 1660 }
1605 1661
1606 if (IsPotentialUnboxedStore()) { 1662 if (IsPotentialUnboxedStore()) {
1607 Register value_reg = locs()->in(1).reg(); 1663 Register value_reg = locs()->in(1).reg();
1608 Register temp = locs()->temp(0).reg(); 1664 Register temp = locs()->temp(0).reg();
1609 Register temp2 = locs()->temp(1).reg(); 1665 Register temp2 = locs()->temp(1).reg();
1610 FpuRegister fpu_temp = locs()->temp(2).fpu_reg(); 1666 FpuRegister fpu_temp = locs()->temp(2).fpu_reg();
1611 1667
1612 Label store_pointer; 1668 Label store_pointer;
1613 Label copy_double;
1614 Label store_double; 1669 Label store_double;
1670 Label store_float32x4;
1615 1671
1616 __ LoadObject(temp, Field::ZoneHandle(field().raw()), PP); 1672 __ LoadObject(temp, Field::ZoneHandle(field().raw()), PP);
1617 1673
1618 __ cmpq(FieldAddress(temp, Field::is_nullable_offset()), 1674 __ cmpq(FieldAddress(temp, Field::is_nullable_offset()),
1619 Immediate(kNullCid)); 1675 Immediate(kNullCid));
1620 __ j(EQUAL, &store_pointer); 1676 __ j(EQUAL, &store_pointer);
1621 1677
1622 __ movzxb(temp2, FieldAddress(temp, Field::kind_bits_offset())); 1678 __ movzxb(temp2, FieldAddress(temp, Field::kind_bits_offset()));
1623 __ testq(temp2, Immediate(1 << Field::kUnboxingCandidateBit)); 1679 __ testq(temp2, Immediate(1 << Field::kUnboxingCandidateBit));
1624 __ j(ZERO, &store_pointer); 1680 __ j(ZERO, &store_pointer);
1625 1681
1626 __ cmpq(FieldAddress(temp, Field::guarded_cid_offset()), 1682 __ cmpq(FieldAddress(temp, Field::guarded_cid_offset()),
1627 Immediate(kDoubleCid)); 1683 Immediate(kDoubleCid));
1628 __ j(EQUAL, &store_double); 1684 __ j(EQUAL, &store_double);
1629 1685
1686 __ cmpq(FieldAddress(temp, Field::guarded_cid_offset()),
1687 Immediate(kFloat32x4Cid));
1688 __ j(EQUAL, &store_float32x4);
1689
1630 // Fall through. 1690 // Fall through.
1631 __ jmp(&store_pointer); 1691 __ jmp(&store_pointer);
1632 1692
1633 __ Bind(&store_double);
1634
1635 __ movq(temp, FieldAddress(instance_reg, field().Offset()));
1636 __ CompareObject(temp, Object::null_object(), PP);
1637 __ j(NOT_EQUAL, &copy_double);
1638
1639 StoreInstanceFieldSlowPath* slow_path = 1693 StoreInstanceFieldSlowPath* slow_path =
1640 new StoreInstanceFieldSlowPath(this, compiler->double_class()); 1694 new StoreInstanceFieldSlowPath(this);
1641 compiler->AddSlowPathCode(slow_path); 1695 compiler->AddSlowPathCode(slow_path);
1642 1696
1643 if (!compiler->is_optimizing()) { 1697 {
1644 locs()->live_registers()->Add(locs()->in(0)); 1698 __ Bind(&store_double);
1645 locs()->live_registers()->Add(locs()->in(1)); 1699 Label copy_double;
1700
1701 __ movq(temp, FieldAddress(instance_reg, field().Offset()));
1702 __ CompareObject(temp, Object::null_object(), PP);
1703 __ j(NOT_EQUAL, &copy_double);
1704
1705 if (!compiler->is_optimizing()) {
1706 locs()->live_registers()->Add(locs()->in(0));
1707 locs()->live_registers()->Add(locs()->in(1));
1708 }
1709 __ TryAllocate(compiler->double_class(),
1710 slow_path->double_entry_label(),
1711 Assembler::kFarJump,
1712 temp,
1713 PP);
1714 __ Bind(slow_path->double_exit_label());
1715 __ movq(temp2, temp);
1716 __ StoreIntoObject(instance_reg,
1717 FieldAddress(instance_reg, field().Offset()),
1718 temp2);
1719
1720 __ Bind(&copy_double);
1721 __ movsd(fpu_temp, FieldAddress(value_reg, Double::value_offset()));
1722 __ movsd(FieldAddress(temp, Double::value_offset()), fpu_temp);
1723 __ jmp(&skip_store);
1646 } 1724 }
1647 __ TryAllocate(compiler->double_class(),
1648 slow_path->entry_label(),
1649 Assembler::kFarJump,
1650 temp,
1651 PP);
1652 __ Bind(slow_path->exit_label());
1653 __ movq(temp2, temp);
1654 __ StoreIntoObject(instance_reg,
1655 FieldAddress(instance_reg, field().Offset()),
1656 temp2);
1657 1725
1658 __ Bind(&copy_double); 1726 {
1659 __ movsd(fpu_temp, FieldAddress(value_reg, Double::value_offset())); 1727 __ Bind(&store_float32x4);
1660 __ movsd(FieldAddress(temp, Double::value_offset()), fpu_temp); 1728 Label copy_float32x4;
1661 __ jmp(&skip_store); 1729
1730 __ movq(temp, FieldAddress(instance_reg, field().Offset()));
1731 __ CompareObject(temp, Object::null_object(), PP);
1732 __ j(NOT_EQUAL, &copy_float32x4);
1733
1734 if (!compiler->is_optimizing()) {
1735 locs()->live_registers()->Add(locs()->in(0));
1736 locs()->live_registers()->Add(locs()->in(1));
1737 }
1738 __ TryAllocate(compiler->float32x4_class(),
1739 slow_path->float32x4_entry_label(),
1740 Assembler::kFarJump,
1741 temp,
1742 PP);
1743 __ Bind(slow_path->float32x4_exit_label());
1744 __ movq(temp2, temp);
1745 __ StoreIntoObject(instance_reg,
1746 FieldAddress(instance_reg, field().Offset()),
1747 temp2);
1748
1749 __ Bind(&copy_float32x4);
1750 __ movups(fpu_temp, FieldAddress(value_reg, Float32x4::value_offset()));
1751 __ movups(FieldAddress(temp, Float32x4::value_offset()), fpu_temp);
1752 __ jmp(&skip_store);
1753 }
1754
1662 __ Bind(&store_pointer); 1755 __ Bind(&store_pointer);
1663 } 1756 }
1664 1757
1665 if (ShouldEmitStoreBarrier()) { 1758 if (ShouldEmitStoreBarrier()) {
1666 Register value_reg = locs()->in(1).reg(); 1759 Register value_reg = locs()->in(1).reg();
1667 __ StoreIntoObject(instance_reg, 1760 __ StoreIntoObject(instance_reg,
1668 FieldAddress(instance_reg, field().Offset()), 1761 FieldAddress(instance_reg, field().Offset()),
1669 value_reg, 1762 value_reg,
1670 CanValueBeSmi()); 1763 CanValueBeSmi());
1671 } else { 1764 } else {
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
1792 deopt_id(), 1885 deopt_id(),
1793 kAllocateObjectWithBoundsCheckRuntimeEntry, 1886 kAllocateObjectWithBoundsCheckRuntimeEntry,
1794 3, 1887 3,
1795 locs()); 1888 locs());
1796 __ Drop(3); 1889 __ Drop(3);
1797 ASSERT(locs()->out().reg() == RAX); 1890 ASSERT(locs()->out().reg() == RAX);
1798 __ popq(RAX); // Pop new instance. 1891 __ popq(RAX); // Pop new instance.
1799 } 1892 }
1800 1893
1801 1894
1802 class BoxDoubleSlowPath : public SlowPathCode { 1895 class LoadFieldSlowPath : public SlowPathCode {
1803 public: 1896 public:
1804 explicit BoxDoubleSlowPath(Instruction* instruction) 1897 explicit LoadFieldSlowPath(Instruction* instruction)
1805 : instruction_(instruction) { } 1898 : instruction_(instruction) { }
1806 1899
1807 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { 1900 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
1808 __ Comment("BoxDoubleSlowPath"); 1901 __ Comment("LoadFieldSlowPath");
1809 __ Bind(entry_label()); 1902 {
1810 const Class& double_class = compiler->double_class(); 1903 __ Bind(double_entry_label());
1811 const Code& stub = 1904 const Class& double_class = compiler->double_class();
1812 Code::Handle(StubCode::GetAllocationStubForClass(double_class)); 1905 const Code& stub =
1813 const ExternalLabel label(double_class.ToCString(), stub.EntryPoint()); 1906 Code::Handle(StubCode::GetAllocationStubForClass(double_class));
1907 const ExternalLabel label(double_class.ToCString(), stub.EntryPoint());
1814 1908
1815 LocationSummary* locs = instruction_->locs(); 1909 LocationSummary* locs = instruction_->locs();
1816 locs->live_registers()->Remove(locs->out()); 1910 locs->live_registers()->Remove(locs->out());
1817 1911
1818 compiler->SaveLiveRegisters(locs); 1912 compiler->SaveLiveRegisters(locs);
1819 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. 1913 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position.
1820 &label, 1914 &label,
1821 PcDescriptors::kOther, 1915 PcDescriptors::kOther,
1822 locs); 1916 locs);
1823 __ MoveRegister(locs->out().reg(), RAX); 1917 __ MoveRegister(locs->out().reg(), RAX);
1824 compiler->RestoreLiveRegisters(locs); 1918 compiler->RestoreLiveRegisters(locs);
1825 1919
1826 __ jmp(exit_label()); 1920 __ jmp(double_exit_label());
1921 }
1922 {
1923 __ Bind(float32x4_entry_label());
1924 const Class& float32x4_class = compiler->float32x4_class();
1925 const Code& stub =
1926 Code::Handle(StubCode::GetAllocationStubForClass(float32x4_class));
1927 const ExternalLabel label(float32x4_class.ToCString(), stub.EntryPoint());
1928
1929 LocationSummary* locs = instruction_->locs();
1930 locs->live_registers()->Remove(locs->out());
1931
1932 compiler->SaveLiveRegisters(locs);
1933 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position.
1934 &label,
1935 PcDescriptors::kOther,
1936 locs);
1937 __ MoveRegister(locs->out().reg(), RAX);
1938 compiler->RestoreLiveRegisters(locs);
1939
1940 __ jmp(float32x4_exit_label());
1941 }
1827 } 1942 }
1828 1943
1944 Label* double_entry_label() {
1945 // Use default SlowPathCode label for double.
1946 return entry_label();
1947 }
1948 Label* double_exit_label() {
1949 // Use default SlowPathCode label for double.
1950 return exit_label();
1951 }
1952
1953 Label* float32x4_entry_label() { return &float32x4_entry_label_; }
1954 Label* float32x4_exit_label() { return &float32x4_exit_label_; }
1955
1829 private: 1956 private:
1957 Label float32x4_entry_label_;
1958 Label float32x4_exit_label_;
1830 Instruction* instruction_; 1959 Instruction* instruction_;
1831 }; 1960 };
1832 1961
1833 1962
1834 LocationSummary* LoadFieldInstr::MakeLocationSummary(bool opt) const { 1963 LocationSummary* LoadFieldInstr::MakeLocationSummary(bool opt) const {
1835 const intptr_t kNumInputs = 1; 1964 const intptr_t kNumInputs = 1;
1836 const intptr_t kNumTemps = 0; 1965 const intptr_t kNumTemps = 0;
1837 LocationSummary* locs = 1966 LocationSummary* locs =
1838 new LocationSummary( 1967 new LocationSummary(
1839 kNumInputs, kNumTemps, 1968 kNumInputs, kNumTemps,
(...skipping 17 matching lines...) Expand all
1857 1986
1858 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 1987 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
1859 Register instance_reg = locs()->in(0).reg(); 1988 Register instance_reg = locs()->in(0).reg();
1860 if (IsUnboxedLoad() && compiler->is_optimizing()) { 1989 if (IsUnboxedLoad() && compiler->is_optimizing()) {
1861 XmmRegister result = locs()->out().fpu_reg(); 1990 XmmRegister result = locs()->out().fpu_reg();
1862 Register temp = locs()->temp(0).reg(); 1991 Register temp = locs()->temp(0).reg();
1863 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes())); 1992 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes()));
1864 intptr_t cid = field()->UnboxedFieldCid(); 1993 intptr_t cid = field()->UnboxedFieldCid();
1865 switch (cid) { 1994 switch (cid) {
1866 case kDoubleCid: 1995 case kDoubleCid:
1996 __ Comment("UnboxedDoubleLoadFieldInstr");
1867 __ movsd(result, FieldAddress(temp, Double::value_offset())); 1997 __ movsd(result, FieldAddress(temp, Double::value_offset()));
1868 break; 1998 break;
1869 // TODO(johnmccutchan): Add Float32x4 path here. 1999 case kFloat32x4Cid:
2000 __ Comment("UnboxedFloat32x4LoadFieldInstr");
2001 __ movups(result, FieldAddress(temp, Float32x4::value_offset()));
2002 break;
1870 default: 2003 default:
1871 UNREACHABLE(); 2004 UNREACHABLE();
1872 } 2005 }
1873 return; 2006 return;
1874 } 2007 }
1875 2008
1876 Label done; 2009 Label done;
1877 Register result = locs()->out().reg(); 2010 Register result = locs()->out().reg();
1878 if (IsPotentialUnboxedLoad()) { 2011 if (IsPotentialUnboxedLoad()) {
1879 Register temp = locs()->temp(1).reg(); 2012 Register temp = locs()->temp(1).reg();
1880 XmmRegister value = locs()->temp(0).fpu_reg(); 2013 XmmRegister value = locs()->temp(0).fpu_reg();
2014 LoadFieldSlowPath* slow_path = new LoadFieldSlowPath(this);
2015 compiler->AddSlowPathCode(slow_path);
1881 2016
1882 Label load_pointer; 2017 Label load_pointer;
1883 Label load_double; 2018 Label load_double;
2019 Label load_float32x4;
1884 2020
1885 __ LoadObject(result, Field::ZoneHandle(field()->raw()), PP); 2021 __ LoadObject(result, Field::ZoneHandle(field()->raw()), PP);
1886 2022
1887 __ cmpq(FieldAddress(result, Field::is_nullable_offset()), 2023 __ cmpq(FieldAddress(result, Field::is_nullable_offset()),
1888 Immediate(kNullCid)); 2024 Immediate(kNullCid));
1889 __ j(EQUAL, &load_pointer); 2025 __ j(EQUAL, &load_pointer);
1890 2026
1891 __ cmpq(FieldAddress(result, Field::guarded_cid_offset()), 2027 __ cmpq(FieldAddress(result, Field::guarded_cid_offset()),
1892 Immediate(kDoubleCid)); 2028 Immediate(kDoubleCid));
1893 __ j(EQUAL, &load_double); 2029 __ j(EQUAL, &load_double);
1894 2030
2031 __ cmpq(FieldAddress(result, Field::guarded_cid_offset()),
2032 Immediate(kFloat32x4Cid));
2033 __ j(EQUAL, &load_float32x4);
2034
1895 // Fall through. 2035 // Fall through.
1896 __ jmp(&load_pointer); 2036 __ jmp(&load_pointer);
1897 2037
1898 __ Bind(&load_double); 2038 {
2039 __ Bind(&load_double);
1899 2040
1900 BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this); 2041 if (!compiler->is_optimizing()) {
1901 compiler->AddSlowPathCode(slow_path); 2042 locs()->live_registers()->Add(locs()->in(0));
2043 }
1902 2044
1903 if (!compiler->is_optimizing()) { 2045 __ TryAllocate(compiler->double_class(),
1904 locs()->live_registers()->Add(locs()->in(0)); 2046 slow_path->double_entry_label(),
2047 Assembler::kFarJump,
2048 result,
2049 PP);
2050 __ Bind(slow_path->double_exit_label());
2051 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes()));
2052 __ movsd(value, FieldAddress(temp, Double::value_offset()));
2053 __ movsd(FieldAddress(result, Double::value_offset()), value);
2054 __ jmp(&done);
1905 } 2055 }
2056 {
2057 __ Bind(&load_float32x4);
1906 2058
1907 __ TryAllocate(compiler->double_class(), 2059 if (!compiler->is_optimizing()) {
1908 slow_path->entry_label(), 2060 locs()->live_registers()->Add(locs()->in(0));
1909 Assembler::kFarJump, 2061 }
1910 result,
1911 PP);
1912 __ Bind(slow_path->exit_label());
1913 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes()));
1914 __ movsd(value, FieldAddress(temp, Double::value_offset()));
1915 __ movsd(FieldAddress(result, Double::value_offset()), value);
1916 __ jmp(&done);
1917 2062
1918 // TODO(johnmccutchan): Add Float32x4 path here. 2063 __ TryAllocate(compiler->float32x4_class(),
2064 slow_path->float32x4_entry_label(),
2065 Assembler::kFarJump,
2066 result,
2067 PP);
2068 __ Bind(slow_path->float32x4_exit_label());
2069 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes()));
2070 __ movups(value, FieldAddress(temp, Float32x4::value_offset()));
2071 __ movups(FieldAddress(result, Float32x4::value_offset()), value);
2072 __ jmp(&done);
2073 }
1919 2074
1920 __ Bind(&load_pointer); 2075 __ Bind(&load_pointer);
1921 } 2076 }
1922 __ movq(result, FieldAddress(instance_reg, offset_in_bytes())); 2077 __ movq(result, FieldAddress(instance_reg, offset_in_bytes()));
1923 __ Bind(&done); 2078 __ Bind(&done);
1924 } 2079 }
1925 2080
1926 2081
1927 LocationSummary* InstantiateTypeInstr::MakeLocationSummary(bool opt) const { 2082 LocationSummary* InstantiateTypeInstr::MakeLocationSummary(bool opt) const {
1928 const intptr_t kNumInputs = 1; 2083 const intptr_t kNumInputs = 1;
(...skipping 949 matching lines...) Expand 10 before | Expand all | Expand 10 after
2878 } else { 3033 } else {
2879 Register temp = locs()->temp(0).reg(); 3034 Register temp = locs()->temp(0).reg();
2880 __ movq(temp, left); 3035 __ movq(temp, left);
2881 __ orq(temp, right); 3036 __ orq(temp, right);
2882 __ testq(temp, Immediate(kSmiTagMask)); 3037 __ testq(temp, Immediate(kSmiTagMask));
2883 } 3038 }
2884 __ j(ZERO, deopt); 3039 __ j(ZERO, deopt);
2885 } 3040 }
2886 3041
2887 3042
3043 class BoxDoubleSlowPath : public SlowPathCode {
3044 public:
3045 explicit BoxDoubleSlowPath(BoxDoubleInstr* instruction)
3046 : instruction_(instruction) { }
3047
3048 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
3049 __ Comment("BoxDoubleSlowPath");
3050 __ Bind(entry_label());
3051 const Class& double_class = compiler->double_class();
3052 const Code& stub =
3053 Code::Handle(StubCode::GetAllocationStubForClass(double_class));
3054 const ExternalLabel label(double_class.ToCString(), stub.EntryPoint());
3055
3056 LocationSummary* locs = instruction_->locs();
3057 locs->live_registers()->Remove(locs->out());
3058
3059 compiler->SaveLiveRegisters(locs);
3060 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position.
3061 &label,
3062 PcDescriptors::kOther,
3063 locs);
3064 __ MoveRegister(locs->out().reg(), RAX);
3065 compiler->RestoreLiveRegisters(locs);
3066
3067 __ jmp(exit_label());
3068 }
3069
3070 private:
3071 BoxDoubleInstr* instruction_;
3072 };
3073
3074
2888 LocationSummary* BoxDoubleInstr::MakeLocationSummary(bool opt) const { 3075 LocationSummary* BoxDoubleInstr::MakeLocationSummary(bool opt) const {
2889 const intptr_t kNumInputs = 1; 3076 const intptr_t kNumInputs = 1;
2890 const intptr_t kNumTemps = 0; 3077 const intptr_t kNumTemps = 0;
2891 LocationSummary* summary = 3078 LocationSummary* summary =
2892 new LocationSummary(kNumInputs, 3079 new LocationSummary(kNumInputs,
2893 kNumTemps, 3080 kNumTemps,
2894 LocationSummary::kCallOnSlowPath); 3081 LocationSummary::kCallOnSlowPath);
2895 summary->set_in(0, Location::RequiresFpuRegister()); 3082 summary->set_in(0, Location::RequiresFpuRegister());
2896 summary->set_out(Location::RequiresRegister()); 3083 summary->set_out(Location::RequiresRegister());
2897 return summary; 3084 return summary;
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
2949 __ movsd(result, FieldAddress(value, Double::value_offset())); 3136 __ movsd(result, FieldAddress(value, Double::value_offset()));
2950 __ jmp(&done); 3137 __ jmp(&done);
2951 __ Bind(&is_smi); 3138 __ Bind(&is_smi);
2952 __ SmiUntag(value); 3139 __ SmiUntag(value);
2953 __ cvtsi2sd(result, value); 3140 __ cvtsi2sd(result, value);
2954 __ Bind(&done); 3141 __ Bind(&done);
2955 } 3142 }
2956 } 3143 }
2957 3144
2958 3145
2959 LocationSummary* BoxFloat32x4Instr::MakeLocationSummary(bool opt) const {
2960 const intptr_t kNumInputs = 1;
2961 const intptr_t kNumTemps = 0;
2962 LocationSummary* summary =
2963 new LocationSummary(kNumInputs,
2964 kNumTemps,
2965 LocationSummary::kCallOnSlowPath);
2966 summary->set_in(0, Location::RequiresFpuRegister());
2967 summary->set_out(Location::RequiresRegister());
2968 return summary;
2969 }
2970
2971
2972 class BoxFloat32x4SlowPath : public SlowPathCode { 3146 class BoxFloat32x4SlowPath : public SlowPathCode {
2973 public: 3147 public:
2974 explicit BoxFloat32x4SlowPath(BoxFloat32x4Instr* instruction) 3148 explicit BoxFloat32x4SlowPath(BoxFloat32x4Instr* instruction)
2975 : instruction_(instruction) { } 3149 : instruction_(instruction) { }
2976 3150
2977 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { 3151 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
2978 __ Comment("BoxFloat32x4SlowPath"); 3152 __ Comment("BoxFloat32x4SlowPath");
2979 __ Bind(entry_label()); 3153 __ Bind(entry_label());
2980 const Class& float32x4_class = compiler->float32x4_class(); 3154 const Class& float32x4_class = compiler->float32x4_class();
2981 const Code& stub = 3155 const Code& stub =
(...skipping 12 matching lines...) Expand all
2994 compiler->RestoreLiveRegisters(locs); 3168 compiler->RestoreLiveRegisters(locs);
2995 3169
2996 __ jmp(exit_label()); 3170 __ jmp(exit_label());
2997 } 3171 }
2998 3172
2999 private: 3173 private:
3000 BoxFloat32x4Instr* instruction_; 3174 BoxFloat32x4Instr* instruction_;
3001 }; 3175 };
3002 3176
3003 3177
3178 LocationSummary* BoxFloat32x4Instr::MakeLocationSummary(bool opt) const {
3179 const intptr_t kNumInputs = 1;
3180 const intptr_t kNumTemps = 0;
3181 LocationSummary* summary =
3182 new LocationSummary(kNumInputs,
3183 kNumTemps,
3184 LocationSummary::kCallOnSlowPath);
3185 summary->set_in(0, Location::RequiresFpuRegister());
3186 summary->set_out(Location::RequiresRegister());
3187 return summary;
3188 }
3189
3190
3004 void BoxFloat32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) { 3191 void BoxFloat32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) {
3005 BoxFloat32x4SlowPath* slow_path = new BoxFloat32x4SlowPath(this); 3192 BoxFloat32x4SlowPath* slow_path = new BoxFloat32x4SlowPath(this);
3006 compiler->AddSlowPathCode(slow_path); 3193 compiler->AddSlowPathCode(slow_path);
3007 3194
3008 Register out_reg = locs()->out().reg(); 3195 Register out_reg = locs()->out().reg();
3009 XmmRegister value = locs()->in(0).fpu_reg(); 3196 XmmRegister value = locs()->in(0).fpu_reg();
3010 3197
3011 __ TryAllocate(compiler->float32x4_class(), 3198 __ TryAllocate(compiler->float32x4_class(),
3012 slow_path->entry_label(), 3199 slow_path->entry_label(),
3013 Assembler::kFarJump, 3200 Assembler::kFarJump,
(...skipping 1987 matching lines...) Expand 10 before | Expand all | Expand 10 after
5001 PcDescriptors::kOther, 5188 PcDescriptors::kOther,
5002 locs()); 5189 locs());
5003 __ Drop(2); // Discard type arguments and receiver. 5190 __ Drop(2); // Discard type arguments and receiver.
5004 } 5191 }
5005 5192
5006 } // namespace dart 5193 } // namespace dart
5007 5194
5008 #undef __ 5195 #undef __
5009 5196
5010 #endif // defined TARGET_ARCH_X64 5197 #endif // defined TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698