Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(143)

Side by Side Diff: runtime/vm/intermediate_language_ia32.cc

Issue 136753012: Refactor unboxed fields in preparation of reusable SIMD boxes (Closed) Base URL: https://dart.googlecode.com/svn/branches/bleeding_edge/dart
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_IA32. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_IA32.
6 #if defined(TARGET_ARCH_IA32) 6 #if defined(TARGET_ARCH_IA32)
7 7
8 #include "vm/intermediate_language.h" 8 #include "vm/intermediate_language.h"
9 9
10 #include "vm/dart_entry.h" 10 #include "vm/dart_entry.h"
(...skipping 1574 matching lines...) Expand 10 before | Expand all | Expand 10 after
1585 UNREACHABLE(); 1585 UNREACHABLE();
1586 } 1586 }
1587 } 1587 }
1588 } 1588 }
1589 __ Bind(&ok); 1589 __ Bind(&ok);
1590 } 1590 }
1591 1591
1592 1592
1593 class StoreInstanceFieldSlowPath : public SlowPathCode { 1593 class StoreInstanceFieldSlowPath : public SlowPathCode {
1594 public: 1594 public:
1595 explicit StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction) 1595 explicit StoreInstanceFieldSlowPath(const Class& cls,
1596 : instruction_(instruction) { } 1596 StoreInstanceFieldInstr* instruction)
1597 : instruction_(instruction), cls_(cls) { }
srdjan 2014/01/30 23:58:39 ditto and elsewhere
Cutch 2014/01/31 00:18:49 Done.
1597 1598
1598 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { 1599 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
1599 __ Comment("StoreInstanceFieldSlowPath"); 1600 __ Comment("StoreInstanceFieldSlowPath");
1600 __ Bind(entry_label()); 1601 __ Bind(entry_label());
1601 const Class& double_class = compiler->double_class();
1602 const Code& stub = 1602 const Code& stub =
1603 Code::Handle(StubCode::GetAllocationStubForClass(double_class)); 1603 Code::Handle(StubCode::GetAllocationStubForClass(cls_));
1604 const ExternalLabel label(double_class.ToCString(), stub.EntryPoint()); 1604 const ExternalLabel label(cls_.ToCString(), stub.EntryPoint());
1605 1605
1606 LocationSummary* locs = instruction_->locs(); 1606 LocationSummary* locs = instruction_->locs();
1607 locs->live_registers()->Remove(locs->out()); 1607 locs->live_registers()->Remove(locs->out());
1608 1608
1609 compiler->SaveLiveRegisters(locs); 1609 compiler->SaveLiveRegisters(locs);
1610 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. 1610 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position.
1611 &label, 1611 &label,
1612 PcDescriptors::kOther, 1612 PcDescriptors::kOther,
1613 locs); 1613 locs);
1614 __ MoveRegister(locs->temp(0).reg(), EAX); 1614 __ MoveRegister(locs->temp(0).reg(), EAX);
1615 compiler->RestoreLiveRegisters(locs); 1615 compiler->RestoreLiveRegisters(locs);
1616 1616
1617 __ jmp(exit_label()); 1617 __ jmp(exit_label());
1618 } 1618 }
1619 1619
1620 private: 1620 private:
1621 StoreInstanceFieldInstr* instruction_; 1621 StoreInstanceFieldInstr* instruction_;
1622 const Class& cls_;
1622 }; 1623 };
1623 1624
1624 1625
1625 LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(bool opt) const { 1626 LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(bool opt) const {
1626 const intptr_t kNumInputs = 2; 1627 const intptr_t kNumInputs = 2;
1627 const intptr_t kNumTemps = 0; 1628 const intptr_t kNumTemps = 0;
1628 LocationSummary* summary = 1629 LocationSummary* summary =
1629 new LocationSummary(kNumInputs, kNumTemps, 1630 new LocationSummary(kNumInputs, kNumTemps,
1630 (field().guarded_cid() == kIllegalCid) || (is_initialization_) 1631 (field().guarded_cid() == kIllegalCid) || (is_initialization_)
1631 ? LocationSummary::kCallOnSlowPath 1632 ? LocationSummary::kCallOnSlowPath
(...skipping 23 matching lines...) Expand all
1655 1656
1656 void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 1657 void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
1657 Label skip_store; 1658 Label skip_store;
1658 1659
1659 Register instance_reg = locs()->in(0).reg(); 1660 Register instance_reg = locs()->in(0).reg();
1660 1661
1661 if (IsUnboxedStore() && compiler->is_optimizing()) { 1662 if (IsUnboxedStore() && compiler->is_optimizing()) {
1662 XmmRegister value = locs()->in(1).fpu_reg(); 1663 XmmRegister value = locs()->in(1).fpu_reg();
1663 Register temp = locs()->temp(0).reg(); 1664 Register temp = locs()->temp(0).reg();
1664 Register temp2 = locs()->temp(1).reg(); 1665 Register temp2 = locs()->temp(1).reg();
1666 const intptr_t cid = field().UnboxedFieldCid();
1665 1667
1666 if (is_initialization_) { 1668 if (is_initialization_) {
1669 const Class* cls = NULL;
1670 switch (cid) {
1671 case kDoubleCid:
1672 cls = &compiler->double_class();
1673 break;
1674 // TODO(johnmccutchan): Add kFloat32x4Cid here.
1675 default:
1676 UNREACHABLE();
1677 }
1667 StoreInstanceFieldSlowPath* slow_path = 1678 StoreInstanceFieldSlowPath* slow_path =
1668 new StoreInstanceFieldSlowPath(this); 1679 new StoreInstanceFieldSlowPath(*cls, this);
1669 compiler->AddSlowPathCode(slow_path); 1680 compiler->AddSlowPathCode(slow_path);
1670 1681
1671 __ TryAllocate(compiler->double_class(), 1682 __ TryAllocate(*cls,
1672 slow_path->entry_label(), 1683 slow_path->entry_label(),
1673 Assembler::kFarJump, 1684 Assembler::kFarJump,
1674 temp, 1685 temp,
1675 temp2); 1686 temp2);
1676 __ Bind(slow_path->exit_label()); 1687 __ Bind(slow_path->exit_label());
1677 __ movl(temp2, temp); 1688 __ movl(temp2, temp);
1678 __ StoreIntoObject(instance_reg, 1689 __ StoreIntoObject(instance_reg,
1679 FieldAddress(instance_reg, field().Offset()), 1690 FieldAddress(instance_reg, field().Offset()),
1680 temp2); 1691 temp2);
1681 } else { 1692 } else {
1682 __ movl(temp, FieldAddress(instance_reg, field().Offset())); 1693 __ movl(temp, FieldAddress(instance_reg, field().Offset()));
1683 } 1694 }
1684 __ movsd(FieldAddress(temp, Double::value_offset()), value); 1695 switch (cid) {
1696 case kDoubleCid:
1697 __ movsd(FieldAddress(temp, Double::value_offset()), value);
1698 // TODO(johnmccutchan): Add kFloat32x4Cid here.
1699 break;
1700 default:
1701 UNREACHABLE();
1702 }
1685 return; 1703 return;
1686 } 1704 }
1687 1705
1688 if (IsPotentialUnboxedStore()) { 1706 if (IsPotentialUnboxedStore()) {
1689 Register value_reg = locs()->in(1).reg(); 1707 Register value_reg = locs()->in(1).reg();
1690 Register temp = locs()->temp(0).reg(); 1708 Register temp = locs()->temp(0).reg();
1691 Register temp2 = locs()->temp(1).reg(); 1709 Register temp2 = locs()->temp(1).reg();
1692 FpuRegister fpu_temp = locs()->temp(2).fpu_reg(); 1710 FpuRegister fpu_temp = locs()->temp(2).fpu_reg();
1693 1711
1694 Label store_pointer, copy_payload; 1712 Label store_pointer;
1713 Label copy_double;
1714 Label store_double;
1715
1695 __ LoadObject(temp, Field::ZoneHandle(field().raw())); 1716 __ LoadObject(temp, Field::ZoneHandle(field().raw()));
1696 __ cmpl(FieldAddress(temp, Field::guarded_cid_offset()), 1717
1697 Immediate(kDoubleCid));
1698 __ j(NOT_EQUAL, &store_pointer);
1699 __ cmpl(FieldAddress(temp, Field::is_nullable_offset()), 1718 __ cmpl(FieldAddress(temp, Field::is_nullable_offset()),
1700 Immediate(kNullCid)); 1719 Immediate(kNullCid));
1701 __ j(EQUAL, &store_pointer); 1720 __ j(EQUAL, &store_pointer);
1721
1702 __ movzxb(temp2, FieldAddress(temp, Field::kind_bits_offset())); 1722 __ movzxb(temp2, FieldAddress(temp, Field::kind_bits_offset()));
1703 __ testl(temp2, Immediate(1 << Field::kUnboxingCandidateBit)); 1723 __ testl(temp2, Immediate(1 << Field::kUnboxingCandidateBit));
1704 __ j(ZERO, &store_pointer); 1724 __ j(ZERO, &store_pointer);
1705 1725
1726 __ cmpl(FieldAddress(temp, Field::guarded_cid_offset()),
1727 Immediate(kDoubleCid));
1728 __ j(EQUAL, &store_double);
1729
1730 // Fall through.
1731 __ jmp(&store_pointer);
1732
1733 __ Bind(&store_double);
1734
1706 const Immediate& raw_null = 1735 const Immediate& raw_null =
1707 Immediate(reinterpret_cast<intptr_t>(Object::null())); 1736 Immediate(reinterpret_cast<intptr_t>(Object::null()));
1708 __ movl(temp, FieldAddress(instance_reg, field().Offset())); 1737 __ movl(temp, FieldAddress(instance_reg, field().Offset()));
1709 __ cmpl(temp, raw_null); 1738 __ cmpl(temp, raw_null);
1710 __ j(NOT_EQUAL, &copy_payload); 1739 __ j(NOT_EQUAL, &copy_double);
1711 1740
1712 StoreInstanceFieldSlowPath* slow_path = 1741 StoreInstanceFieldSlowPath* slow_path =
1713 new StoreInstanceFieldSlowPath(this); 1742 new StoreInstanceFieldSlowPath(compiler->double_class(), this);
1714 compiler->AddSlowPathCode(slow_path); 1743 compiler->AddSlowPathCode(slow_path);
1715 1744
1716 if (!compiler->is_optimizing()) { 1745 if (!compiler->is_optimizing()) {
1717 locs()->live_registers()->Add(locs()->in(0)); 1746 locs()->live_registers()->Add(locs()->in(0));
1718 locs()->live_registers()->Add(locs()->in(1)); 1747 locs()->live_registers()->Add(locs()->in(1));
1719 } 1748 }
1720 1749
1721 __ TryAllocate(compiler->double_class(), 1750 __ TryAllocate(compiler->double_class(),
1722 slow_path->entry_label(), 1751 slow_path->entry_label(),
1723 Assembler::kFarJump, 1752 Assembler::kFarJump,
1724 temp, 1753 temp,
1725 temp2); 1754 temp2);
1726 __ Bind(slow_path->exit_label()); 1755 __ Bind(slow_path->exit_label());
1727 __ movl(temp2, temp); 1756 __ movl(temp2, temp);
1728 __ StoreIntoObject(instance_reg, 1757 __ StoreIntoObject(instance_reg,
1729 FieldAddress(instance_reg, field().Offset()), 1758 FieldAddress(instance_reg, field().Offset()),
1730 temp2); 1759 temp2);
1731 1760
1732 __ Bind(&copy_payload); 1761 __ Bind(&copy_double);
1733 __ movsd(fpu_temp, FieldAddress(value_reg, Double::value_offset())); 1762 __ movsd(fpu_temp, FieldAddress(value_reg, Double::value_offset()));
1734 __ movsd(FieldAddress(temp, Double::value_offset()), fpu_temp); 1763 __ movsd(FieldAddress(temp, Double::value_offset()), fpu_temp);
1735 __ jmp(&skip_store); 1764 __ jmp(&skip_store);
1736 __ Bind(&store_pointer); 1765 __ Bind(&store_pointer);
1737 } 1766 }
1738 1767
1739 if (ShouldEmitStoreBarrier()) { 1768 if (ShouldEmitStoreBarrier()) {
1740 Register value_reg = locs()->in(1).reg(); 1769 Register value_reg = locs()->in(1).reg();
1741 __ StoreIntoObject(instance_reg, 1770 __ StoreIntoObject(instance_reg,
1742 FieldAddress(instance_reg, field().Offset()), 1771 FieldAddress(instance_reg, field().Offset()),
(...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after
1931 return locs; 1960 return locs;
1932 } 1961 }
1933 1962
1934 1963
1935 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 1964 void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
1936 Register instance_reg = locs()->in(0).reg(); 1965 Register instance_reg = locs()->in(0).reg();
1937 if (IsUnboxedLoad() && compiler->is_optimizing()) { 1966 if (IsUnboxedLoad() && compiler->is_optimizing()) {
1938 XmmRegister result = locs()->out().fpu_reg(); 1967 XmmRegister result = locs()->out().fpu_reg();
1939 Register temp = locs()->temp(0).reg(); 1968 Register temp = locs()->temp(0).reg();
1940 __ movl(temp, FieldAddress(instance_reg, offset_in_bytes())); 1969 __ movl(temp, FieldAddress(instance_reg, offset_in_bytes()));
1941 __ movsd(result, FieldAddress(temp, Double::value_offset())); 1970 const intptr_t cid = field()->UnboxedFieldCid();
1971 switch (cid) {
1972 case kDoubleCid:
1973 __ movsd(result, FieldAddress(temp, Double::value_offset()));
1974 break;
1975 // TODO(johnmccutchan): Add Float32x4 path here.
1976 default:
1977 UNREACHABLE();
1978 }
1942 return; 1979 return;
1943 } 1980 }
1944 1981
1945 Label done; 1982 Label done;
1946 Register result = locs()->out().reg(); 1983 Register result = locs()->out().reg();
1947 if (IsPotentialUnboxedLoad()) { 1984 if (IsPotentialUnboxedLoad()) {
1948 Register temp = locs()->temp(1).reg(); 1985 Register temp = locs()->temp(1).reg();
1949 XmmRegister value = locs()->temp(0).fpu_reg(); 1986 XmmRegister value = locs()->temp(0).fpu_reg();
1950 1987
1951 Label load_pointer; 1988 Label load_pointer;
1989 Label load_double;
1952 __ LoadObject(result, Field::ZoneHandle(field()->raw())); 1990 __ LoadObject(result, Field::ZoneHandle(field()->raw()));
1953 1991
1954 FieldAddress field_cid_operand(result, Field::guarded_cid_offset()); 1992 FieldAddress field_cid_operand(result, Field::guarded_cid_offset());
1955 FieldAddress field_nullability_operand(result, Field::is_nullable_offset()); 1993 FieldAddress field_nullability_operand(result, Field::is_nullable_offset());
1956 1994
1957 __ cmpl(field_cid_operand, Immediate(kDoubleCid));
1958 __ j(NOT_EQUAL, &load_pointer);
1959
1960 __ cmpl(field_nullability_operand, Immediate(kNullCid)); 1995 __ cmpl(field_nullability_operand, Immediate(kNullCid));
1961 __ j(EQUAL, &load_pointer); 1996 __ j(EQUAL, &load_pointer);
1962 1997
1998 __ cmpl(field_cid_operand, Immediate(kDoubleCid));
1999 __ j(EQUAL, &load_double);
2000
2001 // Fall through.
2002 __ jmp(&load_pointer);
2003
2004 __ Bind(&load_double);
1963 BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this); 2005 BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this);
1964 compiler->AddSlowPathCode(slow_path); 2006 compiler->AddSlowPathCode(slow_path);
1965 2007
1966 if (!compiler->is_optimizing()) { 2008 if (!compiler->is_optimizing()) {
1967 locs()->live_registers()->Add(locs()->in(0)); 2009 locs()->live_registers()->Add(locs()->in(0));
1968 } 2010 }
1969 2011
1970 __ TryAllocate(compiler->double_class(), 2012 __ TryAllocate(compiler->double_class(),
1971 slow_path->entry_label(), 2013 slow_path->entry_label(),
1972 Assembler::kFarJump, 2014 Assembler::kFarJump,
1973 result, 2015 result,
1974 temp); 2016 temp);
1975 __ Bind(slow_path->exit_label()); 2017 __ Bind(slow_path->exit_label());
1976 __ movl(temp, FieldAddress(instance_reg, offset_in_bytes())); 2018 __ movl(temp, FieldAddress(instance_reg, offset_in_bytes()));
1977 __ movsd(value, FieldAddress(temp, Double::value_offset())); 2019 __ movsd(value, FieldAddress(temp, Double::value_offset()));
1978 __ movsd(FieldAddress(result, Double::value_offset()), value); 2020 __ movsd(FieldAddress(result, Double::value_offset()), value);
1979 __ jmp(&done); 2021 __ jmp(&done);
2022
2023 // TODO(johnmccutchan): Add Float32x4 path here.
2024
1980 __ Bind(&load_pointer); 2025 __ Bind(&load_pointer);
1981 } 2026 }
1982 __ movl(result, FieldAddress(instance_reg, offset_in_bytes())); 2027 __ movl(result, FieldAddress(instance_reg, offset_in_bytes()));
1983 __ Bind(&done); 2028 __ Bind(&done);
1984 } 2029 }
1985 2030
1986 2031
1987 LocationSummary* InstantiateTypeInstr::MakeLocationSummary(bool opt) const { 2032 LocationSummary* InstantiateTypeInstr::MakeLocationSummary(bool opt) const {
1988 const intptr_t kNumInputs = 1; 2033 const intptr_t kNumInputs = 1;
1989 const intptr_t kNumTemps = 0; 2034 const intptr_t kNumTemps = 0;
(...skipping 3245 matching lines...) Expand 10 before | Expand all | Expand 10 after
5235 PcDescriptors::kOther, 5280 PcDescriptors::kOther,
5236 locs()); 5281 locs());
5237 __ Drop(2); // Discard type arguments and receiver. 5282 __ Drop(2); // Discard type arguments and receiver.
5238 } 5283 }
5239 5284
5240 } // namespace dart 5285 } // namespace dart
5241 5286
5242 #undef __ 5287 #undef __
5243 5288
5244 #endif // defined TARGET_ARCH_IA32 5289 #endif // defined TARGET_ARCH_IA32
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698