Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(106)

Side by Side Diff: runtime/vm/intermediate_language_x64.cc

Issue 410333003: Shorter TryAllocate instruction sequence on ARM/ARM64/MIPS. (Closed) Base URL: https://dart.googlecode.com/svn/branches/bleeding_edge/dart
Patch Set: Created 6 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64.
6 #if defined(TARGET_ARCH_X64) 6 #if defined(TARGET_ARCH_X64)
7 7
8 #include "vm/intermediate_language.h" 8 #include "vm/intermediate_language.h"
9 9
10 #include "vm/dart_entry.h" 10 #include "vm/dart_entry.h"
(...skipping 1488 matching lines...) Expand 10 before | Expand all | Expand 10 after
1499 __ CompareImmediate( 1499 __ CompareImmediate(
1500 FieldAddress(value_reg, 1500 FieldAddress(value_reg,
1501 field().guarded_list_length_in_object_offset()), 1501 field().guarded_list_length_in_object_offset()),
1502 Immediate(Smi::RawValue(field().guarded_list_length())), 1502 Immediate(Smi::RawValue(field().guarded_list_length())),
1503 PP); 1503 PP);
1504 __ j(NOT_EQUAL, deopt); 1504 __ j(NOT_EQUAL, deopt);
1505 } 1505 }
1506 } 1506 }
1507 1507
1508 1508
1509 class StoreInstanceFieldSlowPath : public SlowPathCode { 1509 class BoxAllocationSlowPath : public SlowPathCode {
1510 public: 1510 public:
1511 StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction, 1511 BoxAllocationSlowPath(Instruction* instruction,
1512 const Class& cls) 1512 const Class& cls,
1513 : instruction_(instruction), cls_(cls) { } 1513 Register result)
1514 : instruction_(instruction),
1515 cls_(cls),
1516 result_(result) { }
1514 1517
1515 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { 1518 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
1516 __ Comment("StoreInstanceFieldSlowPath");
1517 __ Bind(entry_label());
1518 Isolate* isolate = compiler->isolate(); 1519 Isolate* isolate = compiler->isolate();
1519 StubCode* stub_code = isolate->stub_code(); 1520 StubCode* stub_code = isolate->stub_code();
1520 const Code& stub = Code::Handle(isolate, 1521
1521 stub_code->GetAllocationStubForClass(cls_)); 1522 if (Assembler::EmittingComments()) {
1523 __ Comment("%s slow path allocation of %s",
1524 instruction_->DebugName(),
1525 String::Handle(cls_.PrettyName()).ToCString());
1526 }
1527 __ Bind(entry_label());
1528
1529 const Code& stub =
1530 Code::Handle(isolate, stub_code->GetAllocationStubForClass(cls_));
1522 const ExternalLabel label(stub.EntryPoint()); 1531 const ExternalLabel label(stub.EntryPoint());
1523 1532
1524 LocationSummary* locs = instruction_->locs(); 1533 LocationSummary* locs = instruction_->locs();
1525 locs->live_registers()->Remove(locs->temp(0)); 1534
1535 locs->live_registers()->Remove(Location::RegisterLocation(result_));
1526 1536
1527 compiler->SaveLiveRegisters(locs); 1537 compiler->SaveLiveRegisters(locs);
1528 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. 1538 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position.
1529 &label, 1539 &label,
1530 RawPcDescriptors::kOther, 1540 RawPcDescriptors::kOther,
1531 locs); 1541 locs);
1532 __ MoveRegister(locs->temp(0).reg(), RAX); 1542 __ MoveRegister(result_, RAX);
1533 compiler->RestoreLiveRegisters(locs); 1543 compiler->RestoreLiveRegisters(locs);
1534
1535 __ jmp(exit_label()); 1544 __ jmp(exit_label());
1536 } 1545 }
1537 1546
1547 static void Allocate(FlowGraphCompiler* compiler,
1548 Instruction* instruction,
1549 const Class& cls,
1550 Register result) {
1551 BoxAllocationSlowPath* slow_path =
1552 new BoxAllocationSlowPath(instruction, cls, result);
1553 compiler->AddSlowPathCode(slow_path);
1554
1555 __ TryAllocate(cls,
1556 slow_path->entry_label(),
1557 Assembler::kFarJump,
1558 result,
1559 PP);
1560 __ Bind(slow_path->exit_label());
1561 }
1562
1538 private: 1563 private:
1539 StoreInstanceFieldInstr* instruction_; 1564 Instruction* instruction_;
1540 const Class& cls_; 1565 const Class& cls_;
1566 Register result_;
1541 }; 1567 };
1542 1568
1543 1569
1544 LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Isolate* isolate, 1570 LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Isolate* isolate,
1545 bool opt) const { 1571 bool opt) const {
1546 const intptr_t kNumInputs = 2; 1572 const intptr_t kNumInputs = 2;
1547 const intptr_t kNumTemps = 1573 const intptr_t kNumTemps =
1548 (IsUnboxedStore() && opt) ? 2 : 1574 (IsUnboxedStore() && opt) ? 2 :
1549 ((IsPotentialUnboxedStore()) ? 3 : 0); 1575 ((IsPotentialUnboxedStore()) ? 3 : 0);
1550 LocationSummary* summary = new(isolate) LocationSummary( 1576 LocationSummary* summary = new(isolate) LocationSummary(
(...skipping 18 matching lines...) Expand all
1569 : Location::FpuRegisterLocation(XMM1)); 1595 : Location::FpuRegisterLocation(XMM1));
1570 } else { 1596 } else {
1571 summary->set_in(1, ShouldEmitStoreBarrier() 1597 summary->set_in(1, ShouldEmitStoreBarrier()
1572 ? Location::WritableRegister() 1598 ? Location::WritableRegister()
1573 : Location::RegisterOrConstant(value())); 1599 : Location::RegisterOrConstant(value()));
1574 } 1600 }
1575 return summary; 1601 return summary;
1576 } 1602 }
1577 1603
1578 1604
1605 static void EnsureMutableBox(FlowGraphCompiler* compiler,
1606 StoreInstanceFieldInstr* instruction,
1607 Register box_reg,
1608 const Class& cls,
1609 Register instance_reg,
1610 intptr_t offset,
1611 Register temp) {
1612 Label done;
1613 __ movq(box_reg, FieldAddress(instance_reg, offset));
1614 __ CompareObject(box_reg, Object::null_object(), PP);
1615 __ j(NOT_EQUAL, &done);
1616 BoxAllocationSlowPath::Allocate(compiler, instruction, cls, box_reg);
1617 __ movq(temp, box_reg);
1618 __ StoreIntoObject(instance_reg,
1619 FieldAddress(instance_reg, offset),
1620 temp);
1621
1622 __ Bind(&done);
1623 }
1624
1625
1579 void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 1626 void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
1580 Label skip_store; 1627 Label skip_store;
1581 1628
1582 Register instance_reg = locs()->in(0).reg(); 1629 Register instance_reg = locs()->in(0).reg();
1583 1630
1584 if (IsUnboxedStore() && compiler->is_optimizing()) { 1631 if (IsUnboxedStore() && compiler->is_optimizing()) {
1585 XmmRegister value = locs()->in(1).fpu_reg(); 1632 XmmRegister value = locs()->in(1).fpu_reg();
1586 Register temp = locs()->temp(0).reg(); 1633 Register temp = locs()->temp(0).reg();
1587 Register temp2 = locs()->temp(1).reg(); 1634 Register temp2 = locs()->temp(1).reg();
1588 const intptr_t cid = field().UnboxedFieldCid(); 1635 const intptr_t cid = field().UnboxedFieldCid();
1589 1636
1590 if (is_initialization_) { 1637 if (is_initialization_) {
1591 const Class* cls = NULL; 1638 const Class* cls = NULL;
1592 switch (cid) { 1639 switch (cid) {
1593 case kDoubleCid: 1640 case kDoubleCid:
1594 cls = &compiler->double_class(); 1641 cls = &compiler->double_class();
1595 break; 1642 break;
1596 case kFloat32x4Cid: 1643 case kFloat32x4Cid:
1597 cls = &compiler->float32x4_class(); 1644 cls = &compiler->float32x4_class();
1598 break; 1645 break;
1599 case kFloat64x2Cid: 1646 case kFloat64x2Cid:
1600 cls = &compiler->float64x2_class(); 1647 cls = &compiler->float64x2_class();
1601 break; 1648 break;
1602 default: 1649 default:
1603 UNREACHABLE(); 1650 UNREACHABLE();
1604 } 1651 }
1605 1652
1606 StoreInstanceFieldSlowPath* slow_path = 1653 BoxAllocationSlowPath::Allocate(compiler, this, *cls, temp);
1607 new StoreInstanceFieldSlowPath(this, *cls);
1608 compiler->AddSlowPathCode(slow_path);
1609
1610 __ TryAllocate(*cls,
1611 slow_path->entry_label(),
1612 Assembler::kFarJump,
1613 temp,
1614 PP);
1615 __ Bind(slow_path->exit_label());
1616 __ movq(temp2, temp); 1654 __ movq(temp2, temp);
1617 __ StoreIntoObject(instance_reg, 1655 __ StoreIntoObject(instance_reg,
1618 FieldAddress(instance_reg, offset_in_bytes_), 1656 FieldAddress(instance_reg, offset_in_bytes_),
1619 temp2); 1657 temp2);
1620 } else { 1658 } else {
1621 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes_)); 1659 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes_));
1622 } 1660 }
1623 switch (cid) { 1661 switch (cid) {
1624 case kDoubleCid: 1662 case kDoubleCid:
1625 __ Comment("UnboxedDoubleStoreInstanceFieldInstr"); 1663 __ Comment("UnboxedDoubleStoreInstanceFieldInstr");
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
1675 // Fall through. 1713 // Fall through.
1676 __ jmp(&store_pointer); 1714 __ jmp(&store_pointer);
1677 1715
1678 if (!compiler->is_optimizing()) { 1716 if (!compiler->is_optimizing()) {
1679 locs()->live_registers()->Add(locs()->in(0)); 1717 locs()->live_registers()->Add(locs()->in(0));
1680 locs()->live_registers()->Add(locs()->in(1)); 1718 locs()->live_registers()->Add(locs()->in(1));
1681 } 1719 }
1682 1720
1683 { 1721 {
1684 __ Bind(&store_double); 1722 __ Bind(&store_double);
1685 Label copy_double; 1723 EnsureMutableBox(compiler,
1686 StoreInstanceFieldSlowPath* slow_path = 1724 this,
1687 new StoreInstanceFieldSlowPath(this, compiler->double_class()); 1725 temp,
1688 compiler->AddSlowPathCode(slow_path); 1726 compiler->double_class(),
1689 1727 instance_reg,
1690 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes_)); 1728 offset_in_bytes_,
1691 __ CompareObject(temp, Object::null_object(), PP); 1729 temp2);
1692 __ j(NOT_EQUAL, &copy_double);
1693
1694 __ TryAllocate(compiler->double_class(),
1695 slow_path->entry_label(),
1696 Assembler::kFarJump,
1697 temp,
1698 PP);
1699 __ Bind(slow_path->exit_label());
1700 __ movq(temp2, temp);
1701 __ StoreIntoObject(instance_reg,
1702 FieldAddress(instance_reg, offset_in_bytes_),
1703 temp2);
1704
1705 __ Bind(&copy_double);
1706 __ movsd(fpu_temp, FieldAddress(value_reg, Double::value_offset())); 1730 __ movsd(fpu_temp, FieldAddress(value_reg, Double::value_offset()));
1707 __ movsd(FieldAddress(temp, Double::value_offset()), fpu_temp); 1731 __ movsd(FieldAddress(temp, Double::value_offset()), fpu_temp);
1708 __ jmp(&skip_store); 1732 __ jmp(&skip_store);
1709 } 1733 }
1710 1734
1711 { 1735 {
1712 __ Bind(&store_float32x4); 1736 __ Bind(&store_float32x4);
1713 Label copy_float32x4; 1737 EnsureMutableBox(compiler,
1714 StoreInstanceFieldSlowPath* slow_path = 1738 this,
1715 new StoreInstanceFieldSlowPath(this, compiler->float32x4_class()); 1739 temp,
1716 compiler->AddSlowPathCode(slow_path); 1740 compiler->float32x4_class(),
1717 1741 instance_reg,
1718 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes_)); 1742 offset_in_bytes_,
1719 __ CompareObject(temp, Object::null_object(), PP); 1743 temp2);
1720 __ j(NOT_EQUAL, &copy_float32x4);
1721
1722 __ TryAllocate(compiler->float32x4_class(),
1723 slow_path->entry_label(),
1724 Assembler::kFarJump,
1725 temp,
1726 PP);
1727 __ Bind(slow_path->exit_label());
1728 __ movq(temp2, temp);
1729 __ StoreIntoObject(instance_reg,
1730 FieldAddress(instance_reg, offset_in_bytes_),
1731 temp2);
1732
1733 __ Bind(&copy_float32x4);
1734 __ movups(fpu_temp, FieldAddress(value_reg, Float32x4::value_offset())); 1744 __ movups(fpu_temp, FieldAddress(value_reg, Float32x4::value_offset()));
1735 __ movups(FieldAddress(temp, Float32x4::value_offset()), fpu_temp); 1745 __ movups(FieldAddress(temp, Float32x4::value_offset()), fpu_temp);
1736 __ jmp(&skip_store); 1746 __ jmp(&skip_store);
1737 } 1747 }
1738 1748
1739 { 1749 {
1740 __ Bind(&store_float64x2); 1750 __ Bind(&store_float64x2);
1741 Label copy_float64x2; 1751 EnsureMutableBox(compiler,
1742 1752 this,
1743 StoreInstanceFieldSlowPath* slow_path = 1753 temp,
1744 new StoreInstanceFieldSlowPath(this, compiler->float64x2_class()); 1754 compiler->float64x2_class(),
1745 compiler->AddSlowPathCode(slow_path); 1755 instance_reg,
1746 1756 offset_in_bytes_,
1747 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes_)); 1757 temp2);
1748 __ CompareObject(temp, Object::null_object(), PP);
1749 __ j(NOT_EQUAL, &copy_float64x2);
1750
1751 __ TryAllocate(compiler->float64x2_class(),
1752 slow_path->entry_label(),
1753 Assembler::kFarJump,
1754 temp,
1755 temp2);
1756 __ Bind(slow_path->exit_label());
1757 __ movq(temp2, temp);
1758 __ StoreIntoObject(instance_reg,
1759 FieldAddress(instance_reg, offset_in_bytes_),
1760 temp2);
1761
1762 __ Bind(&copy_float64x2);
1763 __ movups(fpu_temp, FieldAddress(value_reg, Float64x2::value_offset())); 1758 __ movups(fpu_temp, FieldAddress(value_reg, Float64x2::value_offset()));
1764 __ movups(FieldAddress(temp, Float64x2::value_offset()), fpu_temp); 1759 __ movups(FieldAddress(temp, Float64x2::value_offset()), fpu_temp);
1765 __ jmp(&skip_store); 1760 __ jmp(&skip_store);
1766 } 1761 }
1767 1762
1768 __ Bind(&store_pointer); 1763 __ Bind(&store_pointer);
1769 } 1764 }
1770 1765
1771 if (ShouldEmitStoreBarrier()) { 1766 if (ShouldEmitStoreBarrier()) {
1772 Register value_reg = locs()->in(1).reg(); 1767 Register value_reg = locs()->in(1).reg();
(...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after
1987 StubCode* stub_code = compiler->isolate()->stub_code(); 1982 StubCode* stub_code = compiler->isolate()->stub_code();
1988 compiler->GenerateCall(token_pos(), 1983 compiler->GenerateCall(token_pos(),
1989 &stub_code->AllocateArrayLabel(), 1984 &stub_code->AllocateArrayLabel(),
1990 RawPcDescriptors::kOther, 1985 RawPcDescriptors::kOther,
1991 locs()); 1986 locs());
1992 __ Bind(&done); 1987 __ Bind(&done);
1993 ASSERT(locs()->out(0).reg() == kResultReg); 1988 ASSERT(locs()->out(0).reg() == kResultReg);
1994 } 1989 }
1995 1990
1996 1991
1997 class BoxDoubleSlowPath : public SlowPathCode {
1998 public:
1999 explicit BoxDoubleSlowPath(Instruction* instruction)
2000 : instruction_(instruction) { }
2001
2002 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
2003 __ Comment("BoxDoubleSlowPath");
2004 __ Bind(entry_label());
2005 Isolate* isolate = compiler->isolate();
2006 StubCode* stub_code = isolate->stub_code();
2007 const Class& double_class = compiler->double_class();
2008 const Code& stub =
2009 Code::Handle(isolate,
2010 stub_code->GetAllocationStubForClass(double_class));
2011 const ExternalLabel label(stub.EntryPoint());
2012
2013 LocationSummary* locs = instruction_->locs();
2014 ASSERT(!locs->live_registers()->Contains(locs->out(0)));
2015
2016 compiler->SaveLiveRegisters(locs);
2017 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position.
2018 &label,
2019 RawPcDescriptors::kOther,
2020 locs);
2021 __ MoveRegister(locs->out(0).reg(), RAX);
2022 compiler->RestoreLiveRegisters(locs);
2023
2024 __ jmp(exit_label());
2025 }
2026
2027 private:
2028 Instruction* instruction_;
2029 };
2030
2031
2032 class BoxFloat32x4SlowPath : public SlowPathCode {
2033 public:
2034 explicit BoxFloat32x4SlowPath(Instruction* instruction)
2035 : instruction_(instruction) { }
2036
2037 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
2038 __ Comment("BoxFloat32x4SlowPath");
2039 __ Bind(entry_label());
2040 Isolate* isolate = compiler->isolate();
2041 StubCode* stub_code = isolate->stub_code();
2042 const Class& float32x4_class = compiler->float32x4_class();
2043 const Code& stub =
2044 Code::Handle(isolate,
2045 stub_code->GetAllocationStubForClass(float32x4_class));
2046 const ExternalLabel label(stub.EntryPoint());
2047
2048 LocationSummary* locs = instruction_->locs();
2049 ASSERT(!locs->live_registers()->Contains(locs->out(0)));
2050
2051 compiler->SaveLiveRegisters(locs);
2052 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position.
2053 &label,
2054 RawPcDescriptors::kOther,
2055 locs);
2056 __ MoveRegister(locs->out(0).reg(), RAX);
2057 compiler->RestoreLiveRegisters(locs);
2058
2059 __ jmp(exit_label());
2060 }
2061
2062 private:
2063 Instruction* instruction_;
2064 };
2065
2066
2067 class BoxFloat64x2SlowPath : public SlowPathCode {
2068 public:
2069 explicit BoxFloat64x2SlowPath(Instruction* instruction)
2070 : instruction_(instruction) { }
2071
2072 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
2073 __ Comment("BoxFloat64x2SlowPath");
2074 __ Bind(entry_label());
2075 Isolate* isolate = compiler->isolate();
2076 StubCode* stub_code = isolate->stub_code();
2077 const Class& float64x2_class = compiler->float64x2_class();
2078 const Code& stub =
2079 Code::Handle(isolate,
2080 stub_code->GetAllocationStubForClass(float64x2_class));
2081 const ExternalLabel label(stub.EntryPoint());
2082
2083 LocationSummary* locs = instruction_->locs();
2084 ASSERT(!locs->live_registers()->Contains(locs->out(0)));
2085
2086 compiler->SaveLiveRegisters(locs);
2087 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position.
2088 &label,
2089 RawPcDescriptors::kOther,
2090 locs);
2091 __ MoveRegister(locs->out(0).reg(), RAX);
2092 compiler->RestoreLiveRegisters(locs);
2093
2094 __ jmp(exit_label());
2095 }
2096
2097 private:
2098 Instruction* instruction_;
2099 };
2100
2101
2102 LocationSummary* LoadFieldInstr::MakeLocationSummary(Isolate* isolate, 1992 LocationSummary* LoadFieldInstr::MakeLocationSummary(Isolate* isolate,
2103 bool opt) const { 1993 bool opt) const {
2104 const intptr_t kNumInputs = 1; 1994 const intptr_t kNumInputs = 1;
2105 const intptr_t kNumTemps = 1995 const intptr_t kNumTemps =
2106 (IsUnboxedLoad() && opt) ? 1 : 1996 (IsUnboxedLoad() && opt) ? 1 :
2107 ((IsPotentialUnboxedLoad()) ? 2 : 0); 1997 ((IsPotentialUnboxedLoad()) ? 2 : 0);
2108 LocationSummary* locs = new(isolate) LocationSummary( 1998 LocationSummary* locs = new(isolate) LocationSummary(
2109 isolate, kNumInputs, kNumTemps, 1999 isolate, kNumInputs, kNumTemps,
2110 (opt && !IsPotentialUnboxedLoad()) 2000 (opt && !IsPotentialUnboxedLoad())
2111 ? LocationSummary::kNoCall 2001 ? LocationSummary::kNoCall
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
2182 2072
2183 // Fall through. 2073 // Fall through.
2184 __ jmp(&load_pointer); 2074 __ jmp(&load_pointer);
2185 2075
2186 if (!compiler->is_optimizing()) { 2076 if (!compiler->is_optimizing()) {
2187 locs()->live_registers()->Add(locs()->in(0)); 2077 locs()->live_registers()->Add(locs()->in(0));
2188 } 2078 }
2189 2079
2190 { 2080 {
2191 __ Bind(&load_double); 2081 __ Bind(&load_double);
2192 BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this); 2082 BoxAllocationSlowPath::Allocate(
2193 compiler->AddSlowPathCode(slow_path); 2083 compiler, this, compiler->double_class(), result);
2194
2195 __ TryAllocate(compiler->double_class(),
2196 slow_path->entry_label(),
2197 Assembler::kFarJump,
2198 result,
2199 PP);
2200 __ Bind(slow_path->exit_label());
2201 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes())); 2084 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes()));
2202 __ movsd(value, FieldAddress(temp, Double::value_offset())); 2085 __ movsd(value, FieldAddress(temp, Double::value_offset()));
2203 __ movsd(FieldAddress(result, Double::value_offset()), value); 2086 __ movsd(FieldAddress(result, Double::value_offset()), value);
2204 __ jmp(&done); 2087 __ jmp(&done);
2205 } 2088 }
2206 2089
2207 { 2090 {
2208 __ Bind(&load_float32x4); 2091 __ Bind(&load_float32x4);
2209 BoxFloat32x4SlowPath* slow_path = new BoxFloat32x4SlowPath(this); 2092 BoxAllocationSlowPath::Allocate(
2210 compiler->AddSlowPathCode(slow_path); 2093 compiler, this, compiler->float32x4_class(), result);
2211
2212 __ TryAllocate(compiler->float32x4_class(),
2213 slow_path->entry_label(),
2214 Assembler::kFarJump,
2215 result,
2216 PP);
2217 __ Bind(slow_path->exit_label());
2218 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes())); 2094 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes()));
2219 __ movups(value, FieldAddress(temp, Float32x4::value_offset())); 2095 __ movups(value, FieldAddress(temp, Float32x4::value_offset()));
2220 __ movups(FieldAddress(result, Float32x4::value_offset()), value); 2096 __ movups(FieldAddress(result, Float32x4::value_offset()), value);
2221 __ jmp(&done); 2097 __ jmp(&done);
2222 } 2098 }
2223 2099
2224 { 2100 {
2225 __ Bind(&load_float64x2); 2101 __ Bind(&load_float64x2);
2226 BoxFloat64x2SlowPath* slow_path = new BoxFloat64x2SlowPath(this); 2102 BoxAllocationSlowPath::Allocate(
2227 compiler->AddSlowPathCode(slow_path); 2103 compiler, this, compiler->float64x2_class(), result);
2228
2229 __ TryAllocate(compiler->float64x2_class(),
2230 slow_path->entry_label(),
2231 Assembler::kFarJump,
2232 result,
2233 temp);
2234 __ Bind(slow_path->exit_label());
2235 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes())); 2104 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes()));
2236 __ movups(value, FieldAddress(temp, Float64x2::value_offset())); 2105 __ movups(value, FieldAddress(temp, Float64x2::value_offset()));
2237 __ movups(FieldAddress(result, Float64x2::value_offset()), value); 2106 __ movups(FieldAddress(result, Float64x2::value_offset()), value);
2238 __ jmp(&done); 2107 __ jmp(&done);
2239 } 2108 }
2240 2109
2241 __ Bind(&load_pointer); 2110 __ Bind(&load_pointer);
2242 } 2111 }
2243 __ movq(result, FieldAddress(instance_reg, offset_in_bytes())); 2112 __ movq(result, FieldAddress(instance_reg, offset_in_bytes()));
2244 __ Bind(&done); 2113 __ Bind(&done);
(...skipping 945 matching lines...) Expand 10 before | Expand all | Expand 10 after
3190 isolate, kNumInputs, 3059 isolate, kNumInputs,
3191 kNumTemps, 3060 kNumTemps,
3192 LocationSummary::kCallOnSlowPath); 3061 LocationSummary::kCallOnSlowPath);
3193 summary->set_in(0, Location::RequiresFpuRegister()); 3062 summary->set_in(0, Location::RequiresFpuRegister());
3194 summary->set_out(0, Location::RequiresRegister()); 3063 summary->set_out(0, Location::RequiresRegister());
3195 return summary; 3064 return summary;
3196 } 3065 }
3197 3066
3198 3067
3199 void BoxDoubleInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 3068 void BoxDoubleInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
3200 BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this);
3201 compiler->AddSlowPathCode(slow_path);
3202
3203 Register out_reg = locs()->out(0).reg(); 3069 Register out_reg = locs()->out(0).reg();
3204 XmmRegister value = locs()->in(0).fpu_reg(); 3070 XmmRegister value = locs()->in(0).fpu_reg();
3205 3071
3206 __ TryAllocate(compiler->double_class(), 3072 BoxAllocationSlowPath::Allocate(
3207 slow_path->entry_label(), 3073 compiler, this, compiler->double_class(), out_reg);
3208 Assembler::kFarJump,
3209 out_reg,
3210 PP);
3211 __ Bind(slow_path->exit_label());
3212 __ movsd(FieldAddress(out_reg, Double::value_offset()), value); 3074 __ movsd(FieldAddress(out_reg, Double::value_offset()), value);
3213 } 3075 }
3214 3076
3215 3077
3216 LocationSummary* UnboxDoubleInstr::MakeLocationSummary(Isolate* isolate, 3078 LocationSummary* UnboxDoubleInstr::MakeLocationSummary(Isolate* isolate,
3217 bool opt) const { 3079 bool opt) const {
3218 const intptr_t kNumInputs = 1; 3080 const intptr_t kNumInputs = 1;
3219 const intptr_t kNumTemps = 0; 3081 const intptr_t kNumTemps = 0;
3220 LocationSummary* summary = new(isolate) LocationSummary( 3082 LocationSummary* summary = new(isolate) LocationSummary(
3221 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall); 3083 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall);
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
3275 isolate, kNumInputs, 3137 isolate, kNumInputs,
3276 kNumTemps, 3138 kNumTemps,
3277 LocationSummary::kCallOnSlowPath); 3139 LocationSummary::kCallOnSlowPath);
3278 summary->set_in(0, Location::RequiresFpuRegister()); 3140 summary->set_in(0, Location::RequiresFpuRegister());
3279 summary->set_out(0, Location::RequiresRegister()); 3141 summary->set_out(0, Location::RequiresRegister());
3280 return summary; 3142 return summary;
3281 } 3143 }
3282 3144
3283 3145
3284 void BoxFloat32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) { 3146 void BoxFloat32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) {
3285 BoxFloat32x4SlowPath* slow_path = new BoxFloat32x4SlowPath(this);
3286 compiler->AddSlowPathCode(slow_path);
3287
3288 Register out_reg = locs()->out(0).reg(); 3147 Register out_reg = locs()->out(0).reg();
3289 XmmRegister value = locs()->in(0).fpu_reg(); 3148 XmmRegister value = locs()->in(0).fpu_reg();
3290 3149
3291 __ TryAllocate(compiler->float32x4_class(), 3150 BoxAllocationSlowPath::Allocate(
3292 slow_path->entry_label(), 3151 compiler, this, compiler->float32x4_class(), out_reg);
3293 Assembler::kFarJump,
3294 out_reg,
3295 PP);
3296 __ Bind(slow_path->exit_label());
3297 __ movups(FieldAddress(out_reg, Float32x4::value_offset()), value); 3152 __ movups(FieldAddress(out_reg, Float32x4::value_offset()), value);
3298 } 3153 }
3299 3154
3300 3155
3301 LocationSummary* UnboxFloat32x4Instr::MakeLocationSummary(Isolate* isolate, 3156 LocationSummary* UnboxFloat32x4Instr::MakeLocationSummary(Isolate* isolate,
3302 bool opt) const { 3157 bool opt) const {
3303 const intptr_t kNumInputs = 1; 3158 const intptr_t kNumInputs = 1;
3304 return LocationSummary::Make(isolate, 3159 return LocationSummary::Make(isolate,
3305 kNumInputs, 3160 kNumInputs,
3306 Location::RequiresFpuRegister(), 3161 Location::RequiresFpuRegister(),
(...skipping 25 matching lines...) Expand all
3332 isolate, kNumInputs, 3187 isolate, kNumInputs,
3333 kNumTemps, 3188 kNumTemps,
3334 LocationSummary::kCallOnSlowPath); 3189 LocationSummary::kCallOnSlowPath);
3335 summary->set_in(0, Location::RequiresFpuRegister()); 3190 summary->set_in(0, Location::RequiresFpuRegister());
3336 summary->set_out(0, Location::RequiresRegister()); 3191 summary->set_out(0, Location::RequiresRegister());
3337 return summary; 3192 return summary;
3338 } 3193 }
3339 3194
3340 3195
3341 void BoxFloat64x2Instr::EmitNativeCode(FlowGraphCompiler* compiler) { 3196 void BoxFloat64x2Instr::EmitNativeCode(FlowGraphCompiler* compiler) {
3342 BoxFloat64x2SlowPath* slow_path = new BoxFloat64x2SlowPath(this);
3343 compiler->AddSlowPathCode(slow_path);
3344
3345 Register out_reg = locs()->out(0).reg(); 3197 Register out_reg = locs()->out(0).reg();
3346 XmmRegister value = locs()->in(0).fpu_reg(); 3198 XmmRegister value = locs()->in(0).fpu_reg();
3347 3199
3348 __ TryAllocate(compiler->float64x2_class(), 3200 BoxAllocationSlowPath::Allocate(
3349 slow_path->entry_label(), 3201 compiler, this, compiler->float64x2_class(), out_reg);
3350 Assembler::kFarJump,
3351 out_reg,
3352 kNoRegister);
3353 __ Bind(slow_path->exit_label());
3354 __ movups(FieldAddress(out_reg, Float64x2::value_offset()), value); 3202 __ movups(FieldAddress(out_reg, Float64x2::value_offset()), value);
3355 } 3203 }
3356 3204
3357 3205
3358 LocationSummary* UnboxFloat64x2Instr::MakeLocationSummary(Isolate* isolate, 3206 LocationSummary* UnboxFloat64x2Instr::MakeLocationSummary(Isolate* isolate,
3359 bool opt) const { 3207 bool opt) const {
3360 const intptr_t value_cid = value()->Type()->ToCid(); 3208 const intptr_t value_cid = value()->Type()->ToCid();
3361 const intptr_t kNumInputs = 1; 3209 const intptr_t kNumInputs = 1;
3362 const intptr_t kNumTemps = value_cid == kFloat64x2Cid ? 0 : 1; 3210 const intptr_t kNumTemps = value_cid == kFloat64x2Cid ? 0 : 1;
3363 LocationSummary* summary = new(isolate) LocationSummary( 3211 LocationSummary* summary = new(isolate) LocationSummary(
(...skipping 27 matching lines...) Expand all
3391 LocationSummary* summary = new(isolate) LocationSummary( 3239 LocationSummary* summary = new(isolate) LocationSummary(
3392 isolate, kNumInputs, 3240 isolate, kNumInputs,
3393 kNumTemps, 3241 kNumTemps,
3394 LocationSummary::kCallOnSlowPath); 3242 LocationSummary::kCallOnSlowPath);
3395 summary->set_in(0, Location::RequiresFpuRegister()); 3243 summary->set_in(0, Location::RequiresFpuRegister());
3396 summary->set_out(0, Location::RequiresRegister()); 3244 summary->set_out(0, Location::RequiresRegister());
3397 return summary; 3245 return summary;
3398 } 3246 }
3399 3247
3400 3248
3401 class BoxInt32x4SlowPath : public SlowPathCode {
3402 public:
3403 explicit BoxInt32x4SlowPath(BoxInt32x4Instr* instruction)
3404 : instruction_(instruction) { }
3405
3406 virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
3407 __ Comment("BoxInt32x4SlowPath");
3408 __ Bind(entry_label());
3409 Isolate* isolate = compiler->isolate();
3410 StubCode* stub_code = isolate->stub_code();
3411 const Class& int32x4_class = compiler->int32x4_class();
3412 const Code& stub =
3413 Code::Handle(isolate,
3414 stub_code->GetAllocationStubForClass(int32x4_class));
3415 const ExternalLabel label(stub.EntryPoint());
3416
3417 LocationSummary* locs = instruction_->locs();
3418 ASSERT(!locs->live_registers()->Contains(locs->out(0)));
3419
3420 compiler->SaveLiveRegisters(locs);
3421 compiler->GenerateCall(Scanner::kNoSourcePos, // No token position.
3422 &label,
3423 RawPcDescriptors::kOther,
3424 locs);
3425 __ MoveRegister(locs->out(0).reg(), RAX);
3426 compiler->RestoreLiveRegisters(locs);
3427
3428 __ jmp(exit_label());
3429 }
3430
3431 private:
3432 BoxInt32x4Instr* instruction_;
3433 };
3434
3435
3436 void BoxInt32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) { 3249 void BoxInt32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) {
3437 BoxInt32x4SlowPath* slow_path = new BoxInt32x4SlowPath(this);
3438 compiler->AddSlowPathCode(slow_path);
3439
3440 Register out_reg = locs()->out(0).reg(); 3250 Register out_reg = locs()->out(0).reg();
3441 XmmRegister value = locs()->in(0).fpu_reg(); 3251 XmmRegister value = locs()->in(0).fpu_reg();
3442 3252
3443 __ TryAllocate(compiler->int32x4_class(), 3253 BoxAllocationSlowPath::Allocate(
3444 slow_path->entry_label(), 3254 compiler, this, compiler->int32x4_class(), out_reg);
3445 Assembler::kFarJump,
3446 out_reg,
3447 PP);
3448 __ Bind(slow_path->exit_label());
3449 __ movups(FieldAddress(out_reg, Int32x4::value_offset()), value); 3255 __ movups(FieldAddress(out_reg, Int32x4::value_offset()), value);
3450 } 3256 }
3451 3257
3452 3258
3453 LocationSummary* UnboxInt32x4Instr::MakeLocationSummary(Isolate* isolate, 3259 LocationSummary* UnboxInt32x4Instr::MakeLocationSummary(Isolate* isolate,
3454 bool opt) const { 3260 bool opt) const {
3455 const intptr_t kNumInputs = 1; 3261 const intptr_t kNumInputs = 1;
3456 const intptr_t kNumTemps = 0; 3262 const intptr_t kNumTemps = 0;
3457 LocationSummary* summary = new(isolate) LocationSummary( 3263 LocationSummary* summary = new(isolate) LocationSummary(
3458 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall); 3264 isolate, kNumInputs, kNumTemps, LocationSummary::kNoCall);
(...skipping 2503 matching lines...) Expand 10 before | Expand all | Expand 10 after
5962 __ movq(R10, Immediate(kInvalidObjectPointer)); 5768 __ movq(R10, Immediate(kInvalidObjectPointer));
5963 __ movq(RBX, Immediate(kInvalidObjectPointer)); 5769 __ movq(RBX, Immediate(kInvalidObjectPointer));
5964 #endif 5770 #endif
5965 } 5771 }
5966 5772
5967 } // namespace dart 5773 } // namespace dart
5968 5774
5969 #undef __ 5775 #undef __
5970 5776
5971 #endif // defined TARGET_ARCH_X64 5777 #endif // defined TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698