Index: runtime/vm/intermediate_language_mips.cc |
diff --git a/runtime/vm/intermediate_language_mips.cc b/runtime/vm/intermediate_language_mips.cc |
index 450c24bc6e595402959ea7c1ddae95ff89f0751b..c885dbc7a80e4ba27d97e0a559f7123d808382a8 100644 |
--- a/runtime/vm/intermediate_language_mips.cc |
+++ b/runtime/vm/intermediate_language_mips.cc |
@@ -1658,16 +1658,16 @@ void GuardFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
class StoreInstanceFieldSlowPath : public SlowPathCode { |
public: |
- StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction, |
- const Class& cls) |
- : instruction_(instruction), cls_(cls) { } |
+ explicit StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction) |
+ : instruction_(instruction) { } |
virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
__ Comment("StoreInstanceFieldSlowPath"); |
- __ Bind(entry_label()); |
+ __ Bind(double_entry_label()); |
+ const Class& cls = compiler->double_class(); |
const Code& stub = |
- Code::Handle(StubCode::GetAllocationStubForClass(cls_)); |
- const ExternalLabel label(cls_.ToCString(), stub.EntryPoint()); |
+ Code::Handle(StubCode::GetAllocationStubForClass(cls)); |
+ const ExternalLabel label(cls.ToCString(), stub.EntryPoint()); |
LocationSummary* locs = instruction_->locs(); |
locs->live_registers()->Remove(locs->out()); |
@@ -1680,12 +1680,20 @@ class StoreInstanceFieldSlowPath : public SlowPathCode { |
__ mov(locs->temp(0).reg(), V0); |
compiler->RestoreLiveRegisters(locs); |
- __ b(exit_label()); |
+ __ b(double_exit_label()); |
+ } |
+ |
+ Label* double_entry_label() { |
+ // Use default SlowPathCode label for double. |
+ return entry_label(); |
+ } |
+ Label* double_exit_label() { |
+ // Use default SlowPathCode label for double. |
+ return exit_label(); |
} |
private: |
StoreInstanceFieldInstr* instruction_; |
- const Class& cls_; |
}; |
@@ -1730,24 +1738,28 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
Register temp = locs()->temp(0).reg(); |
Register temp2 = locs()->temp(1).reg(); |
const intptr_t cid = field().UnboxedFieldCid(); |
+ StoreInstanceFieldSlowPath* slow_path = |
+ new StoreInstanceFieldSlowPath(this); |
+ compiler->AddSlowPathCode(slow_path); |
if (is_initialization_) { |
const Class* cls = NULL; |
+ Label* entry_label = NULL; |
+ Label* exit_label = NULL; |
switch (cid) { |
case kDoubleCid: |
cls = &compiler->double_class(); |
+ entry_label = slow_path->double_entry_label(); |
+ exit_label = slow_path->double_exit_label(); |
break; |
default: |
UNREACHABLE(); |
} |
- StoreInstanceFieldSlowPath* slow_path = |
- new StoreInstanceFieldSlowPath(this, *cls); |
- compiler->AddSlowPathCode(slow_path); |
__ TryAllocate(*cls, |
- slow_path->entry_label(), |
+ entry_label, |
temp, |
temp2); |
- __ Bind(slow_path->exit_label()); |
+ __ Bind(exit_label); |
__ mov(temp2, temp); |
__ StoreIntoObject(instance_reg, |
FieldAddress(instance_reg, field().Offset()), |
@@ -1797,7 +1809,7 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
©_double); |
StoreInstanceFieldSlowPath* slow_path = |
- new StoreInstanceFieldSlowPath(this, compiler->double_class()); |
+ new StoreInstanceFieldSlowPath(this); |
compiler->AddSlowPathCode(slow_path); |
if (!compiler->is_optimizing()) { |
@@ -1806,10 +1818,10 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
} |
__ TryAllocate(compiler->double_class(), |
- slow_path->entry_label(), |
+ slow_path->double_entry_label(), |
temp, |
temp2); |
- __ Bind(slow_path->exit_label()); |
+ __ Bind(slow_path->double_exit_label()); |
__ mov(temp2, temp); |
__ StoreIntoObject(instance_reg, |
FieldAddress(instance_reg, field().Offset()), |
@@ -1967,13 +1979,13 @@ void AllocateObjectWithBoundsCheckInstr::EmitNativeCode( |
} |
-class BoxDoubleSlowPath : public SlowPathCode { |
+class LoadFieldSlowPath : public SlowPathCode { |
public: |
- explicit BoxDoubleSlowPath(Instruction* instruction) |
+ explicit LoadFieldSlowPath(Instruction* instruction) |
: instruction_(instruction) { } |
virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
- __ Comment("BoxDoubleSlowPath"); |
+ __ Comment("LoadFieldSlowPath"); |
__ Bind(entry_label()); |
const Class& double_class = compiler->double_class(); |
const Code& stub = |
@@ -1996,6 +2008,15 @@ class BoxDoubleSlowPath : public SlowPathCode { |
__ b(exit_label()); |
} |
+ Label* double_entry_label() { |
+ // Use default SlowPathCode label for double. |
+ return entry_label(); |
+ } |
+ Label* double_exit_label() { |
+ // Use default SlowPathCode label for double. |
+ return exit_label(); |
+ } |
+ |
private: |
Instruction* instruction_; |
}; |
@@ -2048,6 +2069,8 @@ void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
if (IsPotentialUnboxedLoad()) { |
Register temp = locs()->temp(1).reg(); |
DRegister value = locs()->temp(0).fpu_reg(); |
+ LoadFieldSlowPath* slow_path = new LoadFieldSlowPath(this); |
+ compiler->AddSlowPathCode(slow_path); |
Label load_pointer; |
Label load_double; |
@@ -2067,26 +2090,26 @@ void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
// Fall through. |
__ b(&load_pointer); |
- __ Bind(&load_double); |
+ { |
+ __ Bind(&load_double); |
- BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this); |
- compiler->AddSlowPathCode(slow_path); |
+ if (!compiler->is_optimizing()) { |
+ locs()->live_registers()->Add(locs()->in(0)); |
+ } |
- if (!compiler->is_optimizing()) { |
- locs()->live_registers()->Add(locs()->in(0)); |
+ __ TryAllocate(compiler->double_class(), |
+ slow_path->double_entry_label(), |
+ result_reg, |
+ temp); |
+ __ Bind(slow_path->double_exit_label()); |
+ __ lw(temp, FieldAddress(instance_reg, offset_in_bytes())); |
+ __ LoadDFromOffset(value, temp, Double::value_offset() - kHeapObjectTag); |
+ __ StoreDToOffset(value, |
+ result_reg, |
+ Double::value_offset() - kHeapObjectTag); |
+ __ b(&done); |
} |
- __ TryAllocate(compiler->double_class(), |
- slow_path->entry_label(), |
- result_reg, |
- temp); |
- __ Bind(slow_path->exit_label()); |
- __ lw(temp, FieldAddress(instance_reg, offset_in_bytes())); |
- __ LoadDFromOffset(value, temp, Double::value_offset() - kHeapObjectTag); |
- __ StoreDToOffset(value, |
- result_reg, |
- Double::value_offset() - kHeapObjectTag); |
- __ b(&done); |
__ Bind(&load_pointer); |
} |
__ lw(result_reg, Address(instance_reg, offset_in_bytes() - kHeapObjectTag)); |
@@ -2935,6 +2958,40 @@ void CheckEitherNonSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
} |
+class BoxDoubleSlowPath : public SlowPathCode { |
+ public: |
+ explicit BoxDoubleSlowPath(Instruction* instruction) |
+ : instruction_(instruction) { } |
+ |
+ virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
+ __ Comment("BoxDoubleSlowPath"); |
+ __ Bind(entry_label()); |
+ const Class& double_class = compiler->double_class(); |
+ const Code& stub = |
+ Code::Handle(StubCode::GetAllocationStubForClass(double_class)); |
+ const ExternalLabel label(double_class.ToCString(), stub.EntryPoint()); |
+ |
+ LocationSummary* locs = instruction_->locs(); |
+ locs->live_registers()->Remove(locs->out()); |
+ |
+ compiler->SaveLiveRegisters(locs); |
+ compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
+ &label, |
+ PcDescriptors::kOther, |
+ locs); |
+ if (locs->out().reg() != V0) { |
+ __ mov(locs->out().reg(), V0); |
+ } |
+ compiler->RestoreLiveRegisters(locs); |
+ |
+ __ b(exit_label()); |
+ } |
+ |
+ private: |
+ Instruction* instruction_; |
+}; |
+ |
+ |
LocationSummary* BoxDoubleInstr::MakeLocationSummary(bool opt) const { |
const intptr_t kNumInputs = 1; |
const intptr_t kNumTemps = 1; |