| Index: runtime/vm/intermediate_language_mips.cc
|
| diff --git a/runtime/vm/intermediate_language_mips.cc b/runtime/vm/intermediate_language_mips.cc
|
| index 450c24bc6e595402959ea7c1ddae95ff89f0751b..007b44c11b4c1321f25937fd0df6fbcf13b690e9 100644
|
| --- a/runtime/vm/intermediate_language_mips.cc
|
| +++ b/runtime/vm/intermediate_language_mips.cc
|
| @@ -1740,9 +1740,11 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| default:
|
| UNREACHABLE();
|
| }
|
| +
|
| StoreInstanceFieldSlowPath* slow_path =
|
| new StoreInstanceFieldSlowPath(this, *cls);
|
| compiler->AddSlowPathCode(slow_path);
|
| +
|
| __ TryAllocate(*cls,
|
| slow_path->entry_label(),
|
| temp,
|
| @@ -1772,7 +1774,6 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| DRegister fpu_temp = locs()->temp(2).fpu_reg();
|
|
|
| Label store_pointer;
|
| - Label copy_double;
|
| Label store_double;
|
|
|
| __ LoadObject(temp, Field::ZoneHandle(field().raw()));
|
| @@ -1790,37 +1791,42 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| // Fall through.
|
| __ b(&store_pointer);
|
|
|
| - __ Bind(&store_double);
|
| -
|
| - __ lw(temp, FieldAddress(instance_reg, field().Offset()));
|
| - __ BranchNotEqual(temp, reinterpret_cast<int32_t>(Object::null()),
|
| - ©_double);
|
| -
|
| - StoreInstanceFieldSlowPath* slow_path =
|
| - new StoreInstanceFieldSlowPath(this, compiler->double_class());
|
| - compiler->AddSlowPathCode(slow_path);
|
| -
|
| if (!compiler->is_optimizing()) {
|
| locs()->live_registers()->Add(locs()->in(0));
|
| locs()->live_registers()->Add(locs()->in(1));
|
| }
|
|
|
| - __ TryAllocate(compiler->double_class(),
|
| - slow_path->entry_label(),
|
| - temp,
|
| - temp2);
|
| - __ Bind(slow_path->exit_label());
|
| - __ mov(temp2, temp);
|
| - __ StoreIntoObject(instance_reg,
|
| - FieldAddress(instance_reg, field().Offset()),
|
| - temp2);
|
| + {
|
| + __ Bind(&store_double);
|
| + Label copy_double;
|
| +
|
| + __ lw(temp, FieldAddress(instance_reg, field().Offset()));
|
| + __ BranchNotEqual(temp, reinterpret_cast<int32_t>(Object::null()),
|
| + ©_double);
|
| +
|
| + StoreInstanceFieldSlowPath* slow_path =
|
| + new StoreInstanceFieldSlowPath(this, compiler->double_class());
|
| + compiler->AddSlowPathCode(slow_path);
|
| +
|
| + __ TryAllocate(compiler->double_class(),
|
| + slow_path->entry_label(),
|
| + temp,
|
| + temp2);
|
| + __ Bind(slow_path->exit_label());
|
| + __ mov(temp2, temp);
|
| + __ StoreIntoObject(instance_reg,
|
| + FieldAddress(instance_reg, field().Offset()),
|
| + temp2);
|
| +
|
| + __ Bind(©_double);
|
| + __ LoadDFromOffset(fpu_temp,
|
| + value_reg,
|
| + Double::value_offset() - kHeapObjectTag);
|
| + __ StoreDToOffset(fpu_temp, temp,
|
| + Double::value_offset() - kHeapObjectTag);
|
| + __ b(&skip_store);
|
| + }
|
|
|
| - __ Bind(©_double);
|
| - __ LoadDFromOffset(fpu_temp,
|
| - value_reg,
|
| - Double::value_offset() - kHeapObjectTag);
|
| - __ StoreDToOffset(fpu_temp, temp, Double::value_offset() - kHeapObjectTag);
|
| - __ b(&skip_store);
|
| __ Bind(&store_pointer);
|
| }
|
|
|
| @@ -2067,26 +2073,28 @@ void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| // Fall through.
|
| __ b(&load_pointer);
|
|
|
| - __ Bind(&load_double);
|
| -
|
| - BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this);
|
| - compiler->AddSlowPathCode(slow_path);
|
| -
|
| if (!compiler->is_optimizing()) {
|
| locs()->live_registers()->Add(locs()->in(0));
|
| }
|
|
|
| - __ TryAllocate(compiler->double_class(),
|
| - slow_path->entry_label(),
|
| - result_reg,
|
| - temp);
|
| - __ Bind(slow_path->exit_label());
|
| - __ lw(temp, FieldAddress(instance_reg, offset_in_bytes()));
|
| - __ LoadDFromOffset(value, temp, Double::value_offset() - kHeapObjectTag);
|
| - __ StoreDToOffset(value,
|
| - result_reg,
|
| - Double::value_offset() - kHeapObjectTag);
|
| - __ b(&done);
|
| + {
|
| + __ Bind(&load_double);
|
| + BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this);
|
| + compiler->AddSlowPathCode(slow_path);
|
| +
|
| + __ TryAllocate(compiler->double_class(),
|
| + slow_path->entry_label(),
|
| + result_reg,
|
| + temp);
|
| + __ Bind(slow_path->exit_label());
|
| + __ lw(temp, FieldAddress(instance_reg, offset_in_bytes()));
|
| + __ LoadDFromOffset(value, temp, Double::value_offset() - kHeapObjectTag);
|
| + __ StoreDToOffset(value,
|
| + result_reg,
|
| + Double::value_offset() - kHeapObjectTag);
|
| + __ b(&done);
|
| + }
|
| +
|
| __ Bind(&load_pointer);
|
| }
|
| __ lw(result_reg, Address(instance_reg, offset_in_bytes() - kHeapObjectTag));
|
|
|