Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(293)

Unified Diff: runtime/vm/intermediate_language_mips.cc

Issue 150063004: Support reusable boxes for Float32x4 fields (Closed) Base URL: https://dart.googlecode.com/svn/branches/bleeding_edge/dart
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: runtime/vm/intermediate_language_mips.cc
diff --git a/runtime/vm/intermediate_language_mips.cc b/runtime/vm/intermediate_language_mips.cc
index 450c24bc6e595402959ea7c1ddae95ff89f0751b..d7166389ae3b9256271f327b114750881c4e368c 100644
--- a/runtime/vm/intermediate_language_mips.cc
+++ b/runtime/vm/intermediate_language_mips.cc
@@ -1656,18 +1656,25 @@ void GuardFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
+bool Field::IsUnboxedField() const {
+ bool valid_class = (guarded_cid() == kDoubleCid);
+ return is_unboxing_candidate() && !is_final() && !is_nullable() &&
+ valid_class;
+}
+
+
class StoreInstanceFieldSlowPath : public SlowPathCode {
public:
- StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction,
- const Class& cls)
- : instruction_(instruction), cls_(cls) { }
+ explicit StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction)
+ : instruction_(instruction) { }
virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
__ Comment("StoreInstanceFieldSlowPath");
- __ Bind(entry_label());
+ __ Bind(double_entry_label());
+ const Class& cls = compiler->double_class();
const Code& stub =
- Code::Handle(StubCode::GetAllocationStubForClass(cls_));
- const ExternalLabel label(cls_.ToCString(), stub.EntryPoint());
+ Code::Handle(StubCode::GetAllocationStubForClass(cls));
+ const ExternalLabel label(cls.ToCString(), stub.EntryPoint());
LocationSummary* locs = instruction_->locs();
locs->live_registers()->Remove(locs->out());
@@ -1680,12 +1687,20 @@ class StoreInstanceFieldSlowPath : public SlowPathCode {
__ mov(locs->temp(0).reg(), V0);
compiler->RestoreLiveRegisters(locs);
- __ b(exit_label());
+ __ b(double_exit_label());
+ }
+
+ Label* double_entry_label() {
+ // Use default SlowPathCode label for double.
+ return entry_label();
+ }
+ Label* double_exit_label() {
+ // Use default SlowPathCode label for double.
+ return exit_label();
}
private:
StoreInstanceFieldInstr* instruction_;
- const Class& cls_;
};
@@ -1730,24 +1745,28 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
Register temp = locs()->temp(0).reg();
Register temp2 = locs()->temp(1).reg();
const intptr_t cid = field().UnboxedFieldCid();
+ StoreInstanceFieldSlowPath* slow_path =
+ new StoreInstanceFieldSlowPath(this);
+ compiler->AddSlowPathCode(slow_path);
if (is_initialization_) {
const Class* cls = NULL;
+ Label* entry_label = NULL;
+ Label* exit_label = NULL;
switch (cid) {
case kDoubleCid:
cls = &compiler->double_class();
+ entry_label = slow_path->double_entry_label();
+ exit_label = slow_path->double_exit_label();
break;
default:
UNREACHABLE();
}
- StoreInstanceFieldSlowPath* slow_path =
- new StoreInstanceFieldSlowPath(this, *cls);
- compiler->AddSlowPathCode(slow_path);
__ TryAllocate(*cls,
- slow_path->entry_label(),
+ entry_label,
temp,
temp2);
- __ Bind(slow_path->exit_label());
+ __ Bind(exit_label);
__ mov(temp2, temp);
__ StoreIntoObject(instance_reg,
FieldAddress(instance_reg, field().Offset()),
@@ -1797,7 +1816,7 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
&copy_double);
StoreInstanceFieldSlowPath* slow_path =
- new StoreInstanceFieldSlowPath(this, compiler->double_class());
+ new StoreInstanceFieldSlowPath(this);
compiler->AddSlowPathCode(slow_path);
if (!compiler->is_optimizing()) {
@@ -1806,10 +1825,10 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
__ TryAllocate(compiler->double_class(),
- slow_path->entry_label(),
+ slow_path->double_entry_label(),
temp,
temp2);
- __ Bind(slow_path->exit_label());
+ __ Bind(slow_path->double_exit_label());
__ mov(temp2, temp);
__ StoreIntoObject(instance_reg,
FieldAddress(instance_reg, field().Offset()),
@@ -1967,13 +1986,13 @@ void AllocateObjectWithBoundsCheckInstr::EmitNativeCode(
}
-class BoxDoubleSlowPath : public SlowPathCode {
+class LoadFieldSlowPath : public SlowPathCode {
public:
- explicit BoxDoubleSlowPath(Instruction* instruction)
+ explicit LoadFieldSlowPath(Instruction* instruction)
: instruction_(instruction) { }
virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
- __ Comment("BoxDoubleSlowPath");
+ __ Comment("LoadFieldSlowPath");
__ Bind(entry_label());
const Class& double_class = compiler->double_class();
const Code& stub =
@@ -1996,6 +2015,15 @@ class BoxDoubleSlowPath : public SlowPathCode {
__ b(exit_label());
}
+ Label* double_entry_label() {
+ // Use default SlowPathCode label for double.
+ return entry_label();
+ }
+ Label* double_exit_label() {
+ // Use default SlowPathCode label for double.
+ return exit_label();
+ }
+
private:
Instruction* instruction_;
};
@@ -2048,6 +2076,8 @@ void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
if (IsPotentialUnboxedLoad()) {
Register temp = locs()->temp(1).reg();
DRegister value = locs()->temp(0).fpu_reg();
+ LoadFieldSlowPath* slow_path = new LoadFieldSlowPath(this);
+ compiler->AddSlowPathCode(slow_path);
Label load_pointer;
Label load_double;
@@ -2067,26 +2097,26 @@ void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
// Fall through.
__ b(&load_pointer);
- __ Bind(&load_double);
+ {
+ __ Bind(&load_double);
- BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this);
- compiler->AddSlowPathCode(slow_path);
+ if (!compiler->is_optimizing()) {
+ locs()->live_registers()->Add(locs()->in(0));
+ }
- if (!compiler->is_optimizing()) {
- locs()->live_registers()->Add(locs()->in(0));
+ __ TryAllocate(compiler->double_class(),
+ slow_path->double_entry_label(),
+ result_reg,
+ temp);
+ __ Bind(slow_path->double_exit_label());
+ __ lw(temp, FieldAddress(instance_reg, offset_in_bytes()));
+ __ LoadDFromOffset(value, temp, Double::value_offset() - kHeapObjectTag);
+ __ StoreDToOffset(value,
+ result_reg,
+ Double::value_offset() - kHeapObjectTag);
+ __ b(&done);
}
- __ TryAllocate(compiler->double_class(),
- slow_path->entry_label(),
- result_reg,
- temp);
- __ Bind(slow_path->exit_label());
- __ lw(temp, FieldAddress(instance_reg, offset_in_bytes()));
- __ LoadDFromOffset(value, temp, Double::value_offset() - kHeapObjectTag);
- __ StoreDToOffset(value,
- result_reg,
- Double::value_offset() - kHeapObjectTag);
- __ b(&done);
__ Bind(&load_pointer);
}
__ lw(result_reg, Address(instance_reg, offset_in_bytes() - kHeapObjectTag));
@@ -2935,6 +2965,40 @@ void CheckEitherNonSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
}
+class BoxDoubleSlowPath : public SlowPathCode {
+ public:
+ explicit BoxDoubleSlowPath(Instruction* instruction)
+ : instruction_(instruction) { }
+
+ virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
+ __ Comment("BoxDoubleSlowPath");
+ __ Bind(entry_label());
+ const Class& double_class = compiler->double_class();
+ const Code& stub =
+ Code::Handle(StubCode::GetAllocationStubForClass(double_class));
+ const ExternalLabel label(double_class.ToCString(), stub.EntryPoint());
+
+ LocationSummary* locs = instruction_->locs();
+ locs->live_registers()->Remove(locs->out());
+
+ compiler->SaveLiveRegisters(locs);
+ compiler->GenerateCall(Scanner::kNoSourcePos, // No token position.
+ &label,
+ PcDescriptors::kOther,
+ locs);
+ if (locs->out().reg() != V0) {
+ __ mov(locs->out().reg(), V0);
+ }
+ compiler->RestoreLiveRegisters(locs);
+
+ __ b(exit_label());
+ }
+
+ private:
+ Instruction* instruction_;
+};
+
+
LocationSummary* BoxDoubleInstr::MakeLocationSummary(bool opt) const {
const intptr_t kNumInputs = 1;
const intptr_t kNumTemps = 1;

Powered by Google App Engine
This is Rietveld 408576698