Index: runtime/vm/intermediate_language_x64.cc |
diff --git a/runtime/vm/intermediate_language_x64.cc b/runtime/vm/intermediate_language_x64.cc |
index f47abad849b4a3c165f8aa6581e79a3564d59d75..ab508fe804896e16f224510619b2d8a2e00d11e2 100644 |
--- a/runtime/vm/intermediate_language_x64.cc |
+++ b/runtime/vm/intermediate_language_x64.cc |
@@ -1490,36 +1490,79 @@ void GuardFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
} |
+bool Field::IsUnboxedField() const { |
+ bool valid_class = (guarded_cid() == kDoubleCid) || |
+ (guarded_cid() == kFloat32x4Cid); |
+ return is_unboxing_candidate() && !is_final() && !is_nullable() && |
+ valid_class; |
+} |
+ |
+ |
class StoreInstanceFieldSlowPath : public SlowPathCode { |
public: |
- StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction, |
- const Class& cls) |
- : instruction_(instruction), cls_(cls) { } |
+ explicit StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction) |
+ : instruction_(instruction) { } |
virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
__ Comment("StoreInstanceFieldSlowPath"); |
- __ Bind(entry_label()); |
- const Code& stub = |
- Code::Handle(StubCode::GetAllocationStubForClass(cls_)); |
- const ExternalLabel label(cls_.ToCString(), stub.EntryPoint()); |
- |
- LocationSummary* locs = instruction_->locs(); |
- locs->live_registers()->Remove(locs->out()); |
- |
- compiler->SaveLiveRegisters(locs); |
- compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
- &label, |
- PcDescriptors::kOther, |
- locs); |
- __ MoveRegister(locs->temp(0).reg(), RAX); |
- compiler->RestoreLiveRegisters(locs); |
+ { |
+ __ Bind(double_entry_label()); |
+ const Class& cls = compiler->double_class(); |
+ const Code& stub = |
+ Code::Handle(StubCode::GetAllocationStubForClass(cls)); |
+ const ExternalLabel label(cls.ToCString(), stub.EntryPoint()); |
+ |
+ LocationSummary* locs = instruction_->locs(); |
+ locs->live_registers()->Remove(locs->out()); |
+ |
+ compiler->SaveLiveRegisters(locs); |
+ compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
+ &label, |
+ PcDescriptors::kOther, |
+ locs); |
+ __ MoveRegister(locs->temp(0).reg(), RAX); |
+ compiler->RestoreLiveRegisters(locs); |
+ |
+ __ jmp(double_exit_label()); |
+ } |
+ { |
+ __ Bind(float32x4_entry_label()); |
+ const Class& cls = compiler->float32x4_class(); |
+ const Code& stub = |
+ Code::Handle(StubCode::GetAllocationStubForClass(cls)); |
+ const ExternalLabel label(cls.ToCString(), stub.EntryPoint()); |
+ |
+ LocationSummary* locs = instruction_->locs(); |
+ locs->live_registers()->Remove(locs->out()); |
+ |
+ compiler->SaveLiveRegisters(locs); |
+ compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
+ &label, |
+ PcDescriptors::kOther, |
+ locs); |
+ __ MoveRegister(locs->temp(0).reg(), RAX); |
+ compiler->RestoreLiveRegisters(locs); |
+ |
+ __ jmp(float32x4_exit_label()); |
+ } |
+ } |
- __ jmp(exit_label()); |
+ Label* double_entry_label() { |
+ // Use default SlowPathCode label for double. |
+ return entry_label(); |
} |
+ Label* double_exit_label() { |
+ // Use default SlowPathCode label for double. |
+ return exit_label(); |
+ } |
+ |
+ Label* float32x4_entry_label() { return &float32x4_entry_label_; } |
+ Label* float32x4_exit_label() { return &float32x4_exit_label_; } |
private: |
+ Label float32x4_entry_label_; |
+ Label float32x4_exit_label_; |
StoreInstanceFieldInstr* instruction_; |
- const Class& cls_; |
}; |
@@ -1566,25 +1609,34 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
const intptr_t cid = field().UnboxedFieldCid(); |
if (is_initialization_) { |
+ StoreInstanceFieldSlowPath* slow_path = |
+ new StoreInstanceFieldSlowPath(this); |
+ compiler->AddSlowPathCode(slow_path); |
+ |
const Class* cls = NULL; |
+ Label* entry_label = NULL; |
+ Label* exit_label = NULL; |
switch (cid) { |
case kDoubleCid: |
cls = &compiler->double_class(); |
+ entry_label = slow_path->double_entry_label(); |
+ exit_label = slow_path->double_exit_label(); |
+ break; |
+ case kFloat32x4Cid: |
+ cls = &compiler->float32x4_class(); |
+ entry_label = slow_path->float32x4_entry_label(); |
+ exit_label = slow_path->float32x4_exit_label(); |
break; |
- // TODO(johnmccutchan): Add kFloat32x4Cid here. |
default: |
UNREACHABLE(); |
} |
- StoreInstanceFieldSlowPath* slow_path = |
- new StoreInstanceFieldSlowPath(this, *cls); |
- compiler->AddSlowPathCode(slow_path); |
__ TryAllocate(*cls, |
- slow_path->entry_label(), |
+ entry_label, |
Assembler::kFarJump, |
temp, |
PP); |
- __ Bind(slow_path->exit_label()); |
+ __ Bind(exit_label); |
__ movq(temp2, temp); |
__ StoreIntoObject(instance_reg, |
FieldAddress(instance_reg, field().Offset()), |
@@ -1594,9 +1646,13 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
} |
switch (cid) { |
case kDoubleCid: |
- __ movsd(FieldAddress(temp, Double::value_offset()), value); |
- // TODO(johnmccutchan): Add kFloat32x4Cid here. |
- break; |
+ __ Comment("UnboxedDoubleStoreInstanceFieldInstr"); |
+ __ movsd(FieldAddress(temp, Double::value_offset()), value); |
+ break; |
+ case kFloat32x4Cid: |
+ __ Comment("UnboxedFloat32x4StoreInstanceFieldInstr"); |
+ __ movups(FieldAddress(temp, Float32x4::value_offset()), value); |
+ break; |
default: |
UNREACHABLE(); |
} |
@@ -1610,8 +1666,8 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
FpuRegister fpu_temp = locs()->temp(2).fpu_reg(); |
Label store_pointer; |
- Label copy_double; |
Label store_double; |
+ Label store_float32x4; |
__ LoadObject(temp, Field::ZoneHandle(field().raw()), PP); |
@@ -1627,38 +1683,75 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
Immediate(kDoubleCid)); |
__ j(EQUAL, &store_double); |
+ __ cmpq(FieldAddress(temp, Field::guarded_cid_offset()), |
+ Immediate(kFloat32x4Cid)); |
+ __ j(EQUAL, &store_float32x4); |
+ |
// Fall through. |
__ jmp(&store_pointer); |
- __ Bind(&store_double); |
- |
- __ movq(temp, FieldAddress(instance_reg, field().Offset())); |
- __ CompareObject(temp, Object::null_object(), PP); |
- __ j(NOT_EQUAL, ©_double); |
- |
StoreInstanceFieldSlowPath* slow_path = |
- new StoreInstanceFieldSlowPath(this, compiler->double_class()); |
+ new StoreInstanceFieldSlowPath(this); |
compiler->AddSlowPathCode(slow_path); |
- if (!compiler->is_optimizing()) { |
- locs()->live_registers()->Add(locs()->in(0)); |
- locs()->live_registers()->Add(locs()->in(1)); |
+ { |
+ __ Bind(&store_double); |
+ Label copy_double; |
+ |
+ __ movq(temp, FieldAddress(instance_reg, field().Offset())); |
+ __ CompareObject(temp, Object::null_object(), PP); |
+ __ j(NOT_EQUAL, ©_double); |
+ |
+ if (!compiler->is_optimizing()) { |
+ locs()->live_registers()->Add(locs()->in(0)); |
+ locs()->live_registers()->Add(locs()->in(1)); |
+ } |
+ __ TryAllocate(compiler->double_class(), |
+ slow_path->double_entry_label(), |
+ Assembler::kFarJump, |
+ temp, |
+ PP); |
+ __ Bind(slow_path->double_exit_label()); |
+ __ movq(temp2, temp); |
+ __ StoreIntoObject(instance_reg, |
+ FieldAddress(instance_reg, field().Offset()), |
+ temp2); |
+ |
+ __ Bind(©_double); |
+ __ movsd(fpu_temp, FieldAddress(value_reg, Double::value_offset())); |
+ __ movsd(FieldAddress(temp, Double::value_offset()), fpu_temp); |
+ __ jmp(&skip_store); |
+ } |
+ |
+ { |
+ __ Bind(&store_float32x4); |
+ Label copy_float32x4; |
+ |
+ __ movq(temp, FieldAddress(instance_reg, field().Offset())); |
+ __ CompareObject(temp, Object::null_object(), PP); |
+ __ j(NOT_EQUAL, ©_float32x4); |
+ |
+ if (!compiler->is_optimizing()) { |
+ locs()->live_registers()->Add(locs()->in(0)); |
+ locs()->live_registers()->Add(locs()->in(1)); |
+ } |
+ __ TryAllocate(compiler->float32x4_class(), |
+ slow_path->float32x4_entry_label(), |
+ Assembler::kFarJump, |
+ temp, |
+ PP); |
+ __ Bind(slow_path->float32x4_exit_label()); |
+ __ movq(temp2, temp); |
+ __ StoreIntoObject(instance_reg, |
+ FieldAddress(instance_reg, field().Offset()), |
+ temp2); |
+ |
+ __ Bind(©_float32x4); |
+ __ movups(fpu_temp, FieldAddress(value_reg, Float32x4::value_offset())); |
+ __ movups(FieldAddress(temp, Float32x4::value_offset()), fpu_temp); |
+ __ jmp(&skip_store); |
} |
- __ TryAllocate(compiler->double_class(), |
- slow_path->entry_label(), |
- Assembler::kFarJump, |
- temp, |
- PP); |
- __ Bind(slow_path->exit_label()); |
- __ movq(temp2, temp); |
- __ StoreIntoObject(instance_reg, |
- FieldAddress(instance_reg, field().Offset()), |
- temp2); |
- __ Bind(©_double); |
- __ movsd(fpu_temp, FieldAddress(value_reg, Double::value_offset())); |
- __ movsd(FieldAddress(temp, Double::value_offset()), fpu_temp); |
- __ jmp(&skip_store); |
__ Bind(&store_pointer); |
} |
@@ -1799,34 +1892,70 @@ void AllocateObjectWithBoundsCheckInstr::EmitNativeCode( |
} |
-class BoxDoubleSlowPath : public SlowPathCode { |
+class LoadFieldSlowPath : public SlowPathCode { |
public: |
- explicit BoxDoubleSlowPath(Instruction* instruction) |
+ explicit LoadFieldSlowPath(Instruction* instruction) |
: instruction_(instruction) { } |
virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
- __ Comment("BoxDoubleSlowPath"); |
- __ Bind(entry_label()); |
- const Class& double_class = compiler->double_class(); |
- const Code& stub = |
- Code::Handle(StubCode::GetAllocationStubForClass(double_class)); |
- const ExternalLabel label(double_class.ToCString(), stub.EntryPoint()); |
- |
- LocationSummary* locs = instruction_->locs(); |
- locs->live_registers()->Remove(locs->out()); |
- |
- compiler->SaveLiveRegisters(locs); |
- compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
- &label, |
- PcDescriptors::kOther, |
- locs); |
- __ MoveRegister(locs->out().reg(), RAX); |
- compiler->RestoreLiveRegisters(locs); |
+ __ Comment("LoadFieldSlowPath"); |
+ { |
+ __ Bind(double_entry_label()); |
+ const Class& double_class = compiler->double_class(); |
+ const Code& stub = |
+ Code::Handle(StubCode::GetAllocationStubForClass(double_class)); |
+ const ExternalLabel label(double_class.ToCString(), stub.EntryPoint()); |
+ |
+ LocationSummary* locs = instruction_->locs(); |
+ locs->live_registers()->Remove(locs->out()); |
+ |
+ compiler->SaveLiveRegisters(locs); |
+ compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
+ &label, |
+ PcDescriptors::kOther, |
+ locs); |
+ __ MoveRegister(locs->out().reg(), RAX); |
+ compiler->RestoreLiveRegisters(locs); |
+ |
+ __ jmp(double_exit_label()); |
+ } |
+ { |
+ __ Bind(float32x4_entry_label()); |
+ const Class& float32x4_class = compiler->float32x4_class(); |
+ const Code& stub = |
+ Code::Handle(StubCode::GetAllocationStubForClass(float32x4_class)); |
+ const ExternalLabel label(float32x4_class.ToCString(), stub.EntryPoint()); |
+ |
+ LocationSummary* locs = instruction_->locs(); |
+ locs->live_registers()->Remove(locs->out()); |
+ |
+ compiler->SaveLiveRegisters(locs); |
+ compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
+ &label, |
+ PcDescriptors::kOther, |
+ locs); |
+ __ MoveRegister(locs->out().reg(), RAX); |
+ compiler->RestoreLiveRegisters(locs); |
+ |
+ __ jmp(float32x4_exit_label()); |
+ } |
+ } |
- __ jmp(exit_label()); |
+ Label* double_entry_label() { |
+ // Use default SlowPathCode label for double. |
+ return entry_label(); |
} |
+ Label* double_exit_label() { |
+ // Use default SlowPathCode label for double. |
+ return exit_label(); |
+ } |
+ |
+ Label* float32x4_entry_label() { return &float32x4_entry_label_; } |
+ Label* float32x4_exit_label() { return &float32x4_exit_label_; } |
private: |
+ Label float32x4_entry_label_; |
+ Label float32x4_exit_label_; |
Instruction* instruction_; |
}; |
@@ -1864,9 +1993,13 @@ void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
intptr_t cid = field()->UnboxedFieldCid(); |
switch (cid) { |
case kDoubleCid: |
+ __ Comment("UnboxedDoubleLoadFieldInstr"); |
__ movsd(result, FieldAddress(temp, Double::value_offset())); |
break; |
- // TODO(johnmccutchan): Add Float32x4 path here. |
+ case kFloat32x4Cid: |
+ __ Comment("UnboxedFloat32x4LoadFieldInstr"); |
+ __ movups(result, FieldAddress(temp, Float32x4::value_offset())); |
+ break; |
default: |
UNREACHABLE(); |
} |
@@ -1878,9 +2011,12 @@ void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
if (IsPotentialUnboxedLoad()) { |
Register temp = locs()->temp(1).reg(); |
XmmRegister value = locs()->temp(0).fpu_reg(); |
+ LoadFieldSlowPath* slow_path = new LoadFieldSlowPath(this); |
+ compiler->AddSlowPathCode(slow_path); |
Label load_pointer; |
Label load_double; |
+ Label load_float32x4; |
__ LoadObject(result, Field::ZoneHandle(field()->raw()), PP); |
@@ -1892,30 +2028,49 @@ void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
Immediate(kDoubleCid)); |
__ j(EQUAL, &load_double); |
+ __ cmpq(FieldAddress(result, Field::guarded_cid_offset()), |
+ Immediate(kFloat32x4Cid)); |
+ __ j(EQUAL, &load_float32x4); |
+ |
// Fall through. |
__ jmp(&load_pointer); |
- __ Bind(&load_double); |
+ { |
+ __ Bind(&load_double); |
- BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this); |
- compiler->AddSlowPathCode(slow_path); |
+ if (!compiler->is_optimizing()) { |
+ locs()->live_registers()->Add(locs()->in(0)); |
+ } |
- if (!compiler->is_optimizing()) { |
- locs()->live_registers()->Add(locs()->in(0)); |
+ __ TryAllocate(compiler->double_class(), |
+ slow_path->double_entry_label(), |
+ Assembler::kFarJump, |
+ result, |
+ PP); |
+ __ Bind(slow_path->double_exit_label()); |
+ __ movq(temp, FieldAddress(instance_reg, offset_in_bytes())); |
+ __ movsd(value, FieldAddress(temp, Double::value_offset())); |
+ __ movsd(FieldAddress(result, Double::value_offset()), value); |
+ __ jmp(&done); |
} |
+ { |
+ __ Bind(&load_float32x4); |
- __ TryAllocate(compiler->double_class(), |
- slow_path->entry_label(), |
- Assembler::kFarJump, |
- result, |
- PP); |
- __ Bind(slow_path->exit_label()); |
- __ movq(temp, FieldAddress(instance_reg, offset_in_bytes())); |
- __ movsd(value, FieldAddress(temp, Double::value_offset())); |
- __ movsd(FieldAddress(result, Double::value_offset()), value); |
- __ jmp(&done); |
+ if (!compiler->is_optimizing()) { |
+ locs()->live_registers()->Add(locs()->in(0)); |
+ } |
- // TODO(johnmccutchan): Add Float32x4 path here. |
+ __ TryAllocate(compiler->float32x4_class(), |
+ slow_path->float32x4_entry_label(), |
+ Assembler::kFarJump, |
+ result, |
+ PP); |
+ __ Bind(slow_path->float32x4_exit_label()); |
+ __ movq(temp, FieldAddress(instance_reg, offset_in_bytes())); |
+ __ movups(value, FieldAddress(temp, Float32x4::value_offset())); |
+ __ movups(FieldAddress(result, Float32x4::value_offset()), value); |
+ __ jmp(&done); |
+ } |
__ Bind(&load_pointer); |
} |
@@ -2885,6 +3040,38 @@ void CheckEitherNonSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
} |
+class BoxDoubleSlowPath : public SlowPathCode { |
+ public: |
+ explicit BoxDoubleSlowPath(BoxDoubleInstr* instruction) |
+ : instruction_(instruction) { } |
+ |
+ virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
+ __ Comment("BoxDoubleSlowPath"); |
+ __ Bind(entry_label()); |
+ const Class& double_class = compiler->double_class(); |
+ const Code& stub = |
+ Code::Handle(StubCode::GetAllocationStubForClass(double_class)); |
+ const ExternalLabel label(double_class.ToCString(), stub.EntryPoint()); |
+ |
+ LocationSummary* locs = instruction_->locs(); |
+ locs->live_registers()->Remove(locs->out()); |
+ |
+ compiler->SaveLiveRegisters(locs); |
+ compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
+ &label, |
+ PcDescriptors::kOther, |
+ locs); |
+ __ MoveRegister(locs->out().reg(), RAX); |
+ compiler->RestoreLiveRegisters(locs); |
+ |
+ __ jmp(exit_label()); |
+ } |
+ |
+ private: |
+ BoxDoubleInstr* instruction_; |
+}; |
+ |
+ |
LocationSummary* BoxDoubleInstr::MakeLocationSummary(bool opt) const { |
const intptr_t kNumInputs = 1; |
const intptr_t kNumTemps = 0; |
@@ -2956,19 +3143,6 @@ void UnboxDoubleInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
} |
-LocationSummary* BoxFloat32x4Instr::MakeLocationSummary(bool opt) const { |
- const intptr_t kNumInputs = 1; |
- const intptr_t kNumTemps = 0; |
- LocationSummary* summary = |
- new LocationSummary(kNumInputs, |
- kNumTemps, |
- LocationSummary::kCallOnSlowPath); |
- summary->set_in(0, Location::RequiresFpuRegister()); |
- summary->set_out(Location::RequiresRegister()); |
- return summary; |
-} |
- |
- |
class BoxFloat32x4SlowPath : public SlowPathCode { |
public: |
explicit BoxFloat32x4SlowPath(BoxFloat32x4Instr* instruction) |
@@ -3001,6 +3175,19 @@ class BoxFloat32x4SlowPath : public SlowPathCode { |
}; |
+LocationSummary* BoxFloat32x4Instr::MakeLocationSummary(bool opt) const { |
+ const intptr_t kNumInputs = 1; |
+ const intptr_t kNumTemps = 0; |
+ LocationSummary* summary = |
+ new LocationSummary(kNumInputs, |
+ kNumTemps, |
+ LocationSummary::kCallOnSlowPath); |
+ summary->set_in(0, Location::RequiresFpuRegister()); |
+ summary->set_out(Location::RequiresRegister()); |
+ return summary; |
+} |
+ |
+ |
void BoxFloat32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) { |
BoxFloat32x4SlowPath* slow_path = new BoxFloat32x4SlowPath(this); |
compiler->AddSlowPathCode(slow_path); |