Chromium Code Reviews| Index: runtime/vm/intermediate_language_ia32.cc |
| diff --git a/runtime/vm/intermediate_language_ia32.cc b/runtime/vm/intermediate_language_ia32.cc |
| index c1b3e0435bfc0b34ac238c4fff8c7fa10052858e..323fbdb46135337579620880d43a47607f5437c4 100644 |
| --- a/runtime/vm/intermediate_language_ia32.cc |
| +++ b/runtime/vm/intermediate_language_ia32.cc |
| @@ -15,6 +15,7 @@ |
| #include "vm/stack_frame.h" |
| #include "vm/stub_code.h" |
| #include "vm/symbols.h" |
| +#include "vm/il_printer.h" |
|
srdjan
2014/02/05 22:20:09
Please use alphabetic order.
Cutch
2014/02/05 23:16:02
Removed.
|
| #define __ compiler->assembler()-> |
| @@ -1590,36 +1591,101 @@ void GuardFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| } |
| +static void PrintLocationSummary(const char* prefix, LocationSummary* locs) { |
| + return; |
| + char str[4000]; |
| + BufferFormatter f(str, sizeof(str)); |
| + locs->PrintTo(&f); |
| + printf("%s: [%" Pd "] %s\n", prefix, locs->stack_bitmap()->Length(), str); |
| +} |
| + |
| + |
| +static void PrintStack(const char* prefix, FlowGraphCompiler* compiler) { |
| + return; |
| + printf("%s: StackSize() -> %" Pd "\n", prefix, compiler->StackSize()); |
| +} |
| + |
| + |
| class StoreInstanceFieldSlowPath : public SlowPathCode { |
| public: |
| - StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction, |
| - const Class& cls) |
| - : instruction_(instruction), cls_(cls) { } |
| + explicit StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction) |
| + : instruction_(instruction) { } |
| virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
| __ Comment("StoreInstanceFieldSlowPath"); |
| - __ Bind(entry_label()); |
| - const Code& stub = |
| - Code::Handle(StubCode::GetAllocationStubForClass(cls_)); |
| - const ExternalLabel label(cls_.ToCString(), stub.EntryPoint()); |
| - |
| - LocationSummary* locs = instruction_->locs(); |
| - locs->live_registers()->Remove(locs->out()); |
| - |
| - compiler->SaveLiveRegisters(locs); |
| - compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
| - &label, |
| - PcDescriptors::kOther, |
| - locs); |
| - __ MoveRegister(locs->temp(0).reg(), EAX); |
| - compiler->RestoreLiveRegisters(locs); |
| + { |
| + __ Bind(double_entry_label()); |
| + const Class& cls = compiler->double_class(); |
| + const Code& stub = |
| + Code::Handle(StubCode::GetAllocationStubForClass(cls)); |
| + const ExternalLabel label(cls.ToCString(), stub.EntryPoint()); |
| + |
| + LocationSummary* locs = instruction_->locs(); |
| + PrintLocationSummary("A", locs); |
| + PrintStack("A", compiler); |
| + locs->live_registers()->Remove(locs->out()); |
| + PrintLocationSummary("B", locs); |
| + PrintStack("B", compiler); |
| + compiler->SaveLiveRegisters(locs); |
| + PrintLocationSummary("C", locs); |
| + PrintStack("C", compiler); |
| + compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
| + &label, |
| + PcDescriptors::kOther, |
| + locs); |
| + PrintLocationSummary("D", locs); |
| + PrintStack("D", compiler); |
| + __ MoveRegister(locs->temp(0).reg(), EAX); |
| + compiler->RestoreLiveRegisters(locs); |
| + PrintLocationSummary("E", locs); |
| + PrintStack("E", compiler); |
| + __ jmp(double_exit_label()); |
| + } |
| + { |
| + __ Bind(float32x4_entry_label()); |
| + const Class& cls = compiler->float32x4_class(); |
| + const Code& stub = |
| + Code::Handle(StubCode::GetAllocationStubForClass(cls)); |
| + const ExternalLabel label(cls.ToCString(), stub.EntryPoint()); |
| + LocationSummary* locs = instruction_->locs(); |
| + PrintLocationSummary("F", locs); |
| + PrintStack("F", compiler); |
| + locs->live_registers()->Remove(locs->out()); |
| + PrintLocationSummary("G", locs); |
| + PrintStack("G", compiler); |
| + compiler->SaveLiveRegisters(locs); |
| + PrintLocationSummary("H", locs); |
| + PrintStack("H", compiler); |
| + compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
| + &label, |
| + PcDescriptors::kOther, |
| + locs); |
| + PrintLocationSummary("I", locs); |
| + PrintStack("I", compiler); |
| + __ MoveRegister(locs->temp(0).reg(), EAX); |
| + compiler->RestoreLiveRegisters(locs); |
| + PrintLocationSummary("J", locs); |
| + PrintStack("J", compiler); |
| + __ jmp(float32x4_exit_label()); |
| + } |
| + } |
| - __ jmp(exit_label()); |
| + Label* double_entry_label() { |
| + // Use default SlowPathCode label for double. |
| + return entry_label(); |
| + } |
| + Label* double_exit_label() { |
| + // Use default SlowPathCode label for double. |
| + return exit_label(); |
| } |
| + Label* float32x4_entry_label() { return &float32x4_entry_label_; } |
| + Label* float32x4_exit_label() { return &float32x4_exit_label_; } |
| + |
| private: |
| + Label float32x4_entry_label_; |
| + Label float32x4_exit_label_; |
| StoreInstanceFieldInstr* instruction_; |
| - const Class& cls_; |
| }; |
| @@ -1666,25 +1732,34 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| const intptr_t cid = field().UnboxedFieldCid(); |
| if (is_initialization_) { |
| + StoreInstanceFieldSlowPath* slow_path = |
| + new StoreInstanceFieldSlowPath(this); |
| + compiler->AddSlowPathCode(slow_path); |
| + |
| const Class* cls = NULL; |
| + Label* entry_label = NULL; |
| + Label* exit_label = NULL; |
| switch (cid) { |
| case kDoubleCid: |
| cls = &compiler->double_class(); |
| + entry_label = slow_path->double_entry_label(); |
| + exit_label = slow_path->double_exit_label(); |
| + break; |
| + case kFloat32x4Cid: |
| + cls = &compiler->float32x4_class(); |
| + entry_label = slow_path->float32x4_entry_label(); |
| + exit_label = slow_path->float32x4_exit_label(); |
| break; |
| - // TODO(johnmccutchan): Add kFloat32x4Cid here. |
| default: |
| UNREACHABLE(); |
| } |
| - StoreInstanceFieldSlowPath* slow_path = |
| - new StoreInstanceFieldSlowPath(this, *cls); |
| - compiler->AddSlowPathCode(slow_path); |
| __ TryAllocate(*cls, |
| - slow_path->entry_label(), |
| + entry_label, |
| Assembler::kFarJump, |
| temp, |
| temp2); |
| - __ Bind(slow_path->exit_label()); |
| + __ Bind(exit_label); |
| __ movl(temp2, temp); |
| __ StoreIntoObject(instance_reg, |
| FieldAddress(instance_reg, field().Offset()), |
| @@ -1694,8 +1769,12 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| } |
| switch (cid) { |
| case kDoubleCid: |
| - __ movsd(FieldAddress(temp, Double::value_offset()), value); |
| - // TODO(johnmccutchan): Add kFloat32x4Cid here. |
| + __ Comment("UnboxedDoubleStoreInstanceFieldInstr"); |
| + __ movsd(FieldAddress(temp, Double::value_offset()), value); |
| + break; |
| + case kFloat32x4Cid: |
| + __ Comment("UnboxedFloat32x4StoreInstanceFieldInstr"); |
| + __ movups(FieldAddress(temp, Float32x4::value_offset()), value); |
| break; |
| default: |
| UNREACHABLE(); |
| @@ -1710,8 +1789,8 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| FpuRegister fpu_temp = locs()->temp(2).fpu_reg(); |
| Label store_pointer; |
| - Label copy_double; |
| Label store_double; |
| + Label store_float32x4; |
| __ LoadObject(temp, Field::ZoneHandle(field().raw())); |
| @@ -1727,41 +1806,80 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| Immediate(kDoubleCid)); |
| __ j(EQUAL, &store_double); |
| + __ cmpl(FieldAddress(temp, Field::guarded_cid_offset()), |
| + Immediate(kFloat32x4Cid)); |
| + __ j(EQUAL, &store_float32x4); |
| + |
| // Fall through. |
| __ jmp(&store_pointer); |
| - __ Bind(&store_double); |
| - |
| - const Immediate& raw_null = |
| - Immediate(reinterpret_cast<intptr_t>(Object::null())); |
| - __ movl(temp, FieldAddress(instance_reg, field().Offset())); |
| - __ cmpl(temp, raw_null); |
| - __ j(NOT_EQUAL, ©_double); |
| - |
| StoreInstanceFieldSlowPath* slow_path = |
| - new StoreInstanceFieldSlowPath(this, compiler->double_class()); |
| + new StoreInstanceFieldSlowPath(this); |
| compiler->AddSlowPathCode(slow_path); |
| - if (!compiler->is_optimizing()) { |
| - locs()->live_registers()->Add(locs()->in(0)); |
| - locs()->live_registers()->Add(locs()->in(1)); |
| + { |
| + __ Bind(&store_double); |
| + Label copy_double; |
| + |
| + const Immediate& raw_null = |
| + Immediate(reinterpret_cast<intptr_t>(Object::null())); |
| + __ movl(temp, FieldAddress(instance_reg, field().Offset())); |
| + __ cmpl(temp, raw_null); |
| + __ j(NOT_EQUAL, ©_double); |
| + |
| + if (!compiler->is_optimizing()) { |
| + locs()->live_registers()->Add(locs()->in(0)); |
| + locs()->live_registers()->Add(locs()->in(1)); |
| + } |
| + |
| + __ TryAllocate(compiler->double_class(), |
| + slow_path->double_entry_label(), |
| + Assembler::kFarJump, |
| + temp, |
| + temp2); |
| + __ Bind(slow_path->double_exit_label()); |
| + __ movl(temp2, temp); |
| + __ StoreIntoObject(instance_reg, |
| + FieldAddress(instance_reg, field().Offset()), |
| + temp2); |
| + |
| + __ Bind(©_double); |
| + __ movsd(fpu_temp, FieldAddress(value_reg, Double::value_offset())); |
| + __ movsd(FieldAddress(temp, Double::value_offset()), fpu_temp); |
| + __ jmp(&skip_store); |
| } |
| - __ TryAllocate(compiler->double_class(), |
| - slow_path->entry_label(), |
| - Assembler::kFarJump, |
| - temp, |
| - temp2); |
| - __ Bind(slow_path->exit_label()); |
| - __ movl(temp2, temp); |
| - __ StoreIntoObject(instance_reg, |
| - FieldAddress(instance_reg, field().Offset()), |
| - temp2); |
| + { |
| + __ Bind(&store_float32x4); |
| + Label copy_float32x4; |
| + |
| + const Immediate& raw_null = |
| + Immediate(reinterpret_cast<intptr_t>(Object::null())); |
| + __ movl(temp, FieldAddress(instance_reg, field().Offset())); |
| + __ cmpl(temp, raw_null); |
| + __ j(NOT_EQUAL, ©_float32x4); |
| + |
| + if (!compiler->is_optimizing()) { |
| + locs()->live_registers()->Add(locs()->in(0)); |
| + locs()->live_registers()->Add(locs()->in(1)); |
| + } |
| + |
| + __ TryAllocate(compiler->float32x4_class(), |
| + slow_path->float32x4_entry_label(), |
| + Assembler::kFarJump, |
| + temp, |
| + temp2); |
| + __ Bind(slow_path->float32x4_exit_label()); |
| + __ movl(temp2, temp); |
| + __ StoreIntoObject(instance_reg, |
| + FieldAddress(instance_reg, field().Offset()), |
| + temp2); |
| - __ Bind(©_double); |
| - __ movsd(fpu_temp, FieldAddress(value_reg, Double::value_offset())); |
| - __ movsd(FieldAddress(temp, Double::value_offset()), fpu_temp); |
| - __ jmp(&skip_store); |
| + __ Bind(©_float32x4); |
| + __ movups(fpu_temp, FieldAddress(value_reg, Float32x4::value_offset())); |
| + __ movups(FieldAddress(temp, Float32x4::value_offset()), fpu_temp); |
| + __ jmp(&skip_store); |
| + } |
| __ Bind(&store_pointer); |
| } |
| @@ -1906,34 +2024,70 @@ void AllocateObjectWithBoundsCheckInstr::EmitNativeCode( |
| } |
| -class BoxDoubleSlowPath : public SlowPathCode { |
| +class LoadFieldSlowPath : public SlowPathCode { |
| public: |
| - explicit BoxDoubleSlowPath(Instruction* instruction) |
| + explicit LoadFieldSlowPath(Instruction* instruction) |
| : instruction_(instruction) { } |
| virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
| - __ Comment("BoxDoubleSlowPath"); |
| - __ Bind(entry_label()); |
| - const Class& double_class = compiler->double_class(); |
| - const Code& stub = |
| - Code::Handle(StubCode::GetAllocationStubForClass(double_class)); |
| - const ExternalLabel label(double_class.ToCString(), stub.EntryPoint()); |
| - |
| - LocationSummary* locs = instruction_->locs(); |
| - locs->live_registers()->Remove(locs->out()); |
| - |
| - compiler->SaveLiveRegisters(locs); |
| - compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
| - &label, |
| - PcDescriptors::kOther, |
| - locs); |
| - __ MoveRegister(locs->out().reg(), EAX); |
| - compiler->RestoreLiveRegisters(locs); |
| + __ Comment("LoadFieldSlowPath"); |
| + { |
| + __ Bind(double_entry_label()); |
| + const Class& double_class = compiler->double_class(); |
| + const Code& stub = |
| + Code::Handle(StubCode::GetAllocationStubForClass(double_class)); |
| + const ExternalLabel label(double_class.ToCString(), stub.EntryPoint()); |
| + |
| + LocationSummary* locs = instruction_->locs(); |
| + locs->live_registers()->Remove(locs->out()); |
| + |
| + compiler->SaveLiveRegisters(locs); |
| + compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
| + &label, |
| + PcDescriptors::kOther, |
| + locs); |
| + __ MoveRegister(locs->out().reg(), EAX); |
| + compiler->RestoreLiveRegisters(locs); |
| + |
| + __ jmp(double_exit_label()); |
| + } |
| + { |
| + __ Bind(float32x4_entry_label()); |
| + const Class& float32x4_class = compiler->float32x4_class(); |
| + const Code& stub = |
| + Code::Handle(StubCode::GetAllocationStubForClass(float32x4_class)); |
| + const ExternalLabel label(float32x4_class.ToCString(), stub.EntryPoint()); |
| + |
| + LocationSummary* locs = instruction_->locs(); |
| + locs->live_registers()->Remove(locs->out()); |
| + |
| + compiler->SaveLiveRegisters(locs); |
| + compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
| + &label, |
| + PcDescriptors::kOther, |
| + locs); |
| + __ MoveRegister(locs->out().reg(), EAX); |
| + compiler->RestoreLiveRegisters(locs); |
| + |
| + __ jmp(float32x4_exit_label()); |
| + } |
| + } |
| - __ jmp(exit_label()); |
| + Label* double_entry_label() { |
| + // Use default SlowPathCode label for double. |
| + return entry_label(); |
| + } |
| + Label* double_exit_label() { |
| + // Use default SlowPathCode label for double. |
| + return exit_label(); |
| } |
| + Label* float32x4_entry_label() { return &float32x4_entry_label_; } |
| + Label* float32x4_exit_label() { return &float32x4_exit_label_; } |
| + |
| private: |
| + Label float32x4_entry_label_; |
| + Label float32x4_exit_label_; |
| Instruction* instruction_; |
| }; |
| @@ -1971,9 +2125,13 @@ void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| const intptr_t cid = field()->UnboxedFieldCid(); |
| switch (cid) { |
| case kDoubleCid: |
| + __ Comment("UnboxedDoubleLoadFieldInstr"); |
| __ movsd(result, FieldAddress(temp, Double::value_offset())); |
| break; |
| - // TODO(johnmccutchan): Add Float32x4 path here. |
| + case kFloat32x4Cid: |
| + __ Comment("UnboxedFloat32x4LoadFieldInstr"); |
| + __ movups(result, FieldAddress(temp, Float32x4::value_offset())); |
| + break; |
| default: |
| UNREACHABLE(); |
| } |
| @@ -1985,9 +2143,13 @@ void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| if (IsPotentialUnboxedLoad()) { |
| Register temp = locs()->temp(1).reg(); |
| XmmRegister value = locs()->temp(0).fpu_reg(); |
| + LoadFieldSlowPath* slow_path = new LoadFieldSlowPath(this); |
| + compiler->AddSlowPathCode(slow_path); |
| Label load_pointer; |
| Label load_double; |
| + Label load_float32x4; |
| + |
| __ LoadObject(result, Field::ZoneHandle(field()->raw())); |
| FieldAddress field_cid_operand(result, Field::guarded_cid_offset()); |
| @@ -1999,29 +2161,49 @@ void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| __ cmpl(field_cid_operand, Immediate(kDoubleCid)); |
| __ j(EQUAL, &load_double); |
| + __ cmpl(field_cid_operand, Immediate(kFloat32x4Cid)); |
| + __ j(EQUAL, &load_float32x4); |
| + |
| // Fall through. |
| __ jmp(&load_pointer); |
| - __ Bind(&load_double); |
| - BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this); |
| - compiler->AddSlowPathCode(slow_path); |
| + { |
| + __ Bind(&load_double); |
| + |
| + if (!compiler->is_optimizing()) { |
| + locs()->live_registers()->Add(locs()->in(0)); |
| + } |
| - if (!compiler->is_optimizing()) { |
| - locs()->live_registers()->Add(locs()->in(0)); |
| + __ TryAllocate(compiler->double_class(), |
| + slow_path->double_entry_label(), |
| + Assembler::kFarJump, |
| + result, |
| + temp); |
| + __ Bind(slow_path->double_exit_label()); |
| + __ movl(temp, FieldAddress(instance_reg, offset_in_bytes())); |
| + __ movsd(value, FieldAddress(temp, Double::value_offset())); |
| + __ movsd(FieldAddress(result, Double::value_offset()), value); |
| + __ jmp(&done); |
| } |
| - __ TryAllocate(compiler->double_class(), |
| - slow_path->entry_label(), |
| - Assembler::kFarJump, |
| - result, |
| - temp); |
| - __ Bind(slow_path->exit_label()); |
| - __ movl(temp, FieldAddress(instance_reg, offset_in_bytes())); |
| - __ movsd(value, FieldAddress(temp, Double::value_offset())); |
| - __ movsd(FieldAddress(result, Double::value_offset()), value); |
| - __ jmp(&done); |
| + { |
| + __ Bind(&load_float32x4); |
| - // TODO(johnmccutchan): Add Float32x4 path here. |
| + if (!compiler->is_optimizing()) { |
| + locs()->live_registers()->Add(locs()->in(0)); |
| + } |
| + |
| + __ TryAllocate(compiler->float32x4_class(), |
| + slow_path->float32x4_entry_label(), |
| + Assembler::kFarJump, |
| + result, |
| + temp); |
| + __ Bind(slow_path->float32x4_exit_label()); |
| + __ movl(temp, FieldAddress(instance_reg, offset_in_bytes())); |
| + __ movups(value, FieldAddress(temp, Float32x4::value_offset())); |
| + __ movups(FieldAddress(result, Float32x4::value_offset()), value); |
| + __ jmp(&done); |
| + } |
| __ Bind(&load_pointer); |
| } |
| @@ -2877,6 +3059,38 @@ void CheckEitherNonSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| } |
| +class BoxDoubleSlowPath : public SlowPathCode { |
| + public: |
| + explicit BoxDoubleSlowPath(BoxDoubleInstr* instruction) |
| + : instruction_(instruction) { } |
| + |
| + virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
| + __ Comment("BoxDoubleSlowPath"); |
| + __ Bind(entry_label()); |
| + const Class& double_class = compiler->double_class(); |
| + const Code& stub = |
| + Code::Handle(StubCode::GetAllocationStubForClass(double_class)); |
| + const ExternalLabel label(double_class.ToCString(), stub.EntryPoint()); |
| + |
| + LocationSummary* locs = instruction_->locs(); |
| + locs->live_registers()->Remove(locs->out()); |
| + |
| + compiler->SaveLiveRegisters(locs); |
| + compiler->GenerateCall(Scanner::kNoSourcePos, // No token position. |
| + &label, |
| + PcDescriptors::kOther, |
| + locs); |
| + __ MoveRegister(locs->out().reg(), EAX); |
| + compiler->RestoreLiveRegisters(locs); |
| + |
| + __ jmp(exit_label()); |
| + } |
| + |
| + private: |
| + BoxDoubleInstr* instruction_; |
| +}; |
| + |
| + |
| LocationSummary* BoxDoubleInstr::MakeLocationSummary(bool opt) const { |
| const intptr_t kNumInputs = 1; |
| const intptr_t kNumTemps = 0; |
| @@ -2953,19 +3167,6 @@ void UnboxDoubleInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| } |
| -LocationSummary* BoxFloat32x4Instr::MakeLocationSummary(bool opt) const { |
| - const intptr_t kNumInputs = 1; |
| - const intptr_t kNumTemps = 0; |
| - LocationSummary* summary = |
| - new LocationSummary(kNumInputs, |
| - kNumTemps, |
| - LocationSummary::kCallOnSlowPath); |
| - summary->set_in(0, Location::RequiresFpuRegister()); |
| - summary->set_out(Location::RequiresRegister()); |
| - return summary; |
| -} |
| - |
| - |
| class BoxFloat32x4SlowPath : public SlowPathCode { |
| public: |
| explicit BoxFloat32x4SlowPath(BoxFloat32x4Instr* instruction) |
| @@ -2998,6 +3199,19 @@ class BoxFloat32x4SlowPath : public SlowPathCode { |
| }; |
| +LocationSummary* BoxFloat32x4Instr::MakeLocationSummary(bool opt) const { |
| + const intptr_t kNumInputs = 1; |
| + const intptr_t kNumTemps = 0; |
| + LocationSummary* summary = |
| + new LocationSummary(kNumInputs, |
| + kNumTemps, |
| + LocationSummary::kCallOnSlowPath); |
| + summary->set_in(0, Location::RequiresFpuRegister()); |
| + summary->set_out(Location::RequiresRegister()); |
| + return summary; |
| +} |
| + |
| + |
| void BoxFloat32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| BoxFloat32x4SlowPath* slow_path = new BoxFloat32x4SlowPath(this); |
| compiler->AddSlowPathCode(slow_path); |