| Index: runtime/vm/intermediate_language_x64.cc
|
| diff --git a/runtime/vm/intermediate_language_x64.cc b/runtime/vm/intermediate_language_x64.cc
|
| index 19bf8645c7320025fc26cc2953179a77f51d81d2..e6d391023521b78226f82e8e08a0298226ef4749 100644
|
| --- a/runtime/vm/intermediate_language_x64.cc
|
| +++ b/runtime/vm/intermediate_language_x64.cc
|
| @@ -1506,38 +1506,64 @@ void GuardFieldLengthInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| }
|
|
|
|
|
| -class StoreInstanceFieldSlowPath : public SlowPathCode {
|
| +class BoxAllocationSlowPath : public SlowPathCode {
|
| public:
|
| - StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction,
|
| - const Class& cls)
|
| - : instruction_(instruction), cls_(cls) { }
|
| + BoxAllocationSlowPath(Instruction* instruction,
|
| + const Class& cls,
|
| + Register result)
|
| + : instruction_(instruction),
|
| + cls_(cls),
|
| + result_(result) { }
|
|
|
| virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
|
| - __ Comment("StoreInstanceFieldSlowPath");
|
| - __ Bind(entry_label());
|
| Isolate* isolate = compiler->isolate();
|
| StubCode* stub_code = isolate->stub_code();
|
| - const Code& stub = Code::Handle(isolate,
|
| - stub_code->GetAllocationStubForClass(cls_));
|
| +
|
| + if (Assembler::EmittingComments()) {
|
| + __ Comment("%s slow path allocation of %s",
|
| + instruction_->DebugName(),
|
| + String::Handle(cls_.PrettyName()).ToCString());
|
| + }
|
| + __ Bind(entry_label());
|
| +
|
| + const Code& stub =
|
| + Code::Handle(isolate, stub_code->GetAllocationStubForClass(cls_));
|
| const ExternalLabel label(stub.EntryPoint());
|
|
|
| LocationSummary* locs = instruction_->locs();
|
| - locs->live_registers()->Remove(locs->temp(0));
|
| +
|
| + locs->live_registers()->Remove(Location::RegisterLocation(result_));
|
|
|
| compiler->SaveLiveRegisters(locs);
|
| compiler->GenerateCall(Scanner::kNoSourcePos, // No token position.
|
| &label,
|
| RawPcDescriptors::kOther,
|
| locs);
|
| - __ MoveRegister(locs->temp(0).reg(), RAX);
|
| + __ MoveRegister(result_, RAX);
|
| compiler->RestoreLiveRegisters(locs);
|
| -
|
| __ jmp(exit_label());
|
| }
|
|
|
| + static void Allocate(FlowGraphCompiler* compiler,
|
| + Instruction* instruction,
|
| + const Class& cls,
|
| + Register result) {
|
| + BoxAllocationSlowPath* slow_path =
|
| + new BoxAllocationSlowPath(instruction, cls, result);
|
| + compiler->AddSlowPathCode(slow_path);
|
| +
|
| + __ TryAllocate(cls,
|
| + slow_path->entry_label(),
|
| + Assembler::kFarJump,
|
| + result,
|
| + PP);
|
| + __ Bind(slow_path->exit_label());
|
| + }
|
| +
|
| private:
|
| - StoreInstanceFieldInstr* instruction_;
|
| + Instruction* instruction_;
|
| const Class& cls_;
|
| + Register result_;
|
| };
|
|
|
|
|
| @@ -1576,6 +1602,27 @@ LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Isolate* isolate,
|
| }
|
|
|
|
|
| +static void EnsureMutableBox(FlowGraphCompiler* compiler,
|
| + StoreInstanceFieldInstr* instruction,
|
| + Register box_reg,
|
| + const Class& cls,
|
| + Register instance_reg,
|
| + intptr_t offset,
|
| + Register temp) {
|
| + Label done;
|
| + __ movq(box_reg, FieldAddress(instance_reg, offset));
|
| + __ CompareObject(box_reg, Object::null_object(), PP);
|
| + __ j(NOT_EQUAL, &done);
|
| + BoxAllocationSlowPath::Allocate(compiler, instruction, cls, box_reg);
|
| + __ movq(temp, box_reg);
|
| + __ StoreIntoObject(instance_reg,
|
| + FieldAddress(instance_reg, offset),
|
| + temp);
|
| +
|
| + __ Bind(&done);
|
| +}
|
| +
|
| +
|
| void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| Label skip_store;
|
|
|
| @@ -1603,16 +1650,7 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| UNREACHABLE();
|
| }
|
|
|
| - StoreInstanceFieldSlowPath* slow_path =
|
| - new StoreInstanceFieldSlowPath(this, *cls);
|
| - compiler->AddSlowPathCode(slow_path);
|
| -
|
| - __ TryAllocate(*cls,
|
| - slow_path->entry_label(),
|
| - Assembler::kFarJump,
|
| - temp,
|
| - PP);
|
| - __ Bind(slow_path->exit_label());
|
| + BoxAllocationSlowPath::Allocate(compiler, this, *cls, temp);
|
| __ movq(temp2, temp);
|
| __ StoreIntoObject(instance_reg,
|
| FieldAddress(instance_reg, offset_in_bytes_),
|
| @@ -1682,27 +1720,13 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
|
|
| {
|
| __ Bind(&store_double);
|
| - Label copy_double;
|
| - StoreInstanceFieldSlowPath* slow_path =
|
| - new StoreInstanceFieldSlowPath(this, compiler->double_class());
|
| - compiler->AddSlowPathCode(slow_path);
|
| -
|
| - __ movq(temp, FieldAddress(instance_reg, offset_in_bytes_));
|
| - __ CompareObject(temp, Object::null_object(), PP);
|
| - __ j(NOT_EQUAL, ©_double);
|
| -
|
| - __ TryAllocate(compiler->double_class(),
|
| - slow_path->entry_label(),
|
| - Assembler::kFarJump,
|
| - temp,
|
| - PP);
|
| - __ Bind(slow_path->exit_label());
|
| - __ movq(temp2, temp);
|
| - __ StoreIntoObject(instance_reg,
|
| - FieldAddress(instance_reg, offset_in_bytes_),
|
| - temp2);
|
| -
|
| - __ Bind(©_double);
|
| + EnsureMutableBox(compiler,
|
| + this,
|
| + temp,
|
| + compiler->double_class(),
|
| + instance_reg,
|
| + offset_in_bytes_,
|
| + temp2);
|
| __ movsd(fpu_temp, FieldAddress(value_reg, Double::value_offset()));
|
| __ movsd(FieldAddress(temp, Double::value_offset()), fpu_temp);
|
| __ jmp(&skip_store);
|
| @@ -1710,27 +1734,13 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
|
|
| {
|
| __ Bind(&store_float32x4);
|
| - Label copy_float32x4;
|
| - StoreInstanceFieldSlowPath* slow_path =
|
| - new StoreInstanceFieldSlowPath(this, compiler->float32x4_class());
|
| - compiler->AddSlowPathCode(slow_path);
|
| -
|
| - __ movq(temp, FieldAddress(instance_reg, offset_in_bytes_));
|
| - __ CompareObject(temp, Object::null_object(), PP);
|
| - __ j(NOT_EQUAL, ©_float32x4);
|
| -
|
| - __ TryAllocate(compiler->float32x4_class(),
|
| - slow_path->entry_label(),
|
| - Assembler::kFarJump,
|
| - temp,
|
| - PP);
|
| - __ Bind(slow_path->exit_label());
|
| - __ movq(temp2, temp);
|
| - __ StoreIntoObject(instance_reg,
|
| - FieldAddress(instance_reg, offset_in_bytes_),
|
| - temp2);
|
| -
|
| - __ Bind(©_float32x4);
|
| + EnsureMutableBox(compiler,
|
| + this,
|
| + temp,
|
| + compiler->float32x4_class(),
|
| + instance_reg,
|
| + offset_in_bytes_,
|
| + temp2);
|
| __ movups(fpu_temp, FieldAddress(value_reg, Float32x4::value_offset()));
|
| __ movups(FieldAddress(temp, Float32x4::value_offset()), fpu_temp);
|
| __ jmp(&skip_store);
|
| @@ -1738,28 +1748,13 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
|
|
| {
|
| __ Bind(&store_float64x2);
|
| - Label copy_float64x2;
|
| -
|
| - StoreInstanceFieldSlowPath* slow_path =
|
| - new StoreInstanceFieldSlowPath(this, compiler->float64x2_class());
|
| - compiler->AddSlowPathCode(slow_path);
|
| -
|
| - __ movq(temp, FieldAddress(instance_reg, offset_in_bytes_));
|
| - __ CompareObject(temp, Object::null_object(), PP);
|
| - __ j(NOT_EQUAL, ©_float64x2);
|
| -
|
| - __ TryAllocate(compiler->float64x2_class(),
|
| - slow_path->entry_label(),
|
| - Assembler::kFarJump,
|
| - temp,
|
| - temp2);
|
| - __ Bind(slow_path->exit_label());
|
| - __ movq(temp2, temp);
|
| - __ StoreIntoObject(instance_reg,
|
| - FieldAddress(instance_reg, offset_in_bytes_),
|
| - temp2);
|
| -
|
| - __ Bind(©_float64x2);
|
| + EnsureMutableBox(compiler,
|
| + this,
|
| + temp,
|
| + compiler->float64x2_class(),
|
| + instance_reg,
|
| + offset_in_bytes_,
|
| + temp2);
|
| __ movups(fpu_temp, FieldAddress(value_reg, Float64x2::value_offset()));
|
| __ movups(FieldAddress(temp, Float64x2::value_offset()), fpu_temp);
|
| __ jmp(&skip_store);
|
| @@ -1994,111 +1989,6 @@ void CreateArrayInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| }
|
|
|
|
|
| -class BoxDoubleSlowPath : public SlowPathCode {
|
| - public:
|
| - explicit BoxDoubleSlowPath(Instruction* instruction)
|
| - : instruction_(instruction) { }
|
| -
|
| - virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
|
| - __ Comment("BoxDoubleSlowPath");
|
| - __ Bind(entry_label());
|
| - Isolate* isolate = compiler->isolate();
|
| - StubCode* stub_code = isolate->stub_code();
|
| - const Class& double_class = compiler->double_class();
|
| - const Code& stub =
|
| - Code::Handle(isolate,
|
| - stub_code->GetAllocationStubForClass(double_class));
|
| - const ExternalLabel label(stub.EntryPoint());
|
| -
|
| - LocationSummary* locs = instruction_->locs();
|
| - ASSERT(!locs->live_registers()->Contains(locs->out(0)));
|
| -
|
| - compiler->SaveLiveRegisters(locs);
|
| - compiler->GenerateCall(Scanner::kNoSourcePos, // No token position.
|
| - &label,
|
| - RawPcDescriptors::kOther,
|
| - locs);
|
| - __ MoveRegister(locs->out(0).reg(), RAX);
|
| - compiler->RestoreLiveRegisters(locs);
|
| -
|
| - __ jmp(exit_label());
|
| - }
|
| -
|
| - private:
|
| - Instruction* instruction_;
|
| -};
|
| -
|
| -
|
| -class BoxFloat32x4SlowPath : public SlowPathCode {
|
| - public:
|
| - explicit BoxFloat32x4SlowPath(Instruction* instruction)
|
| - : instruction_(instruction) { }
|
| -
|
| - virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
|
| - __ Comment("BoxFloat32x4SlowPath");
|
| - __ Bind(entry_label());
|
| - Isolate* isolate = compiler->isolate();
|
| - StubCode* stub_code = isolate->stub_code();
|
| - const Class& float32x4_class = compiler->float32x4_class();
|
| - const Code& stub =
|
| - Code::Handle(isolate,
|
| - stub_code->GetAllocationStubForClass(float32x4_class));
|
| - const ExternalLabel label(stub.EntryPoint());
|
| -
|
| - LocationSummary* locs = instruction_->locs();
|
| - ASSERT(!locs->live_registers()->Contains(locs->out(0)));
|
| -
|
| - compiler->SaveLiveRegisters(locs);
|
| - compiler->GenerateCall(Scanner::kNoSourcePos, // No token position.
|
| - &label,
|
| - RawPcDescriptors::kOther,
|
| - locs);
|
| - __ MoveRegister(locs->out(0).reg(), RAX);
|
| - compiler->RestoreLiveRegisters(locs);
|
| -
|
| - __ jmp(exit_label());
|
| - }
|
| -
|
| - private:
|
| - Instruction* instruction_;
|
| -};
|
| -
|
| -
|
| -class BoxFloat64x2SlowPath : public SlowPathCode {
|
| - public:
|
| - explicit BoxFloat64x2SlowPath(Instruction* instruction)
|
| - : instruction_(instruction) { }
|
| -
|
| - virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
|
| - __ Comment("BoxFloat64x2SlowPath");
|
| - __ Bind(entry_label());
|
| - Isolate* isolate = compiler->isolate();
|
| - StubCode* stub_code = isolate->stub_code();
|
| - const Class& float64x2_class = compiler->float64x2_class();
|
| - const Code& stub =
|
| - Code::Handle(isolate,
|
| - stub_code->GetAllocationStubForClass(float64x2_class));
|
| - const ExternalLabel label(stub.EntryPoint());
|
| -
|
| - LocationSummary* locs = instruction_->locs();
|
| - ASSERT(!locs->live_registers()->Contains(locs->out(0)));
|
| -
|
| - compiler->SaveLiveRegisters(locs);
|
| - compiler->GenerateCall(Scanner::kNoSourcePos, // No token position.
|
| - &label,
|
| - RawPcDescriptors::kOther,
|
| - locs);
|
| - __ MoveRegister(locs->out(0).reg(), RAX);
|
| - compiler->RestoreLiveRegisters(locs);
|
| -
|
| - __ jmp(exit_label());
|
| - }
|
| -
|
| - private:
|
| - Instruction* instruction_;
|
| -};
|
| -
|
| -
|
| LocationSummary* LoadFieldInstr::MakeLocationSummary(Isolate* isolate,
|
| bool opt) const {
|
| const intptr_t kNumInputs = 1;
|
| @@ -2189,15 +2079,8 @@ void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
|
|
| {
|
| __ Bind(&load_double);
|
| - BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this);
|
| - compiler->AddSlowPathCode(slow_path);
|
| -
|
| - __ TryAllocate(compiler->double_class(),
|
| - slow_path->entry_label(),
|
| - Assembler::kFarJump,
|
| - result,
|
| - PP);
|
| - __ Bind(slow_path->exit_label());
|
| + BoxAllocationSlowPath::Allocate(
|
| + compiler, this, compiler->double_class(), result);
|
| __ movq(temp, FieldAddress(instance_reg, offset_in_bytes()));
|
| __ movsd(value, FieldAddress(temp, Double::value_offset()));
|
| __ movsd(FieldAddress(result, Double::value_offset()), value);
|
| @@ -2206,15 +2089,8 @@ void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
|
|
| {
|
| __ Bind(&load_float32x4);
|
| - BoxFloat32x4SlowPath* slow_path = new BoxFloat32x4SlowPath(this);
|
| - compiler->AddSlowPathCode(slow_path);
|
| -
|
| - __ TryAllocate(compiler->float32x4_class(),
|
| - slow_path->entry_label(),
|
| - Assembler::kFarJump,
|
| - result,
|
| - PP);
|
| - __ Bind(slow_path->exit_label());
|
| + BoxAllocationSlowPath::Allocate(
|
| + compiler, this, compiler->float32x4_class(), result);
|
| __ movq(temp, FieldAddress(instance_reg, offset_in_bytes()));
|
| __ movups(value, FieldAddress(temp, Float32x4::value_offset()));
|
| __ movups(FieldAddress(result, Float32x4::value_offset()), value);
|
| @@ -2223,15 +2099,8 @@ void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
|
|
| {
|
| __ Bind(&load_float64x2);
|
| - BoxFloat64x2SlowPath* slow_path = new BoxFloat64x2SlowPath(this);
|
| - compiler->AddSlowPathCode(slow_path);
|
| -
|
| - __ TryAllocate(compiler->float64x2_class(),
|
| - slow_path->entry_label(),
|
| - Assembler::kFarJump,
|
| - result,
|
| - temp);
|
| - __ Bind(slow_path->exit_label());
|
| + BoxAllocationSlowPath::Allocate(
|
| + compiler, this, compiler->float64x2_class(), result);
|
| __ movq(temp, FieldAddress(instance_reg, offset_in_bytes()));
|
| __ movups(value, FieldAddress(temp, Float64x2::value_offset()));
|
| __ movups(FieldAddress(result, Float64x2::value_offset()), value);
|
| @@ -3197,18 +3066,11 @@ LocationSummary* BoxDoubleInstr::MakeLocationSummary(Isolate* isolate,
|
|
|
|
|
| void BoxDoubleInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| - BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this);
|
| - compiler->AddSlowPathCode(slow_path);
|
| -
|
| Register out_reg = locs()->out(0).reg();
|
| XmmRegister value = locs()->in(0).fpu_reg();
|
|
|
| - __ TryAllocate(compiler->double_class(),
|
| - slow_path->entry_label(),
|
| - Assembler::kFarJump,
|
| - out_reg,
|
| - PP);
|
| - __ Bind(slow_path->exit_label());
|
| + BoxAllocationSlowPath::Allocate(
|
| + compiler, this, compiler->double_class(), out_reg);
|
| __ movsd(FieldAddress(out_reg, Double::value_offset()), value);
|
| }
|
|
|
| @@ -3282,18 +3144,11 @@ LocationSummary* BoxFloat32x4Instr::MakeLocationSummary(Isolate* isolate,
|
|
|
|
|
| void BoxFloat32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| - BoxFloat32x4SlowPath* slow_path = new BoxFloat32x4SlowPath(this);
|
| - compiler->AddSlowPathCode(slow_path);
|
| -
|
| Register out_reg = locs()->out(0).reg();
|
| XmmRegister value = locs()->in(0).fpu_reg();
|
|
|
| - __ TryAllocate(compiler->float32x4_class(),
|
| - slow_path->entry_label(),
|
| - Assembler::kFarJump,
|
| - out_reg,
|
| - PP);
|
| - __ Bind(slow_path->exit_label());
|
| + BoxAllocationSlowPath::Allocate(
|
| + compiler, this, compiler->float32x4_class(), out_reg);
|
| __ movups(FieldAddress(out_reg, Float32x4::value_offset()), value);
|
| }
|
|
|
| @@ -3339,18 +3194,11 @@ LocationSummary* BoxFloat64x2Instr::MakeLocationSummary(Isolate* isolate,
|
|
|
|
|
| void BoxFloat64x2Instr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| - BoxFloat64x2SlowPath* slow_path = new BoxFloat64x2SlowPath(this);
|
| - compiler->AddSlowPathCode(slow_path);
|
| -
|
| Register out_reg = locs()->out(0).reg();
|
| XmmRegister value = locs()->in(0).fpu_reg();
|
|
|
| - __ TryAllocate(compiler->float64x2_class(),
|
| - slow_path->entry_label(),
|
| - Assembler::kFarJump,
|
| - out_reg,
|
| - kNoRegister);
|
| - __ Bind(slow_path->exit_label());
|
| + BoxAllocationSlowPath::Allocate(
|
| + compiler, this, compiler->float64x2_class(), out_reg);
|
| __ movups(FieldAddress(out_reg, Float64x2::value_offset()), value);
|
| }
|
|
|
| @@ -3398,54 +3246,12 @@ LocationSummary* BoxInt32x4Instr::MakeLocationSummary(Isolate* isolate,
|
| }
|
|
|
|
|
| -class BoxInt32x4SlowPath : public SlowPathCode {
|
| - public:
|
| - explicit BoxInt32x4SlowPath(BoxInt32x4Instr* instruction)
|
| - : instruction_(instruction) { }
|
| -
|
| - virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
|
| - __ Comment("BoxInt32x4SlowPath");
|
| - __ Bind(entry_label());
|
| - Isolate* isolate = compiler->isolate();
|
| - StubCode* stub_code = isolate->stub_code();
|
| - const Class& int32x4_class = compiler->int32x4_class();
|
| - const Code& stub =
|
| - Code::Handle(isolate,
|
| - stub_code->GetAllocationStubForClass(int32x4_class));
|
| - const ExternalLabel label(stub.EntryPoint());
|
| -
|
| - LocationSummary* locs = instruction_->locs();
|
| - ASSERT(!locs->live_registers()->Contains(locs->out(0)));
|
| -
|
| - compiler->SaveLiveRegisters(locs);
|
| - compiler->GenerateCall(Scanner::kNoSourcePos, // No token position.
|
| - &label,
|
| - RawPcDescriptors::kOther,
|
| - locs);
|
| - __ MoveRegister(locs->out(0).reg(), RAX);
|
| - compiler->RestoreLiveRegisters(locs);
|
| -
|
| - __ jmp(exit_label());
|
| - }
|
| -
|
| - private:
|
| - BoxInt32x4Instr* instruction_;
|
| -};
|
| -
|
| -
|
| void BoxInt32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| - BoxInt32x4SlowPath* slow_path = new BoxInt32x4SlowPath(this);
|
| - compiler->AddSlowPathCode(slow_path);
|
| -
|
| Register out_reg = locs()->out(0).reg();
|
| XmmRegister value = locs()->in(0).fpu_reg();
|
|
|
| - __ TryAllocate(compiler->int32x4_class(),
|
| - slow_path->entry_label(),
|
| - Assembler::kFarJump,
|
| - out_reg,
|
| - PP);
|
| - __ Bind(slow_path->exit_label());
|
| + BoxAllocationSlowPath::Allocate(
|
| + compiler, this, compiler->int32x4_class(), out_reg);
|
| __ movups(FieldAddress(out_reg, Int32x4::value_offset()), value);
|
| }
|
|
|
|
|