| Index: runtime/vm/intermediate_language_ia32.cc
|
| diff --git a/runtime/vm/intermediate_language_ia32.cc b/runtime/vm/intermediate_language_ia32.cc
|
| index 3b86cc1321a212f17568b68cd18bb42abbce950c..c1b3e0435bfc0b34ac238c4fff8c7fa10052858e 100644
|
| --- a/runtime/vm/intermediate_language_ia32.cc
|
| +++ b/runtime/vm/intermediate_language_ia32.cc
|
| @@ -1592,16 +1592,16 @@ void GuardFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
|
|
| class StoreInstanceFieldSlowPath : public SlowPathCode {
|
| public:
|
| - explicit StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction)
|
| - : instruction_(instruction) { }
|
| + StoreInstanceFieldSlowPath(StoreInstanceFieldInstr* instruction,
|
| + const Class& cls)
|
| + : instruction_(instruction), cls_(cls) { }
|
|
|
| virtual void EmitNativeCode(FlowGraphCompiler* compiler) {
|
| __ Comment("StoreInstanceFieldSlowPath");
|
| __ Bind(entry_label());
|
| - const Class& double_class = compiler->double_class();
|
| const Code& stub =
|
| - Code::Handle(StubCode::GetAllocationStubForClass(double_class));
|
| - const ExternalLabel label(double_class.ToCString(), stub.EntryPoint());
|
| + Code::Handle(StubCode::GetAllocationStubForClass(cls_));
|
| + const ExternalLabel label(cls_.ToCString(), stub.EntryPoint());
|
|
|
| LocationSummary* locs = instruction_->locs();
|
| locs->live_registers()->Remove(locs->out());
|
| @@ -1619,6 +1619,7 @@ class StoreInstanceFieldSlowPath : public SlowPathCode {
|
|
|
| private:
|
| StoreInstanceFieldInstr* instruction_;
|
| + const Class& cls_;
|
| };
|
|
|
|
|
| @@ -1662,13 +1663,23 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| XmmRegister value = locs()->in(1).fpu_reg();
|
| Register temp = locs()->temp(0).reg();
|
| Register temp2 = locs()->temp(1).reg();
|
| + const intptr_t cid = field().UnboxedFieldCid();
|
|
|
| if (is_initialization_) {
|
| + const Class* cls = NULL;
|
| + switch (cid) {
|
| + case kDoubleCid:
|
| + cls = &compiler->double_class();
|
| + break;
|
| + // TODO(johnmccutchan): Add kFloat32x4Cid here.
|
| + default:
|
| + UNREACHABLE();
|
| + }
|
| StoreInstanceFieldSlowPath* slow_path =
|
| - new StoreInstanceFieldSlowPath(this);
|
| + new StoreInstanceFieldSlowPath(this, *cls);
|
| compiler->AddSlowPathCode(slow_path);
|
|
|
| - __ TryAllocate(compiler->double_class(),
|
| + __ TryAllocate(*cls,
|
| slow_path->entry_label(),
|
| Assembler::kFarJump,
|
| temp,
|
| @@ -1681,7 +1692,14 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| } else {
|
| __ movl(temp, FieldAddress(instance_reg, field().Offset()));
|
| }
|
| - __ movsd(FieldAddress(temp, Double::value_offset()), value);
|
| + switch (cid) {
|
| + case kDoubleCid:
|
| + __ movsd(FieldAddress(temp, Double::value_offset()), value);
|
| + // TODO(johnmccutchan): Add kFloat32x4Cid here.
|
| + break;
|
| + default:
|
| + UNREACHABLE();
|
| + }
|
| return;
|
| }
|
|
|
| @@ -1691,26 +1709,37 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| Register temp2 = locs()->temp(1).reg();
|
| FpuRegister fpu_temp = locs()->temp(2).fpu_reg();
|
|
|
| - Label store_pointer, copy_payload;
|
| + Label store_pointer;
|
| + Label copy_double;
|
| + Label store_double;
|
| +
|
| __ LoadObject(temp, Field::ZoneHandle(field().raw()));
|
| - __ cmpl(FieldAddress(temp, Field::guarded_cid_offset()),
|
| - Immediate(kDoubleCid));
|
| - __ j(NOT_EQUAL, &store_pointer);
|
| +
|
| __ cmpl(FieldAddress(temp, Field::is_nullable_offset()),
|
| Immediate(kNullCid));
|
| __ j(EQUAL, &store_pointer);
|
| +
|
| __ movzxb(temp2, FieldAddress(temp, Field::kind_bits_offset()));
|
| __ testl(temp2, Immediate(1 << Field::kUnboxingCandidateBit));
|
| __ j(ZERO, &store_pointer);
|
|
|
| + __ cmpl(FieldAddress(temp, Field::guarded_cid_offset()),
|
| + Immediate(kDoubleCid));
|
| + __ j(EQUAL, &store_double);
|
| +
|
| + // Fall through.
|
| + __ jmp(&store_pointer);
|
| +
|
| + __ Bind(&store_double);
|
| +
|
| const Immediate& raw_null =
|
| Immediate(reinterpret_cast<intptr_t>(Object::null()));
|
| __ movl(temp, FieldAddress(instance_reg, field().Offset()));
|
| __ cmpl(temp, raw_null);
|
| - __ j(NOT_EQUAL, ©_payload);
|
| + __ j(NOT_EQUAL, ©_double);
|
|
|
| StoreInstanceFieldSlowPath* slow_path =
|
| - new StoreInstanceFieldSlowPath(this);
|
| + new StoreInstanceFieldSlowPath(this, compiler->double_class());
|
| compiler->AddSlowPathCode(slow_path);
|
|
|
| if (!compiler->is_optimizing()) {
|
| @@ -1729,7 +1758,7 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| FieldAddress(instance_reg, field().Offset()),
|
| temp2);
|
|
|
| - __ Bind(©_payload);
|
| + __ Bind(©_double);
|
| __ movsd(fpu_temp, FieldAddress(value_reg, Double::value_offset()));
|
| __ movsd(FieldAddress(temp, Double::value_offset()), fpu_temp);
|
| __ jmp(&skip_store);
|
| @@ -1939,7 +1968,15 @@ void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| XmmRegister result = locs()->out().fpu_reg();
|
| Register temp = locs()->temp(0).reg();
|
| __ movl(temp, FieldAddress(instance_reg, offset_in_bytes()));
|
| - __ movsd(result, FieldAddress(temp, Double::value_offset()));
|
| + const intptr_t cid = field()->UnboxedFieldCid();
|
| + switch (cid) {
|
| + case kDoubleCid:
|
| + __ movsd(result, FieldAddress(temp, Double::value_offset()));
|
| + break;
|
| + // TODO(johnmccutchan): Add Float32x4 path here.
|
| + default:
|
| + UNREACHABLE();
|
| + }
|
| return;
|
| }
|
|
|
| @@ -1950,17 +1987,22 @@ void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| XmmRegister value = locs()->temp(0).fpu_reg();
|
|
|
| Label load_pointer;
|
| + Label load_double;
|
| __ LoadObject(result, Field::ZoneHandle(field()->raw()));
|
|
|
| FieldAddress field_cid_operand(result, Field::guarded_cid_offset());
|
| FieldAddress field_nullability_operand(result, Field::is_nullable_offset());
|
|
|
| - __ cmpl(field_cid_operand, Immediate(kDoubleCid));
|
| - __ j(NOT_EQUAL, &load_pointer);
|
| -
|
| __ cmpl(field_nullability_operand, Immediate(kNullCid));
|
| __ j(EQUAL, &load_pointer);
|
|
|
| + __ cmpl(field_cid_operand, Immediate(kDoubleCid));
|
| + __ j(EQUAL, &load_double);
|
| +
|
| + // Fall through.
|
| + __ jmp(&load_pointer);
|
| +
|
| + __ Bind(&load_double);
|
| BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this);
|
| compiler->AddSlowPathCode(slow_path);
|
|
|
| @@ -1978,6 +2020,9 @@ void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| __ movsd(value, FieldAddress(temp, Double::value_offset()));
|
| __ movsd(FieldAddress(result, Double::value_offset()), value);
|
| __ jmp(&done);
|
| +
|
| + // TODO(johnmccutchan): Add Float32x4 path here.
|
| +
|
| __ Bind(&load_pointer);
|
| }
|
| __ movl(result, FieldAddress(instance_reg, offset_in_bytes()));
|
|
|