| Index: runtime/vm/intermediate_language_ia32.cc
|
| diff --git a/runtime/vm/intermediate_language_ia32.cc b/runtime/vm/intermediate_language_ia32.cc
|
| index c587f1f2142176ba7604552f5fad483813040993..1649a8c81c356d4ea0b765c4f8e413041772db45 100644
|
| --- a/runtime/vm/intermediate_language_ia32.cc
|
| +++ b/runtime/vm/intermediate_language_ia32.cc
|
| @@ -1677,6 +1677,9 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| case kFloat32x4Cid:
|
| cls = &compiler->float32x4_class();
|
| break;
|
| + case kFloat64x2Cid:
|
| + cls = &compiler->float64x2_class();
|
| + break;
|
| default:
|
| UNREACHABLE();
|
| }
|
| @@ -1707,6 +1710,10 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| __ Comment("UnboxedFloat32x4StoreInstanceFieldInstr");
|
| __ movups(FieldAddress(temp, Float32x4::value_offset()), value);
|
| break;
|
| + case kFloat64x2Cid:
|
| + __ Comment("UnboxedFloat64x2StoreInstanceFieldInstr");
|
| + __ movups(FieldAddress(temp, Float64x2::value_offset()), value);
|
| + break;
|
| default:
|
| UNREACHABLE();
|
| }
|
| @@ -1722,6 +1729,7 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| Label store_pointer;
|
| Label store_double;
|
| Label store_float32x4;
|
| + Label store_float64x2;
|
|
|
| __ LoadObject(temp, Field::ZoneHandle(field().raw()));
|
|
|
| @@ -1741,6 +1749,10 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| Immediate(kFloat32x4Cid));
|
| __ j(EQUAL, &store_float32x4);
|
|
|
| + __ cmpl(FieldAddress(temp, Field::guarded_cid_offset()),
|
| + Immediate(kFloat64x2Cid));
|
| + __ j(EQUAL, &store_float64x2);
|
| +
|
| // Fall through.
|
| __ jmp(&store_pointer);
|
|
|
| @@ -1811,6 +1823,38 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| __ movups(FieldAddress(temp, Float32x4::value_offset()), fpu_temp);
|
| __ jmp(&skip_store);
|
| }
|
| +
|
| + {
|
| + __ Bind(&store_float64x2);
|
| + Label copy_float64x2;
|
| +
|
| + StoreInstanceFieldSlowPath* slow_path =
|
| + new StoreInstanceFieldSlowPath(this, compiler->float64x2_class());
|
| + compiler->AddSlowPathCode(slow_path);
|
| +
|
| + const Immediate& raw_null =
|
| + Immediate(reinterpret_cast<intptr_t>(Object::null()));
|
| + __ movl(temp, FieldAddress(instance_reg, offset_in_bytes_));
|
| + __ cmpl(temp, raw_null);
|
| + __ j(NOT_EQUAL, ©_float64x2);
|
| +
|
| + __ TryAllocate(compiler->float64x2_class(),
|
| + slow_path->entry_label(),
|
| + Assembler::kFarJump,
|
| + temp,
|
| + temp2);
|
| + __ Bind(slow_path->exit_label());
|
| + __ movl(temp2, temp);
|
| + __ StoreIntoObject(instance_reg,
|
| + FieldAddress(instance_reg, offset_in_bytes_),
|
| + temp2);
|
| +
|
| + __ Bind(©_float64x2);
|
| + __ movups(fpu_temp, FieldAddress(value_reg, Float64x2::value_offset()));
|
| + __ movups(FieldAddress(temp, Float64x2::value_offset()), fpu_temp);
|
| + __ jmp(&skip_store);
|
| + }
|
| +
|
| __ Bind(&store_pointer);
|
| }
|
|
|
| @@ -2072,6 +2116,10 @@ void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| __ Comment("UnboxedFloat32x4LoadFieldInstr");
|
| __ movups(result, FieldAddress(temp, Float32x4::value_offset()));
|
| break;
|
| + case kFloat64x2Cid:
|
| + __ Comment("UnboxedFloat64x2LoadFieldInstr");
|
| + __ movups(result, FieldAddress(temp, Float64x2::value_offset()));
|
| + break;
|
| default:
|
| UNREACHABLE();
|
| }
|
| @@ -2088,6 +2136,7 @@ void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| Label load_pointer;
|
| Label load_double;
|
| Label load_float32x4;
|
| + Label load_float64x2;
|
|
|
| __ LoadObject(result, Field::ZoneHandle(field()->raw()));
|
|
|
| @@ -2103,6 +2152,9 @@ void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| __ cmpl(field_cid_operand, Immediate(kFloat32x4Cid));
|
| __ j(EQUAL, &load_float32x4);
|
|
|
| + __ cmpl(field_cid_operand, Immediate(kFloat64x2Cid));
|
| + __ j(EQUAL, &load_float64x2);
|
| +
|
| // Fall through.
|
| __ jmp(&load_pointer);
|
|
|
| @@ -2145,6 +2197,24 @@ void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| __ jmp(&done);
|
| }
|
|
|
| + {
|
| + __ Bind(&load_float64x2);
|
| +
|
| + BoxFloat64x2SlowPath* slow_path = new BoxFloat64x2SlowPath(this);
|
| + compiler->AddSlowPathCode(slow_path);
|
| +
|
| + __ TryAllocate(compiler->float64x2_class(),
|
| + slow_path->entry_label(),
|
| + Assembler::kFarJump,
|
| + result,
|
| + temp);
|
| + __ Bind(slow_path->exit_label());
|
| + __ movl(temp, FieldAddress(instance_reg, offset_in_bytes()));
|
| + __ movups(value, FieldAddress(temp, Float64x2::value_offset()));
|
| + __ movups(FieldAddress(result, Float64x2::value_offset()), value);
|
| + __ jmp(&done);
|
| + }
|
| +
|
| __ Bind(&load_pointer);
|
| }
|
| __ movl(result, FieldAddress(instance_reg, offset_in_bytes()));
|
| @@ -3450,7 +3520,7 @@ void Simd32x4ShuffleInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
|
|
| switch (op_kind()) {
|
| case MethodRecognizer::kFloat32x4ShuffleX:
|
| - __ shufps(value, value, Immediate(0x00));
|
| + // Shuffle not necessary.
|
| __ cvtss2sd(value, value);
|
| break;
|
| case MethodRecognizer::kFloat32x4ShuffleY:
|
| @@ -3881,7 +3951,7 @@ void Simd64x2ShuffleInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
|
|
| switch (op_kind()) {
|
| case MethodRecognizer::kFloat64x2GetX:
|
| - __ shufpd(value, value, Immediate(0x00));
|
| + // nop.
|
| break;
|
| case MethodRecognizer::kFloat64x2GetY:
|
| __ shufpd(value, value, Immediate(0x33));
|
|
|