Index: runtime/vm/intermediate_language_x64.cc |
diff --git a/runtime/vm/intermediate_language_x64.cc b/runtime/vm/intermediate_language_x64.cc |
index a43db626df7386a9aff9c823f83cac42de9747db..930ace149f1a5932f7b5b079eedd06f45c075df2 100644 |
--- a/runtime/vm/intermediate_language_x64.cc |
+++ b/runtime/vm/intermediate_language_x64.cc |
@@ -1498,7 +1498,7 @@ LocationSummary* GuardFieldClassInstr::MakeLocationSummary(Zone* zone, |
void GuardFieldClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
- ASSERT(sizeof(classid_t) == kInt16Size); |
+ ASSERT(sizeof(classid_t) == kInt32Size); |
const intptr_t value_cid = value()->Type()->ToCid(); |
const intptr_t field_cid = field().guarded_cid(); |
const intptr_t nullability = field().is_nullable() ? kNullCid : kIllegalCid; |
@@ -1550,13 +1550,13 @@ void GuardFieldClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
if (value_cid == kDynamicCid) { |
LoadValueCid(compiler, value_cid_reg, value_reg); |
- __ cmpw(value_cid_reg, field_cid_operand); |
+ __ cmpl(value_cid_reg, field_cid_operand); |
__ j(EQUAL, &ok); |
- __ cmpw(value_cid_reg, field_nullability_operand); |
+ __ cmpl(value_cid_reg, field_nullability_operand); |
} else if (value_cid == kNullCid) { |
- __ cmpw(field_nullability_operand, Immediate(value_cid)); |
+ __ cmpl(field_nullability_operand, Immediate(value_cid)); |
} else { |
- __ cmpw(field_cid_operand, Immediate(value_cid)); |
+ __ cmpl(field_cid_operand, Immediate(value_cid)); |
} |
__ j(EQUAL, &ok); |
@@ -1567,16 +1567,16 @@ void GuardFieldClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
if (!field().needs_length_check()) { |
// Uninitialized field can be handled inline. Check if the |
// field is still unitialized. |
- __ cmpw(field_cid_operand, Immediate(kIllegalCid)); |
+ __ cmpl(field_cid_operand, Immediate(kIllegalCid)); |
__ j(NOT_EQUAL, fail); |
if (value_cid == kDynamicCid) { |
- __ movw(field_cid_operand, value_cid_reg); |
- __ movw(field_nullability_operand, value_cid_reg); |
+ __ movl(field_cid_operand, value_cid_reg); |
+ __ movl(field_nullability_operand, value_cid_reg); |
} else { |
ASSERT(field_reg != kNoRegister); |
- __ movw(field_cid_operand, Immediate(value_cid)); |
- __ movw(field_nullability_operand, Immediate(value_cid)); |
+ __ movl(field_cid_operand, Immediate(value_cid)); |
+ __ movl(field_nullability_operand, Immediate(value_cid)); |
} |
if (deopt == NULL) { |
@@ -1589,7 +1589,7 @@ void GuardFieldClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
ASSERT(!compiler->is_optimizing()); |
__ Bind(fail); |
- __ cmpw(FieldAddress(field_reg, Field::guarded_cid_offset()), |
+ __ cmpl(FieldAddress(field_reg, Field::guarded_cid_offset()), |
Immediate(kDynamicCid)); |
__ j(EQUAL, &ok); |
@@ -1832,7 +1832,7 @@ static void EnsureMutableBox(FlowGraphCompiler* compiler, |
void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
- ASSERT(sizeof(classid_t) == kInt16Size); |
+ ASSERT(sizeof(classid_t) == kInt32Size); |
Label skip_store; |
Register instance_reg = locs()->in(0).reg(); |
@@ -1904,7 +1904,7 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
__ LoadObject(temp, Field::ZoneHandle(Z, field().Original())); |
- __ cmpw(FieldAddress(temp, Field::is_nullable_offset()), |
+ __ cmpl(FieldAddress(temp, Field::is_nullable_offset()), |
Immediate(kNullCid)); |
__ j(EQUAL, &store_pointer); |
@@ -1912,15 +1912,15 @@ void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
__ testq(temp2, Immediate(1 << Field::kUnboxingCandidateBit)); |
__ j(ZERO, &store_pointer); |
- __ cmpw(FieldAddress(temp, Field::guarded_cid_offset()), |
+ __ cmpl(FieldAddress(temp, Field::guarded_cid_offset()), |
Immediate(kDoubleCid)); |
__ j(EQUAL, &store_double); |
- __ cmpw(FieldAddress(temp, Field::guarded_cid_offset()), |
+ __ cmpl(FieldAddress(temp, Field::guarded_cid_offset()), |
Immediate(kFloat32x4Cid)); |
__ j(EQUAL, &store_float32x4); |
- __ cmpw(FieldAddress(temp, Field::guarded_cid_offset()), |
+ __ cmpl(FieldAddress(temp, Field::guarded_cid_offset()), |
Immediate(kFloat64x2Cid)); |
__ j(EQUAL, &store_float64x2); |
@@ -2190,7 +2190,7 @@ LocationSummary* LoadFieldInstr::MakeLocationSummary(Zone* zone, |
void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
- ASSERT(sizeof(classid_t) == kInt16Size); |
+ ASSERT(sizeof(classid_t) == kInt32Size); |
Register instance_reg = locs()->in(0).reg(); |
if (IsUnboxedLoad() && compiler->is_optimizing()) { |
XmmRegister result = locs()->out(0).fpu_reg(); |
@@ -2229,19 +2229,20 @@ void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
__ LoadObject(result, Field::ZoneHandle(field()->Original())); |
- FieldAddress field_cid_operand(result, Field::guarded_cid_offset()); |
- FieldAddress field_nullability_operand(result, Field::is_nullable_offset()); |
- |
- __ cmpw(field_nullability_operand, Immediate(kNullCid)); |
+ __ cmpl(FieldAddress(result, Field::is_nullable_offset()), |
+ Immediate(kNullCid)); |
__ j(EQUAL, &load_pointer); |
- __ cmpw(field_cid_operand, Immediate(kDoubleCid)); |
+ __ cmpl(FieldAddress(result, Field::guarded_cid_offset()), |
+ Immediate(kDoubleCid)); |
__ j(EQUAL, &load_double); |
- __ cmpw(field_cid_operand, Immediate(kFloat32x4Cid)); |
+ __ cmpl(FieldAddress(result, Field::guarded_cid_offset()), |
+ Immediate(kFloat32x4Cid)); |
__ j(EQUAL, &load_float32x4); |
- __ cmpw(field_cid_operand, Immediate(kFloat64x2Cid)); |
+ __ cmpl(FieldAddress(result, Field::guarded_cid_offset()), |
+ Immediate(kFloat64x2Cid)); |
__ j(EQUAL, &load_float64x2); |
// Fall through. |