Index: runtime/vm/assembler_x64.cc |
diff --git a/runtime/vm/assembler_x64.cc b/runtime/vm/assembler_x64.cc |
index da4021a02e37d1b3bb1ba953afc69fe733d2f6ea..24ffd1c5cab5e13a60eeda61ea605e760715a8bb 100644 |
--- a/runtime/vm/assembler_x64.cc |
+++ b/runtime/vm/assembler_x64.cc |
@@ -3624,12 +3624,12 @@ void Assembler::EmitGenericShift(bool wide, |
void Assembler::LoadClassId(Register result, Register object) { |
- ASSERT(RawObject::kClassIdTagPos == kBitsPerInt32); |
- ASSERT(RawObject::kClassIdTagSize == kBitsPerInt32); |
- ASSERT(sizeof(classid_t) == sizeof(uint32_t)); |
+ ASSERT(RawObject::kClassIdTagPos == 16); |
+ ASSERT(RawObject::kClassIdTagSize == 16); |
+ ASSERT(sizeof(classid_t) == sizeof(uint16_t)); |
const intptr_t class_id_offset = |
Object::tags_offset() + RawObject::kClassIdTagPos / kBitsPerByte; |
- movl(result, FieldAddress(object, class_id_offset)); |
+ movzxw(result, FieldAddress(object, class_id_offset)); |
} |
@@ -3659,9 +3659,9 @@ void Assembler::SmiUntagOrCheckClass(Register object, |
intptr_t class_id, |
Label* is_smi) { |
ASSERT(kSmiTagShift == 1); |
- ASSERT(RawObject::kClassIdTagPos == kBitsPerInt32); |
- ASSERT(RawObject::kClassIdTagSize == kBitsPerInt32); |
- ASSERT(sizeof(classid_t) == sizeof(uint32_t)); |
+ ASSERT(RawObject::kClassIdTagPos == 16); |
+ ASSERT(RawObject::kClassIdTagSize == 16); |
+ ASSERT(sizeof(classid_t) == sizeof(uint16_t)); |
const intptr_t class_id_offset = |
Object::tags_offset() + RawObject::kClassIdTagPos / kBitsPerByte; |
@@ -3670,7 +3670,7 @@ void Assembler::SmiUntagOrCheckClass(Register object, |
j(NOT_CARRY, is_smi, kNearJump); |
// Load cid: can't use LoadClassId, object is untagged. Use TIMES_2 scale |
// factor in the addressing mode to compensate for this. |
- movl(TMP, Address(object, TIMES_2, class_id_offset)); |
+ movzxw(TMP, Address(object, TIMES_2, class_id_offset)); |
cmpl(TMP, Immediate(class_id)); |
} |