Index: runtime/vm/intrinsifier_arm.cc |
diff --git a/runtime/vm/intrinsifier_arm.cc b/runtime/vm/intrinsifier_arm.cc |
index 365be150f9ea9dd9d3039c17db3caaf229ac0dd8..1898445573c045f74de06ce15b4ddc9d9f58bea0 100644 |
--- a/runtime/vm/intrinsifier_arm.cc |
+++ b/runtime/vm/intrinsifier_arm.cc |
@@ -176,9 +176,8 @@ void Intrinsifier::GrowableArray_add(Assembler* assembler) { |
__ CompareImmediate(R2, max_len); \ |
__ b(&fall_through, GT); \ |
__ mov(R2, Operand(R2, LSL, scale_shift)); \ |
- const intptr_t fixed_size_plus_alignment_padding = \ |
- sizeof(Raw##type_name) + kObjectAlignment - 1; \ |
- __ AddImmediate(R2, fixed_size_plus_alignment_padding); \ |
+ const intptr_t fixed_size = sizeof(Raw##type_name) + kObjectAlignment - 1; \ |
+ __ AddImmediate(R2, fixed_size); \ |
__ bic(R2, R2, Operand(kObjectAlignment - 1)); \ |
Heap::Space space = Heap::kNew; \ |
__ ldr(R3, Address(THR, Thread::heap_offset())); \ |
@@ -1998,9 +1997,8 @@ static void TryAllocateOnebyteString(Assembler* assembler, |
__ mov(R8, Operand(length_reg)); // Save the length register. |
// TODO(koda): Protect against negative length and overflow here. |
__ SmiUntag(length_reg); |
- const intptr_t fixed_size_plus_alignment_padding = |
- sizeof(RawString) + kObjectAlignment - 1; |
- __ AddImmediate(length_reg, fixed_size_plus_alignment_padding); |
+ const intptr_t fixed_size = sizeof(RawString) + kObjectAlignment - 1; |
+ __ AddImmediate(length_reg, fixed_size); |
__ bic(length_reg, length_reg, Operand(kObjectAlignment - 1)); |
const intptr_t cid = kOneByteStringCid; |