| Index: runtime/vm/assembler_arm64.cc
|
| diff --git a/runtime/vm/assembler_arm64.cc b/runtime/vm/assembler_arm64.cc
|
| index 9c3aa71da779fabc85e8099ec265552d60a73ab1..68184bc74b825bbd6d26373eda08559d0de881c9 100644
|
| --- a/runtime/vm/assembler_arm64.cc
|
| +++ b/runtime/vm/assembler_arm64.cc
|
| @@ -1324,33 +1324,29 @@ void Assembler::TryAllocate(const Class& cls,
|
| // If this allocation is traced, program will jump to failure path
|
| // (i.e. the allocation stub) which will allocate the object and trace the
|
| // allocation call site.
|
| - MaybeTraceAllocation(cls.id(), temp_reg, failure);
|
| + MaybeTraceAllocation(cls.id(), temp_reg, failure,
|
| + /* inline_isolate = */ false);
|
| const intptr_t instance_size = cls.instance_size();
|
| - Heap* heap = Isolate::Current()->heap();
|
| - Heap::Space space = heap->SpaceForAllocation(cls.id());
|
| - const uword top_address = heap->TopAddress(space);
|
| - LoadImmediate(temp_reg, top_address);
|
| - ldr(instance_reg, Address(temp_reg));
|
| + Heap::Space space = Heap::SpaceForAllocation(cls.id());
|
| + ldr(temp_reg, Address(THR, Thread::heap_offset()));
|
| + ldr(instance_reg, Address(temp_reg, Heap::TopOffset(space)));
|
| // TODO(koda): Protect against unsigned overflow here.
|
| AddImmediateSetFlags(instance_reg, instance_reg, instance_size);
|
|
|
| // instance_reg: potential next object start.
|
| - const uword end_address = heap->EndAddress(space);
|
| - ASSERT(top_address < end_address);
|
| - // Could use ldm to load (top, end), but no benefit seen experimentally.
|
| - ldr(TMP, Address(temp_reg, end_address - top_address));
|
| + ldr(TMP, Address(temp_reg, Heap::EndOffset(space)));
|
| CompareRegisters(TMP, instance_reg);
|
| // fail if heap end unsigned less than or equal to instance_reg.
|
| b(failure, LS);
|
|
|
| // Successfully allocated the object, now update top to point to
|
| // next object start and store the class in the class field of object.
|
| - str(instance_reg, Address(temp_reg));
|
| + str(instance_reg, Address(temp_reg, Heap::TopOffset(space)));
|
|
|
| ASSERT(instance_size >= kHeapObjectTag);
|
| AddImmediate(
|
| instance_reg, instance_reg, -instance_size + kHeapObjectTag);
|
| - UpdateAllocationStats(cls.id(), space);
|
| + UpdateAllocationStats(cls.id(), space, /* inline_isolate = */ false);
|
|
|
| uword tags = 0;
|
| tags = RawObject::SizeTag::update(instance_size, tags);
|
| @@ -1375,29 +1371,28 @@ void Assembler::TryAllocateArray(intptr_t cid,
|
| // If this allocation is traced, program will jump to failure path
|
| // (i.e. the allocation stub) which will allocate the object and trace the
|
| // allocation call site.
|
| - MaybeTraceAllocation(cid, temp1, failure);
|
| - Isolate* isolate = Isolate::Current();
|
| - Heap* heap = isolate->heap();
|
| - Heap::Space space = heap->SpaceForAllocation(cid);
|
| - LoadImmediate(temp1, heap->TopAddress(space));
|
| - ldr(instance, Address(temp1, 0)); // Potential new object start.
|
| + MaybeTraceAllocation(cid, temp1, failure, /* inline_isolate = */ false);
|
| + Heap::Space space = Heap::SpaceForAllocation(cid);
|
| + ldr(temp1, Address(THR, Thread::heap_offset()));
|
| + // Potential new object start.
|
| + ldr(instance, Address(temp1, Heap::TopOffset(space)));
|
| AddImmediateSetFlags(end_address, instance, instance_size);
|
| b(failure, CS); // Fail on unsigned overflow.
|
|
|
| // Check if the allocation fits into the remaining space.
|
| // instance: potential new object start.
|
| // end_address: potential next object start.
|
| - LoadImmediate(temp2, heap->EndAddress(space));
|
| - ldr(temp2, Address(temp2, 0));
|
| + ldr(temp2, Address(temp1, Heap::EndOffset(space)));
|
| cmp(end_address, Operand(temp2));
|
| b(failure, CS);
|
|
|
| // Successfully allocated the object(s), now update top to point to
|
| // next object start and initialize the object.
|
| - str(end_address, Address(temp1, 0));
|
| + str(end_address, Address(temp1, Heap::TopOffset(space)));
|
| add(instance, instance, Operand(kHeapObjectTag));
|
| LoadImmediate(temp2, instance_size);
|
| - UpdateAllocationStatsWithSize(cid, temp2, space);
|
| + UpdateAllocationStatsWithSize(cid, temp2, space,
|
| + /* inline_isolate = */ false);
|
|
|
| // Initialize the tags.
|
| // instance: new object start as a tagged pointer.
|
|
|