Index: runtime/vm/assembler_arm64.cc |
diff --git a/runtime/vm/assembler_arm64.cc b/runtime/vm/assembler_arm64.cc |
index 7b002148648b4e3e22396b13030158e6d7ced3c7..9452315cd5e201bee661c71b37732bde1de4d378 100644 |
--- a/runtime/vm/assembler_arm64.cc |
+++ b/runtime/vm/assembler_arm64.cc |
@@ -1322,6 +1322,31 @@ void Assembler::UpdateAllocationStatsWithSize(intptr_t cid, |
} |
+void Assembler::MaybeTraceAllocation(intptr_t cid, |
+ Register temp_reg, |
+ Register pp, |
+ Label* trace) { |
+ ASSERT(cid > 0); |
+ Isolate* isolate = Isolate::Current(); |
+ ClassTable* class_table = isolate->class_table(); |
+ const uword class_offset = cid * sizeof(ClassHeapStats); // NOLINT |
+ if (cid < kNumPredefinedCids) { |
+ const uword class_heap_stats_table_address = |
+ class_table->PredefinedClassHeapStatsTableAddress(); |
+ LoadImmediate(temp_reg, class_heap_stats_table_address + class_offset, pp); |
+ } else { |
+ LoadImmediate(temp_reg, class_table->ClassStatsTableAddress(), pp); |
+ ldr(temp_reg, Address(temp_reg, 0)); |
+ AddImmediate(temp_reg, temp_reg, class_offset, pp); |
+ } |
+ const uword state_offset = ClassHeapStats::state_offset(); |
+ const Address& state_address = Address(temp_reg, state_offset); |
+ ldr(temp_reg, state_address); |
+ tsti(temp_reg, Immediate(ClassHeapStats::TraceAllocationMask())); |
+ b(trace, NE); |
+} |
+ |
+ |
void Assembler::TryAllocate(const Class& cls, |
Label* failure, |
Register instance_reg, |
@@ -1329,6 +1354,10 @@ void Assembler::TryAllocate(const Class& cls, |
Register pp) { |
ASSERT(failure != NULL); |
if (FLAG_inline_alloc) { |
+ // If this allocation is traced, program will jump to failure path |
+ // (i.e. the allocation stub) which will allocate the object and trace the |
+ // allocation call site. |
+ MaybeTraceAllocation(cls.id(), temp_reg, pp, failure); |
const intptr_t instance_size = cls.instance_size(); |
Heap* heap = Isolate::Current()->heap(); |
Heap::Space space = heap->SpaceForAllocation(cls.id()); |