| Index: runtime/vm/assembler_arm.cc
|
| diff --git a/runtime/vm/assembler_arm.cc b/runtime/vm/assembler_arm.cc
|
| index 663baafbc531288b9612b0a80bf0912e21c5d411..18697a80ed9d4725f6b54bbbafc060db012ee24c 100644
|
| --- a/runtime/vm/assembler_arm.cc
|
| +++ b/runtime/vm/assembler_arm.cc
|
| @@ -2641,9 +2641,96 @@ void Assembler::LeaveStubFrame() {
|
| }
|
|
|
|
|
| +void Assembler::UpdateAllocationStats(intptr_t cid,
|
| + Register temp_reg,
|
| + Heap::Space space) {
|
| + ASSERT(temp_reg != kNoRegister);
|
| + ASSERT(temp_reg != TMP);
|
| + ASSERT(cid > 0);
|
| + Isolate* isolate = Isolate::Current();
|
| + ClassTable* class_table = isolate->class_table();
|
| + if (cid < kNumPredefinedCids) {
|
| + const uword class_heap_stats_table_address =
|
| + class_table->PredefinedClassHeapStatsTableAddress();
|
| + const uword class_offset = cid * sizeof(ClassHeapStats); // NOLINT
|
| + const uword count_field_offset = (space == Heap::kNew) ?
|
| + ClassHeapStats::allocated_since_gc_new_space_offset() :
|
| + ClassHeapStats::allocated_since_gc_old_space_offset();
|
| + LoadImmediate(temp_reg, class_heap_stats_table_address + class_offset);
|
| + const Address& count_address = Address(temp_reg, count_field_offset);
|
| + ldr(TMP, count_address);
|
| + AddImmediate(TMP, 1);
|
| + str(TMP, count_address);
|
| + } else {
|
| + ASSERT(temp_reg != kNoRegister);
|
| + const uword class_offset = cid * sizeof(ClassHeapStats); // NOLINT
|
| + const uword count_field_offset = (space == Heap::kNew) ?
|
| + ClassHeapStats::allocated_since_gc_new_space_offset() :
|
| + ClassHeapStats::allocated_since_gc_old_space_offset();
|
| + LoadImmediate(temp_reg, class_table->ClassStatsTableAddress());
|
| + ldr(temp_reg, Address(temp_reg, 0));
|
| + AddImmediate(temp_reg, class_offset);
|
| + ldr(TMP, Address(temp_reg, count_field_offset));
|
| + AddImmediate(TMP, 1);
|
| + str(TMP, Address(temp_reg, count_field_offset));
|
| + }
|
| +}
|
| +
|
| +
|
| +void Assembler::UpdateAllocationStatsWithSize(intptr_t cid,
|
| + Register size_reg,
|
| + Register temp_reg,
|
| + Heap::Space space) {
|
| + ASSERT(temp_reg != kNoRegister);
|
| + ASSERT(temp_reg != TMP);
|
| + ASSERT(cid > 0);
|
| + Isolate* isolate = Isolate::Current();
|
| + ClassTable* class_table = isolate->class_table();
|
| + if (cid < kNumPredefinedCids) {
|
| + const uword class_heap_stats_table_address =
|
| + class_table->PredefinedClassHeapStatsTableAddress();
|
| + const uword class_offset = cid * sizeof(ClassHeapStats); // NOLINT
|
| + const uword count_field_offset = (space == Heap::kNew) ?
|
| + ClassHeapStats::allocated_since_gc_new_space_offset() :
|
| + ClassHeapStats::allocated_since_gc_old_space_offset();
|
| + const uword size_field_offset = (space == Heap::kNew) ?
|
| + ClassHeapStats::allocated_size_since_gc_new_space_offset() :
|
| + ClassHeapStats::allocated_size_since_gc_old_space_offset();
|
| + LoadImmediate(temp_reg, class_heap_stats_table_address + class_offset);
|
| + const Address& count_address = Address(temp_reg, count_field_offset);
|
| + const Address& size_address = Address(temp_reg, size_field_offset);
|
| + ldr(TMP, count_address);
|
| + AddImmediate(TMP, 1);
|
| + str(TMP, count_address);
|
| + ldr(TMP, size_address);
|
| + add(TMP, TMP, ShifterOperand(size_reg));
|
| + str(TMP, size_address);
|
| + } else {
|
| + ASSERT(temp_reg != kNoRegister);
|
| + const uword class_offset = cid * sizeof(ClassHeapStats); // NOLINT
|
| + const uword count_field_offset = (space == Heap::kNew) ?
|
| + ClassHeapStats::allocated_since_gc_new_space_offset() :
|
| + ClassHeapStats::allocated_since_gc_old_space_offset();
|
| + const uword size_field_offset = (space == Heap::kNew) ?
|
| + ClassHeapStats::allocated_size_since_gc_new_space_offset() :
|
| + ClassHeapStats::allocated_size_since_gc_old_space_offset();
|
| + LoadImmediate(temp_reg, class_table->ClassStatsTableAddress());
|
| + ldr(temp_reg, Address(temp_reg, 0));
|
| + AddImmediate(temp_reg, class_offset);
|
| + ldr(TMP, Address(temp_reg, count_field_offset));
|
| + AddImmediate(TMP, 1);
|
| + str(TMP, Address(temp_reg, count_field_offset));
|
| + ldr(TMP, Address(temp_reg, size_field_offset));
|
| + add(TMP, TMP, ShifterOperand(size_reg));
|
| + str(TMP, Address(temp_reg, size_field_offset));
|
| + }
|
| +}
|
| +
|
| +
|
| void Assembler::TryAllocate(const Class& cls,
|
| Label* failure,
|
| - Register instance_reg) {
|
| + Register instance_reg,
|
| + Register temp_reg) {
|
| ASSERT(failure != NULL);
|
| if (FLAG_inline_alloc) {
|
| Heap* heap = Isolate::Current()->heap();
|
| @@ -2666,6 +2753,7 @@ void Assembler::TryAllocate(const Class& cls,
|
|
|
| ASSERT(instance_size >= kHeapObjectTag);
|
| AddImmediate(instance_reg, -instance_size + kHeapObjectTag);
|
| + UpdateAllocationStats(cls.id(), temp_reg);
|
|
|
| uword tags = 0;
|
| tags = RawObject::SizeTag::update(instance_size, tags);
|
|
|