| Index: runtime/vm/assembler_mips.cc
|
| diff --git a/runtime/vm/assembler_mips.cc b/runtime/vm/assembler_mips.cc
|
| index 61469da96d4ddf2c76fb27c44e5584423761851a..c753a3a5d80196d0eca029f46097e5939555e5e7 100644
|
| --- a/runtime/vm/assembler_mips.cc
|
| +++ b/runtime/vm/assembler_mips.cc
|
| @@ -681,9 +681,96 @@ void Assembler::LeaveStubFrameAndReturn(Register ra) {
|
| }
|
|
|
|
|
| +void Assembler::BumpAllocationCount(Heap::Space space,
|
| + intptr_t cid,
|
| + Register temp_reg) {
|
| + ASSERT(temp_reg != kNoRegister);
|
| + ASSERT(temp_reg != TMP);
|
| + ASSERT(cid > 0);
|
| + Isolate* isolate = Isolate::Current();
|
| + ClassTable* class_table = isolate->class_table();
|
| + if (cid < kNumPredefinedCids) {
|
| + const uword class_heap_stats_table_address =
|
| + class_table->PredefinedClassHeapStatsTableAddress();
|
| + const uword class_offset = cid * sizeof(ClassHeapStats); // NOLINT
|
| + const uword count_field_offset = space == Heap::kNew ?
|
| + ClassHeapStats::new_count_since_gc_new_space_offset() :
|
| + ClassHeapStats::new_count_since_gc_old_space_offset();
|
| + const Address& count_address = Address(temp_reg, count_field_offset);
|
| + LoadImmediate(temp_reg, class_heap_stats_table_address + class_offset);
|
| + lw(TMP, count_address);
|
| + AddImmediate(TMP, 1);
|
| + sw(TMP, count_address);
|
| + } else {
|
| + ASSERT(temp_reg != kNoRegister);
|
| + const uword class_offset = cid * sizeof(ClassHeapStats); // NOLINT
|
| + const uword count_field_offset = space == Heap::kNew ?
|
| + ClassHeapStats::new_count_since_gc_new_space_offset() :
|
| + ClassHeapStats::new_count_since_gc_old_space_offset();
|
| + LoadImmediate(temp_reg, class_table->ClassStatsTableAddress());
|
| + lw(temp_reg, Address(temp_reg, 0));
|
| + AddImmediate(temp_reg, class_offset);
|
| + lw(TMP, Address(temp_reg, count_field_offset));
|
| + AddImmediate(TMP, 1);
|
| + sw(TMP, Address(temp_reg, count_field_offset));
|
| + }
|
| +}
|
| +
|
| +
|
| +void Assembler::BumpAllocationCount(Heap::Space space,
|
| + intptr_t cid,
|
| + Register size_reg,
|
| + Register temp_reg) {
|
| + ASSERT(temp_reg != kNoRegister);
|
| + ASSERT(cid > 0);
|
| + ASSERT(temp_reg != TMP);
|
| + Isolate* isolate = Isolate::Current();
|
| + ClassTable* class_table = isolate->class_table();
|
| + if (cid < kNumPredefinedCids) {
|
| + const uword class_heap_stats_table_address =
|
| + class_table->PredefinedClassHeapStatsTableAddress();
|
| + const uword class_offset = cid * sizeof(ClassHeapStats); // NOLINT
|
| + const uword count_field_offset = space == Heap::kNew ?
|
| + ClassHeapStats::new_count_since_gc_new_space_offset() :
|
| + ClassHeapStats::new_count_since_gc_old_space_offset();
|
| + const uword size_field_offset = space == Heap::kNew ?
|
| + ClassHeapStats::new_size_since_gc_new_space_offset() :
|
| + ClassHeapStats::new_size_since_gc_old_space_offset();
|
| + const Address& count_address = Address(temp_reg, count_field_offset);
|
| + const Address& size_address = Address(temp_reg, size_field_offset);
|
| + LoadImmediate(temp_reg, class_heap_stats_table_address + class_offset);
|
| + lw(TMP, count_address);
|
| + AddImmediate(TMP, 1);
|
| + sw(TMP, count_address);
|
| + lw(TMP, size_address);
|
| + addu(TMP, TMP, size_reg);
|
| + sw(TMP, size_address);
|
| + } else {
|
| + ASSERT(temp_reg != kNoRegister);
|
| + const uword class_offset = cid * sizeof(ClassHeapStats); // NOLINT
|
| + const uword count_field_offset = space == Heap::kNew ?
|
| + ClassHeapStats::new_count_since_gc_new_space_offset() :
|
| + ClassHeapStats::new_count_since_gc_old_space_offset();
|
| + const uword size_field_offset = space == Heap::kNew ?
|
| + ClassHeapStats::new_size_since_gc_new_space_offset() :
|
| + ClassHeapStats::new_size_since_gc_old_space_offset();
|
| + LoadImmediate(temp_reg, class_table->ClassStatsTableAddress());
|
| + lw(temp_reg, Address(temp_reg, 0));
|
| + AddImmediate(temp_reg, class_offset);
|
| + lw(TMP, Address(temp_reg, count_field_offset));
|
| + AddImmediate(TMP, 1);
|
| + sw(TMP, Address(temp_reg, count_field_offset));
|
| + lw(TMP, Address(temp_reg, size_field_offset));
|
| + addu(TMP, TMP, size_reg);
|
| + sw(TMP, Address(temp_reg, size_field_offset));
|
| + }
|
| +}
|
| +
|
| +
|
| void Assembler::TryAllocate(const Class& cls,
|
| Label* failure,
|
| - Register instance_reg) {
|
| + Register instance_reg,
|
| + Register temp_reg) {
|
| ASSERT(failure != NULL);
|
| if (FLAG_inline_alloc) {
|
| Heap* heap = Isolate::Current()->heap();
|
| @@ -705,7 +792,7 @@ void Assembler::TryAllocate(const Class& cls,
|
|
|
| ASSERT(instance_size >= kHeapObjectTag);
|
| AddImmediate(instance_reg, -instance_size + kHeapObjectTag);
|
| -
|
| + BumpAllocationCount(Heap::kNew, cls.id(), temp_reg);
|
| uword tags = 0;
|
| tags = RawObject::SizeTag::update(instance_size, tags);
|
| ASSERT(cls.id() != kIllegalCid);
|
|
|