| Index: runtime/vm/stub_code_arm.cc
|
| ===================================================================
|
| --- runtime/vm/stub_code_arm.cc (revision 40518)
|
| +++ runtime/vm/stub_code_arm.cc (working copy)
|
| @@ -637,8 +637,9 @@
|
|
|
| Isolate* isolate = Isolate::Current();
|
| Heap* heap = isolate->heap();
|
| -
|
| - __ LoadImmediate(R6, heap->TopAddress());
|
| + const intptr_t cid = kArrayCid;
|
| + Heap::Space space = heap->SpaceForAllocation(cid);
|
| + __ LoadImmediate(R6, heap->TopAddress(space));
|
| __ ldr(R0, Address(R6, 0)); // Potential new object start.
|
| __ adds(R7, R0, Operand(R8)); // Potential next object start.
|
| __ b(&slow_case, VS);
|
| @@ -647,7 +648,7 @@
|
| // R0: potential new object start.
|
| // R7: potential next object start.
|
| // R8: allocation size.
|
| - __ LoadImmediate(R3, heap->EndAddress());
|
| + __ LoadImmediate(R3, heap->EndAddress(space));
|
| __ ldr(R3, Address(R3, 0));
|
| __ cmp(R7, Operand(R3));
|
| __ b(&slow_case, CS);
|
| @@ -656,7 +657,7 @@
|
| // next object start and initialize the object.
|
| __ str(R7, Address(R6, 0));
|
| __ add(R0, R0, Operand(kHeapObjectTag));
|
| - __ UpdateAllocationStatsWithSize(kArrayCid, R8, R4);
|
| + __ UpdateAllocationStatsWithSize(cid, R8, R4, space);
|
|
|
| // Initialize the tags.
|
| // R0: new object start as a tagged pointer.
|
| @@ -664,7 +665,6 @@
|
| // R8: allocation size.
|
| {
|
| const intptr_t shift = RawObject::kSizeTagPos - kObjectAlignmentLog2;
|
| - const Class& cls = Class::Handle(isolate->object_store()->array_class());
|
|
|
| __ CompareImmediate(R8, RawObject::SizeTag::kMaxSizeTag);
|
| __ mov(R8, Operand(R8, LSL, shift), LS);
|
| @@ -672,7 +672,7 @@
|
|
|
| // Get the class index and insert it into the tags.
|
| // R8: size and bit tags.
|
| - __ LoadImmediate(TMP, RawObject::ClassIdTag::encode(cls.id()));
|
| + __ LoadImmediate(TMP, RawObject::ClassIdTag::encode(cid));
|
| __ orr(R8, R8, Operand(TMP));
|
| __ str(R8, FieldAddress(R0, Array::tags_offset())); // Store tags.
|
| }
|
| @@ -870,7 +870,6 @@
|
| // R0: new allocated RawContext object.
|
| void StubCode::GenerateAllocateContextStub(Assembler* assembler) {
|
| if (FLAG_inline_alloc) {
|
| - const Class& context_class = Class::ZoneHandle(Object::context_class());
|
| Label slow_case;
|
| Heap* heap = Isolate::Current()->heap();
|
| // First compute the rounded instance size.
|
| @@ -884,7 +883,9 @@
|
| // Now allocate the object.
|
| // R1: number of context variables.
|
| // R2: object size.
|
| - __ LoadImmediate(R5, heap->TopAddress());
|
| + const intptr_t cid = kContextCid;
|
| + Heap::Space space = heap->SpaceForAllocation(cid);
|
| + __ LoadImmediate(R5, heap->TopAddress(space));
|
| __ ldr(R0, Address(R5, 0));
|
| __ add(R3, R2, Operand(R0));
|
| // Check if the allocation fits into the remaining space.
|
| @@ -892,7 +893,7 @@
|
| // R1: number of context variables.
|
| // R2: object size.
|
| // R3: potential next object start.
|
| - __ LoadImmediate(IP, heap->EndAddress());
|
| + __ LoadImmediate(IP, heap->EndAddress(space));
|
| __ ldr(IP, Address(IP, 0));
|
| __ cmp(R3, Operand(IP));
|
| if (FLAG_use_slow_path) {
|
| @@ -909,7 +910,7 @@
|
| // R3: next object start.
|
| __ str(R3, Address(R5, 0));
|
| __ add(R0, R0, Operand(kHeapObjectTag));
|
| - __ UpdateAllocationStatsWithSize(context_class.id(), R2, R5);
|
| + __ UpdateAllocationStatsWithSize(cid, R2, R5, space);
|
|
|
| // Calculate the size tag.
|
| // R0: new object.
|
| @@ -923,7 +924,7 @@
|
|
|
| // Get the class index and insert it into the tags.
|
| // R2: size and bit tags.
|
| - __ LoadImmediate(IP, RawObject::ClassIdTag::encode(context_class.id()));
|
| + __ LoadImmediate(IP, RawObject::ClassIdTag::encode(cid));
|
| __ orr(R2, R2, Operand(IP));
|
| __ str(R2, FieldAddress(R0, Context::tags_offset()));
|
|
|
| @@ -1072,13 +1073,14 @@
|
| // next object start and initialize the allocated object.
|
| // R1: instantiated type arguments (if is_cls_parameterized).
|
| Heap* heap = Isolate::Current()->heap();
|
| - __ LoadImmediate(R5, heap->TopAddress());
|
| + Heap::Space space = heap->SpaceForAllocation(cls.id());
|
| + __ LoadImmediate(R5, heap->TopAddress(space));
|
| __ ldr(R2, Address(R5, 0));
|
| __ AddImmediate(R3, R2, instance_size);
|
| // Check if the allocation fits into the remaining space.
|
| // R2: potential new object start.
|
| // R3: potential next object start.
|
| - __ LoadImmediate(IP, heap->EndAddress());
|
| + __ LoadImmediate(IP, heap->EndAddress(space));
|
| __ ldr(IP, Address(IP, 0));
|
| __ cmp(R3, Operand(IP));
|
| if (FLAG_use_slow_path) {
|
| @@ -1087,7 +1089,7 @@
|
| __ b(&slow_case, CS); // Unsigned higher or equal.
|
| }
|
| __ str(R3, Address(R5, 0));
|
| - __ UpdateAllocationStats(cls.id(), R5);
|
| + __ UpdateAllocationStats(cls.id(), R5, space);
|
|
|
| // R2: new object start.
|
| // R3: next object start.
|
|
|