| Index: src/ppc/macro-assembler-ppc.cc
|
| diff --git a/src/ppc/macro-assembler-ppc.cc b/src/ppc/macro-assembler-ppc.cc
|
| index 0530f4baab67e2be709c655df370d36651b11b2f..3a10e452cda811b1ef46f49ec62f6958491c2a6b 100644
|
| --- a/src/ppc/macro-assembler-ppc.cc
|
| +++ b/src/ppc/macro-assembler-ppc.cc
|
| @@ -1790,6 +1790,7 @@ void MacroAssembler::Allocate(int object_size, Register result,
|
| Register scratch1, Register scratch2,
|
| Label* gc_required, AllocationFlags flags) {
|
| DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
|
| + DCHECK((flags & ALLOCATION_FOLDED) == 0);
|
| if (!FLAG_inline_new) {
|
| if (emit_debug_code()) {
|
| // Trash the registers to simulate an allocation failure.
|
| @@ -1875,7 +1876,11 @@ void MacroAssembler::Allocate(int object_size, Register result,
|
| blt(gc_required);
|
| add(result_end, result, result_end);
|
| }
|
| - StoreP(result_end, MemOperand(top_address));
|
| +
|
| + if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
|
| + // The top pointer is not updated for allocation folding dominators.
|
| + StoreP(result_end, MemOperand(top_address));
|
| + }
|
|
|
| // Tag object.
|
| addi(result, result, Operand(kHeapObjectTag));
|
| @@ -1885,6 +1890,7 @@ void MacroAssembler::Allocate(int object_size, Register result,
|
| void MacroAssembler::Allocate(Register object_size, Register result,
|
| Register result_end, Register scratch,
|
| Label* gc_required, AllocationFlags flags) {
|
| + DCHECK((flags & ALLOCATION_FOLDED) == 0);
|
| if (!FLAG_inline_new) {
|
| if (emit_debug_code()) {
|
| // Trash the registers to simulate an allocation failure.
|
| @@ -1974,6 +1980,110 @@ void MacroAssembler::Allocate(Register object_size, Register result,
|
| andi(r0, result_end, Operand(kObjectAlignmentMask));
|
| Check(eq, kUnalignedAllocationInNewSpace, cr0);
|
| }
|
| + if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
|
| + // The top pointer is not updated for allocation folding dominators.
|
| + StoreP(result_end, MemOperand(top_address));
|
| + }
|
| +
|
| + // Tag object.
|
| + addi(result, result, Operand(kHeapObjectTag));
|
| +}
|
| +
|
| +void MacroAssembler::FastAllocate(Register object_size, Register result,
|
| + Register result_end, Register scratch,
|
| + AllocationFlags flags) {
|
| + // |object_size| and |result_end| may overlap if the DOUBLE_ALIGNMENT flag
|
| + // is not specified. Other registers must not overlap.
|
| + DCHECK(!AreAliased(object_size, result, scratch, ip));
|
| + DCHECK(!AreAliased(result_end, result, scratch, ip));
|
| + DCHECK((flags & DOUBLE_ALIGNMENT) == 0 || !object_size.is(result_end));
|
| +
|
| + ExternalReference allocation_top =
|
| + AllocationUtils::GetAllocationTopReference(isolate(), flags);
|
| +
|
| + Register top_address = scratch;
|
| + mov(top_address, Operand(allocation_top));
|
| + LoadP(result, MemOperand(top_address));
|
| +
|
| + if ((flags & DOUBLE_ALIGNMENT) != 0) {
|
| + // Align the next allocation. Storing the filler map without checking top is
|
| + // safe in new-space because the limit of the heap is aligned there.
|
| +#if V8_TARGET_ARCH_PPC64
|
| + STATIC_ASSERT(kPointerAlignment == kDoubleAlignment);
|
| +#else
|
| + DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
|
| + andi(result_end, result, Operand(kDoubleAlignmentMask));
|
| + Label aligned;
|
| + beq(&aligned);
|
| + mov(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
|
| + stw(result_end, MemOperand(result));
|
| + addi(result, result, Operand(kDoubleSize / 2));
|
| + bind(&aligned);
|
| +#endif
|
| + }
|
| +
|
| + // Calculate new top using result. Object size may be in words so a shift is
|
| + // required to get the number of bytes.
|
| + if ((flags & SIZE_IN_WORDS) != 0) {
|
| + ShiftLeftImm(result_end, object_size, Operand(kPointerSizeLog2));
|
| + add(result_end, result, result_end);
|
| + } else {
|
| + add(result_end, result, object_size);
|
| + }
|
| +
|
| + // Update allocation top. result temporarily holds the new top.
|
| + if (emit_debug_code()) {
|
| + andi(r0, result_end, Operand(kObjectAlignmentMask));
|
| + Check(eq, kUnalignedAllocationInNewSpace, cr0);
|
| + }
|
| + StoreP(result_end, MemOperand(top_address));
|
| +
|
| + // Tag object.
|
| + addi(result, result, Operand(kHeapObjectTag));
|
| +}
|
| +
|
| +void MacroAssembler::FastAllocate(int object_size, Register result,
|
| + Register scratch1, Register scratch2,
|
| + AllocationFlags flags) {
|
| + DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
|
| + DCHECK(!AreAliased(result, scratch1, scratch2, ip));
|
| +
|
| + // Make object size into bytes.
|
| + if ((flags & SIZE_IN_WORDS) != 0) {
|
| + object_size *= kPointerSize;
|
| + }
|
| + DCHECK_EQ(0, object_size & kObjectAlignmentMask);
|
| +
|
| + ExternalReference allocation_top =
|
| + AllocationUtils::GetAllocationTopReference(isolate(), flags);
|
| +
|
| + // Set up allocation top address register.
|
| + Register top_address = scratch1;
|
| + Register result_end = scratch2;
|
| + mov(top_address, Operand(allocation_top));
|
| + LoadP(result, MemOperand(top_address));
|
| +
|
| + if ((flags & DOUBLE_ALIGNMENT) != 0) {
|
| + // Align the next allocation. Storing the filler map without checking top is
|
| + // safe in new-space because the limit of the heap is aligned there.
|
| +#if V8_TARGET_ARCH_PPC64
|
| + STATIC_ASSERT(kPointerAlignment == kDoubleAlignment);
|
| +#else
|
| + DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
|
| + andi(result_end, result, Operand(kDoubleAlignmentMask));
|
| + Label aligned;
|
| + beq(&aligned);
|
| + mov(result_end, Operand(isolate()->factory()->one_pointer_filler_map()));
|
| + stw(result_end, MemOperand(result));
|
| + addi(result, result, Operand(kDoubleSize / 2));
|
| + bind(&aligned);
|
| +#endif
|
| + }
|
| +
|
| + // Calculate new top using result.
|
| + Add(result_end, result, object_size, r0);
|
| +
|
| + // The top pointer is not updated for allocation folding dominators.
|
| StoreP(result_end, MemOperand(top_address));
|
|
|
| // Tag object.
|
|
|