| Index: src/crankshaft/ppc/lithium-codegen-ppc.cc
|
| diff --git a/src/crankshaft/ppc/lithium-codegen-ppc.cc b/src/crankshaft/ppc/lithium-codegen-ppc.cc
|
| index 0a1ec2c78a1872ba10bd3ec190a5d064d1d07e07..afff54282f5209d4813f3305acd0a935f5ca7917 100644
|
| --- a/src/crankshaft/ppc/lithium-codegen-ppc.cc
|
| +++ b/src/crankshaft/ppc/lithium-codegen-ppc.cc
|
| @@ -5345,7 +5345,7 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
|
| class DeferredAllocate final : public LDeferredCode {
|
| public:
|
| DeferredAllocate(LCodeGen* codegen, LAllocate* instr)
|
| - : LDeferredCode(codegen), instr_(instr) {}
|
| + : LDeferredCode(codegen), instr_(instr) { }
|
| void Generate() override { codegen()->DoDeferredAllocate(instr_); }
|
| LInstruction* instr() override { return instr_; }
|
|
|
| @@ -5353,7 +5353,8 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
|
| LAllocate* instr_;
|
| };
|
|
|
| - DeferredAllocate* deferred = new (zone()) DeferredAllocate(this, instr);
|
| + DeferredAllocate* deferred =
|
| + new(zone()) DeferredAllocate(this, instr);
|
|
|
| Register result = ToRegister(instr->result());
|
| Register scratch = ToRegister(instr->temp1());
|
| @@ -5369,6 +5370,14 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
|
| flags = static_cast<AllocationFlags>(flags | PRETENURE);
|
| }
|
|
|
| + if (instr->hydrogen()->IsAllocationFoldingDominator()) {
|
| + flags = static_cast<AllocationFlags>(flags | ALLOCATION_FOLDING_DOMINATOR);
|
| + }
|
| +
|
| + if (instr->hydrogen()->IsAllocationFolded()) {
|
| + flags = static_cast<AllocationFlags>(flags | ALLOCATION_FOLDED);
|
| + }
|
| +
|
| if (instr->size()->IsConstantOperand()) {
|
| int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
|
| CHECK(size <= Page::kMaxRegularHeapObjectSize);
|
| @@ -5440,6 +5449,50 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
|
| CallRuntimeFromDeferred(Runtime::kAllocateInTargetSpace, 2, instr,
|
| instr->context());
|
| __ StoreToSafepointRegisterSlot(r3, result);
|
| +
|
| + if (instr->hydrogen()->IsAllocationFoldingDominator()) {
|
| + AllocationFlags allocation_flags = NO_ALLOCATION_FLAGS;
|
| + if (instr->hydrogen()->IsOldSpaceAllocation()) {
|
| + DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
|
| + allocation_flags = static_cast<AllocationFlags>(flags | PRETENURE);
|
| + }
|
| + // If the allocation folding dominator allocate triggered a GC, allocation
|
| + // happend in the runtime. We have to reset the top pointer to virtually
|
| + // undo the allocation.
|
| + ExternalReference allocation_top =
|
| + AllocationUtils::GetAllocationTopReference(isolate(), allocation_flags);
|
| + Register top_address = scratch0();
|
| + __ subi(r3, r3, Operand(kHeapObjectTag));
|
| + __ mov(top_address, Operand(allocation_top));
|
| + __ StoreP(r3, MemOperand(top_address));
|
| + __ addi(r3, r3, Operand(kHeapObjectTag));
|
| + }
|
| +}
|
| +
|
| +void LCodeGen::DoFastAllocate(LFastAllocate* instr) {
|
| + DCHECK(instr->hydrogen()->IsAllocationFolded());
|
| + Register result = ToRegister(instr->result());
|
| + Register scratch1 = ToRegister(instr->temp1());
|
| + Register scratch2 = ToRegister(instr->temp2());
|
| +
|
| + AllocationFlags flags = NO_ALLOCATION_FLAGS;
|
| + if (instr->hydrogen()->MustAllocateDoubleAligned()) {
|
| + flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
|
| + }
|
| + if (instr->hydrogen()->IsOldSpaceAllocation()) {
|
| + DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
|
| + flags = static_cast<AllocationFlags>(flags | PRETENURE);
|
| + }
|
| + if (!instr->hydrogen()->IsAllocationFoldingDominator()) {
|
| + if (instr->size()->IsConstantOperand()) {
|
| + int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
|
| + CHECK(size <= Page::kMaxRegularHeapObjectSize);
|
| + __ FastAllocate(size, result, scratch1, scratch2, flags);
|
| + } else {
|
| + Register size = ToRegister(instr->size());
|
| + __ FastAllocate(size, result, scratch1, scratch2, flags);
|
| + }
|
| + }
|
| }
|
|
|
|
|
|
|