| Index: src/crankshaft/mips64/lithium-codegen-mips64.cc
|
| diff --git a/src/crankshaft/mips64/lithium-codegen-mips64.cc b/src/crankshaft/mips64/lithium-codegen-mips64.cc
|
| index d056f715239e373be8a9bd315dc813d249212d3c..55c260a007dec458dc8af2091fbe26f978325f55 100644
|
| --- a/src/crankshaft/mips64/lithium-codegen-mips64.cc
|
| +++ b/src/crankshaft/mips64/lithium-codegen-mips64.cc
|
| @@ -5280,6 +5280,15 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
|
| DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
|
| flags = static_cast<AllocationFlags>(flags | PRETENURE);
|
| }
|
| +
|
| + if (instr->hydrogen()->IsAllocationFoldingDominator()) {
|
| + flags = static_cast<AllocationFlags>(flags | ALLOCATION_FOLDING_DOMINATOR);
|
| + }
|
| +
|
| + if (instr->hydrogen()->IsAllocationFolded()) {
|
| + flags = static_cast<AllocationFlags>(flags | ALLOCATION_FOLDED);
|
| + }
|
| +
|
| if (instr->size()->IsConstantOperand()) {
|
| int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
|
| CHECK(size <= Page::kMaxRegularHeapObjectSize);
|
| @@ -5350,6 +5359,50 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
|
| CallRuntimeFromDeferred(
|
| Runtime::kAllocateInTargetSpace, 2, instr, instr->context());
|
| __ StoreToSafepointRegisterSlot(v0, result);
|
| +
|
| + if (instr->hydrogen()->IsAllocationFoldingDominator()) {
|
| + AllocationFlags allocation_flags = NO_ALLOCATION_FLAGS;
|
| + if (instr->hydrogen()->IsOldSpaceAllocation()) {
|
| + DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
|
| + allocation_flags = static_cast<AllocationFlags>(flags | PRETENURE);
|
| + }
|
| + // If the allocation folding dominator allocate triggered a GC, allocation
|
| + // happend in the runtime. We have to reset the top pointer to virtually
|
| + // undo the allocation.
|
| + ExternalReference allocation_top =
|
| + AllocationUtils::GetAllocationTopReference(isolate(), allocation_flags);
|
| + Register top_address = scratch0();
|
| + __ Dsubu(v0, v0, Operand(kHeapObjectTag));
|
| + __ li(top_address, Operand(allocation_top));
|
| + __ sd(v0, MemOperand(top_address));
|
| + __ Daddu(v0, v0, Operand(kHeapObjectTag));
|
| + }
|
| +}
|
| +
|
| +void LCodeGen::DoFastAllocate(LFastAllocate* instr) {
|
| + DCHECK(instr->hydrogen()->IsAllocationFolded());
|
| + Register result = ToRegister(instr->result());
|
| + Register scratch1 = ToRegister(instr->temp1());
|
| + Register scratch2 = ToRegister(instr->temp2());
|
| +
|
| + AllocationFlags flags = NO_ALLOCATION_FLAGS;
|
| + if (instr->hydrogen()->MustAllocateDoubleAligned()) {
|
| + flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
|
| + }
|
| + if (instr->hydrogen()->IsOldSpaceAllocation()) {
|
| + DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
|
| + flags = static_cast<AllocationFlags>(flags | PRETENURE);
|
| + }
|
| + if (!instr->hydrogen()->IsAllocationFoldingDominator()) {
|
| + if (instr->size()->IsConstantOperand()) {
|
| + int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
|
| + CHECK(size <= Page::kMaxRegularHeapObjectSize);
|
| + __ FastAllocate(size, result, scratch1, scratch2, flags);
|
| + } else {
|
| + Register size = ToRegister(instr->size());
|
| + __ FastAllocate(size, result, scratch1, scratch2, flags);
|
| + }
|
| + }
|
| }
|
|
|
|
|
|
|