Index: src/crankshaft/arm64/lithium-codegen-arm64.cc |
diff --git a/src/crankshaft/arm64/lithium-codegen-arm64.cc b/src/crankshaft/arm64/lithium-codegen-arm64.cc |
index 0d6ceafd46f8fcece5cef850f43c25df7134f271..ddc6ad755f1b58616d26f36426dee9f349c69db3 100644 |
--- a/src/crankshaft/arm64/lithium-codegen-arm64.cc |
+++ b/src/crankshaft/arm64/lithium-codegen-arm64.cc |
@@ -1426,6 +1426,14 @@ void LCodeGen::DoAllocate(LAllocate* instr) { |
flags = static_cast<AllocationFlags>(flags | PRETENURE); |
} |
+ if (instr->hydrogen()->IsAllocationFoldingDominator()) { |
+ flags = static_cast<AllocationFlags>(flags | ALLOCATION_FOLDING_DOMINATOR); |
+ } |
+ |
+ if (instr->hydrogen()->IsAllocationFolded()) { |
+ flags = static_cast<AllocationFlags>(flags | ALLOCATION_FOLDED); |
+ } |
+ |
if (instr->size()->IsConstantOperand()) { |
int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); |
CHECK(size <= Page::kMaxRegularHeapObjectSize); |
@@ -1487,6 +1495,50 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) { |
CallRuntimeFromDeferred( |
Runtime::kAllocateInTargetSpace, 2, instr, instr->context()); |
__ StoreToSafepointRegisterSlot(x0, ToRegister(instr->result())); |
+ |
+ if (instr->hydrogen()->IsAllocationFoldingDominator()) { |
+ AllocationFlags allocation_flags = NO_ALLOCATION_FLAGS; |
+ if (instr->hydrogen()->IsOldSpaceAllocation()) { |
+ DCHECK(!instr->hydrogen()->IsNewSpaceAllocation()); |
+ allocation_flags = static_cast<AllocationFlags>(flags | PRETENURE); |
+ } |
+ // If the allocation folding dominator allocate triggered a GC, allocation |
+ // happend in the runtime. We have to reset the top pointer to virtually |
+ // undo the allocation. |
+ ExternalReference allocation_top = |
+ AllocationUtils::GetAllocationTopReference(isolate(), allocation_flags); |
+ Register top_address = x10; |
+ __ Sub(x0, x0, Operand(kHeapObjectTag)); |
+ __ Mov(top_address, Operand(allocation_top)); |
+ __ Str(x0, MemOperand(top_address)); |
+ __ Add(x0, x0, Operand(kHeapObjectTag)); |
+ } |
+} |
+ |
+void LCodeGen::DoFastAllocate(LFastAllocate* instr) { |
+ DCHECK(instr->hydrogen()->IsAllocationFolded()); |
+ Register result = ToRegister(instr->result()); |
+ Register scratch1 = ToRegister(instr->temp1()); |
+ Register scratch2 = ToRegister(instr->temp2()); |
+ |
+ AllocationFlags flags = NO_ALLOCATION_FLAGS; |
+ if (instr->hydrogen()->MustAllocateDoubleAligned()) { |
+ flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT); |
+ } |
+ if (instr->hydrogen()->IsOldSpaceAllocation()) { |
+ DCHECK(!instr->hydrogen()->IsNewSpaceAllocation()); |
+ flags = static_cast<AllocationFlags>(flags | PRETENURE); |
+ } |
+ if (!instr->hydrogen()->IsAllocationFoldingDominator()) { |
+ if (instr->size()->IsConstantOperand()) { |
+ int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); |
+ CHECK(size <= Page::kMaxRegularHeapObjectSize); |
+ __ FastAllocate(size, result, scratch1, scratch2, flags); |
+ } else { |
+ Register size = ToRegister(instr->size()); |
+ __ FastAllocate(size, result, scratch1, scratch2, flags); |
+ } |
+ } |
} |