Index: src/crankshaft/x64/lithium-codegen-x64.cc |
diff --git a/src/crankshaft/x64/lithium-codegen-x64.cc b/src/crankshaft/x64/lithium-codegen-x64.cc |
index 973545a83bc94b513ee0ab86c052034f59dcabcb..a900d2a6084c3368765ac6abd8f4352adc9d232b 100644 |
--- a/src/crankshaft/x64/lithium-codegen-x64.cc |
+++ b/src/crankshaft/x64/lithium-codegen-x64.cc |
@@ -5170,6 +5170,14 @@ void LCodeGen::DoAllocate(LAllocate* instr) { |
flags = static_cast<AllocationFlags>(flags | PRETENURE); |
} |
+ if (instr->hydrogen()->IsAllocationFoldingDominator()) { |
+ flags = static_cast<AllocationFlags>(flags | ALLOCATION_FOLDING_DOMINATOR); |
+ } |
+ |
+ if (instr->hydrogen()->IsAllocationFolded()) { |
+ flags = static_cast<AllocationFlags>(flags | ALLOCATION_FOLDED); |
+ } |
+ |
if (instr->size()->IsConstantOperand()) { |
int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); |
CHECK(size <= Page::kMaxRegularHeapObjectSize); |
@@ -5199,6 +5207,30 @@ void LCodeGen::DoAllocate(LAllocate* instr) { |
} |
} |
+void LCodeGen::DoFastAllocate(LFastAllocate* instr) { |
+ DCHECK(instr->hydrogen()->IsAllocationFolded()); |
+ Register result = ToRegister(instr->result()); |
+ Register temp = ToRegister(instr->temp()); |
+ |
+ AllocationFlags flags = NO_ALLOCATION_FLAGS; |
+ if (instr->hydrogen()->MustAllocateDoubleAligned()) { |
+ flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT); |
+ } |
+ if (instr->hydrogen()->IsOldSpaceAllocation()) { |
+ DCHECK(!instr->hydrogen()->IsNewSpaceAllocation()); |
+ flags = static_cast<AllocationFlags>(flags | PRETENURE); |
+ } |
+ if (!instr->hydrogen()->IsAllocationFoldingDominator()) { |
+ if (instr->size()->IsConstantOperand()) { |
+ int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); |
+ CHECK(size <= Page::kMaxRegularHeapObjectSize); |
+ __ FastAllocate(size, result, temp, flags); |
+ } else { |
+ Register size = ToRegister(instr->size()); |
+ __ FastAllocate(size, result, temp, flags); |
+ } |
+ } |
+} |
void LCodeGen::DoDeferredAllocate(LAllocate* instr) { |
Register result = ToRegister(instr->result()); |
@@ -5231,6 +5263,22 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) { |
CallRuntimeFromDeferred( |
Runtime::kAllocateInTargetSpace, 2, instr, instr->context()); |
__ StoreToSafepointRegisterSlot(result, rax); |
+ |
+ if (instr->hydrogen()->IsAllocationFoldingDominator()) { |
+ AllocationFlags allocation_flags = NO_ALLOCATION_FLAGS; |
+ if (instr->hydrogen()->IsOldSpaceAllocation()) { |
+ DCHECK(!instr->hydrogen()->IsNewSpaceAllocation()); |
+ allocation_flags = static_cast<AllocationFlags>(flags | PRETENURE); |
+ } |
+ // If the allocation folding dominator allocate triggered a GC, allocation |
+ // happend in the runtime. We have to reset the top pointer to virtually |
+ // undo the allocation. |
+ ExternalReference allocation_top = |
+ AllocationUtils::GetAllocationTopReference(isolate(), allocation_flags); |
+ __ subp(rax, Immediate(kHeapObjectTag)); |
+ __ Store(allocation_top, rax); |
+ __ addp(rax, Immediate(kHeapObjectTag)); |
+ } |
} |