Index: src/mips/macro-assembler-mips.cc |
diff --git a/src/mips/macro-assembler-mips.cc b/src/mips/macro-assembler-mips.cc |
index afdd3b00ff515f55267de150efaefae3cfea30be..66e0a4cffb3038188b4121a9f27d85968cfaa3b5 100644 |
--- a/src/mips/macro-assembler-mips.cc |
+++ b/src/mips/macro-assembler-mips.cc |
@@ -4085,6 +4085,7 @@ void MacroAssembler::Allocate(int object_size, |
Label* gc_required, |
AllocationFlags flags) { |
DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); |
+ DCHECK((flags & ALLOCATION_FOLDED) == 0); |
if (!FLAG_inline_new) { |
if (emit_debug_code()) { |
// Trash the registers to simulate an allocation failure. |
@@ -4157,7 +4158,11 @@ void MacroAssembler::Allocate(int object_size, |
// to calculate the new top. |
Addu(result_end, result, Operand(object_size)); |
Branch(gc_required, Ugreater, result_end, Operand(alloc_limit)); |
- sw(result_end, MemOperand(top_address)); |
+ |
+ if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) { |
+ // The top pointer is not updated for allocation folding dominators. |
+ sw(result_end, MemOperand(top_address)); |
+ } |
// Tag object. |
Addu(result, result, Operand(kHeapObjectTag)); |
@@ -4167,6 +4172,7 @@ void MacroAssembler::Allocate(int object_size, |
void MacroAssembler::Allocate(Register object_size, Register result, |
Register result_end, Register scratch, |
Label* gc_required, AllocationFlags flags) { |
+ DCHECK((flags & ALLOCATION_FOLDED) == 0); |
if (!FLAG_inline_new) { |
if (emit_debug_code()) { |
// Trash the registers to simulate an allocation failure. |
@@ -4239,6 +4245,7 @@ void MacroAssembler::Allocate(Register object_size, Register result, |
} else { |
Addu(result_end, result, Operand(object_size)); |
} |
+ |
Branch(gc_required, Ugreater, result_end, Operand(alloc_limit)); |
// Update allocation top. result temporarily holds the new top. |
@@ -4246,12 +4253,104 @@ void MacroAssembler::Allocate(Register object_size, Register result, |
And(alloc_limit, result_end, Operand(kObjectAlignmentMask)); |
Check(eq, kUnalignedAllocationInNewSpace, alloc_limit, Operand(zero_reg)); |
} |
- sw(result_end, MemOperand(top_address)); |
+ |
+ if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) { |
+ // The top pointer is not updated for allocation folding dominators. |
+ sw(result_end, MemOperand(top_address)); |
+ } |
// Tag object. |
Addu(result, result, Operand(kHeapObjectTag)); |
} |
+void MacroAssembler::FastAllocate(int object_size, Register result, |
+ Register scratch1, Register scratch2, |
+ AllocationFlags flags) { |
+ DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); |
+ DCHECK(!AreAliased(result, scratch1, scratch2, t9, at)); |
+ |
+ // Make object size into bytes. |
+ if ((flags & SIZE_IN_WORDS) != 0) { |
+ object_size *= kPointerSize; |
+ } |
+ DCHECK_EQ(0, object_size & kObjectAlignmentMask); |
+ |
+ ExternalReference allocation_top = |
+ AllocationUtils::GetAllocationTopReference(isolate(), flags); |
+ |
+ // Set up allocation top address and allocation limit registers. |
+ Register top_address = scratch1; |
+ // This code stores a temporary value in t9. |
+ Register result_end = scratch2; |
+ li(top_address, Operand(allocation_top)); |
+ lw(result, MemOperand(top_address)); |
+ |
+ if ((flags & DOUBLE_ALIGNMENT) != 0) { |
+ // Align the next allocation. Storing the filler map without checking top is |
+ // safe in new-space because the limit of the heap is aligned there. |
+ DCHECK(kPointerAlignment * 2 == kDoubleAlignment); |
+ And(result_end, result, Operand(kDoubleAlignmentMask)); |
+ Label aligned; |
+ Branch(&aligned, eq, result_end, Operand(zero_reg)); |
+ li(result_end, Operand(isolate()->factory()->one_pointer_filler_map())); |
+ sw(result_end, MemOperand(result)); |
+ Addu(result, result, Operand(kDoubleSize / 2)); |
+ bind(&aligned); |
+ } |
+ |
+ Addu(result_end, result, Operand(object_size)); |
+ |
+ // The top pointer is not updated for allocation folding dominators. |
+ sw(result_end, MemOperand(top_address)); |
+ |
+ Addu(result, result, Operand(kHeapObjectTag)); |
+} |
+ |
+void MacroAssembler::FastAllocate(Register object_size, Register result, |
+ Register result_end, Register scratch, |
+ AllocationFlags flags) { |
+ // |object_size| and |result_end| may overlap if the DOUBLE_ALIGNMENT flag |
+ // is not specified. Other registers must not overlap. |
+ DCHECK(!AreAliased(object_size, result, scratch, t9, at)); |
+ DCHECK(!AreAliased(result_end, result, scratch, t9, at)); |
+ DCHECK((flags & DOUBLE_ALIGNMENT) == 0 || !object_size.is(result_end)); |
+ |
+ ExternalReference allocation_top = |
+ AllocationUtils::GetAllocationTopReference(isolate(), flags); |
+ |
+ // Set up allocation top address and allocation limit registers. |
+ Register top_address = scratch; |
+ // This code stores a temporary value in t9. |
+ li(top_address, Operand(allocation_top)); |
+ lw(result, MemOperand(top_address)); |
+ |
+ if ((flags & DOUBLE_ALIGNMENT) != 0) { |
+ // Align the next allocation. Storing the filler map without checking top is |
+ // safe in new-space because the limit of the heap is aligned there. |
+ DCHECK(kPointerAlignment * 2 == kDoubleAlignment); |
+ And(result_end, result, Operand(kDoubleAlignmentMask)); |
+ Label aligned; |
+ Branch(&aligned, eq, result_end, Operand(zero_reg)); |
+ li(result_end, Operand(isolate()->factory()->one_pointer_filler_map())); |
+ sw(result_end, MemOperand(result)); |
+ Addu(result, result, Operand(kDoubleSize / 2)); |
+ bind(&aligned); |
+ } |
+ |
+ // Calculate new top and bail out if new space is exhausted. Use result |
+ // to calculate the new top. Object size may be in words so a shift is |
+ // required to get the number of bytes. |
+ if ((flags & SIZE_IN_WORDS) != 0) { |
+ Lsa(result_end, result, object_size, kPointerSizeLog2); |
+ } else { |
+ Addu(result_end, result, Operand(object_size)); |
+ } |
+ |
+ // The top pointer is not updated for allocation folding dominators. |
+ sw(result_end, MemOperand(top_address)); |
+ |
+ Addu(result, result, Operand(kHeapObjectTag)); |
+} |
void MacroAssembler::AllocateTwoByteString(Register result, |
Register length, |