Index: src/heap/spaces.cc |
diff --git a/src/heap/spaces.cc b/src/heap/spaces.cc |
index f19821070b7efcc2dbe9aef861b5154d86525ef5..385c97802ee8ee3d52adcb5462f9f1ef478ab7d5 100644 |
--- a/src/heap/spaces.cc |
+++ b/src/heap/spaces.cc |
@@ -1467,8 +1467,8 @@ bool NewSpace::AddFreshPage() { |
} |
-AllocationResult NewSpace::SlowAllocateRaw(int size_in_bytes, |
- AllocationAlignment alignment) { |
+bool NewSpace::EnsureAllocation(int size_in_bytes, |
+ AllocationAlignment alignment) { |
Address old_top = allocation_info_.top(); |
Address high = to_space_.page_high(); |
if (allocation_info_.limit() < high) { |
Hannes Payer (out of office)
2015/07/30 10:38:25
This method should in principle take care of two c
ofrobots
2015/07/30 20:26:17
Done.
|
@@ -1484,18 +1484,16 @@ AllocationResult NewSpace::SlowAllocateRaw(int size_in_bytes, |
IncrementalMarking::GC_VIA_STACK_GUARD); |
UpdateInlineAllocationLimit(aligned_size_in_bytes); |
top_on_previous_step_ = new_top; |
- if (alignment == kWordAligned) return AllocateRawUnaligned(size_in_bytes); |
- return AllocateRawAligned(size_in_bytes, alignment); |
+ return true; |
} else if (AddFreshPage()) { |
// Switched to new page. Try allocating again. |
int bytes_allocated = static_cast<int>(old_top - top_on_previous_step_); |
heap()->incremental_marking()->Step(bytes_allocated, |
IncrementalMarking::GC_VIA_STACK_GUARD); |
top_on_previous_step_ = to_space_.page_low(); |
- if (alignment == kWordAligned) return AllocateRawUnaligned(size_in_bytes); |
- return AllocateRawAligned(size_in_bytes, alignment); |
+ return true; |
} else { |
Hannes Payer (out of office)
2015/07/30 10:38:25
Remove the else case and just return false.
|
- return AllocationResult::Retry(); |
+ return false; |
} |
} |