Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(359)

Unified Diff: src/heap/spaces.cc

Issue 1265443003: remove recursion from NewSpace::AllocateRaw* (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: address comments from the code review Created 5 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « src/heap/spaces.h ('k') | src/heap/spaces-inl.h » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/heap/spaces.cc
diff --git a/src/heap/spaces.cc b/src/heap/spaces.cc
index f19821070b7efcc2dbe9aef861b5154d86525ef5..dde3858e16ef68f98cb4675d5201c046190c20b6 100644
--- a/src/heap/spaces.cc
+++ b/src/heap/spaces.cc
@@ -1467,14 +1467,34 @@ bool NewSpace::AddFreshPage() {
}
-AllocationResult NewSpace::SlowAllocateRaw(int size_in_bytes,
- AllocationAlignment alignment) {
+bool NewSpace::EnsureAllocation(int size_in_bytes,
+ AllocationAlignment alignment) {
Address old_top = allocation_info_.top();
Address high = to_space_.page_high();
- if (allocation_info_.limit() < high) {
- int alignment_size = Heap::GetFillToAlign(old_top, alignment);
- int aligned_size_in_bytes = size_in_bytes + alignment_size;
+ int filler_size = Heap::GetFillToAlign(old_top, alignment);
+ int aligned_size_in_bytes = size_in_bytes + filler_size;
+ if (old_top + aligned_size_in_bytes >= high) {
+ // Not enough room in the page, try to allocate a new one.
+ if (!AddFreshPage()) {
+ return false;
+ }
+
+ // Do a step for the bytes allocated on the last page.
+ int bytes_allocated = static_cast<int>(old_top - top_on_previous_step_);
+ heap()->incremental_marking()->Step(bytes_allocated,
+ IncrementalMarking::GC_VIA_STACK_GUARD);
+ old_top = allocation_info_.top();
+ top_on_previous_step_ = old_top;
+
+ high = to_space_.page_high();
+ filler_size = Heap::GetFillToAlign(old_top, alignment);
+ aligned_size_in_bytes = size_in_bytes + filler_size;
+ }
+
+ DCHECK(old_top + aligned_size_in_bytes < high);
+
+ if (allocation_info_.limit() < high) {
// Either the limit has been lowered because linear allocation was disabled
// or because incremental marking wants to get a chance to do a step. Set
// the new limit accordingly.
@@ -1484,19 +1504,8 @@ AllocationResult NewSpace::SlowAllocateRaw(int size_in_bytes,
IncrementalMarking::GC_VIA_STACK_GUARD);
UpdateInlineAllocationLimit(aligned_size_in_bytes);
top_on_previous_step_ = new_top;
- if (alignment == kWordAligned) return AllocateRawUnaligned(size_in_bytes);
- return AllocateRawAligned(size_in_bytes, alignment);
- } else if (AddFreshPage()) {
- // Switched to new page. Try allocating again.
- int bytes_allocated = static_cast<int>(old_top - top_on_previous_step_);
- heap()->incremental_marking()->Step(bytes_allocated,
- IncrementalMarking::GC_VIA_STACK_GUARD);
- top_on_previous_step_ = to_space_.page_low();
- if (alignment == kWordAligned) return AllocateRawUnaligned(size_in_bytes);
- return AllocateRawAligned(size_in_bytes, alignment);
- } else {
- return AllocationResult::Retry();
}
+ return true;
}
« no previous file with comments | « src/heap/spaces.h ('k') | src/heap/spaces-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698