Index: src/heap/spaces.cc |
diff --git a/src/heap/spaces.cc b/src/heap/spaces.cc |
index dfaac73ffd10a4c7722a4ebd70d33bb8aa7d063a..07be9fa076372d8ce0d0d6a16b425d5f1e40d572 100644 |
--- a/src/heap/spaces.cc |
+++ b/src/heap/spaces.cc |
@@ -1400,6 +1400,10 @@ void NewSpace::ResetAllocationInfo() { |
while (it.has_next()) { |
Bitmap::Clear(it.next()); |
} |
+ if (top_on_previous_step_) { |
Hannes Payer (out of office)
2015/07/23 10:58:54
Resetting here make the step still imprecise. We a
ofrobots
2015/07/23 16:31:34
Acknowledged.
|
+ // Start a new step. |
+ top_on_previous_step_ = allocation_info_.top(); |
+ } |
} |
@@ -1478,18 +1482,19 @@ AllocationResult NewSpace::SlowAllocateRaw(int size_in_bytes, |
// the new limit accordingly. |
Address new_top = old_top + aligned_size_in_bytes; |
int bytes_allocated = static_cast<int>(new_top - top_on_previous_step_); |
+ |
heap()->incremental_marking()->Step(bytes_allocated, |
IncrementalMarking::GC_VIA_STACK_GUARD); |
UpdateInlineAllocationLimit(aligned_size_in_bytes); |
+ |
+ AllocationResult result = |
+ (alignment == kWordAligned) |
+ ? AllocateRawUnaligned(size_in_bytes) |
+ : AllocateRawAligned(size_in_bytes, alignment); |
top_on_previous_step_ = new_top; |
- if (alignment == kWordAligned) return AllocateRawUnaligned(size_in_bytes); |
- return AllocateRawAligned(size_in_bytes, alignment); |
+ return result; |
} else if (AddFreshPage()) { |
// Switched to new page. Try allocating again. |
- int bytes_allocated = static_cast<int>(old_top - top_on_previous_step_); |
Hannes Payer (out of office)
2015/07/23 10:58:54
This code accounted for bytes_allocated before we
ofrobots
2015/07/23 16:31:34
Okay, I understand why this case is here in the fi
|
- heap()->incremental_marking()->Step(bytes_allocated, |
- IncrementalMarking::GC_VIA_STACK_GUARD); |
- top_on_previous_step_ = to_space_.page_low(); |
if (alignment == kWordAligned) return AllocateRawUnaligned(size_in_bytes); |
return AllocateRawAligned(size_in_bytes, alignment); |
} else { |