Chromium Code Reviews| Index: src/heap/spaces.cc |
| diff --git a/src/heap/spaces.cc b/src/heap/spaces.cc |
| index ba0ddb6e38f87d1ac4180061fca72865bb6c8719..28ef9470bb57409bdda12688d43394ae01bd76be 100644 |
| --- a/src/heap/spaces.cc |
| +++ b/src/heap/spaces.cc |
| @@ -1396,6 +1396,7 @@ void NewSpace::UpdateAllocationInfo() { |
| void NewSpace::ResetAllocationInfo() { |
| + Address old_top = allocation_info_.top(); |
| to_space_.Reset(); |
| UpdateAllocationInfo(); |
| pages_used_ = 0; |
| @@ -1404,6 +1405,12 @@ void NewSpace::ResetAllocationInfo() { |
| while (it.has_next()) { |
| Bitmap::Clear(it.next()); |
| } |
| + if (top_on_previous_step_) { |
| + int bytes_allocated = static_cast<int>(old_top - top_on_previous_step_); |
| + heap()->incremental_marking()->Step(bytes_allocated, |
|
Hannes Payer (out of office)
2015/08/14 05:26:30
Calling step is is a no-op in this case. ResetAllo
ofrobots
2015/08/14 15:26:18
The objective is to make accounting of the allocat
Hannes Payer (out of office)
2015/08/17 17:56:16
SGTM with the refactoring.
|
| + IncrementalMarking::GC_VIA_STACK_GUARD); |
| + top_on_previous_step_ = allocation_info_.top(); |
| + } |
| } |
| @@ -1482,13 +1489,15 @@ bool NewSpace::EnsureAllocation(int size_in_bytes, |
| return false; |
| } |
| - // Do a step for the bytes allocated on the last page. |
| - int bytes_allocated = static_cast<int>(old_top - top_on_previous_step_); |
| - heap()->incremental_marking()->Step(bytes_allocated, |
| - IncrementalMarking::GC_VIA_STACK_GUARD); |
| - old_top = allocation_info_.top(); |
| - top_on_previous_step_ = old_top; |
| + if (top_on_previous_step_) { |
| + // Do a step for the bytes allocated on the last page. |
| + int bytes_allocated = static_cast<int>(old_top - top_on_previous_step_); |
| + heap()->incremental_marking()->Step( |
| + bytes_allocated, IncrementalMarking::GC_VIA_STACK_GUARD); |
| + top_on_previous_step_ = allocation_info_.top(); |
| + } |
| + old_top = allocation_info_.top(); |
| high = to_space_.page_high(); |
| filler_size = Heap::GetFillToAlign(old_top, alignment); |
| aligned_size_in_bytes = size_in_bytes + filler_size; |
| @@ -1500,12 +1509,14 @@ bool NewSpace::EnsureAllocation(int size_in_bytes, |
| // Either the limit has been lowered because linear allocation was disabled |
| // or because incremental marking wants to get a chance to do a step. Set |
| // the new limit accordingly. |
| - Address new_top = old_top + aligned_size_in_bytes; |
| - int bytes_allocated = static_cast<int>(new_top - top_on_previous_step_); |
| - heap()->incremental_marking()->Step(bytes_allocated, |
| - IncrementalMarking::GC_VIA_STACK_GUARD); |
| + if (top_on_previous_step_) { |
| + Address new_top = old_top + aligned_size_in_bytes; |
| + int bytes_allocated = static_cast<int>(new_top - top_on_previous_step_); |
| + heap()->incremental_marking()->Step( |
| + bytes_allocated, IncrementalMarking::GC_VIA_STACK_GUARD); |
| + top_on_previous_step_ = new_top; |
|
ofrobots
2015/08/04 19:37:11
I think it might be good to refactor this into a n
Hannes Payer (out of office)
2015/08/14 05:26:30
Yes, good idea.
|
| + } |
| UpdateInlineAllocationLimit(aligned_size_in_bytes); |
| - top_on_previous_step_ = new_top; |
| } |
| return true; |
| } |