Index: src/heap/spaces.cc |
diff --git a/src/heap/spaces.cc b/src/heap/spaces.cc |
index 332d59114b2c601b2a27ae1ac21eb8557d3ac00c..8764020b8020b1f86d04748ea18d1e52ff85d73f 100644 |
--- a/src/heap/spaces.cc |
+++ b/src/heap/spaces.cc |
@@ -419,6 +419,14 @@ void MemoryAllocator::FreeMemory(base::VirtualMemory* reservation, |
reservation->Release(); |
} |
+void MemoryAllocator::PartialFreeMemory(base::VirtualMemory* reservation, |
+ Executability executable, |
+ Address free_start) { |
+ // We do not allow partial shrink for code. |
+ DCHECK(executable != NOT_EXECUTABLE); |
+ |
+ reservation->ReleasePartial(free_start); |
+} |
void MemoryAllocator::FreeMemory(Address base, size_t size, |
Executability executable) { |
@@ -584,6 +592,11 @@ bool MemoryChunk::CommitArea(size_t requested) { |
return true; |
} |
+size_t MemoryChunk::CommittedPhysicalMemory() { |
+ if (!base::VirtualMemory::HasLazyCommits() || owner()->identity() == LO_SPACE) |
+ return size(); |
+ return high_water_mark_.Value(); |
+} |
void MemoryChunk::InsertAfter(MemoryChunk* other) { |
MemoryChunk* other_next = other->next_chunk(); |
@@ -751,6 +764,34 @@ void Page::ResetFreeListStatistics() { |
available_in_free_list_ = 0; |
} |
+void MemoryAllocator::PartialFreeMemory(MemoryChunk* chunk, |
+ Address start_free) { |
+ // We do not allow partial shrink for code. |
+ DCHECK(chunk->executable() == NOT_EXECUTABLE); |
+ |
+ intptr_t size; |
+ base::VirtualMemory* reservation = chunk->reserved_memory(); |
+ if (reservation->IsReserved()) { |
+ size = static_cast<intptr_t>(reservation->size()); |
+ } else { |
+ size = static_cast<intptr_t>(chunk->size()); |
+ } |
+ |
+ size_t to_free_size = size - (start_free - chunk->address()); |
+ |
+ DCHECK(size_.Value() >= static_cast<intptr_t>(to_free_size)); |
+ size_.Increment(-static_cast<intptr_t>(to_free_size)); |
+ isolate_->counters()->memory_allocated()->Decrement( |
+ static_cast<int>(to_free_size)); |
+ chunk->set_size(size - to_free_size); |
+ |
+ if (reservation->IsReserved()) { |
+ PartialFreeMemory(reservation, chunk->executable(), start_free); |
+ } else { |
+ FreeMemory(start_free, to_free_size, chunk->executable()); |
+ } |
+} |
+ |
void MemoryAllocator::PreFreeMemory(MemoryChunk* chunk) { |
DCHECK(!chunk->IsFlagSet(MemoryChunk::PRE_FREED)); |
LOG(isolate_, DeleteEvent("MemoryChunk", chunk)); |
@@ -2884,6 +2925,19 @@ void PagedSpace::ResetCodeAndMetadataStatistics(Isolate* isolate) { |
void MapSpace::VerifyObject(HeapObject* object) { CHECK(object->IsMap()); } |
#endif |
+Address LargePage::GetAddressToShrink() { |
+ HeapObject* object = GetObject(); |
+ CodeRange* code_range = heap()->memory_allocator()->code_range(); |
+ if (code_range != NULL && code_range->contains(object->address())) { |
+ return 0; |
+ } |
+ size_t used_size = RoundUp((object->address() - address()) + object->Size(), |
+ base::OS::CommitPageSize()); |
+ if (used_size < CommittedPhysicalMemory()) { |
+ return address() + used_size; |
+ } |
+ return 0; |
+} |
// ----------------------------------------------------------------------------- |
// LargeObjectIterator |
@@ -3034,7 +3088,6 @@ void LargeObjectSpace::ClearMarkingStateOfLiveObjects() { |
} |
} |
- |
void LargeObjectSpace::FreeUnmarkedObjects() { |
LargePage* previous = NULL; |
LargePage* current = first_page_; |
@@ -3043,6 +3096,11 @@ void LargeObjectSpace::FreeUnmarkedObjects() { |
MarkBit mark_bit = Marking::MarkBitFrom(object); |
DCHECK(!Marking::IsGrey(mark_bit)); |
if (Marking::IsBlack(mark_bit)) { |
+ Address free_start; |
+ if ((free_start = current->GetAddressToShrink()) != 0) { |
+ // TODO(hpayer): Perform partial free concurrently. |
+ heap()->memory_allocator()->PartialFreeMemory(current, free_start); |
+ } |
previous = current; |
current = current->next_page(); |
} else { |