OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/spaces.h" | 5 #include "src/heap/spaces.h" |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/base/platform/platform.h" | 8 #include "src/base/platform/platform.h" |
9 #include "src/base/platform/semaphore.h" | 9 #include "src/base/platform/semaphore.h" |
10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
(...skipping 566 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
577 CodeRange* code_range = heap_->memory_allocator()->code_range(); | 577 CodeRange* code_range = heap_->memory_allocator()->code_range(); |
578 DCHECK(code_range->valid() && IsFlagSet(IS_EXECUTABLE)); | 578 DCHECK(code_range->valid() && IsFlagSet(IS_EXECUTABLE)); |
579 if (!code_range->UncommitRawMemory(start, length)) return false; | 579 if (!code_range->UncommitRawMemory(start, length)) return false; |
580 } | 580 } |
581 } | 581 } |
582 | 582 |
583 area_end_ = area_start_ + requested; | 583 area_end_ = area_start_ + requested; |
584 return true; | 584 return true; |
585 } | 585 } |
586 | 586 |
| 587 size_t MemoryChunk::CommittedPhysicalMemory() { |
| 588 if (!base::VirtualMemory::HasLazyCommits() || owner()->identity() == LO_SPACE) |
| 589 return size(); |
| 590 return high_water_mark_.Value(); |
| 591 } |
587 | 592 |
588 void MemoryChunk::InsertAfter(MemoryChunk* other) { | 593 void MemoryChunk::InsertAfter(MemoryChunk* other) { |
589 MemoryChunk* other_next = other->next_chunk(); | 594 MemoryChunk* other_next = other->next_chunk(); |
590 | 595 |
591 set_next_chunk(other_next); | 596 set_next_chunk(other_next); |
592 set_prev_chunk(other); | 597 set_prev_chunk(other); |
593 other_next->set_prev_chunk(this); | 598 other_next->set_prev_chunk(this); |
594 other->set_next_chunk(this); | 599 other->set_next_chunk(this); |
595 } | 600 } |
596 | 601 |
(...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
744 return MemoryChunk::Initialize(heap, base, chunk_size, area_start, area_end, | 749 return MemoryChunk::Initialize(heap, base, chunk_size, area_start, area_end, |
745 executable, owner, &reservation); | 750 executable, owner, &reservation); |
746 } | 751 } |
747 | 752 |
748 | 753 |
749 void Page::ResetFreeListStatistics() { | 754 void Page::ResetFreeListStatistics() { |
750 wasted_memory_ = 0; | 755 wasted_memory_ = 0; |
751 available_in_free_list_ = 0; | 756 available_in_free_list_ = 0; |
752 } | 757 } |
753 | 758 |
| 759 void MemoryAllocator::PartialFreeMemory(MemoryChunk* chunk, |
| 760 Address start_free) { |
| 761 // We do not allow partial shrink for code. |
| 762 DCHECK(chunk->executable() == NOT_EXECUTABLE); |
| 763 |
| 764 intptr_t size; |
| 765 base::VirtualMemory* reservation = chunk->reserved_memory(); |
| 766 DCHECK(reservation->IsReserved()); |
| 767 size = static_cast<intptr_t>(reservation->size()); |
| 768 |
| 769 size_t to_free_size = size - (start_free - chunk->address()); |
| 770 |
| 771 DCHECK(size_.Value() >= static_cast<intptr_t>(to_free_size)); |
| 772 size_.Increment(-static_cast<intptr_t>(to_free_size)); |
| 773 isolate_->counters()->memory_allocated()->Decrement( |
| 774 static_cast<int>(to_free_size)); |
| 775 chunk->set_size(size - to_free_size); |
| 776 |
| 777 reservation->ReleasePartial(start_free); |
| 778 } |
| 779 |
754 void MemoryAllocator::PreFreeMemory(MemoryChunk* chunk) { | 780 void MemoryAllocator::PreFreeMemory(MemoryChunk* chunk) { |
755 DCHECK(!chunk->IsFlagSet(MemoryChunk::PRE_FREED)); | 781 DCHECK(!chunk->IsFlagSet(MemoryChunk::PRE_FREED)); |
756 LOG(isolate_, DeleteEvent("MemoryChunk", chunk)); | 782 LOG(isolate_, DeleteEvent("MemoryChunk", chunk)); |
757 | 783 |
758 isolate_->heap()->RememberUnmappedPage(reinterpret_cast<Address>(chunk), | 784 isolate_->heap()->RememberUnmappedPage(reinterpret_cast<Address>(chunk), |
759 chunk->IsEvacuationCandidate()); | 785 chunk->IsEvacuationCandidate()); |
760 | 786 |
761 intptr_t size; | 787 intptr_t size; |
762 base::VirtualMemory* reservation = chunk->reserved_memory(); | 788 base::VirtualMemory* reservation = chunk->reserved_memory(); |
763 if (reservation->IsReserved()) { | 789 if (reservation->IsReserved()) { |
(...skipping 2113 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2877 #endif | 2903 #endif |
2878 } | 2904 } |
2879 | 2905 |
2880 // ----------------------------------------------------------------------------- | 2906 // ----------------------------------------------------------------------------- |
2881 // MapSpace implementation | 2907 // MapSpace implementation |
2882 | 2908 |
2883 #ifdef VERIFY_HEAP | 2909 #ifdef VERIFY_HEAP |
2884 void MapSpace::VerifyObject(HeapObject* object) { CHECK(object->IsMap()); } | 2910 void MapSpace::VerifyObject(HeapObject* object) { CHECK(object->IsMap()); } |
2885 #endif | 2911 #endif |
2886 | 2912 |
| 2913 Address LargePage::GetAddressToShrink() { |
| 2914 HeapObject* object = GetObject(); |
| 2915 if (executable() == EXECUTABLE) { |
| 2916 return 0; |
| 2917 } |
| 2918 size_t used_size = RoundUp((object->address() - address()) + object->Size(), |
| 2919 base::OS::CommitPageSize()); |
| 2920 if (used_size < CommittedPhysicalMemory()) { |
| 2921 return address() + used_size; |
| 2922 } |
| 2923 return 0; |
| 2924 } |
2887 | 2925 |
2888 // ----------------------------------------------------------------------------- | 2926 // ----------------------------------------------------------------------------- |
2889 // LargeObjectIterator | 2927 // LargeObjectIterator |
2890 | 2928 |
2891 LargeObjectIterator::LargeObjectIterator(LargeObjectSpace* space) { | 2929 LargeObjectIterator::LargeObjectIterator(LargeObjectSpace* space) { |
2892 current_ = space->first_page_; | 2930 current_ = space->first_page_; |
2893 } | 2931 } |
2894 | 2932 |
2895 | 2933 |
2896 HeapObject* LargeObjectIterator::Next() { | 2934 HeapObject* LargeObjectIterator::Next() { |
(...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3027 HeapObject* object = current->GetObject(); | 3065 HeapObject* object = current->GetObject(); |
3028 MarkBit mark_bit = Marking::MarkBitFrom(object); | 3066 MarkBit mark_bit = Marking::MarkBitFrom(object); |
3029 DCHECK(Marking::IsBlack(mark_bit)); | 3067 DCHECK(Marking::IsBlack(mark_bit)); |
3030 Marking::BlackToWhite(mark_bit); | 3068 Marking::BlackToWhite(mark_bit); |
3031 Page::FromAddress(object->address())->ResetProgressBar(); | 3069 Page::FromAddress(object->address())->ResetProgressBar(); |
3032 Page::FromAddress(object->address())->ResetLiveBytes(); | 3070 Page::FromAddress(object->address())->ResetLiveBytes(); |
3033 current = current->next_page(); | 3071 current = current->next_page(); |
3034 } | 3072 } |
3035 } | 3073 } |
3036 | 3074 |
3037 | |
3038 void LargeObjectSpace::FreeUnmarkedObjects() { | 3075 void LargeObjectSpace::FreeUnmarkedObjects() { |
3039 LargePage* previous = NULL; | 3076 LargePage* previous = NULL; |
3040 LargePage* current = first_page_; | 3077 LargePage* current = first_page_; |
3041 while (current != NULL) { | 3078 while (current != NULL) { |
3042 HeapObject* object = current->GetObject(); | 3079 HeapObject* object = current->GetObject(); |
3043 MarkBit mark_bit = Marking::MarkBitFrom(object); | 3080 MarkBit mark_bit = Marking::MarkBitFrom(object); |
3044 DCHECK(!Marking::IsGrey(mark_bit)); | 3081 DCHECK(!Marking::IsGrey(mark_bit)); |
3045 if (Marking::IsBlack(mark_bit)) { | 3082 if (Marking::IsBlack(mark_bit)) { |
| 3083 Address free_start; |
| 3084 if ((free_start = current->GetAddressToShrink()) != 0) { |
| 3085 // TODO(hpayer): Perform partial free concurrently. |
| 3086 heap()->memory_allocator()->PartialFreeMemory(current, free_start); |
| 3087 } |
3046 previous = current; | 3088 previous = current; |
3047 current = current->next_page(); | 3089 current = current->next_page(); |
3048 } else { | 3090 } else { |
3049 LargePage* page = current; | 3091 LargePage* page = current; |
3050 // Cut the chunk out from the chunk list. | 3092 // Cut the chunk out from the chunk list. |
3051 current = current->next_page(); | 3093 current = current->next_page(); |
3052 if (previous == NULL) { | 3094 if (previous == NULL) { |
3053 first_page_ = current; | 3095 first_page_ = current; |
3054 } else { | 3096 } else { |
3055 previous->set_next_page(current); | 3097 previous->set_next_page(current); |
(...skipping 142 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3198 object->ShortPrint(); | 3240 object->ShortPrint(); |
3199 PrintF("\n"); | 3241 PrintF("\n"); |
3200 } | 3242 } |
3201 printf(" --------------------------------------\n"); | 3243 printf(" --------------------------------------\n"); |
3202 printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes()); | 3244 printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes()); |
3203 } | 3245 } |
3204 | 3246 |
3205 #endif // DEBUG | 3247 #endif // DEBUG |
3206 } // namespace internal | 3248 } // namespace internal |
3207 } // namespace v8 | 3249 } // namespace v8 |
OLD | NEW |