| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/spaces.h" | 5 #include "src/heap/spaces.h" |
| 6 | 6 |
| 7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
| 8 #include "src/base/platform/platform.h" | 8 #include "src/base/platform/platform.h" |
| 9 #include "src/base/platform/semaphore.h" | 9 #include "src/base/platform/semaphore.h" |
| 10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
| (...skipping 401 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 412 // TODO(gc) make code_range part of memory allocator? | 412 // TODO(gc) make code_range part of memory allocator? |
| 413 // Code which is part of the code-range does not have its own VirtualMemory. | 413 // Code which is part of the code-range does not have its own VirtualMemory. |
| 414 DCHECK(code_range() == NULL || | 414 DCHECK(code_range() == NULL || |
| 415 !code_range()->contains(static_cast<Address>(reservation->address()))); | 415 !code_range()->contains(static_cast<Address>(reservation->address()))); |
| 416 DCHECK(executable == NOT_EXECUTABLE || !code_range()->valid() || | 416 DCHECK(executable == NOT_EXECUTABLE || !code_range()->valid() || |
| 417 reservation->size() <= Page::kPageSize); | 417 reservation->size() <= Page::kPageSize); |
| 418 | 418 |
| 419 reservation->Release(); | 419 reservation->Release(); |
| 420 } | 420 } |
| 421 | 421 |
| 422 void MemoryAllocator::PartialFreeMemory(base::VirtualMemory* reservation, |
| 423 Executability executable, |
| 424 Address free_start) { |
| 425 // We do not allow partial shrink for code. |
| 426 DCHECK(executable != NOT_EXECUTABLE); |
| 427 |
| 428 reservation->ReleasePartial(free_start); |
| 429 } |
| 422 | 430 |
| 423 void MemoryAllocator::FreeMemory(Address base, size_t size, | 431 void MemoryAllocator::FreeMemory(Address base, size_t size, |
| 424 Executability executable) { | 432 Executability executable) { |
| 425 // TODO(gc) make code_range part of memory allocator? | 433 // TODO(gc) make code_range part of memory allocator? |
| 426 if (code_range() != NULL && | 434 if (code_range() != NULL && |
| 427 code_range()->contains(static_cast<Address>(base))) { | 435 code_range()->contains(static_cast<Address>(base))) { |
| 428 DCHECK(executable == EXECUTABLE); | 436 DCHECK(executable == EXECUTABLE); |
| 429 code_range()->FreeRawMemory(base, size); | 437 code_range()->FreeRawMemory(base, size); |
| 430 } else { | 438 } else { |
| 431 DCHECK(executable == NOT_EXECUTABLE || !code_range()->valid()); | 439 DCHECK(executable == NOT_EXECUTABLE || !code_range()->valid()); |
| (...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 577 CodeRange* code_range = heap_->memory_allocator()->code_range(); | 585 CodeRange* code_range = heap_->memory_allocator()->code_range(); |
| 578 DCHECK(code_range->valid() && IsFlagSet(IS_EXECUTABLE)); | 586 DCHECK(code_range->valid() && IsFlagSet(IS_EXECUTABLE)); |
| 579 if (!code_range->UncommitRawMemory(start, length)) return false; | 587 if (!code_range->UncommitRawMemory(start, length)) return false; |
| 580 } | 588 } |
| 581 } | 589 } |
| 582 | 590 |
| 583 area_end_ = area_start_ + requested; | 591 area_end_ = area_start_ + requested; |
| 584 return true; | 592 return true; |
| 585 } | 593 } |
| 586 | 594 |
| 595 size_t MemoryChunk::CommittedPhysicalMemory() { |
| 596 if (!base::VirtualMemory::HasLazyCommits() || owner()->identity() == LO_SPACE) |
| 597 return size(); |
| 598 return high_water_mark_.Value(); |
| 599 } |
| 587 | 600 |
| 588 void MemoryChunk::InsertAfter(MemoryChunk* other) { | 601 void MemoryChunk::InsertAfter(MemoryChunk* other) { |
| 589 MemoryChunk* other_next = other->next_chunk(); | 602 MemoryChunk* other_next = other->next_chunk(); |
| 590 | 603 |
| 591 set_next_chunk(other_next); | 604 set_next_chunk(other_next); |
| 592 set_prev_chunk(other); | 605 set_prev_chunk(other); |
| 593 other_next->set_prev_chunk(this); | 606 other_next->set_prev_chunk(this); |
| 594 other->set_next_chunk(this); | 607 other->set_next_chunk(this); |
| 595 } | 608 } |
| 596 | 609 |
| (...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 744 return MemoryChunk::Initialize(heap, base, chunk_size, area_start, area_end, | 757 return MemoryChunk::Initialize(heap, base, chunk_size, area_start, area_end, |
| 745 executable, owner, &reservation); | 758 executable, owner, &reservation); |
| 746 } | 759 } |
| 747 | 760 |
| 748 | 761 |
| 749 void Page::ResetFreeListStatistics() { | 762 void Page::ResetFreeListStatistics() { |
| 750 wasted_memory_ = 0; | 763 wasted_memory_ = 0; |
| 751 available_in_free_list_ = 0; | 764 available_in_free_list_ = 0; |
| 752 } | 765 } |
| 753 | 766 |
| 767 void MemoryAllocator::PartialFreeMemory(MemoryChunk* chunk, |
| 768 Address start_free) { |
| 769 // We do not allow partial shrink for code. |
| 770 DCHECK(chunk->executable() == NOT_EXECUTABLE); |
| 771 |
| 772 intptr_t size; |
| 773 base::VirtualMemory* reservation = chunk->reserved_memory(); |
| 774 if (reservation->IsReserved()) { |
| 775 size = static_cast<intptr_t>(reservation->size()); |
| 776 } else { |
| 777 size = static_cast<intptr_t>(chunk->size()); |
| 778 } |
| 779 |
| 780 size_t to_free_size = size - (start_free - chunk->address()); |
| 781 |
| 782 DCHECK(size_.Value() >= static_cast<intptr_t>(to_free_size)); |
| 783 size_.Increment(-static_cast<intptr_t>(to_free_size)); |
| 784 isolate_->counters()->memory_allocated()->Decrement( |
| 785 static_cast<int>(to_free_size)); |
| 786 chunk->set_size(size - to_free_size); |
| 787 |
| 788 if (reservation->IsReserved()) { |
| 789 PartialFreeMemory(reservation, chunk->executable(), start_free); |
| 790 } else { |
| 791 FreeMemory(start_free, to_free_size, chunk->executable()); |
| 792 } |
| 793 } |
| 794 |
| 754 void MemoryAllocator::PreFreeMemory(MemoryChunk* chunk) { | 795 void MemoryAllocator::PreFreeMemory(MemoryChunk* chunk) { |
| 755 DCHECK(!chunk->IsFlagSet(MemoryChunk::PRE_FREED)); | 796 DCHECK(!chunk->IsFlagSet(MemoryChunk::PRE_FREED)); |
| 756 LOG(isolate_, DeleteEvent("MemoryChunk", chunk)); | 797 LOG(isolate_, DeleteEvent("MemoryChunk", chunk)); |
| 757 | 798 |
| 758 isolate_->heap()->RememberUnmappedPage(reinterpret_cast<Address>(chunk), | 799 isolate_->heap()->RememberUnmappedPage(reinterpret_cast<Address>(chunk), |
| 759 chunk->IsEvacuationCandidate()); | 800 chunk->IsEvacuationCandidate()); |
| 760 | 801 |
| 761 intptr_t size; | 802 intptr_t size; |
| 762 base::VirtualMemory* reservation = chunk->reserved_memory(); | 803 base::VirtualMemory* reservation = chunk->reserved_memory(); |
| 763 if (reservation->IsReserved()) { | 804 if (reservation->IsReserved()) { |
| (...skipping 2113 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2877 #endif | 2918 #endif |
| 2878 } | 2919 } |
| 2879 | 2920 |
| 2880 // ----------------------------------------------------------------------------- | 2921 // ----------------------------------------------------------------------------- |
| 2881 // MapSpace implementation | 2922 // MapSpace implementation |
| 2882 | 2923 |
| 2883 #ifdef VERIFY_HEAP | 2924 #ifdef VERIFY_HEAP |
| 2884 void MapSpace::VerifyObject(HeapObject* object) { CHECK(object->IsMap()); } | 2925 void MapSpace::VerifyObject(HeapObject* object) { CHECK(object->IsMap()); } |
| 2885 #endif | 2926 #endif |
| 2886 | 2927 |
| 2928 Address LargePage::GetAddressToShrink() { |
| 2929 HeapObject* object = GetObject(); |
| 2930 CodeRange* code_range = heap()->memory_allocator()->code_range(); |
| 2931 if (code_range != NULL && code_range->contains(object->address())) { |
| 2932 return 0; |
| 2933 } |
| 2934 size_t used_size = RoundUp((object->address() - address()) + object->Size(), |
| 2935 base::OS::CommitPageSize()); |
| 2936 if (used_size < CommittedPhysicalMemory()) { |
| 2937 return address() + used_size; |
| 2938 } |
| 2939 return 0; |
| 2940 } |
| 2887 | 2941 |
| 2888 // ----------------------------------------------------------------------------- | 2942 // ----------------------------------------------------------------------------- |
| 2889 // LargeObjectIterator | 2943 // LargeObjectIterator |
| 2890 | 2944 |
| 2891 LargeObjectIterator::LargeObjectIterator(LargeObjectSpace* space) { | 2945 LargeObjectIterator::LargeObjectIterator(LargeObjectSpace* space) { |
| 2892 current_ = space->first_page_; | 2946 current_ = space->first_page_; |
| 2893 } | 2947 } |
| 2894 | 2948 |
| 2895 | 2949 |
| 2896 HeapObject* LargeObjectIterator::Next() { | 2950 HeapObject* LargeObjectIterator::Next() { |
| (...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3027 HeapObject* object = current->GetObject(); | 3081 HeapObject* object = current->GetObject(); |
| 3028 MarkBit mark_bit = Marking::MarkBitFrom(object); | 3082 MarkBit mark_bit = Marking::MarkBitFrom(object); |
| 3029 DCHECK(Marking::IsBlack(mark_bit)); | 3083 DCHECK(Marking::IsBlack(mark_bit)); |
| 3030 Marking::BlackToWhite(mark_bit); | 3084 Marking::BlackToWhite(mark_bit); |
| 3031 Page::FromAddress(object->address())->ResetProgressBar(); | 3085 Page::FromAddress(object->address())->ResetProgressBar(); |
| 3032 Page::FromAddress(object->address())->ResetLiveBytes(); | 3086 Page::FromAddress(object->address())->ResetLiveBytes(); |
| 3033 current = current->next_page(); | 3087 current = current->next_page(); |
| 3034 } | 3088 } |
| 3035 } | 3089 } |
| 3036 | 3090 |
| 3037 | |
| 3038 void LargeObjectSpace::FreeUnmarkedObjects() { | 3091 void LargeObjectSpace::FreeUnmarkedObjects() { |
| 3039 LargePage* previous = NULL; | 3092 LargePage* previous = NULL; |
| 3040 LargePage* current = first_page_; | 3093 LargePage* current = first_page_; |
| 3041 while (current != NULL) { | 3094 while (current != NULL) { |
| 3042 HeapObject* object = current->GetObject(); | 3095 HeapObject* object = current->GetObject(); |
| 3043 MarkBit mark_bit = Marking::MarkBitFrom(object); | 3096 MarkBit mark_bit = Marking::MarkBitFrom(object); |
| 3044 DCHECK(!Marking::IsGrey(mark_bit)); | 3097 DCHECK(!Marking::IsGrey(mark_bit)); |
| 3045 if (Marking::IsBlack(mark_bit)) { | 3098 if (Marking::IsBlack(mark_bit)) { |
| 3099 Address free_start; |
| 3100 if ((free_start = current->GetAddressToShrink()) != 0) { |
| 3101 // TODO(hpayer): Perform partial free concurrently. |
| 3102 heap()->memory_allocator()->PartialFreeMemory(current, free_start); |
| 3103 } |
| 3046 previous = current; | 3104 previous = current; |
| 3047 current = current->next_page(); | 3105 current = current->next_page(); |
| 3048 } else { | 3106 } else { |
| 3049 LargePage* page = current; | 3107 LargePage* page = current; |
| 3050 // Cut the chunk out from the chunk list. | 3108 // Cut the chunk out from the chunk list. |
| 3051 current = current->next_page(); | 3109 current = current->next_page(); |
| 3052 if (previous == NULL) { | 3110 if (previous == NULL) { |
| 3053 first_page_ = current; | 3111 first_page_ = current; |
| 3054 } else { | 3112 } else { |
| 3055 previous->set_next_page(current); | 3113 previous->set_next_page(current); |
| (...skipping 142 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3198 object->ShortPrint(); | 3256 object->ShortPrint(); |
| 3199 PrintF("\n"); | 3257 PrintF("\n"); |
| 3200 } | 3258 } |
| 3201 printf(" --------------------------------------\n"); | 3259 printf(" --------------------------------------\n"); |
| 3202 printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes()); | 3260 printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes()); |
| 3203 } | 3261 } |
| 3204 | 3262 |
| 3205 #endif // DEBUG | 3263 #endif // DEBUG |
| 3206 } // namespace internal | 3264 } // namespace internal |
| 3207 } // namespace v8 | 3265 } // namespace v8 |
| OLD | NEW |