| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/spaces.h" | 5 #include "src/heap/spaces.h" |
| 6 | 6 |
| 7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
| 8 #include "src/base/platform/platform.h" | 8 #include "src/base/platform/platform.h" |
| 9 #include "src/base/platform/semaphore.h" | 9 #include "src/base/platform/semaphore.h" |
| 10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
| (...skipping 710 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 721 area_start = base + Page::kObjectStartOffset; | 721 area_start = base + Page::kObjectStartOffset; |
| 722 area_end = area_start + commit_area_size; | 722 area_end = area_start + commit_area_size; |
| 723 } | 723 } |
| 724 | 724 |
| 725 // Use chunk_size for statistics and callbacks because we assume that they | 725 // Use chunk_size for statistics and callbacks because we assume that they |
| 726 // treat reserved but not-yet committed memory regions of chunks as allocated. | 726 // treat reserved but not-yet committed memory regions of chunks as allocated. |
| 727 isolate_->counters()->memory_allocated()->Increment( | 727 isolate_->counters()->memory_allocated()->Increment( |
| 728 static_cast<int>(chunk_size)); | 728 static_cast<int>(chunk_size)); |
| 729 | 729 |
| 730 LOG(isolate_, NewEvent("MemoryChunk", base, chunk_size)); | 730 LOG(isolate_, NewEvent("MemoryChunk", base, chunk_size)); |
| 731 if (owner != NULL) { | |
| 732 ObjectSpace space = static_cast<ObjectSpace>(1 << owner->identity()); | |
| 733 PerformAllocationCallback(space, kAllocationActionAllocate, chunk_size); | |
| 734 } | |
| 735 | 731 |
| 736 // We cannot use the last chunk in the address space because we would | 732 // We cannot use the last chunk in the address space because we would |
| 737 // overflow when comparing top and limit if this chunk is used for a | 733 // overflow when comparing top and limit if this chunk is used for a |
| 738 // linear allocation area. | 734 // linear allocation area. |
| 739 if ((reinterpret_cast<uintptr_t>(base) + chunk_size) == 0u) { | 735 if ((reinterpret_cast<uintptr_t>(base) + chunk_size) == 0u) { |
| 740 CHECK(!last_chunk_.IsReserved()); | 736 CHECK(!last_chunk_.IsReserved()); |
| 741 last_chunk_.TakeControl(&reservation); | 737 last_chunk_.TakeControl(&reservation); |
| 742 UncommitBlock(reinterpret_cast<Address>(last_chunk_.address()), | 738 UncommitBlock(reinterpret_cast<Address>(last_chunk_.address()), |
| 743 last_chunk_.size()); | 739 last_chunk_.size()); |
| 744 size_.Increment(-static_cast<intptr_t>(chunk_size)); | 740 size_.Increment(-static_cast<intptr_t>(chunk_size)); |
| (...skipping 11 matching lines...) Expand all Loading... |
| 756 | 752 |
| 757 | 753 |
| 758 void Page::ResetFreeListStatistics() { | 754 void Page::ResetFreeListStatistics() { |
| 759 wasted_memory_ = 0; | 755 wasted_memory_ = 0; |
| 760 available_in_free_list_ = 0; | 756 available_in_free_list_ = 0; |
| 761 } | 757 } |
| 762 | 758 |
| 763 void MemoryAllocator::PreFreeMemory(MemoryChunk* chunk) { | 759 void MemoryAllocator::PreFreeMemory(MemoryChunk* chunk) { |
| 764 DCHECK(!chunk->IsFlagSet(MemoryChunk::PRE_FREED)); | 760 DCHECK(!chunk->IsFlagSet(MemoryChunk::PRE_FREED)); |
| 765 LOG(isolate_, DeleteEvent("MemoryChunk", chunk)); | 761 LOG(isolate_, DeleteEvent("MemoryChunk", chunk)); |
| 766 if (chunk->owner() != NULL) { | |
| 767 ObjectSpace space = | |
| 768 static_cast<ObjectSpace>(1 << chunk->owner()->identity()); | |
| 769 PerformAllocationCallback(space, kAllocationActionFree, chunk->size()); | |
| 770 } | |
| 771 | 762 |
| 772 isolate_->heap()->RememberUnmappedPage(reinterpret_cast<Address>(chunk), | 763 isolate_->heap()->RememberUnmappedPage(reinterpret_cast<Address>(chunk), |
| 773 chunk->IsEvacuationCandidate()); | 764 chunk->IsEvacuationCandidate()); |
| 774 | 765 |
| 775 intptr_t size; | 766 intptr_t size; |
| 776 base::VirtualMemory* reservation = chunk->reserved_memory(); | 767 base::VirtualMemory* reservation = chunk->reserved_memory(); |
| 777 if (reservation->IsReserved()) { | 768 if (reservation->IsReserved()) { |
| 778 size = static_cast<intptr_t>(reservation->size()); | 769 size = static_cast<intptr_t>(reservation->size()); |
| 779 } else { | 770 } else { |
| 780 size = static_cast<intptr_t>(chunk->size()); | 771 size = static_cast<intptr_t>(chunk->size()); |
| (...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 909 return true; | 900 return true; |
| 910 } | 901 } |
| 911 | 902 |
| 912 | 903 |
| 913 void MemoryAllocator::ZapBlock(Address start, size_t size) { | 904 void MemoryAllocator::ZapBlock(Address start, size_t size) { |
| 914 for (size_t s = 0; s + kPointerSize <= size; s += kPointerSize) { | 905 for (size_t s = 0; s + kPointerSize <= size; s += kPointerSize) { |
| 915 Memory::Address_at(start + s) = kZapValue; | 906 Memory::Address_at(start + s) = kZapValue; |
| 916 } | 907 } |
| 917 } | 908 } |
| 918 | 909 |
| 919 | |
| 920 void MemoryAllocator::PerformAllocationCallback(ObjectSpace space, | |
| 921 AllocationAction action, | |
| 922 size_t size) { | |
| 923 for (int i = 0; i < memory_allocation_callbacks_.length(); ++i) { | |
| 924 MemoryAllocationCallbackRegistration registration = | |
| 925 memory_allocation_callbacks_[i]; | |
| 926 if ((registration.space & space) == space && | |
| 927 (registration.action & action) == action) | |
| 928 registration.callback(space, action, static_cast<int>(size)); | |
| 929 } | |
| 930 } | |
| 931 | |
| 932 | |
| 933 bool MemoryAllocator::MemoryAllocationCallbackRegistered( | |
| 934 MemoryAllocationCallback callback) { | |
| 935 for (int i = 0; i < memory_allocation_callbacks_.length(); ++i) { | |
| 936 if (memory_allocation_callbacks_[i].callback == callback) return true; | |
| 937 } | |
| 938 return false; | |
| 939 } | |
| 940 | |
| 941 | |
| 942 void MemoryAllocator::AddMemoryAllocationCallback( | |
| 943 MemoryAllocationCallback callback, ObjectSpace space, | |
| 944 AllocationAction action) { | |
| 945 DCHECK(callback != NULL); | |
| 946 MemoryAllocationCallbackRegistration registration(callback, space, action); | |
| 947 DCHECK(!MemoryAllocator::MemoryAllocationCallbackRegistered(callback)); | |
| 948 return memory_allocation_callbacks_.Add(registration); | |
| 949 } | |
| 950 | |
| 951 | |
| 952 void MemoryAllocator::RemoveMemoryAllocationCallback( | |
| 953 MemoryAllocationCallback callback) { | |
| 954 DCHECK(callback != NULL); | |
| 955 for (int i = 0; i < memory_allocation_callbacks_.length(); ++i) { | |
| 956 if (memory_allocation_callbacks_[i].callback == callback) { | |
| 957 memory_allocation_callbacks_.Remove(i); | |
| 958 return; | |
| 959 } | |
| 960 } | |
| 961 UNREACHABLE(); | |
| 962 } | |
| 963 | |
| 964 | |
| 965 #ifdef DEBUG | 910 #ifdef DEBUG |
| 966 void MemoryAllocator::ReportStatistics() { | 911 void MemoryAllocator::ReportStatistics() { |
| 967 intptr_t size = Size(); | 912 intptr_t size = Size(); |
| 968 float pct = static_cast<float>(capacity_ - size) / capacity_; | 913 float pct = static_cast<float>(capacity_ - size) / capacity_; |
| 969 PrintF(" capacity: %" V8PRIdPTR ", used: %" V8PRIdPTR | 914 PrintF(" capacity: %" V8PRIdPTR ", used: %" V8PRIdPTR |
| 970 ", available: %%%d\n\n", | 915 ", available: %%%d\n\n", |
| 971 capacity_, size, static_cast<int>(pct * 100)); | 916 capacity_, size, static_cast<int>(pct * 100)); |
| 972 } | 917 } |
| 973 #endif | 918 #endif |
| 974 | 919 |
| (...skipping 1973 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2948 chunk_map_.Clear(); | 2893 chunk_map_.Clear(); |
| 2949 return true; | 2894 return true; |
| 2950 } | 2895 } |
| 2951 | 2896 |
| 2952 | 2897 |
| 2953 void LargeObjectSpace::TearDown() { | 2898 void LargeObjectSpace::TearDown() { |
| 2954 while (first_page_ != NULL) { | 2899 while (first_page_ != NULL) { |
| 2955 LargePage* page = first_page_; | 2900 LargePage* page = first_page_; |
| 2956 first_page_ = first_page_->next_page(); | 2901 first_page_ = first_page_->next_page(); |
| 2957 LOG(heap()->isolate(), DeleteEvent("LargeObjectChunk", page->address())); | 2902 LOG(heap()->isolate(), DeleteEvent("LargeObjectChunk", page->address())); |
| 2958 | |
| 2959 ObjectSpace space = static_cast<ObjectSpace>(1 << identity()); | |
| 2960 heap()->memory_allocator()->PerformAllocationCallback( | |
| 2961 space, kAllocationActionFree, page->size()); | |
| 2962 heap()->memory_allocator()->Free<MemoryAllocator::kFull>(page); | 2903 heap()->memory_allocator()->Free<MemoryAllocator::kFull>(page); |
| 2963 } | 2904 } |
| 2964 SetUp(); | 2905 SetUp(); |
| 2965 } | 2906 } |
| 2966 | 2907 |
| 2967 | 2908 |
| 2968 AllocationResult LargeObjectSpace::AllocateRaw(int object_size, | 2909 AllocationResult LargeObjectSpace::AllocateRaw(int object_size, |
| 2969 Executability executable) { | 2910 Executability executable) { |
| 2970 // Check if we want to force a GC before growing the old space further. | 2911 // Check if we want to force a GC before growing the old space further. |
| 2971 // If so, fail the allocation. | 2912 // If so, fail the allocation. |
| (...skipping 254 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3226 object->ShortPrint(); | 3167 object->ShortPrint(); |
| 3227 PrintF("\n"); | 3168 PrintF("\n"); |
| 3228 } | 3169 } |
| 3229 printf(" --------------------------------------\n"); | 3170 printf(" --------------------------------------\n"); |
| 3230 printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes()); | 3171 printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes()); |
| 3231 } | 3172 } |
| 3232 | 3173 |
| 3233 #endif // DEBUG | 3174 #endif // DEBUG |
| 3234 } // namespace internal | 3175 } // namespace internal |
| 3235 } // namespace v8 | 3176 } // namespace v8 |
| OLD | NEW |