Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_HEAP_SPACES_H_ | 5 #ifndef V8_HEAP_SPACES_H_ |
| 6 #define V8_HEAP_SPACES_H_ | 6 #define V8_HEAP_SPACES_H_ |
| 7 | 7 |
| 8 #include "src/allocation.h" | 8 #include "src/allocation.h" |
| 9 #include "src/atomic-utils.h" | 9 #include "src/atomic-utils.h" |
| 10 #include "src/base/atomicops.h" | 10 #include "src/base/atomicops.h" |
| 11 #include "src/base/bits.h" | 11 #include "src/base/bits.h" |
| 12 #include "src/base/platform/mutex.h" | 12 #include "src/base/platform/mutex.h" |
| 13 #include "src/flags.h" | 13 #include "src/flags.h" |
| 14 #include "src/hashmap.h" | 14 #include "src/hashmap.h" |
| 15 #include "src/list.h" | 15 #include "src/list.h" |
| 16 #include "src/objects.h" | 16 #include "src/objects.h" |
| 17 #include "src/utils.h" | 17 #include "src/utils.h" |
| 18 | 18 |
| 19 namespace v8 { | 19 namespace v8 { |
| 20 namespace internal { | 20 namespace internal { |
| 21 | 21 |
| 22 class AllocationObserver; | |
| 22 class CompactionSpaceCollection; | 23 class CompactionSpaceCollection; |
| 23 class InlineAllocationObserver; | |
| 24 class Isolate; | 24 class Isolate; |
| 25 | 25 |
| 26 // ----------------------------------------------------------------------------- | 26 // ----------------------------------------------------------------------------- |
| 27 // Heap structures: | 27 // Heap structures: |
| 28 // | 28 // |
| 29 // A JS heap consists of a young generation, an old generation, and a large | 29 // A JS heap consists of a young generation, an old generation, and a large |
| 30 // object space. The young generation is divided into two semispaces. A | 30 // object space. The young generation is divided into two semispaces. A |
| 31 // scavenger implements Cheney's copying algorithm. The old generation is | 31 // scavenger implements Cheney's copying algorithm. The old generation is |
| 32 // separated into a map space and an old object space. The map space contains | 32 // separated into a map space and an old object space. The map space contains |
| 33 // all (and only) map objects, the rest of old objects go into the old space. | 33 // all (and only) map objects, the rest of old objects go into the old space. |
| (...skipping 2002 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2036 // Allocate the requested number of bytes in the space double aligned if | 2036 // Allocate the requested number of bytes in the space double aligned if |
| 2037 // possible, return a failure object if not. | 2037 // possible, return a failure object if not. |
| 2038 MUST_USE_RESULT inline AllocationResult AllocateRawAligned( | 2038 MUST_USE_RESULT inline AllocationResult AllocateRawAligned( |
| 2039 int size_in_bytes, AllocationAlignment alignment); | 2039 int size_in_bytes, AllocationAlignment alignment); |
| 2040 | 2040 |
| 2041 // Allocate the requested number of bytes in the space and consider allocation | 2041 // Allocate the requested number of bytes in the space and consider allocation |
| 2042 // alignment if needed. | 2042 // alignment if needed. |
| 2043 MUST_USE_RESULT inline AllocationResult AllocateRaw( | 2043 MUST_USE_RESULT inline AllocationResult AllocateRaw( |
| 2044 int size_in_bytes, AllocationAlignment alignment); | 2044 int size_in_bytes, AllocationAlignment alignment); |
| 2045 | 2045 |
| 2046 void AddAllocationObserver(AllocationObserver* observer) { | |
| 2047 allocation_observers_.Add(observer); | |
| 2048 } | |
| 2049 | |
| 2050 void RemoveAllocationObserver(AllocationObserver* observer) { | |
| 2051 bool removed = allocation_observers_.RemoveElement(observer); | |
| 2052 static_cast<void>(removed); | |
| 2053 DCHECK(removed); | |
| 2054 } | |
| 2055 | |
|
ofrobots
2016/01/23 16:16:31
Suggestion: These seem almost like a copy of the A
mattloring
2016/01/26 00:42:48
Done.
| |
| 2046 // Give a block of memory to the space's free list. It might be added to | 2056 // Give a block of memory to the space's free list. It might be added to |
| 2047 // the free list or accounted as waste. | 2057 // the free list or accounted as waste. |
| 2048 // If add_to_freelist is false then just accounting stats are updated and | 2058 // If add_to_freelist is false then just accounting stats are updated and |
| 2049 // no attempt to add area to free list is made. | 2059 // no attempt to add area to free list is made. |
| 2050 int Free(Address start, int size_in_bytes) { | 2060 int Free(Address start, int size_in_bytes) { |
| 2051 int wasted = free_list_.Free(start, size_in_bytes); | 2061 int wasted = free_list_.Free(start, size_in_bytes); |
| 2052 accounting_stats_.DeallocateBytes(size_in_bytes); | 2062 accounting_stats_.DeallocateBytes(size_in_bytes); |
| 2053 return size_in_bytes - wasted; | 2063 return size_in_bytes - wasted; |
| 2054 } | 2064 } |
| 2055 | 2065 |
| (...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2190 | 2200 |
| 2191 // Accounting information for this space. | 2201 // Accounting information for this space. |
| 2192 AllocationStats accounting_stats_; | 2202 AllocationStats accounting_stats_; |
| 2193 | 2203 |
| 2194 // The dummy page that anchors the double linked list of pages. | 2204 // The dummy page that anchors the double linked list of pages. |
| 2195 Page anchor_; | 2205 Page anchor_; |
| 2196 | 2206 |
| 2197 // The space's free list. | 2207 // The space's free list. |
| 2198 FreeList free_list_; | 2208 FreeList free_list_; |
| 2199 | 2209 |
| 2210 // Allocation observers | |
| 2211 List<AllocationObserver*> allocation_observers_; | |
| 2212 | |
| 2200 // Normal allocation information. | 2213 // Normal allocation information. |
| 2201 AllocationInfo allocation_info_; | 2214 AllocationInfo allocation_info_; |
| 2202 | 2215 |
| 2203 // Mutex guarding any concurrent access to the space. | 2216 // Mutex guarding any concurrent access to the space. |
| 2204 base::Mutex space_mutex_; | 2217 base::Mutex space_mutex_; |
| 2205 | 2218 |
| 2206 friend class MarkCompactCollector; | 2219 friend class MarkCompactCollector; |
| 2207 friend class PageIterator; | 2220 friend class PageIterator; |
| 2208 | 2221 |
| 2209 // Used in cctest. | 2222 // Used in cctest. |
| (...skipping 539 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2749 | 2762 |
| 2750 // Reset the allocation pointer to the beginning of the active semispace. | 2763 // Reset the allocation pointer to the beginning of the active semispace. |
| 2751 void ResetAllocationInfo(); | 2764 void ResetAllocationInfo(); |
| 2752 | 2765 |
| 2753 void UpdateInlineAllocationLimit(int size_in_bytes); | 2766 void UpdateInlineAllocationLimit(int size_in_bytes); |
| 2754 | 2767 |
| 2755 // Allows observation of inline allocation. The observer->Step() method gets | 2768 // Allows observation of inline allocation. The observer->Step() method gets |
| 2756 // called after every step_size bytes have been allocated (approximately). | 2769 // called after every step_size bytes have been allocated (approximately). |
| 2757 // This works by adjusting the allocation limit to a lower value and adjusting | 2770 // This works by adjusting the allocation limit to a lower value and adjusting |
| 2758 // it after each step. | 2771 // it after each step. |
| 2759 void AddInlineAllocationObserver(InlineAllocationObserver* observer); | 2772 void AddInlineAllocationObserver(AllocationObserver* observer); |
| 2760 | 2773 |
| 2761 // Removes a previously installed observer. | 2774 // Removes a previously installed observer. |
| 2762 void RemoveInlineAllocationObserver(InlineAllocationObserver* observer); | 2775 void RemoveInlineAllocationObserver(AllocationObserver* observer); |
| 2763 | 2776 |
| 2764 void DisableInlineAllocationSteps() { | 2777 void DisableInlineAllocationSteps() { |
| 2765 top_on_previous_step_ = 0; | 2778 top_on_previous_step_ = 0; |
| 2766 UpdateInlineAllocationLimit(0); | 2779 UpdateInlineAllocationLimit(0); |
| 2767 } | 2780 } |
| 2768 | 2781 |
| 2769 // Get the extent of the inactive semispace (for use as a marking stack, | 2782 // Get the extent of the inactive semispace (for use as a marking stack, |
| 2770 // or to zap it). Notice: space-addresses are not necessarily on the | 2783 // or to zap it). Notice: space-addresses are not necessarily on the |
| 2771 // same page, so FromSpaceStart() might be above FromSpaceEnd(). | 2784 // same page, so FromSpaceStart() might be above FromSpaceEnd(). |
| 2772 Address FromSpacePageLow() { return from_space_.page_low(); } | 2785 Address FromSpacePageLow() { return from_space_.page_low(); } |
| (...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2859 | 2872 |
| 2860 // Allocation pointer and limit for normal allocation and allocation during | 2873 // Allocation pointer and limit for normal allocation and allocation during |
| 2861 // mark-compact collection. | 2874 // mark-compact collection. |
| 2862 AllocationInfo allocation_info_; | 2875 AllocationInfo allocation_info_; |
| 2863 | 2876 |
| 2864 // When inline allocation stepping is active, either because of incremental | 2877 // When inline allocation stepping is active, either because of incremental |
| 2865 // marking or because of idle scavenge, we 'interrupt' inline allocation every | 2878 // marking or because of idle scavenge, we 'interrupt' inline allocation every |
| 2866 // once in a while. This is done by setting allocation_info_.limit to be lower | 2879 // once in a while. This is done by setting allocation_info_.limit to be lower |
| 2867 // than the actual limit and and increasing it in steps to guarantee that the | 2880 // than the actual limit and and increasing it in steps to guarantee that the |
| 2868 // observers are notified periodically. | 2881 // observers are notified periodically. |
| 2869 List<InlineAllocationObserver*> inline_allocation_observers_; | 2882 List<AllocationObserver*> inline_allocation_observers_; |
| 2870 Address top_on_previous_step_; | 2883 Address top_on_previous_step_; |
| 2871 bool inline_allocation_observers_paused_; | 2884 bool inline_allocation_observers_paused_; |
| 2872 | 2885 |
| 2873 HistogramInfo* allocated_histogram_; | 2886 HistogramInfo* allocated_histogram_; |
| 2874 HistogramInfo* promoted_histogram_; | 2887 HistogramInfo* promoted_histogram_; |
| 2875 | 2888 |
| 2876 bool EnsureAllocation(int size_in_bytes, AllocationAlignment alignment); | 2889 bool EnsureAllocation(int size_in_bytes, AllocationAlignment alignment); |
| 2877 | 2890 |
| 2878 // If we are doing inline allocation in steps, this method performs the 'step' | 2891 // If we are doing inline allocation in steps, this method performs the 'step' |
| 2879 // operation. top is the memory address of the bump pointer at the last | 2892 // operation. top is the memory address of the bump pointer at the last |
| (...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3053 static intptr_t ObjectSizeFor(intptr_t chunk_size) { | 3066 static intptr_t ObjectSizeFor(intptr_t chunk_size) { |
| 3054 if (chunk_size <= (Page::kPageSize + Page::kObjectStartOffset)) return 0; | 3067 if (chunk_size <= (Page::kPageSize + Page::kObjectStartOffset)) return 0; |
| 3055 return chunk_size - Page::kPageSize - Page::kObjectStartOffset; | 3068 return chunk_size - Page::kPageSize - Page::kObjectStartOffset; |
| 3056 } | 3069 } |
| 3057 | 3070 |
| 3058 // Shared implementation of AllocateRaw, AllocateRawCode and | 3071 // Shared implementation of AllocateRaw, AllocateRawCode and |
| 3059 // AllocateRawFixedArray. | 3072 // AllocateRawFixedArray. |
| 3060 MUST_USE_RESULT AllocationResult | 3073 MUST_USE_RESULT AllocationResult |
| 3061 AllocateRaw(int object_size, Executability executable); | 3074 AllocateRaw(int object_size, Executability executable); |
| 3062 | 3075 |
| 3076 void AddAllocationObserver(AllocationObserver* observer) { | |
| 3077 allocation_observers_.Add(observer); | |
| 3078 } | |
| 3079 | |
| 3080 void RemoveAllocationObserver(AllocationObserver* observer) { | |
| 3081 bool removed = allocation_observers_.RemoveElement(observer); | |
| 3082 static_cast<void>(removed); | |
| 3083 DCHECK(removed); | |
| 3084 } | |
| 3085 | |
| 3063 // Available bytes for objects in this space. | 3086 // Available bytes for objects in this space. |
| 3064 inline intptr_t Available() override; | 3087 inline intptr_t Available() override; |
| 3065 | 3088 |
| 3066 intptr_t Size() override { return size_; } | 3089 intptr_t Size() override { return size_; } |
| 3067 | 3090 |
| 3068 intptr_t SizeOfObjects() override { return objects_size_; } | 3091 intptr_t SizeOfObjects() override { return objects_size_; } |
| 3069 | 3092 |
| 3070 // Approximate amount of physical memory committed for this space. | 3093 // Approximate amount of physical memory committed for this space. |
| 3071 size_t CommittedPhysicalMemory() override; | 3094 size_t CommittedPhysicalMemory() override; |
| 3072 | 3095 |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3110 | 3133 |
| 3111 private: | 3134 private: |
| 3112 // The head of the linked list of large object chunks. | 3135 // The head of the linked list of large object chunks. |
| 3113 LargePage* first_page_; | 3136 LargePage* first_page_; |
| 3114 intptr_t size_; // allocated bytes | 3137 intptr_t size_; // allocated bytes |
| 3115 int page_count_; // number of chunks | 3138 int page_count_; // number of chunks |
| 3116 intptr_t objects_size_; // size of objects | 3139 intptr_t objects_size_; // size of objects |
| 3117 // Map MemoryChunk::kAlignment-aligned chunks to large pages covering them | 3140 // Map MemoryChunk::kAlignment-aligned chunks to large pages covering them |
| 3118 HashMap chunk_map_; | 3141 HashMap chunk_map_; |
| 3119 | 3142 |
| 3143 // Allocation observers | |
| 3144 List<AllocationObserver*> allocation_observers_; | |
| 3145 | |
| 3120 friend class LargeObjectIterator; | 3146 friend class LargeObjectIterator; |
| 3121 }; | 3147 }; |
| 3122 | 3148 |
| 3123 | 3149 |
| 3124 class LargeObjectIterator : public ObjectIterator { | 3150 class LargeObjectIterator : public ObjectIterator { |
| 3125 public: | 3151 public: |
| 3126 explicit LargeObjectIterator(LargeObjectSpace* space); | 3152 explicit LargeObjectIterator(LargeObjectSpace* space); |
| 3127 | 3153 |
| 3128 HeapObject* Next(); | 3154 HeapObject* Next(); |
| 3129 | 3155 |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3164 count = 0; | 3190 count = 0; |
| 3165 } | 3191 } |
| 3166 // Must be small, since an iteration is used for lookup. | 3192 // Must be small, since an iteration is used for lookup. |
| 3167 static const int kMaxComments = 64; | 3193 static const int kMaxComments = 64; |
| 3168 }; | 3194 }; |
| 3169 #endif | 3195 #endif |
| 3170 } // namespace internal | 3196 } // namespace internal |
| 3171 } // namespace v8 | 3197 } // namespace v8 |
| 3172 | 3198 |
| 3173 #endif // V8_HEAP_SPACES_H_ | 3199 #endif // V8_HEAP_SPACES_H_ |
| OLD | NEW |