Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(5)

Side by Side Diff: src/heap/spaces.h

Issue 2689683002: [heap] Fix address space leak in Unmapper (Closed)
Patch Set: Windows compile fixes Created 3 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/incremental-marking.h ('k') | src/heap/spaces.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #ifndef V8_HEAP_SPACES_H_ 5 #ifndef V8_HEAP_SPACES_H_
6 #define V8_HEAP_SPACES_H_ 6 #define V8_HEAP_SPACES_H_
7 7
8 #include <list> 8 #include <list>
9 #include <memory> 9 #include <memory>
10 #include <unordered_set> 10 #include <unordered_set>
(...skipping 1079 matching lines...) Expand 10 before | Expand all | Expand 10 after
1090 STATIC_ASSERT(Page::kPageSize % kRegionSize == 0); 1090 STATIC_ASSERT(Page::kPageSize % kRegionSize == 0);
1091 1091
1092 Address starts_[kSize]; 1092 Address starts_[kSize];
1093 }; 1093 };
1094 1094
1095 1095
1096 // ---------------------------------------------------------------------------- 1096 // ----------------------------------------------------------------------------
1097 // A space acquires chunks of memory from the operating system. The memory 1097 // A space acquires chunks of memory from the operating system. The memory
1098 // allocator allocates and deallocates pages for the paged heap spaces and large 1098 // allocator allocates and deallocates pages for the paged heap spaces and large
1099 // pages for large object space. 1099 // pages for large object space.
1100 class MemoryAllocator { 1100 class V8_EXPORT_PRIVATE MemoryAllocator {
1101 public: 1101 public:
1102 // Unmapper takes care of concurrently unmapping and uncommitting memory 1102 // Unmapper takes care of concurrently unmapping and uncommitting memory
1103 // chunks. 1103 // chunks.
1104 class Unmapper { 1104 class Unmapper {
1105 public: 1105 public:
1106 class UnmapFreeMemoryTask; 1106 class UnmapFreeMemoryTask;
1107 1107
1108 explicit Unmapper(MemoryAllocator* allocator) 1108 explicit Unmapper(MemoryAllocator* allocator)
1109 : allocator_(allocator), 1109 : allocator_(allocator),
1110 pending_unmapping_tasks_semaphore_(0), 1110 pending_unmapping_tasks_semaphore_(0),
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
1142 1142
1143 private: 1143 private:
1144 enum ChunkQueueType { 1144 enum ChunkQueueType {
1145 kRegular, // Pages of kPageSize that do not live in a CodeRange and 1145 kRegular, // Pages of kPageSize that do not live in a CodeRange and
1146 // can thus be used for stealing. 1146 // can thus be used for stealing.
1147 kNonRegular, // Large chunks and executable chunks. 1147 kNonRegular, // Large chunks and executable chunks.
1148 kPooled, // Pooled chunks, already uncommited and ready for reuse. 1148 kPooled, // Pooled chunks, already uncommited and ready for reuse.
1149 kNumberOfChunkQueues, 1149 kNumberOfChunkQueues,
1150 }; 1150 };
1151 1151
1152 enum class FreeMode {
1153 kUncommitPooled,
1154 kReleasePooled,
1155 };
1156
1152 template <ChunkQueueType type> 1157 template <ChunkQueueType type>
1153 void AddMemoryChunkSafe(MemoryChunk* chunk) { 1158 void AddMemoryChunkSafe(MemoryChunk* chunk) {
1154 base::LockGuard<base::Mutex> guard(&mutex_); 1159 base::LockGuard<base::Mutex> guard(&mutex_);
1155 if (type != kRegular || allocator_->CanFreeMemoryChunk(chunk)) { 1160 if (type != kRegular || allocator_->CanFreeMemoryChunk(chunk)) {
1156 chunks_[type].push_back(chunk); 1161 chunks_[type].push_back(chunk);
1157 } else { 1162 } else {
1158 DCHECK_EQ(type, kRegular); 1163 DCHECK_EQ(type, kRegular);
1159 delayed_regular_chunks_.push_back(chunk); 1164 delayed_regular_chunks_.push_back(chunk);
1160 } 1165 }
1161 } 1166 }
1162 1167
1163 template <ChunkQueueType type> 1168 template <ChunkQueueType type>
1164 MemoryChunk* GetMemoryChunkSafe() { 1169 MemoryChunk* GetMemoryChunkSafe() {
1165 base::LockGuard<base::Mutex> guard(&mutex_); 1170 base::LockGuard<base::Mutex> guard(&mutex_);
1166 if (chunks_[type].empty()) return nullptr; 1171 if (chunks_[type].empty()) return nullptr;
1167 MemoryChunk* chunk = chunks_[type].front(); 1172 MemoryChunk* chunk = chunks_[type].front();
1168 chunks_[type].pop_front(); 1173 chunks_[type].pop_front();
1169 return chunk; 1174 return chunk;
1170 } 1175 }
1171 1176
1172 void ReconsiderDelayedChunks(); 1177 void ReconsiderDelayedChunks();
1178 template <FreeMode mode>
1173 void PerformFreeMemoryOnQueuedChunks(); 1179 void PerformFreeMemoryOnQueuedChunks();
1174 1180
1175 base::Mutex mutex_; 1181 base::Mutex mutex_;
1176 MemoryAllocator* allocator_; 1182 MemoryAllocator* allocator_;
1177 std::list<MemoryChunk*> chunks_[kNumberOfChunkQueues]; 1183 std::list<MemoryChunk*> chunks_[kNumberOfChunkQueues];
1178 // Delayed chunks cannot be processed in the current unmapping cycle because 1184 // Delayed chunks cannot be processed in the current unmapping cycle because
1179 // of dependencies such as an active sweeper. 1185 // of dependencies such as an active sweeper.
1180 // See MemoryAllocator::CanFreeMemoryChunk. 1186 // See MemoryAllocator::CanFreeMemoryChunk.
1181 std::list<MemoryChunk*> delayed_regular_chunks_; 1187 std::list<MemoryChunk*> delayed_regular_chunks_;
1182 base::Semaphore pending_unmapping_tasks_semaphore_; 1188 base::Semaphore pending_unmapping_tasks_semaphore_;
1183 intptr_t concurrent_unmapping_tasks_active_; 1189 intptr_t concurrent_unmapping_tasks_active_;
1184 1190
1185 friend class MemoryAllocator; 1191 friend class MemoryAllocator;
1186 }; 1192 };
1187 1193
1188 enum AllocationMode { 1194 enum AllocationMode {
1189 kRegular, 1195 kRegular,
1190 kPooled, 1196 kPooled,
1191 }; 1197 };
1192 1198
1193 enum FreeMode { 1199 enum FreeMode {
1194 kFull, 1200 kFull,
1201 kAlreadyPooled,
1195 kPreFreeAndQueue, 1202 kPreFreeAndQueue,
1196 kPooledAndQueue, 1203 kPooledAndQueue,
1197 }; 1204 };
1198 1205
1199 static size_t CodePageGuardStartOffset(); 1206 static size_t CodePageGuardStartOffset();
1200 1207
1201 static size_t CodePageGuardSize(); 1208 static size_t CodePageGuardSize();
1202 1209
1203 static size_t CodePageAreaStartOffset(); 1210 static size_t CodePageAreaStartOffset();
1204 1211
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after
1374 base::AtomicValue<void*> highest_ever_allocated_; 1381 base::AtomicValue<void*> highest_ever_allocated_;
1375 1382
1376 base::VirtualMemory last_chunk_; 1383 base::VirtualMemory last_chunk_;
1377 Unmapper unmapper_; 1384 Unmapper unmapper_;
1378 1385
1379 friend class TestCodeRangeScope; 1386 friend class TestCodeRangeScope;
1380 1387
1381 DISALLOW_IMPLICIT_CONSTRUCTORS(MemoryAllocator); 1388 DISALLOW_IMPLICIT_CONSTRUCTORS(MemoryAllocator);
1382 }; 1389 };
1383 1390
1391 extern template Page*
1392 MemoryAllocator::AllocatePage<MemoryAllocator::kRegular, PagedSpace>(
1393 size_t size, PagedSpace* owner, Executability executable);
1394 extern template Page*
1395 MemoryAllocator::AllocatePage<MemoryAllocator::kRegular, SemiSpace>(
1396 size_t size, SemiSpace* owner, Executability executable);
1397 extern template Page*
1398 MemoryAllocator::AllocatePage<MemoryAllocator::kPooled, SemiSpace>(
1399 size_t size, SemiSpace* owner, Executability executable);
1384 1400
1385 // ----------------------------------------------------------------------------- 1401 // -----------------------------------------------------------------------------
1386 // Interface for heap object iterator to be implemented by all object space 1402 // Interface for heap object iterator to be implemented by all object space
1387 // object iterators. 1403 // object iterators.
1388 // 1404 //
1389 // NOTE: The space specific object iterators also implements the own next() 1405 // NOTE: The space specific object iterators also implements the own next()
1390 // method which is used to avoid using virtual functions 1406 // method which is used to avoid using virtual functions
1391 // iterating a specific space. 1407 // iterating a specific space.
1392 1408
1393 class V8_EXPORT_PRIVATE ObjectIterator : public Malloced { 1409 class V8_EXPORT_PRIVATE ObjectIterator : public Malloced {
(...skipping 247 matching lines...) Expand 10 before | Expand all | Expand 10 after
1641 // 11-31 words (tiny): The tiny blocks are only used for allocation, when 1657 // 11-31 words (tiny): The tiny blocks are only used for allocation, when
1642 // categories >= small do not have entries anymore. 1658 // categories >= small do not have entries anymore.
1643 // 32-255 words (small): Used for allocating free space between 1-31 words in 1659 // 32-255 words (small): Used for allocating free space between 1-31 words in
1644 // size. 1660 // size.
1645 // 256-2047 words (medium): Used for allocating free space between 32-255 words 1661 // 256-2047 words (medium): Used for allocating free space between 32-255 words
1646 // in size. 1662 // in size.
1647 // 1048-16383 words (large): Used for allocating free space between 256-2047 1663 // 1048-16383 words (large): Used for allocating free space between 256-2047
1648 // words in size. 1664 // words in size.
1649 // At least 16384 words (huge): This list is for objects of 2048 words or 1665 // At least 16384 words (huge): This list is for objects of 2048 words or
1650 // larger. Empty pages are also added to this list. 1666 // larger. Empty pages are also added to this list.
1651 class FreeList { 1667 class V8_EXPORT_PRIVATE FreeList {
1652 public: 1668 public:
1653 // This method returns how much memory can be allocated after freeing 1669 // This method returns how much memory can be allocated after freeing
1654 // maximum_freed memory. 1670 // maximum_freed memory.
1655 static inline size_t GuaranteedAllocatable(size_t maximum_freed) { 1671 static inline size_t GuaranteedAllocatable(size_t maximum_freed) {
1656 if (maximum_freed <= kTiniestListMax) { 1672 if (maximum_freed <= kTiniestListMax) {
1657 // Since we are not iterating over all list entries, we cannot guarantee 1673 // Since we are not iterating over all list entries, we cannot guarantee
1658 // that we can find the maximum freed block in that free list. 1674 // that we can find the maximum freed block in that free list.
1659 return 0; 1675 return 0;
1660 } else if (maximum_freed <= kTinyListMax) { 1676 } else if (maximum_freed <= kTinyListMax) {
1661 return kTinyAllocationMax; 1677 return kTinyAllocationMax;
(...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after
1878 1894
1879 private: 1895 private:
1880 LocalAllocationBuffer(Heap* heap, AllocationInfo allocation_info); 1896 LocalAllocationBuffer(Heap* heap, AllocationInfo allocation_info);
1881 1897
1882 void Close(); 1898 void Close();
1883 1899
1884 Heap* heap_; 1900 Heap* heap_;
1885 AllocationInfo allocation_info_; 1901 AllocationInfo allocation_info_;
1886 }; 1902 };
1887 1903
1888 class PagedSpace : public Space { 1904 class V8_EXPORT_PRIVATE PagedSpace : NON_EXPORTED_BASE(public Space) {
1889 public: 1905 public:
1890 typedef PageIterator iterator; 1906 typedef PageIterator iterator;
1891 1907
1892 static const intptr_t kCompactionMemoryWanted = 500 * KB; 1908 static const intptr_t kCompactionMemoryWanted = 500 * KB;
1893 1909
1894 // Creates a space with an id. 1910 // Creates a space with an id.
1895 PagedSpace(Heap* heap, AllocationSpace id, Executability executable); 1911 PagedSpace(Heap* heap, AllocationSpace id, Executability executable);
1896 1912
1897 ~PagedSpace() override { TearDown(); } 1913 ~PagedSpace() override { TearDown(); }
1898 1914
(...skipping 998 matching lines...) Expand 10 before | Expand all | Expand 10 after
2897 PageIterator old_iterator_; 2913 PageIterator old_iterator_;
2898 PageIterator code_iterator_; 2914 PageIterator code_iterator_;
2899 PageIterator map_iterator_; 2915 PageIterator map_iterator_;
2900 LargePageIterator lo_iterator_; 2916 LargePageIterator lo_iterator_;
2901 }; 2917 };
2902 2918
2903 } // namespace internal 2919 } // namespace internal
2904 } // namespace v8 2920 } // namespace v8
2905 2921
2906 #endif // V8_HEAP_SPACES_H_ 2922 #endif // V8_HEAP_SPACES_H_
OLDNEW
« no previous file with comments | « src/heap/incremental-marking.h ('k') | src/heap/spaces.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698