Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(363)

Side by Side Diff: src/heap/spaces.cc

Issue 2689683002: [heap] Fix address space leak in Unmapper (Closed)
Patch Set: Windows compile fixes Created 3 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/spaces.h ('k') | test/unittests/BUILD.gn » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/spaces.h" 5 #include "src/heap/spaces.h"
6 6
7 #include <utility> 7 #include <utility>
8 8
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/base/platform/platform.h" 10 #include "src/base/platform/platform.h"
(...skipping 317 matching lines...) Expand 10 before | Expand all | Expand 10 after
328 code_range_ = nullptr; 328 code_range_ = nullptr;
329 } 329 }
330 330
331 class MemoryAllocator::Unmapper::UnmapFreeMemoryTask : public v8::Task { 331 class MemoryAllocator::Unmapper::UnmapFreeMemoryTask : public v8::Task {
332 public: 332 public:
333 explicit UnmapFreeMemoryTask(Unmapper* unmapper) : unmapper_(unmapper) {} 333 explicit UnmapFreeMemoryTask(Unmapper* unmapper) : unmapper_(unmapper) {}
334 334
335 private: 335 private:
336 // v8::Task overrides. 336 // v8::Task overrides.
337 void Run() override { 337 void Run() override {
338 unmapper_->PerformFreeMemoryOnQueuedChunks(); 338 unmapper_->PerformFreeMemoryOnQueuedChunks<FreeMode::kUncommitPooled>();
339 unmapper_->pending_unmapping_tasks_semaphore_.Signal(); 339 unmapper_->pending_unmapping_tasks_semaphore_.Signal();
340 } 340 }
341 341
342 Unmapper* unmapper_; 342 Unmapper* unmapper_;
343 DISALLOW_COPY_AND_ASSIGN(UnmapFreeMemoryTask); 343 DISALLOW_COPY_AND_ASSIGN(UnmapFreeMemoryTask);
344 }; 344 };
345 345
346 void MemoryAllocator::Unmapper::FreeQueuedChunks() { 346 void MemoryAllocator::Unmapper::FreeQueuedChunks() {
347 ReconsiderDelayedChunks(); 347 ReconsiderDelayedChunks();
348 if (FLAG_concurrent_sweeping) { 348 if (FLAG_concurrent_sweeping) {
349 V8::GetCurrentPlatform()->CallOnBackgroundThread( 349 V8::GetCurrentPlatform()->CallOnBackgroundThread(
350 new UnmapFreeMemoryTask(this), v8::Platform::kShortRunningTask); 350 new UnmapFreeMemoryTask(this), v8::Platform::kShortRunningTask);
351 concurrent_unmapping_tasks_active_++; 351 concurrent_unmapping_tasks_active_++;
352 } else { 352 } else {
353 PerformFreeMemoryOnQueuedChunks(); 353 PerformFreeMemoryOnQueuedChunks<FreeMode::kUncommitPooled>();
354 } 354 }
355 } 355 }
356 356
357 bool MemoryAllocator::Unmapper::WaitUntilCompleted() { 357 bool MemoryAllocator::Unmapper::WaitUntilCompleted() {
358 bool waited = false; 358 bool waited = false;
359 while (concurrent_unmapping_tasks_active_ > 0) { 359 while (concurrent_unmapping_tasks_active_ > 0) {
360 pending_unmapping_tasks_semaphore_.Wait(); 360 pending_unmapping_tasks_semaphore_.Wait();
361 concurrent_unmapping_tasks_active_--; 361 concurrent_unmapping_tasks_active_--;
362 waited = true; 362 waited = true;
363 } 363 }
364 return waited; 364 return waited;
365 } 365 }
366 366
367 template <MemoryAllocator::Unmapper::FreeMode mode>
367 void MemoryAllocator::Unmapper::PerformFreeMemoryOnQueuedChunks() { 368 void MemoryAllocator::Unmapper::PerformFreeMemoryOnQueuedChunks() {
368 MemoryChunk* chunk = nullptr; 369 MemoryChunk* chunk = nullptr;
369 // Regular chunks. 370 // Regular chunks.
370 while ((chunk = GetMemoryChunkSafe<kRegular>()) != nullptr) { 371 while ((chunk = GetMemoryChunkSafe<kRegular>()) != nullptr) {
371 bool pooled = chunk->IsFlagSet(MemoryChunk::POOLED); 372 bool pooled = chunk->IsFlagSet(MemoryChunk::POOLED);
372 allocator_->PerformFreeMemory(chunk); 373 allocator_->PerformFreeMemory(chunk);
373 if (pooled) AddMemoryChunkSafe<kPooled>(chunk); 374 if (pooled) AddMemoryChunkSafe<kPooled>(chunk);
374 } 375 }
376 if (mode == MemoryAllocator::Unmapper::FreeMode::kReleasePooled) {
377 // The previous loop uncommitted any pages marked as pooled and added them
378 // to the pooled list. In case of kReleasePooled we need to free them
379 // though.
380 while ((chunk = GetMemoryChunkSafe<kPooled>()) != nullptr) {
381 allocator_->Free<MemoryAllocator::kAlreadyPooled>(chunk);
382 }
383 }
375 // Non-regular chunks. 384 // Non-regular chunks.
376 while ((chunk = GetMemoryChunkSafe<kNonRegular>()) != nullptr) { 385 while ((chunk = GetMemoryChunkSafe<kNonRegular>()) != nullptr) {
377 allocator_->PerformFreeMemory(chunk); 386 allocator_->PerformFreeMemory(chunk);
378 } 387 }
379 } 388 }
380 389
381 void MemoryAllocator::Unmapper::TearDown() { 390 void MemoryAllocator::Unmapper::TearDown() {
382 WaitUntilCompleted(); 391 WaitUntilCompleted();
383 ReconsiderDelayedChunks(); 392 ReconsiderDelayedChunks();
384 CHECK(delayed_regular_chunks_.empty()); 393 CHECK(delayed_regular_chunks_.empty());
385 PerformFreeMemoryOnQueuedChunks(); 394 PerformFreeMemoryOnQueuedChunks<FreeMode::kReleasePooled>();
395 for (int i = 0; i < kNumberOfChunkQueues; i++) {
396 DCHECK(chunks_[i].empty());
397 }
386 } 398 }
387 399
388 void MemoryAllocator::Unmapper::ReconsiderDelayedChunks() { 400 void MemoryAllocator::Unmapper::ReconsiderDelayedChunks() {
389 std::list<MemoryChunk*> delayed_chunks(std::move(delayed_regular_chunks_)); 401 std::list<MemoryChunk*> delayed_chunks(std::move(delayed_regular_chunks_));
390 // Move constructed, so the permanent list should be empty. 402 // Move constructed, so the permanent list should be empty.
391 DCHECK(delayed_regular_chunks_.empty()); 403 DCHECK(delayed_regular_chunks_.empty());
392 for (auto it = delayed_chunks.begin(); it != delayed_chunks.end(); ++it) { 404 for (auto it = delayed_chunks.begin(); it != delayed_chunks.end(); ++it) {
393 AddMemoryChunkSafe<kRegular>(*it); 405 AddMemoryChunkSafe<kRegular>(*it);
394 } 406 }
395 } 407 }
(...skipping 506 matching lines...) Expand 10 before | Expand all | Expand 10 after
902 } 914 }
903 } 915 }
904 916
905 template <MemoryAllocator::FreeMode mode> 917 template <MemoryAllocator::FreeMode mode>
906 void MemoryAllocator::Free(MemoryChunk* chunk) { 918 void MemoryAllocator::Free(MemoryChunk* chunk) {
907 switch (mode) { 919 switch (mode) {
908 case kFull: 920 case kFull:
909 PreFreeMemory(chunk); 921 PreFreeMemory(chunk);
910 PerformFreeMemory(chunk); 922 PerformFreeMemory(chunk);
911 break; 923 break;
924 case kAlreadyPooled:
925 // Pooled pages cannot be touched anymore as their memory is uncommitted.
926 FreeMemory(chunk->address(), static_cast<size_t>(MemoryChunk::kPageSize),
927 Executability::NOT_EXECUTABLE);
928 break;
912 case kPooledAndQueue: 929 case kPooledAndQueue:
913 DCHECK_EQ(chunk->size(), static_cast<size_t>(MemoryChunk::kPageSize)); 930 DCHECK_EQ(chunk->size(), static_cast<size_t>(MemoryChunk::kPageSize));
914 DCHECK_EQ(chunk->executable(), NOT_EXECUTABLE); 931 DCHECK_EQ(chunk->executable(), NOT_EXECUTABLE);
915 chunk->SetFlag(MemoryChunk::POOLED); 932 chunk->SetFlag(MemoryChunk::POOLED);
916 // Fall through to kPreFreeAndQueue. 933 // Fall through to kPreFreeAndQueue.
917 case kPreFreeAndQueue: 934 case kPreFreeAndQueue:
918 PreFreeMemory(chunk); 935 PreFreeMemory(chunk);
919 // The chunks added to this queue will be freed by a concurrent thread. 936 // The chunks added to this queue will be freed by a concurrent thread.
920 unmapper()->AddMemoryChunkSafe(chunk); 937 unmapper()->AddMemoryChunkSafe(chunk);
921 break; 938 break;
922 default:
923 UNREACHABLE();
924 } 939 }
925 } 940 }
926 941
927 template void MemoryAllocator::Free<MemoryAllocator::kFull>(MemoryChunk* chunk); 942 template void MemoryAllocator::Free<MemoryAllocator::kFull>(MemoryChunk* chunk);
928 943
944 template void MemoryAllocator::Free<MemoryAllocator::kAlreadyPooled>(
945 MemoryChunk* chunk);
946
929 template void MemoryAllocator::Free<MemoryAllocator::kPreFreeAndQueue>( 947 template void MemoryAllocator::Free<MemoryAllocator::kPreFreeAndQueue>(
930 MemoryChunk* chunk); 948 MemoryChunk* chunk);
931 949
932 template void MemoryAllocator::Free<MemoryAllocator::kPooledAndQueue>( 950 template void MemoryAllocator::Free<MemoryAllocator::kPooledAndQueue>(
933 MemoryChunk* chunk); 951 MemoryChunk* chunk);
934 952
935 template <MemoryAllocator::AllocationMode alloc_mode, typename SpaceType> 953 template <MemoryAllocator::AllocationMode alloc_mode, typename SpaceType>
936 Page* MemoryAllocator::AllocatePage(size_t size, SpaceType* owner, 954 Page* MemoryAllocator::AllocatePage(size_t size, SpaceType* owner,
937 Executability executable) { 955 Executability executable) {
938 MemoryChunk* chunk = nullptr; 956 MemoryChunk* chunk = nullptr;
(...skipping 2291 matching lines...) Expand 10 before | Expand all | Expand 10 after
3230 object->ShortPrint(); 3248 object->ShortPrint();
3231 PrintF("\n"); 3249 PrintF("\n");
3232 } 3250 }
3233 printf(" --------------------------------------\n"); 3251 printf(" --------------------------------------\n");
3234 printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes()); 3252 printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes());
3235 } 3253 }
3236 3254
3237 #endif // DEBUG 3255 #endif // DEBUG
3238 } // namespace internal 3256 } // namespace internal
3239 } // namespace v8 3257 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/spaces.h ('k') | test/unittests/BUILD.gn » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698