| OLD | NEW |
| 1 // Copyright (c) 2005, Google Inc. | 1 // Copyright (c) 2005, Google Inc. |
| 2 // All rights reserved. | 2 // All rights reserved. |
| 3 // | 3 // |
| 4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
| 5 // modification, are permitted provided that the following conditions are | 5 // modification, are permitted provided that the following conditions are |
| 6 // met: | 6 // met: |
| 7 // | 7 // |
| 8 // * Redistributions of source code must retain the above copyright | 8 // * Redistributions of source code must retain the above copyright |
| 9 // notice, this list of conditions and the following disclaimer. | 9 // notice, this list of conditions and the following disclaimer. |
| 10 // * Redistributions in binary form must reproduce the above | 10 // * Redistributions in binary form must reproduce the above |
| (...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 150 | 150 |
| 151 static LowLevelAlloc::Arena* heap_profiler_memory; | 151 static LowLevelAlloc::Arena* heap_profiler_memory; |
| 152 | 152 |
| 153 static void* ProfilerMalloc(size_t bytes) { | 153 static void* ProfilerMalloc(size_t bytes) { |
| 154 return LowLevelAlloc::AllocWithArena(bytes, heap_profiler_memory); | 154 return LowLevelAlloc::AllocWithArena(bytes, heap_profiler_memory); |
| 155 } | 155 } |
| 156 static void ProfilerFree(void* p) { | 156 static void ProfilerFree(void* p) { |
| 157 LowLevelAlloc::Free(p); | 157 LowLevelAlloc::Free(p); |
| 158 } | 158 } |
| 159 | 159 |
| 160 //---------------------------------------------------------------------- | |
| 161 // Another allocator for heap profiler's internal mmap address map | |
| 162 // | |
| 163 // Large amount of memory is consumed if we use an arena 'heap_profiler_memory' | |
| 164 // for the internal mmap address map. It looks like memory fragmentation | |
| 165 // because of repeated allocation/deallocation in the arena. | |
| 166 // | |
| 167 // 'mmap_heap_profiler_memory' is a dedicated arena for the mmap address map. | |
| 168 // This arena is reserved for every construction of the mmap address map, and | |
| 169 // disposed after every use. | |
| 170 //---------------------------------------------------------------------- | |
| 171 | |
| 172 static LowLevelAlloc::Arena* mmap_heap_profiler_memory = NULL; | |
| 173 | |
| 174 static void* MMapProfilerMalloc(size_t bytes) { | |
| 175 return LowLevelAlloc::AllocWithArena(bytes, mmap_heap_profiler_memory); | |
| 176 } | |
| 177 static void MMapProfilerFree(void* p) { | |
| 178 LowLevelAlloc::Free(p); | |
| 179 } | |
| 180 | |
| 181 // This function should be called from a locked scope. | |
| 182 // It returns false if failed in deleting the arena. | |
| 183 static bool DeleteMMapProfilerArenaIfExistsLocked() { | |
| 184 if (mmap_heap_profiler_memory == NULL) return true; | |
| 185 if (!LowLevelAlloc::DeleteArena(mmap_heap_profiler_memory)) return false; | |
| 186 mmap_heap_profiler_memory = NULL; | |
| 187 return true; | |
| 188 } | |
| 189 | |
| 190 // We use buffers of this size in DoGetHeapProfile. | 160 // We use buffers of this size in DoGetHeapProfile. |
| 191 // The size is 1 << 20 in the original google-perftools. Changed it to | 161 // The size is 1 << 20 in the original google-perftools. Changed it to |
| 192 // 5 << 20 since a larger buffer is requried for deeper profiling in Chromium. | 162 // 5 << 20 since a larger buffer is requried for deeper profiling in Chromium. |
| 193 // The buffer is allocated only when the environment variable HEAPPROFILE is | 163 // The buffer is allocated only when the environment variable HEAPPROFILE is |
| 194 // specified to dump heap information. | 164 // specified to dump heap information. |
| 195 static const int kProfileBufferSize = 5 << 20; | 165 static const int kProfileBufferSize = 5 << 20; |
| 196 | 166 |
| 197 // This is a last-ditch buffer we use in DumpProfileLocked in case we | 167 // This is a last-ditch buffer we use in DumpProfileLocked in case we |
| 198 // can't allocate more memory from ProfilerMalloc. We expect this | 168 // can't allocate more memory from ProfilerMalloc. We expect this |
| 199 // will be used by HeapProfileEndWriter when the application has to | 169 // will be used by HeapProfileEndWriter when the application has to |
| (...skipping 27 matching lines...) Expand all Loading... |
| 227 // Input must be a buffer of size at least 1MB. | 197 // Input must be a buffer of size at least 1MB. |
| 228 static char* DoGetHeapProfileLocked(char* buf, int buflen) { | 198 static char* DoGetHeapProfileLocked(char* buf, int buflen) { |
| 229 // We used to be smarter about estimating the required memory and | 199 // We used to be smarter about estimating the required memory and |
| 230 // then capping it to 1MB and generating the profile into that. | 200 // then capping it to 1MB and generating the profile into that. |
| 231 if (buf == NULL || buflen < 1) | 201 if (buf == NULL || buflen < 1) |
| 232 return NULL; | 202 return NULL; |
| 233 | 203 |
| 234 RAW_DCHECK(heap_lock.IsHeld(), ""); | 204 RAW_DCHECK(heap_lock.IsHeld(), ""); |
| 235 int bytes_written = 0; | 205 int bytes_written = 0; |
| 236 if (is_on) { | 206 if (is_on) { |
| 237 if (FLAGS_mmap_profile) { | 207 HeapProfileTable::Stats const stats = heap_profile->total(); |
| 238 if (!DeleteMMapProfilerArenaIfExistsLocked()) { | 208 (void)stats; // avoid an unused-variable warning in non-debug mode. |
| 239 RAW_LOG(FATAL, "Memory leak in HeapProfiler:"); | |
| 240 } | |
| 241 mmap_heap_profiler_memory = | |
| 242 LowLevelAlloc::NewArena(0, LowLevelAlloc::DefaultArena()); | |
| 243 heap_profile->RefreshMMapData(MMapProfilerMalloc, MMapProfilerFree); | |
| 244 } | |
| 245 if (deep_profile) { | 209 if (deep_profile) { |
| 246 bytes_written = deep_profile->FillOrderedProfile(buf, buflen - 1); | 210 bytes_written = deep_profile->FillOrderedProfile(buf, buflen - 1); |
| 247 } else { | 211 } else { |
| 248 bytes_written = heap_profile->FillOrderedProfile(buf, buflen - 1); | 212 bytes_written = heap_profile->FillOrderedProfile(buf, buflen - 1); |
| 249 } | 213 } |
| 250 if (FLAGS_mmap_profile) { | 214 // FillOrderedProfile should not reduce the set of active mmap-ed regions, |
| 251 heap_profile->ClearMMapData(); | 215 // hence MemoryRegionMap will let us remove everything we've added above: |
| 252 if (!DeleteMMapProfilerArenaIfExistsLocked()) { | 216 RAW_DCHECK(stats.Equivalent(heap_profile->total()), ""); |
| 253 RAW_LOG(FATAL, "Memory leak in HeapProfiler:"); | 217 // if this fails, we somehow removed by FillOrderedProfile |
| 254 } | 218 // more than we have added. |
| 255 } | |
| 256 } | 219 } |
| 257 buf[bytes_written] = '\0'; | 220 buf[bytes_written] = '\0'; |
| 258 RAW_DCHECK(bytes_written == strlen(buf), ""); | 221 RAW_DCHECK(bytes_written == strlen(buf), ""); |
| 259 | 222 |
| 260 return buf; | 223 return buf; |
| 261 } | 224 } |
| 262 | 225 |
| 263 extern "C" char* GetHeapProfile() { | 226 extern "C" char* GetHeapProfile() { |
| 264 // Use normal malloc: we return the profile to the user to free it: | 227 // Use normal malloc: we return the profile to the user to free it: |
| 265 char* buffer = reinterpret_cast<char*>(malloc(kProfileBufferSize)); | 228 char* buffer = reinterpret_cast<char*>(malloc(kProfileBufferSize)); |
| (...skipping 229 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 495 // call new, and we want that to be accounted for correctly. | 458 // call new, and we want that to be accounted for correctly. |
| 496 MallocExtension::Initialize(); | 459 MallocExtension::Initialize(); |
| 497 | 460 |
| 498 if (FLAGS_only_mmap_profile) { | 461 if (FLAGS_only_mmap_profile) { |
| 499 FLAGS_mmap_profile = true; | 462 FLAGS_mmap_profile = true; |
| 500 } | 463 } |
| 501 | 464 |
| 502 if (FLAGS_mmap_profile) { | 465 if (FLAGS_mmap_profile) { |
| 503 // Ask MemoryRegionMap to record all mmap, mremap, and sbrk | 466 // Ask MemoryRegionMap to record all mmap, mremap, and sbrk |
| 504 // call stack traces of at least size kMaxStackDepth: | 467 // call stack traces of at least size kMaxStackDepth: |
| 505 MemoryRegionMap::Init(HeapProfileTable::kMaxStackDepth); | 468 MemoryRegionMap::Init(HeapProfileTable::kMaxStackDepth, |
| 469 /* use_buckets */ true); |
| 506 } | 470 } |
| 507 | 471 |
| 508 if (FLAGS_mmap_log) { | 472 if (FLAGS_mmap_log) { |
| 509 // Install our hooks to do the logging: | 473 // Install our hooks to do the logging: |
| 510 RAW_CHECK(MallocHook::AddMmapHook(&MmapHook), ""); | 474 RAW_CHECK(MallocHook::AddMmapHook(&MmapHook), ""); |
| 511 RAW_CHECK(MallocHook::AddMremapHook(&MremapHook), ""); | 475 RAW_CHECK(MallocHook::AddMremapHook(&MremapHook), ""); |
| 512 RAW_CHECK(MallocHook::AddMunmapHook(&MunmapHook), ""); | 476 RAW_CHECK(MallocHook::AddMunmapHook(&MunmapHook), ""); |
| 513 RAW_CHECK(MallocHook::AddSbrkHook(&SbrkHook), ""); | 477 RAW_CHECK(MallocHook::AddSbrkHook(&SbrkHook), ""); |
| 514 } | 478 } |
| 515 | 479 |
| 516 heap_profiler_memory = | 480 heap_profiler_memory = |
| 517 LowLevelAlloc::NewArena(0, LowLevelAlloc::DefaultArena()); | 481 LowLevelAlloc::NewArena(0, LowLevelAlloc::DefaultArena()); |
| 518 | 482 |
| 519 // Reserve space now for the heap profiler, so we can still write a | 483 // Reserve space now for the heap profiler, so we can still write a |
| 520 // heap profile even if the application runs out of memory. | 484 // heap profile even if the application runs out of memory. |
| 521 global_profiler_buffer = | 485 global_profiler_buffer = |
| 522 reinterpret_cast<char*>(ProfilerMalloc(kProfileBufferSize)); | 486 reinterpret_cast<char*>(ProfilerMalloc(kProfileBufferSize)); |
| 523 | 487 |
| 524 heap_profile = new(ProfilerMalloc(sizeof(HeapProfileTable))) | 488 heap_profile = new(ProfilerMalloc(sizeof(HeapProfileTable))) |
| 525 HeapProfileTable(ProfilerMalloc, ProfilerFree); | 489 HeapProfileTable(ProfilerMalloc, ProfilerFree, FLAGS_mmap_profile); |
| 526 | 490 |
| 527 last_dump_alloc = 0; | 491 last_dump_alloc = 0; |
| 528 last_dump_free = 0; | 492 last_dump_free = 0; |
| 529 high_water_mark = 0; | 493 high_water_mark = 0; |
| 530 last_dump_time = 0; | 494 last_dump_time = 0; |
| 531 | 495 |
| 532 if (FLAGS_deep_heap_profile) { | 496 if (FLAGS_deep_heap_profile) { |
| 533 // Initialize deep memory profiler | 497 // Initialize deep memory profiler |
| 534 RAW_VLOG(0, "[%d] Starting a deep memory profiler", getpid()); | 498 RAW_VLOG(0, "[%d] Starting a deep memory profiler", getpid()); |
| 535 deep_profile = new(ProfilerMalloc(sizeof(DeepHeapProfile))) | 499 deep_profile = new(ProfilerMalloc(sizeof(DeepHeapProfile))) |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 587 | 551 |
| 588 if (deep_profile) { | 552 if (deep_profile) { |
| 589 // free deep memory profiler | 553 // free deep memory profiler |
| 590 deep_profile->~DeepHeapProfile(); | 554 deep_profile->~DeepHeapProfile(); |
| 591 ProfilerFree(deep_profile); | 555 ProfilerFree(deep_profile); |
| 592 deep_profile = NULL; | 556 deep_profile = NULL; |
| 593 } | 557 } |
| 594 | 558 |
| 595 // free profile | 559 // free profile |
| 596 heap_profile->~HeapProfileTable(); | 560 heap_profile->~HeapProfileTable(); |
| 597 if (!DeleteMMapProfilerArenaIfExistsLocked()) { | |
| 598 RAW_LOG(FATAL, "Memory leak in HeapProfiler:"); | |
| 599 } | |
| 600 ProfilerFree(heap_profile); | 561 ProfilerFree(heap_profile); |
| 601 heap_profile = NULL; | 562 heap_profile = NULL; |
| 602 | 563 |
| 603 // free output-buffer memory | 564 // free output-buffer memory |
| 604 ProfilerFree(global_profiler_buffer); | 565 ProfilerFree(global_profiler_buffer); |
| 605 | 566 |
| 606 // free prefix | 567 // free prefix |
| 607 ProfilerFree(filename_prefix); | 568 ProfilerFree(filename_prefix); |
| 608 filename_prefix = NULL; | 569 filename_prefix = NULL; |
| 609 | 570 |
| (...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 686 | 647 |
| 687 // class used for finalization -- dumps the heap-profile at program exit | 648 // class used for finalization -- dumps the heap-profile at program exit |
| 688 struct HeapProfileEndWriter { | 649 struct HeapProfileEndWriter { |
| 689 ~HeapProfileEndWriter() { HeapProfilerDump("Exiting"); } | 650 ~HeapProfileEndWriter() { HeapProfilerDump("Exiting"); } |
| 690 }; | 651 }; |
| 691 | 652 |
| 692 // We want to make sure tcmalloc is up and running before starting the profiler | 653 // We want to make sure tcmalloc is up and running before starting the profiler |
| 693 static const TCMallocGuard tcmalloc_initializer; | 654 static const TCMallocGuard tcmalloc_initializer; |
| 694 REGISTER_MODULE_INITIALIZER(heapprofiler, HeapProfilerInit()); | 655 REGISTER_MODULE_INITIALIZER(heapprofiler, HeapProfilerInit()); |
| 695 static HeapProfileEndWriter heap_profile_end_writer; | 656 static HeapProfileEndWriter heap_profile_end_writer; |
| OLD | NEW |