| OLD | NEW |
| 1 // Copyright (c) 2008, Google Inc. | 1 // Copyright (c) 2008, Google Inc. |
| 2 // All rights reserved. | 2 // All rights reserved. |
| 3 // | 3 // |
| 4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
| 5 // modification, are permitted provided that the following conditions are | 5 // modification, are permitted provided that the following conditions are |
| 6 // met: | 6 // met: |
| 7 // | 7 // |
| 8 // * Redistributions of source code must retain the above copyright | 8 // * Redistributions of source code must retain the above copyright |
| 9 // notice, this list of conditions and the following disclaimer. | 9 // notice, this list of conditions and the following disclaimer. |
| 10 // * Redistributions in binary form must reproduce the above | 10 // * Redistributions in binary form must reproduce the above |
| (...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 89 | 89 |
| 90 void Scavenge(); | 90 void Scavenge(); |
| 91 void Print(TCMalloc_Printer* out) const; | 91 void Print(TCMalloc_Printer* out) const; |
| 92 | 92 |
| 93 int GetSamplePeriod(); | 93 int GetSamplePeriod(); |
| 94 | 94 |
| 95 // Record allocation of "k" bytes. Return true iff allocation | 95 // Record allocation of "k" bytes. Return true iff allocation |
| 96 // should be sampled | 96 // should be sampled |
| 97 bool SampleAllocation(size_t k); | 97 bool SampleAllocation(size_t k); |
| 98 | 98 |
| 99 // Record additional bytes allocated. |
| 100 void AddToByteAllocatedTotal(size_t k) { total_bytes_allocated_ += k; } |
| 101 |
| 102 // Return the total number of bytes allocated from this heap. The value will |
| 103 // wrap when there is an overflow, and so only the differences between two |
| 104 // values should be relied on (and even then, modulo 2^32). |
| 105 uint32 GetTotalBytesAllocated() const; |
| 106 |
| 107 // On the current thread, return GetTotalBytesAllocated(). |
| 108 static uint32 GetBytesAllocatedOnCurrentThread(); |
| 109 |
| 99 static void InitModule(); | 110 static void InitModule(); |
| 100 static void InitTSD(); | 111 static void InitTSD(); |
| 101 static ThreadCache* GetThreadHeap(); | 112 static ThreadCache* GetThreadHeap(); |
| 102 static ThreadCache* GetCache(); | 113 static ThreadCache* GetCache(); |
| 103 static ThreadCache* GetCacheIfPresent(); | 114 static ThreadCache* GetCacheIfPresent(); |
| 104 static ThreadCache* CreateCacheIfNecessary(); | 115 static ThreadCache* CreateCacheIfNecessary(); |
| 105 static void BecomeIdle(); | 116 static void BecomeIdle(); |
| 106 | 117 |
| 107 // Return the number of thread heaps in use. | 118 // Return the number of thread heaps in use. |
| 108 static inline int HeapsInUse(); | 119 static inline int HeapsInUse(); |
| (...skipping 175 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 284 // Represents overall_thread_cache_size_ minus the sum of max_size_ | 295 // Represents overall_thread_cache_size_ minus the sum of max_size_ |
| 285 // across all ThreadCaches. Protected by Static::pageheap_lock. | 296 // across all ThreadCaches. Protected by Static::pageheap_lock. |
| 286 static ssize_t unclaimed_cache_space_; | 297 static ssize_t unclaimed_cache_space_; |
| 287 | 298 |
| 288 // This class is laid out with the most frequently used fields | 299 // This class is laid out with the most frequently used fields |
| 289 // first so that hot elements are placed on the same cache line. | 300 // first so that hot elements are placed on the same cache line. |
| 290 | 301 |
| 291 size_t size_; // Combined size of data | 302 size_t size_; // Combined size of data |
| 292 size_t max_size_; // size_ > max_size_ --> Scavenge() | 303 size_t max_size_; // size_ > max_size_ --> Scavenge() |
| 293 | 304 |
| 305 // The following is the tally of bytes allocated on a thread as a response to |
| 306 // any flavor of malloc() call. The aggegated amount includes all padding to |
| 307 // the smallest class that can hold the request, or to the nearest whole page |
| 308 // when a large allocation is made without using a class. This sum is |
| 309 // currently used for Chromium profiling, where tallies are kept of the amount |
| 310 // of memory allocated during the running of each task on each thread. |
| 311 uint32 total_bytes_allocated_; // Total, modulo 2^32. |
| 312 |
| 294 // We sample allocations, biased by the size of the allocation | 313 // We sample allocations, biased by the size of the allocation |
| 295 Sampler sampler_; // A sampler | 314 Sampler sampler_; // A sampler |
| 296 | 315 |
| 297 FreeList list_[kNumClasses]; // Array indexed by size-class | 316 FreeList list_[kNumClasses]; // Array indexed by size-class |
| 298 | 317 |
| 299 pthread_t tid_; // Which thread owns it | 318 pthread_t tid_; // Which thread owns it |
| 300 bool in_setspecific_; // In call to pthread_setspecific? | 319 bool in_setspecific_; // In call to pthread_setspecific? |
| 301 | 320 |
| 302 // Allocate a new heap. REQUIRES: Static::pageheap_lock is held. | 321 // Allocate a new heap. REQUIRES: Static::pageheap_lock is held. |
| 303 static ThreadCache* NewHeap(pthread_t tid); | 322 static ThreadCache* NewHeap(pthread_t tid); |
| (...skipping 16 matching lines...) Expand all Loading... |
| 320 extern PageHeapAllocator<ThreadCache> threadcache_allocator; | 339 extern PageHeapAllocator<ThreadCache> threadcache_allocator; |
| 321 | 340 |
| 322 inline int ThreadCache::HeapsInUse() { | 341 inline int ThreadCache::HeapsInUse() { |
| 323 return threadcache_allocator.inuse(); | 342 return threadcache_allocator.inuse(); |
| 324 } | 343 } |
| 325 | 344 |
| 326 inline bool ThreadCache::SampleAllocation(size_t k) { | 345 inline bool ThreadCache::SampleAllocation(size_t k) { |
| 327 return sampler_.SampleAllocation(k); | 346 return sampler_.SampleAllocation(k); |
| 328 } | 347 } |
| 329 | 348 |
| 349 inline uint32 ThreadCache::GetTotalBytesAllocated() const { |
| 350 return total_bytes_allocated_; |
| 351 } |
| 352 |
| 330 inline void* ThreadCache::Allocate(size_t size, size_t cl) { | 353 inline void* ThreadCache::Allocate(size_t size, size_t cl) { |
| 331 ASSERT(size <= kMaxSize); | 354 ASSERT(size <= kMaxSize); |
| 332 ASSERT(size == Static::sizemap()->ByteSizeForClass(cl)); | 355 ASSERT(size == Static::sizemap()->ByteSizeForClass(cl)); |
| 333 | 356 |
| 334 FreeList* list = &list_[cl]; | 357 FreeList* list = &list_[cl]; |
| 335 if (list->empty()) { | 358 if (list->empty()) { |
| 336 return FetchFromCentralCache(cl, size); | 359 return FetchFromCentralCache(cl, size); |
| 337 } | 360 } |
| 338 size_ -= size; | 361 size_ -= size; |
| 339 return list->Pop(); | 362 return list->Pop(); |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 383 // because we may be in the thread destruction code and may have | 406 // because we may be in the thread destruction code and may have |
| 384 // already cleaned up the cache for this thread. | 407 // already cleaned up the cache for this thread. |
| 385 inline ThreadCache* ThreadCache::GetCacheIfPresent() { | 408 inline ThreadCache* ThreadCache::GetCacheIfPresent() { |
| 386 if (!tsd_inited_) return NULL; | 409 if (!tsd_inited_) return NULL; |
| 387 return GetThreadHeap(); | 410 return GetThreadHeap(); |
| 388 } | 411 } |
| 389 | 412 |
| 390 } // namespace tcmalloc | 413 } // namespace tcmalloc |
| 391 | 414 |
| 392 #endif // TCMALLOC_THREAD_CACHE_H_ | 415 #endif // TCMALLOC_THREAD_CACHE_H_ |
| OLD | NEW |