| OLD | NEW |
| 1 // Copyright (c) 2008, Google Inc. | 1 // Copyright (c) 2008, Google Inc. |
| 2 // All rights reserved. | 2 // All rights reserved. |
| 3 // | 3 // |
| 4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
| 5 // modification, are permitted provided that the following conditions are | 5 // modification, are permitted provided that the following conditions are |
| 6 // met: | 6 // met: |
| 7 // | 7 // |
| 8 // * Redistributions of source code must retain the above copyright | 8 // * Redistributions of source code must retain the above copyright |
| 9 // notice, this list of conditions and the following disclaimer. | 9 // notice, this list of conditions and the following disclaimer. |
| 10 // * Redistributions in binary form must reproduce the above | 10 // * Redistributions in binary form must reproduce the above |
| (...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 87 | 87 |
| 88 // Total byte size in cache | 88 // Total byte size in cache |
| 89 size_t Size() const { return size_; } | 89 size_t Size() const { return size_; } |
| 90 | 90 |
| 91 // Allocate an object of the given size and class. The size given | 91 // Allocate an object of the given size and class. The size given |
| 92 // must be the same as the size of the class in the size map. | 92 // must be the same as the size of the class in the size map. |
| 93 void* Allocate(size_t size, size_t cl); | 93 void* Allocate(size_t size, size_t cl); |
| 94 void Deallocate(void* ptr, size_t size_class); | 94 void Deallocate(void* ptr, size_t size_class); |
| 95 | 95 |
| 96 void Scavenge(); | 96 void Scavenge(); |
| 97 void Print(TCMalloc_Printer* out) const; | |
| 98 | 97 |
| 99 int GetSamplePeriod(); | 98 int GetSamplePeriod(); |
| 100 | 99 |
| 101 // Record allocation of "k" bytes. Return true iff allocation | 100 // Record allocation of "k" bytes. Return true iff allocation |
| 102 // should be sampled | 101 // should be sampled |
| 103 bool SampleAllocation(size_t k); | 102 bool SampleAllocation(size_t k); |
| 104 | 103 |
| 105 static void InitModule(); | 104 static void InitModule(); |
| 106 static void InitTSD(); | 105 static void InitTSD(); |
| 107 static ThreadCache* GetThreadHeap(); | 106 static ThreadCache* GetThreadHeap(); |
| 108 static ThreadCache* GetCache(); | 107 static ThreadCache* GetCache(); |
| 109 static ThreadCache* GetCacheIfPresent(); | 108 static ThreadCache* GetCacheIfPresent(); |
| 110 static ThreadCache* CreateCacheIfNecessary(); | 109 static ThreadCache* CreateCacheIfNecessary(); |
| 111 static void BecomeIdle(); | 110 static void BecomeIdle(); |
| 112 | 111 |
| 113 // Return the number of thread heaps in use. | 112 // Return the number of thread heaps in use. |
| 114 static inline int HeapsInUse(); | 113 static inline int HeapsInUse(); |
| 115 | 114 |
| 116 // Writes to total_bytes the total number of bytes used by all thread heaps. | 115 // Writes to total_bytes the total number of bytes used by all thread heaps. |
| 117 // class_count must be an array of size kNumClasses. Writes the number of | 116 // class_count must be an array of size kNumClasses. Writes the number of |
| 118 // items on the corresponding freelist. class_count may be NULL. | 117 // items on the corresponding freelist. class_count may be NULL. |
| 119 // The storage of both parameters must be zero intialized. | 118 // The storage of both parameters must be zero intialized. |
| 120 // REQUIRES: Static::pageheap_lock is held. | 119 // REQUIRES: Static::pageheap_lock is held. |
| 121 static void GetThreadStats(uint64_t* total_bytes, uint64_t* class_count); | 120 static void GetThreadStats(uint64_t* total_bytes, uint64_t* class_count); |
| 122 | 121 |
| 123 // Write debugging statistics to 'out'. | |
| 124 // REQUIRES: Static::pageheap_lock is held. | |
| 125 static void PrintThreads(TCMalloc_Printer* out); | |
| 126 | |
| 127 // Sets the total thread cache size to new_size, recomputing the | 122 // Sets the total thread cache size to new_size, recomputing the |
| 128 // individual thread cache sizes as necessary. | 123 // individual thread cache sizes as necessary. |
| 129 // REQUIRES: Static::pageheap lock is held. | 124 // REQUIRES: Static::pageheap lock is held. |
| 130 static void set_overall_thread_cache_size(size_t new_size); | 125 static void set_overall_thread_cache_size(size_t new_size); |
| 131 static size_t overall_thread_cache_size() { | 126 static size_t overall_thread_cache_size() { |
| 132 return overall_thread_cache_size_; | 127 return overall_thread_cache_size_; |
| 133 } | 128 } |
| 134 | 129 |
| 135 private: | 130 private: |
| 136 class FreeList { | 131 class FreeList { |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 202 length_++; | 197 length_++; |
| 203 } | 198 } |
| 204 | 199 |
| 205 void* Pop() { | 200 void* Pop() { |
| 206 ASSERT(list_ != NULL); | 201 ASSERT(list_ != NULL); |
| 207 length_--; | 202 length_--; |
| 208 if (length_ < lowater_) lowater_ = length_; | 203 if (length_ < lowater_) lowater_ = length_; |
| 209 return SLL_Pop(&list_); | 204 return SLL_Pop(&list_); |
| 210 } | 205 } |
| 211 | 206 |
| 207 void* Next() { |
| 208 return SLL_Next(&list_); |
| 209 } |
| 210 |
| 212 void PushRange(int N, void *start, void *end) { | 211 void PushRange(int N, void *start, void *end) { |
| 213 SLL_PushRange(&list_, start, end); | 212 SLL_PushRange(&list_, start, end); |
| 214 length_ += N; | 213 length_ += N; |
| 215 } | 214 } |
| 216 | 215 |
| 217 void PopRange(int N, void **start, void **end) { | 216 void PopRange(int N, void **start, void **end) { |
| 218 SLL_PopRange(&list_, N, start, end); | 217 SLL_PopRange(&list_, N, start, end); |
| 219 ASSERT(length_ >= N); | 218 ASSERT(length_ >= N); |
| 220 length_ -= N; | 219 length_ -= N; |
| 221 if (length_ < lowater_) lowater_ = length_; | 220 if (length_ < lowater_) lowater_ = length_; |
| (...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 342 return FetchFromCentralCache(cl, size); | 341 return FetchFromCentralCache(cl, size); |
| 343 } | 342 } |
| 344 size_ -= size; | 343 size_ -= size; |
| 345 return list->Pop(); | 344 return list->Pop(); |
| 346 } | 345 } |
| 347 | 346 |
| 348 inline void ThreadCache::Deallocate(void* ptr, size_t cl) { | 347 inline void ThreadCache::Deallocate(void* ptr, size_t cl) { |
| 349 FreeList* list = &list_[cl]; | 348 FreeList* list = &list_[cl]; |
| 350 size_ += Static::sizemap()->ByteSizeForClass(cl); | 349 size_ += Static::sizemap()->ByteSizeForClass(cl); |
| 351 ssize_t size_headroom = max_size_ - size_ - 1; | 350 ssize_t size_headroom = max_size_ - size_ - 1; |
| 351 |
| 352 // This catches back-to-back frees of allocs in the same size |
| 353 // class. A more comprehensive (and expensive) test would be to walk |
| 354 // the entire freelist. But this might be enough to find some bugs. |
| 355 ASSERT(ptr != list->Next()); |
| 356 |
| 352 list->Push(ptr); | 357 list->Push(ptr); |
| 353 ssize_t list_headroom = | 358 ssize_t list_headroom = |
| 354 static_cast<ssize_t>(list->max_length()) - list->length(); | 359 static_cast<ssize_t>(list->max_length()) - list->length(); |
| 355 | 360 |
| 356 // There are two relatively uncommon things that require further work. | 361 // There are two relatively uncommon things that require further work. |
| 357 // In the common case we're done, and in that case we need a single branch | 362 // In the common case we're done, and in that case we need a single branch |
| 358 // because of the bitwise-or trick that follows. | 363 // because of the bitwise-or trick that follows. |
| 359 if ((list_headroom | size_headroom) < 0) { | 364 if ((list_headroom | size_headroom) < 0) { |
| 360 if (list_headroom < 0) { | 365 if (list_headroom < 0) { |
| 361 ListTooLong(list, cl); | 366 ListTooLong(list, cl); |
| (...skipping 27 matching lines...) Expand all Loading... |
| 389 // because we may be in the thread destruction code and may have | 394 // because we may be in the thread destruction code and may have |
| 390 // already cleaned up the cache for this thread. | 395 // already cleaned up the cache for this thread. |
| 391 inline ThreadCache* ThreadCache::GetCacheIfPresent() { | 396 inline ThreadCache* ThreadCache::GetCacheIfPresent() { |
| 392 if (!tsd_inited_) return NULL; | 397 if (!tsd_inited_) return NULL; |
| 393 return GetThreadHeap(); | 398 return GetThreadHeap(); |
| 394 } | 399 } |
| 395 | 400 |
| 396 } // namespace tcmalloc | 401 } // namespace tcmalloc |
| 397 | 402 |
| 398 #endif // TCMALLOC_THREAD_CACHE_H_ | 403 #endif // TCMALLOC_THREAD_CACHE_H_ |
| OLD | NEW |