OLD | NEW |
1 // Copyright (c) 2008, Google Inc. | 1 // Copyright (c) 2008, Google Inc. |
2 // All rights reserved. | 2 // All rights reserved. |
3 // | 3 // |
4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
5 // modification, are permitted provided that the following conditions are | 5 // modification, are permitted provided that the following conditions are |
6 // met: | 6 // met: |
7 // | 7 // |
8 // * Redistributions of source code must retain the above copyright | 8 // * Redistributions of source code must retain the above copyright |
9 // notice, this list of conditions and the following disclaimer. | 9 // notice, this list of conditions and the following disclaimer. |
10 // * Redistributions in binary form must reproduce the above | 10 // * Redistributions in binary form must reproduce the above |
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
81 | 81 |
82 // Total byte size in cache | 82 // Total byte size in cache |
83 size_t Size() const { return size_; } | 83 size_t Size() const { return size_; } |
84 | 84 |
85 // Allocate an object of the given size and class. The size given | 85 // Allocate an object of the given size and class. The size given |
86 // must be the same as the size of the class in the size map. | 86 // must be the same as the size of the class in the size map. |
87 void* Allocate(size_t size, size_t cl); | 87 void* Allocate(size_t size, size_t cl); |
88 void Deallocate(void* ptr, size_t size_class); | 88 void Deallocate(void* ptr, size_t size_class); |
89 | 89 |
90 void Scavenge(); | 90 void Scavenge(); |
91 void Print(TCMalloc_Printer* out) const; | |
92 | 91 |
93 int GetSamplePeriod(); | 92 int GetSamplePeriod(); |
94 | 93 |
95 // Record allocation of "k" bytes. Return true iff allocation | 94 // Record allocation of "k" bytes. Return true iff allocation |
96 // should be sampled | 95 // should be sampled |
97 bool SampleAllocation(size_t k); | 96 bool SampleAllocation(size_t k); |
98 | 97 |
99 static void InitModule(); | 98 static void InitModule(); |
100 static void InitTSD(); | 99 static void InitTSD(); |
101 static ThreadCache* GetThreadHeap(); | 100 static ThreadCache* GetThreadHeap(); |
102 static ThreadCache* GetCache(); | 101 static ThreadCache* GetCache(); |
103 static ThreadCache* GetCacheIfPresent(); | 102 static ThreadCache* GetCacheIfPresent(); |
104 static ThreadCache* CreateCacheIfNecessary(); | 103 static ThreadCache* CreateCacheIfNecessary(); |
105 static void BecomeIdle(); | 104 static void BecomeIdle(); |
106 | 105 |
107 // Return the number of thread heaps in use. | 106 // Return the number of thread heaps in use. |
108 static inline int HeapsInUse(); | 107 static inline int HeapsInUse(); |
109 | 108 |
110 // Writes to total_bytes the total number of bytes used by all thread heaps. | 109 // Writes to total_bytes the total number of bytes used by all thread heaps. |
111 // class_count must be an array of size kNumClasses. Writes the number of | 110 // class_count must be an array of size kNumClasses. Writes the number of |
112 // items on the corresponding freelist. class_count may be NULL. | 111 // items on the corresponding freelist. class_count may be NULL. |
113 // The storage of both parameters must be zero intialized. | 112 // The storage of both parameters must be zero intialized. |
114 // REQUIRES: Static::pageheap_lock is held. | 113 // REQUIRES: Static::pageheap_lock is held. |
115 static void GetThreadStats(uint64_t* total_bytes, uint64_t* class_count); | 114 static void GetThreadStats(uint64_t* total_bytes, uint64_t* class_count); |
116 | 115 |
117 // Write debugging statistics to 'out'. | |
118 // REQUIRES: Static::pageheap_lock is held. | |
119 static void PrintThreads(TCMalloc_Printer* out); | |
120 | |
121 // Sets the total thread cache size to new_size, recomputing the | 116 // Sets the total thread cache size to new_size, recomputing the |
122 // individual thread cache sizes as necessary. | 117 // individual thread cache sizes as necessary. |
123 // REQUIRES: Static::pageheap lock is held. | 118 // REQUIRES: Static::pageheap lock is held. |
124 static void set_overall_thread_cache_size(size_t new_size); | 119 static void set_overall_thread_cache_size(size_t new_size); |
125 static size_t overall_thread_cache_size() { | 120 static size_t overall_thread_cache_size() { |
126 return overall_thread_cache_size_; | 121 return overall_thread_cache_size_; |
127 } | 122 } |
128 | 123 |
129 private: | 124 private: |
130 class FreeList { | 125 class FreeList { |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
196 length_++; | 191 length_++; |
197 } | 192 } |
198 | 193 |
199 void* Pop() { | 194 void* Pop() { |
200 ASSERT(list_ != NULL); | 195 ASSERT(list_ != NULL); |
201 length_--; | 196 length_--; |
202 if (length_ < lowater_) lowater_ = length_; | 197 if (length_ < lowater_) lowater_ = length_; |
203 return FL_Pop(&list_); | 198 return FL_Pop(&list_); |
204 } | 199 } |
205 | 200 |
| 201 void* Next() { |
| 202 return SLL_Next(&list_); |
| 203 } |
| 204 |
206 void PushRange(int N, void *start, void *end) { | 205 void PushRange(int N, void *start, void *end) { |
207 FL_PushRange(&list_, start, end); | 206 FL_PushRange(&list_, start, end); |
208 length_ += N; | 207 length_ += N; |
209 } | 208 } |
210 | 209 |
211 void PopRange(int N, void **start, void **end) { | 210 void PopRange(int N, void **start, void **end) { |
212 FL_PopRange(&list_, N, start, end); | 211 FL_PopRange(&list_, N, start, end); |
213 ASSERT(length_ >= N); | 212 ASSERT(length_ >= N); |
214 length_ -= N; | 213 length_ -= N; |
215 if (length_ < lowater_) lowater_ = length_; | 214 if (length_ < lowater_) lowater_ = length_; |
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
336 return FetchFromCentralCache(cl, size); | 335 return FetchFromCentralCache(cl, size); |
337 } | 336 } |
338 size_ -= size; | 337 size_ -= size; |
339 return list->Pop(); | 338 return list->Pop(); |
340 } | 339 } |
341 | 340 |
342 inline void ThreadCache::Deallocate(void* ptr, size_t cl) { | 341 inline void ThreadCache::Deallocate(void* ptr, size_t cl) { |
343 FreeList* list = &list_[cl]; | 342 FreeList* list = &list_[cl]; |
344 size_ += Static::sizemap()->ByteSizeForClass(cl); | 343 size_ += Static::sizemap()->ByteSizeForClass(cl); |
345 ssize_t size_headroom = max_size_ - size_ - 1; | 344 ssize_t size_headroom = max_size_ - size_ - 1; |
| 345 |
| 346 // This catches back-to-back frees of allocs in the same size |
| 347 // class. A more comprehensive (and expensive) test would be to walk |
| 348 // the entire freelist. But this might be enough to find some bugs. |
| 349 ASSERT(ptr != list->Next()); |
| 350 |
346 list->Push(ptr); | 351 list->Push(ptr); |
347 ssize_t list_headroom = | 352 ssize_t list_headroom = |
348 static_cast<ssize_t>(list->max_length()) - list->length(); | 353 static_cast<ssize_t>(list->max_length()) - list->length(); |
349 | 354 |
350 // There are two relatively uncommon things that require further work. | 355 // There are two relatively uncommon things that require further work. |
351 // In the common case we're done, and in that case we need a single branch | 356 // In the common case we're done, and in that case we need a single branch |
352 // because of the bitwise-or trick that follows. | 357 // because of the bitwise-or trick that follows. |
353 if ((list_headroom | size_headroom) < 0) { | 358 if ((list_headroom | size_headroom) < 0) { |
354 if (list_headroom < 0) { | 359 if (list_headroom < 0) { |
355 ListTooLong(list, cl); | 360 ListTooLong(list, cl); |
(...skipping 27 matching lines...) Expand all Loading... |
383 // because we may be in the thread destruction code and may have | 388 // because we may be in the thread destruction code and may have |
384 // already cleaned up the cache for this thread. | 389 // already cleaned up the cache for this thread. |
385 inline ThreadCache* ThreadCache::GetCacheIfPresent() { | 390 inline ThreadCache* ThreadCache::GetCacheIfPresent() { |
386 if (!tsd_inited_) return NULL; | 391 if (!tsd_inited_) return NULL; |
387 return GetThreadHeap(); | 392 return GetThreadHeap(); |
388 } | 393 } |
389 | 394 |
390 } // namespace tcmalloc | 395 } // namespace tcmalloc |
391 | 396 |
392 #endif // TCMALLOC_THREAD_CACHE_H_ | 397 #endif // TCMALLOC_THREAD_CACHE_H_ |
OLD | NEW |