OLD | NEW |
(Empty) | |
| 1 // Copyright 2017 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 #ifndef BASE_TRACE_EVENT_SHARDED_ALLOCATION_REGISTER_H_ |
| 6 #define BASE_TRACE_EVENT_SHARDED_ALLOCATION_REGISTER_H_ |
| 7 |
| 8 #include <memory> |
| 9 #include <unordered_map> |
| 10 #include <vector> |
| 11 |
| 12 #include "base/atomicops.h" |
| 13 #include "base/base_export.h" |
| 14 #include "base/macros.h" |
| 15 #include "base/synchronization/lock.h" |
| 16 #include "base/trace_event/heap_profiler_allocation_register.h" |
| 17 |
| 18 namespace base { |
| 19 namespace trace_event { |
| 20 |
| 21 class TraceEventMemoryOverhead; |
| 22 |
| 23 // This container holds allocations, and context for each allocation [in the |
| 24 // form of a back trace]. |
| 25 // This container is thread-safe. |
| 26 class BASE_EXPORT ShardedAllocationRegister { |
| 27 public: |
| 28 using MetricsMap = std::unordered_map<AllocationContext, AllocationMetrics>; |
| 29 |
| 30 struct OutputMetrics { |
| 31 // Total size of allocated objects. |
| 32 size_t size; |
| 33 // Total count of allocated objects. |
| 34 size_t count; |
| 35 }; |
| 36 |
| 37 ShardedAllocationRegister(); |
| 38 |
| 39 // This class must be enabled before calling Insert() or Remove(). Once the |
| 40 // class is enabled, it's okay if Insert() or Remove() is called [due to |
| 41 // races] after the class is disabled. |
| 42 void SetEnabled(); |
| 43 void SetDisabled(); |
| 44 bool is_enabled() const { return !!base::subtle::Acquire_Load(&enabled_); } |
| 45 |
| 46 ~ShardedAllocationRegister(); |
| 47 |
| 48 // Inserts allocation details into the container. If the address was present |
| 49 // already, its details are updated. |address| must not be null. |
| 50 // |
| 51 // Returns true if an insert occurred. Inserts may fail because the table |
| 52 // is full. |
| 53 bool Insert(const void* address, |
| 54 size_t size, |
| 55 const AllocationContext& context); |
| 56 |
| 57 // Removes the address from the container if it is present. It is ok to call |
| 58 // this with a null pointer. |
| 59 void Remove(const void* address); |
| 60 |
| 61 // Estimates memory overhead including |sizeof(AllocationRegister)|. |
| 62 void EstimateTraceMemoryOverhead(TraceEventMemoryOverhead* overhead) const; |
| 63 |
| 64 // Updates |map| with all allocated objects and their statistics. |
| 65 // Returns aggregate statistics. |
| 66 OutputMetrics UpdateAndReturnsMetrics(MetricsMap& map) const; |
| 67 |
| 68 private: |
| 69 struct RegisterAndLock { |
| 70 RegisterAndLock(); |
| 71 ~RegisterAndLock(); |
| 72 AllocationRegister allocation_register; |
| 73 Lock lock; |
| 74 }; |
| 75 std::unique_ptr<RegisterAndLock[]> allocation_registers_; |
| 76 |
| 77 // This member needs to be checked on every allocation and deallocation [fast |
| 78 // path] when heap profiling is enabled. Using a lock here causes significant |
| 79 // contention. |
| 80 base::subtle::Atomic32 enabled_; |
| 81 |
| 82 DISALLOW_COPY_AND_ASSIGN(ShardedAllocationRegister); |
| 83 }; |
| 84 |
| 85 } // namespace trace_event |
| 86 } // namespace base |
| 87 |
| 88 #endif // BASE_TRACE_EVENT_SHARDED_ALLOCATION_REGISTER_H_ |
OLD | NEW |