OLD | NEW |
1 // Copyright 2017 The Chromium Authors. All rights reserved. | 1 // Copyright 2017 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef BASE_TRACE_EVENT_SHARDED_ALLOCATION_REGISTER_H_ | 5 #ifndef BASE_TRACE_EVENT_SHARDED_ALLOCATION_REGISTER_H_ |
6 #define BASE_TRACE_EVENT_SHARDED_ALLOCATION_REGISTER_H_ | 6 #define BASE_TRACE_EVENT_SHARDED_ALLOCATION_REGISTER_H_ |
7 | 7 |
8 #include <memory> | 8 #include <memory> |
9 #include <unordered_map> | 9 #include <unordered_map> |
10 #include <vector> | 10 #include <vector> |
11 | 11 |
12 #include "base/atomicops.h" | 12 #include "base/atomicops.h" |
13 #include "base/base_export.h" | 13 #include "base/base_export.h" |
| 14 #include "base/callback.h" |
14 #include "base/macros.h" | 15 #include "base/macros.h" |
15 #include "base/synchronization/lock.h" | 16 #include "base/synchronization/lock.h" |
16 #include "base/trace_event/heap_profiler_allocation_register.h" | 17 #include "base/trace_event/heap_profiler_allocation_register.h" |
17 | 18 |
18 namespace base { | 19 namespace base { |
19 namespace trace_event { | 20 namespace trace_event { |
20 | 21 |
21 class TraceEventMemoryOverhead; | 22 class TraceEventMemoryOverhead; |
22 | 23 |
23 // This container holds allocations, and context for each allocation [in the | 24 // This container holds allocations, and context for each allocation [in the |
24 // form of a back trace]. | 25 // form of a back trace]. |
25 // This container is thread-safe. | 26 // This container is thread-safe. |
26 class BASE_EXPORT ShardedAllocationRegister { | 27 class BASE_EXPORT ShardedAllocationRegister { |
27 public: | 28 public: |
28 using MetricsMap = std::unordered_map<AllocationContext, AllocationMetrics>; | |
29 | |
30 struct OutputMetrics { | |
31 // Total size of allocated objects. | |
32 size_t size; | |
33 // Total count of allocated objects. | |
34 size_t count; | |
35 }; | |
36 | |
37 ShardedAllocationRegister(); | 29 ShardedAllocationRegister(); |
38 | 30 |
39 // This class must be enabled before calling Insert() or Remove(). Once the | 31 // This class must be enabled before calling Insert() or Remove(). Once the |
40 // class is enabled, it's okay if Insert() or Remove() is called [due to | 32 // class is enabled, it's okay if Insert() or Remove() is called [due to |
41 // races] after the class is disabled. | 33 // races] after the class is disabled. |
42 void SetEnabled(); | 34 void SetEnabled(); |
43 void SetDisabled(); | 35 void SetDisabled(); |
44 bool is_enabled() const { return !!base::subtle::Acquire_Load(&enabled_); } | 36 bool is_enabled() const { return !!base::subtle::Acquire_Load(&enabled_); } |
45 | 37 |
46 ~ShardedAllocationRegister(); | 38 ~ShardedAllocationRegister(); |
47 | 39 |
48 // Inserts allocation details into the container. If the address was present | 40 // Inserts allocation details into the container. If the address was present |
49 // already, its details are updated. |address| must not be null. | 41 // already, its details are updated. |address| must not be null. |
50 // | 42 // |
51 // Returns true if an insert occurred. Inserts may fail because the table | 43 // Returns true if an insert occurred. Inserts may fail because the table |
52 // is full. | 44 // is full. |
53 bool Insert(const void* address, | 45 bool Insert(const void* address, |
54 size_t size, | 46 size_t size, |
55 const AllocationContext& context); | 47 const AllocationContext& context); |
56 | 48 |
57 // Removes the address from the container if it is present. It is ok to call | 49 // Removes the address from the container if it is present. It is ok to call |
58 // this with a null pointer. | 50 // this with a null pointer. |
59 void Remove(const void* address); | 51 void Remove(const void* address); |
60 | 52 |
61 // Estimates memory overhead including |sizeof(AllocationRegister)|. | 53 // Estimates memory overhead including |sizeof(AllocationRegister)|. |
62 void EstimateTraceMemoryOverhead(TraceEventMemoryOverhead* overhead) const; | 54 void EstimateTraceMemoryOverhead(TraceEventMemoryOverhead* overhead) const; |
63 | 55 |
64 // Updates |map| with all allocated objects and their statistics. | 56 using AllocationVisitor = |
65 // Returns aggregate statistics. | 57 base::RepeatingCallback<void(const AllocationRegister::Allocation&)>; |
66 OutputMetrics UpdateAndReturnsMetrics(MetricsMap& map) const; | 58 |
| 59 void VisitAllocations(const AllocationVisitor& visitor) const; |
67 | 60 |
68 private: | 61 private: |
69 struct RegisterAndLock { | 62 struct RegisterAndLock { |
70 RegisterAndLock(); | 63 RegisterAndLock(); |
71 ~RegisterAndLock(); | 64 ~RegisterAndLock(); |
72 AllocationRegister allocation_register; | 65 AllocationRegister allocation_register; |
73 Lock lock; | 66 Lock lock; |
74 }; | 67 }; |
75 std::unique_ptr<RegisterAndLock[]> allocation_registers_; | 68 std::unique_ptr<RegisterAndLock[]> allocation_registers_; |
76 | 69 |
77 // This member needs to be checked on every allocation and deallocation [fast | 70 // This member needs to be checked on every allocation and deallocation [fast |
78 // path] when heap profiling is enabled. Using a lock here causes significant | 71 // path] when heap profiling is enabled. Using a lock here causes significant |
79 // contention. | 72 // contention. |
80 base::subtle::Atomic32 enabled_; | 73 base::subtle::Atomic32 enabled_; |
81 | 74 |
82 DISALLOW_COPY_AND_ASSIGN(ShardedAllocationRegister); | 75 DISALLOW_COPY_AND_ASSIGN(ShardedAllocationRegister); |
83 }; | 76 }; |
84 | 77 |
85 } // namespace trace_event | 78 } // namespace trace_event |
86 } // namespace base | 79 } // namespace base |
87 | 80 |
88 #endif // BASE_TRACE_EVENT_SHARDED_ALLOCATION_REGISTER_H_ | 81 #endif // BASE_TRACE_EVENT_SHARDED_ALLOCATION_REGISTER_H_ |
OLD | NEW |