Chromium Code Reviews| Index: base/trace_event/sharded_allocation_register.h |
| diff --git a/base/trace_event/sharded_allocation_register.h b/base/trace_event/sharded_allocation_register.h |
| new file mode 100644 |
| index 0000000000000000000000000000000000000000..71150cc6100916b31209c7170733d7f3ac320772 |
| --- /dev/null |
| +++ b/base/trace_event/sharded_allocation_register.h |
| @@ -0,0 +1,87 @@ |
| +// Copyright 2017 The Chromium Authors. All rights reserved. |
| +// Use of this source code is governed by a BSD-style license that can be |
| +// found in the LICENSE file. |
| + |
| +#ifndef BASE_TRACE_EVENT_SHARDED_ALLOCATION_REGISTER_H_ |
| +#define BASE_TRACE_EVENT_SHARDED_ALLOCATION_REGISTER_H_ |
| + |
| +#include <memory> |
| +#include <unordered_map> |
| +#include <vector> |
| + |
| +#include "base/atomicops.h" |
| +#include "base/base_export.h" |
| +#include "base/macros.h" |
| +#include "base/synchronization/lock.h" |
| +#include "base/trace_event/heap_profiler_allocation_register.h" |
| + |
| +namespace base { |
| +namespace trace_event { |
| + |
| +class TraceEventMemoryOverhead; |
| + |
| +// This container holds allocations, and context for each allocation [in the |
| +// form of a back trace]. |
| +// This container is thread-safe. |
| +class BASE_EXPORT ShardedAllocationRegister { |
| + public: |
| + ShardedAllocationRegister(); |
| + |
| + // Must be initialized before the container can be used. |
| + void Initialize(); |
| + bool IsInitialized() const; |
| + |
| + void SetEnabled(bool enabled); |
| + bool IsEnabled() const; |
|
Primiano Tucci (use gerrit)
2017/05/22 16:37:51
this could be an in-line is_enabled() const { ret
erikchen
2017/05/22 17:10:43
Done.
|
| + |
| + ~ShardedAllocationRegister(); |
| + |
| + // Inserts allocation details into the container. If the address was present |
| + // already, its details are updated. |address| must not be null. |
| + // |
| + // Returns true if an insert occurred. Inserts may fail because the table |
| + // is full. |
| + bool Insert(const void* address, |
| + size_t size, |
| + const AllocationContext& context); |
| + |
| + // Removes the address from the container if it is present. It is ok to call |
| + // this with a null pointer. |
| + void Remove(const void* address); |
| + |
| + // Estimates memory overhead including |sizeof(AllocationRegister)|. |
| + void EstimateTraceMemoryOverhead(TraceEventMemoryOverhead* overhead) const; |
| + |
| + using MetricsMap = std::unordered_map<AllocationContext, AllocationMetrics>; |
|
Primiano Tucci (use gerrit)
2017/05/22 16:37:51
nit: structs and type declarations should go on th
erikchen
2017/05/22 17:10:43
Done.
|
| + struct OutputMetrics { |
| + // Total size of allocated objects. |
| + size_t size; |
| + // Total count of allocated objects. |
| + size_t count; |
| + }; |
| + |
| + // Updates |map| with all allocated objects and their statistics. |
| + // Returns aggregate statistics. |
| + OutputMetrics UpdateAndReturnsMetrics(MetricsMap& map) const; |
| + |
| + private: |
| + struct RegisterAndLock { |
| + RegisterAndLock(); |
| + ~RegisterAndLock(); |
| + AllocationRegister allocation_register; |
| + Lock lock; |
| + }; |
| + std::vector<std::unique_ptr<RegisterAndLock>> allocation_registers_; |
| + |
| + // This member needs to be checked on every allocation and deallocation [fast |
| + // path] when heap profiling is enabled. Using a lock here causes significant |
| + // contention. |
| + base::subtle::Atomic32 enabled_; |
| + |
| + DISALLOW_COPY_AND_ASSIGN(ShardedAllocationRegister); |
| +}; |
| + |
| +} // namespace trace_event |
| +} // namespace base |
| + |
| +#endif // BASE_TRACE_EVENT_SHARDED_ALLOCATION_REGISTER_H_ |