Index: base/trace_event/heap_profiler_allocation_register.h |
diff --git a/base/trace_event/heap_profiler_allocation_register.h b/base/trace_event/heap_profiler_allocation_register.h |
index 6d63923f4c164e7a51a2eaf6686d94a87de1f4a9..c3c4648094b89244f74046eae467634c2b7daf00 100644 |
--- a/base/trace_event/heap_profiler_allocation_register.h |
+++ b/base/trace_event/heap_profiler_allocation_register.h |
@@ -28,6 +28,10 @@ namespace internal { |
// Allocates a region of virtual address space of |size| rounded up to the |
// system page size. The memory is zeroed by the system. A guard page is |
// added after the end. |
+// |
+// TODO(awong): Remove the guarded memory. This isn't currently being used and |
Primiano Tucci (use gerrit)
2017/04/04 09:50:23
You say "this isn't currently used" but that is wh
awong
2017/04/04 18:28:30
I'll remove it. The code just has too many concep
|
+// this code is complex looking enough that this costs more than its bug-bashing |
+// weight. |
void* AllocateGuardedVirtualMemory(size_t size); |
// Frees a region of virtual address space allocated by a call to |
@@ -52,7 +56,7 @@ class FixedHashMap { |
// Capacity controls how many items this hash map can hold, and largely |
// affects memory footprint. |
- FixedHashMap(size_t capacity) |
+ explicit FixedHashMap(size_t capacity) |
: num_cells_(capacity), |
num_inserts_dropped_(0), |
cells_(static_cast<Cell*>( |
@@ -252,6 +256,8 @@ class TraceEventMemoryOverhead; |
// freed. Internally it has two hashtables: one for Backtraces and one for |
// actual allocations. Sizes of both hashtables are fixed, and this class |
// allocates (mmaps) only in its constructor. |
+// |
+// When either hash table hits max size, new inserts are dropped. |
class BASE_EXPORT AllocationRegister { |
public: |
// Details about an allocation. |
@@ -358,6 +364,11 @@ class BASE_EXPORT AllocationRegister { |
AllocationInfo, |
AddressHasher>; |
+ // This method should be called once to initialize |
+ // |out_of_storage_backtrace_index_|. The first call will create a unique |
+ // sentinel entry. The return value of multiple calls is undefined. |
+ BacktraceMap::KVIndex CreateBacktraceSentinel(); |
+ |
BacktraceMap::KVIndex InsertBacktrace(const Backtrace& backtrace); |
void RemoveBacktrace(BacktraceMap::KVIndex index); |
@@ -366,8 +377,8 @@ class BASE_EXPORT AllocationRegister { |
AllocationMap allocations_; |
BacktraceMap backtraces_; |
- // Sentinel used when we run out of backtraces_ storage. |
- BacktraceMap::KVIndex out_of_storage_backtrace_index_; |
+ // Sentinel used when the |backtraces_| table is full. |
+ const BacktraceMap::KVIndex out_of_storage_backtrace_index_; |
DISALLOW_COPY_AND_ASSIGN(AllocationRegister); |
}; |