Index: content/common/discardable_shared_memory_heap.cc |
diff --git a/content/common/discardable_shared_memory_heap.cc b/content/common/discardable_shared_memory_heap.cc |
index 36ee632255329506570831c90bc23287cad00580..e0ec29a81b9b8f7c24589caf2368f4c86a0d0aab 100644 |
--- a/content/common/discardable_shared_memory_heap.cc |
+++ b/content/common/discardable_shared_memory_heap.cc |
@@ -6,11 +6,16 @@ |
#include <algorithm> |
+#include "base/atomic_sequence_num.h" |
reveman
2015/04/27 15:08:58
dont' need this anymore
ssid
2015/04/27 15:21:55
Done.
|
#include "base/memory/discardable_shared_memory.h" |
+#include "base/strings/stringprintf.h" |
namespace content { |
namespace { |
+const char kMemoryAllocatorHeapNamePrefix[] = "segment_%zu"; |
reveman
2015/04/27 15:08:58
nit: maybe have the prefix be "segment" and the pr
ssid
2015/04/27 15:21:56
Done.
|
+const char kMemoryAllocatorName[] = "discardable"; |
+ |
bool IsPowerOfTwo(size_t x) { |
return (x & (x - 1)) == 0; |
} |
@@ -35,10 +40,12 @@ DiscardableSharedMemoryHeap::ScopedMemorySegment::ScopedMemorySegment( |
DiscardableSharedMemoryHeap* heap, |
scoped_ptr<base::DiscardableSharedMemory> shared_memory, |
size_t size, |
+ DiscardableSharedMemoryId id, |
reveman
2015/04/27 15:08:58
int32_t to avoid circular dep
ssid
2015/04/27 15:21:55
Done.
|
const base::Closure& deleted_callback) |
: heap_(heap), |
shared_memory_(shared_memory.Pass()), |
size_(size), |
+ id_(id), |
deleted_callback_(deleted_callback) { |
} |
@@ -55,6 +62,11 @@ bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsResident() const { |
return heap_->IsMemoryResident(shared_memory_.get()); |
} |
+void DiscardableSharedMemoryHeap::ScopedMemorySegment::DumpInto( |
+ base::trace_event::ProcessMemoryDump* pmd) const { |
+ heap_->DumpInto(shared_memory_.get(), size_, id_, pmd); |
+} |
+ |
DiscardableSharedMemoryHeap::DiscardableSharedMemoryHeap(size_t block_size) |
: block_size_(block_size), num_blocks_(0), num_free_blocks_(0) { |
DCHECK_NE(block_size_, 0u); |
@@ -75,6 +87,7 @@ DiscardableSharedMemoryHeap::~DiscardableSharedMemoryHeap() { |
scoped_ptr<DiscardableSharedMemoryHeap::Span> DiscardableSharedMemoryHeap::Grow( |
scoped_ptr<base::DiscardableSharedMemory> shared_memory, |
size_t size, |
+ DiscardableSharedMemoryId id, |
reveman
2015/04/27 15:08:58
int32_t to avoid circular dep
ssid
2015/04/27 15:21:56
Done.
|
const base::Closure& deleted_callback) { |
// Memory must be aligned to block size. |
DCHECK_EQ( |
@@ -93,8 +106,8 @@ scoped_ptr<DiscardableSharedMemoryHeap::Span> DiscardableSharedMemoryHeap::Grow( |
num_blocks_ += span->length_; |
// Start tracking if segment is resident by adding it to |memory_segments_|. |
- memory_segments_.push_back(new ScopedMemorySegment(this, shared_memory.Pass(), |
- size, deleted_callback)); |
+ memory_segments_.push_back(new ScopedMemorySegment( |
+ this, shared_memory.Pass(), size, id, deleted_callback)); |
return span.Pass(); |
} |
@@ -212,6 +225,15 @@ size_t DiscardableSharedMemoryHeap::GetSizeOfFreeLists() const { |
return num_free_blocks_ * block_size_; |
} |
+bool DiscardableSharedMemoryHeap::DumpInto( |
+ base::trace_event::ProcessMemoryDump* pmd) { |
+ std::for_each(memory_segments_.begin(), memory_segments_.end(), |
+ [pmd](const ScopedMemorySegment* segment) { |
+ segment->DumpInto(pmd); |
+ }); |
+ return true; |
+} |
+ |
void DiscardableSharedMemoryHeap::InsertIntoFreeList( |
scoped_ptr<DiscardableSharedMemoryHeap::Span> span) { |
DCHECK(!IsInFreeList(span.get())); |
@@ -316,4 +338,35 @@ void DiscardableSharedMemoryHeap::ReleaseMemory( |
} |
} |
+void DiscardableSharedMemoryHeap::DumpInto( |
+ const base::DiscardableSharedMemory* shared_memory, |
+ size_t size, |
+ DiscardableSharedMemoryId id, |
+ base::trace_event::ProcessMemoryDump* pmd) { |
+ std::string heap_name = |
+ base::StringPrintf(kMemoryAllocatorHeapNamePrefix, id); |
+ base::trace_event::MemoryAllocatorDump* dump = |
+ pmd->CreateAllocatorDump(kMemoryAllocatorName, heap_name.c_str()); |
+ |
+ size_t allocated_objects_count = 0; |
+ size_t allocated_objects_size_in_bytes = 0; |
+ size_t offset = |
+ reinterpret_cast<size_t>(shared_memory->memory()) / block_size_; |
+ size_t end = offset + size / block_size_; |
+ while (offset < end) { |
+ Span* span = spans_[offset]; |
+ if (!IsInFreeList(span)) { |
+ allocated_objects_count++; |
+ allocated_objects_size_in_bytes += span->length_; |
+ } |
+ offset += span->length_; |
+ } |
+ |
+ dump->set_physical_size_in_bytes(static_cast<uint64>(size)); |
+ dump->set_allocated_objects_count( |
+ static_cast<uint64>(allocated_objects_count)); |
+ dump->set_allocated_objects_size_in_bytes( |
+ static_cast<uint64>(allocated_objects_size_in_bytes)); |
+} |
+ |
} // namespace content |