Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1569)

Side by Side Diff: content/common/discardable_shared_memory_heap.cc

Issue 1100073004: Adding discardable memory dump provider. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Fixed comments. Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 The Chromium Authors. All rights reserved. 1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/common/discardable_shared_memory_heap.h" 5 #include "content/common/discardable_shared_memory_heap.h"
6 6
7 #include <algorithm> 7 #include <algorithm>
8 8
9 #include "base/atomic_sequence_num.h"
reveman 2015/04/27 15:08:58 dont' need this anymore
ssid 2015/04/27 15:21:55 Done.
9 #include "base/memory/discardable_shared_memory.h" 10 #include "base/memory/discardable_shared_memory.h"
11 #include "base/strings/stringprintf.h"
10 12
11 namespace content { 13 namespace content {
12 namespace { 14 namespace {
13 15
16 const char kMemoryAllocatorHeapNamePrefix[] = "segment_%zu";
reveman 2015/04/27 15:08:58 nit: maybe have the prefix be "segment" and the pr
ssid 2015/04/27 15:21:56 Done.
17 const char kMemoryAllocatorName[] = "discardable";
18
14 bool IsPowerOfTwo(size_t x) { 19 bool IsPowerOfTwo(size_t x) {
15 return (x & (x - 1)) == 0; 20 return (x & (x - 1)) == 0;
16 } 21 }
17 22
18 bool IsInFreeList(DiscardableSharedMemoryHeap::Span* span) { 23 bool IsInFreeList(DiscardableSharedMemoryHeap::Span* span) {
19 return span->previous() || span->next(); 24 return span->previous() || span->next();
20 } 25 }
21 26
22 } // namespace 27 } // namespace
23 28
24 DiscardableSharedMemoryHeap::Span::Span( 29 DiscardableSharedMemoryHeap::Span::Span(
25 base::DiscardableSharedMemory* shared_memory, 30 base::DiscardableSharedMemory* shared_memory,
26 size_t start, 31 size_t start,
27 size_t length) 32 size_t length)
28 : shared_memory_(shared_memory), start_(start), length_(length) { 33 : shared_memory_(shared_memory), start_(start), length_(length) {
29 } 34 }
30 35
31 DiscardableSharedMemoryHeap::Span::~Span() { 36 DiscardableSharedMemoryHeap::Span::~Span() {
32 } 37 }
33 38
34 DiscardableSharedMemoryHeap::ScopedMemorySegment::ScopedMemorySegment( 39 DiscardableSharedMemoryHeap::ScopedMemorySegment::ScopedMemorySegment(
35 DiscardableSharedMemoryHeap* heap, 40 DiscardableSharedMemoryHeap* heap,
36 scoped_ptr<base::DiscardableSharedMemory> shared_memory, 41 scoped_ptr<base::DiscardableSharedMemory> shared_memory,
37 size_t size, 42 size_t size,
43 DiscardableSharedMemoryId id,
reveman 2015/04/27 15:08:58 int32_t to avoid circular dep
ssid 2015/04/27 15:21:55 Done.
38 const base::Closure& deleted_callback) 44 const base::Closure& deleted_callback)
39 : heap_(heap), 45 : heap_(heap),
40 shared_memory_(shared_memory.Pass()), 46 shared_memory_(shared_memory.Pass()),
41 size_(size), 47 size_(size),
48 id_(id),
42 deleted_callback_(deleted_callback) { 49 deleted_callback_(deleted_callback) {
43 } 50 }
44 51
45 DiscardableSharedMemoryHeap::ScopedMemorySegment::~ScopedMemorySegment() { 52 DiscardableSharedMemoryHeap::ScopedMemorySegment::~ScopedMemorySegment() {
46 heap_->ReleaseMemory(shared_memory_.get(), size_); 53 heap_->ReleaseMemory(shared_memory_.get(), size_);
47 deleted_callback_.Run(); 54 deleted_callback_.Run();
48 } 55 }
49 56
50 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsUsed() const { 57 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsUsed() const {
51 return heap_->IsMemoryUsed(shared_memory_.get(), size_); 58 return heap_->IsMemoryUsed(shared_memory_.get(), size_);
52 } 59 }
53 60
54 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsResident() const { 61 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsResident() const {
55 return heap_->IsMemoryResident(shared_memory_.get()); 62 return heap_->IsMemoryResident(shared_memory_.get());
56 } 63 }
57 64
65 void DiscardableSharedMemoryHeap::ScopedMemorySegment::DumpInto(
66 base::trace_event::ProcessMemoryDump* pmd) const {
67 heap_->DumpInto(shared_memory_.get(), size_, id_, pmd);
68 }
69
58 DiscardableSharedMemoryHeap::DiscardableSharedMemoryHeap(size_t block_size) 70 DiscardableSharedMemoryHeap::DiscardableSharedMemoryHeap(size_t block_size)
59 : block_size_(block_size), num_blocks_(0), num_free_blocks_(0) { 71 : block_size_(block_size), num_blocks_(0), num_free_blocks_(0) {
60 DCHECK_NE(block_size_, 0u); 72 DCHECK_NE(block_size_, 0u);
61 DCHECK(IsPowerOfTwo(block_size_)); 73 DCHECK(IsPowerOfTwo(block_size_));
62 } 74 }
63 75
64 DiscardableSharedMemoryHeap::~DiscardableSharedMemoryHeap() { 76 DiscardableSharedMemoryHeap::~DiscardableSharedMemoryHeap() {
65 memory_segments_.clear(); 77 memory_segments_.clear();
66 DCHECK_EQ(num_blocks_, 0u); 78 DCHECK_EQ(num_blocks_, 0u);
67 DCHECK_EQ(num_free_blocks_, 0u); 79 DCHECK_EQ(num_free_blocks_, 0u);
68 DCHECK_EQ(std::count_if(free_spans_, free_spans_ + arraysize(free_spans_), 80 DCHECK_EQ(std::count_if(free_spans_, free_spans_ + arraysize(free_spans_),
69 [](const base::LinkedList<Span>& free_spans) { 81 [](const base::LinkedList<Span>& free_spans) {
70 return !free_spans.empty(); 82 return !free_spans.empty();
71 }), 83 }),
72 0); 84 0);
73 } 85 }
74 86
75 scoped_ptr<DiscardableSharedMemoryHeap::Span> DiscardableSharedMemoryHeap::Grow( 87 scoped_ptr<DiscardableSharedMemoryHeap::Span> DiscardableSharedMemoryHeap::Grow(
76 scoped_ptr<base::DiscardableSharedMemory> shared_memory, 88 scoped_ptr<base::DiscardableSharedMemory> shared_memory,
77 size_t size, 89 size_t size,
90 DiscardableSharedMemoryId id,
reveman 2015/04/27 15:08:58 int32_t to avoid circular dep
ssid 2015/04/27 15:21:56 Done.
78 const base::Closure& deleted_callback) { 91 const base::Closure& deleted_callback) {
79 // Memory must be aligned to block size. 92 // Memory must be aligned to block size.
80 DCHECK_EQ( 93 DCHECK_EQ(
81 reinterpret_cast<size_t>(shared_memory->memory()) & (block_size_ - 1), 94 reinterpret_cast<size_t>(shared_memory->memory()) & (block_size_ - 1),
82 0u); 95 0u);
83 DCHECK_EQ(size & (block_size_ - 1), 0u); 96 DCHECK_EQ(size & (block_size_ - 1), 0u);
84 97
85 scoped_ptr<Span> span( 98 scoped_ptr<Span> span(
86 new Span(shared_memory.get(), 99 new Span(shared_memory.get(),
87 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_, 100 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_,
88 size / block_size_)); 101 size / block_size_));
89 DCHECK(spans_.find(span->start_) == spans_.end()); 102 DCHECK(spans_.find(span->start_) == spans_.end());
90 DCHECK(spans_.find(span->start_ + span->length_ - 1) == spans_.end()); 103 DCHECK(spans_.find(span->start_ + span->length_ - 1) == spans_.end());
91 RegisterSpan(span.get()); 104 RegisterSpan(span.get());
92 105
93 num_blocks_ += span->length_; 106 num_blocks_ += span->length_;
94 107
95 // Start tracking if segment is resident by adding it to |memory_segments_|. 108 // Start tracking if segment is resident by adding it to |memory_segments_|.
96 memory_segments_.push_back(new ScopedMemorySegment(this, shared_memory.Pass(), 109 memory_segments_.push_back(new ScopedMemorySegment(
97 size, deleted_callback)); 110 this, shared_memory.Pass(), size, id, deleted_callback));
98 111
99 return span.Pass(); 112 return span.Pass();
100 } 113 }
101 114
102 void DiscardableSharedMemoryHeap::MergeIntoFreeLists(scoped_ptr<Span> span) { 115 void DiscardableSharedMemoryHeap::MergeIntoFreeLists(scoped_ptr<Span> span) {
103 DCHECK(span->shared_memory_); 116 DCHECK(span->shared_memory_);
104 117
105 // First add length of |span| to |num_free_blocks_|. 118 // First add length of |span| to |num_free_blocks_|.
106 num_free_blocks_ += span->length_; 119 num_free_blocks_ += span->length_;
107 120
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
205 } 218 }
206 219
207 size_t DiscardableSharedMemoryHeap::GetSize() const { 220 size_t DiscardableSharedMemoryHeap::GetSize() const {
208 return num_blocks_ * block_size_; 221 return num_blocks_ * block_size_;
209 } 222 }
210 223
211 size_t DiscardableSharedMemoryHeap::GetSizeOfFreeLists() const { 224 size_t DiscardableSharedMemoryHeap::GetSizeOfFreeLists() const {
212 return num_free_blocks_ * block_size_; 225 return num_free_blocks_ * block_size_;
213 } 226 }
214 227
228 bool DiscardableSharedMemoryHeap::DumpInto(
229 base::trace_event::ProcessMemoryDump* pmd) {
230 std::for_each(memory_segments_.begin(), memory_segments_.end(),
231 [pmd](const ScopedMemorySegment* segment) {
232 segment->DumpInto(pmd);
233 });
234 return true;
235 }
236
215 void DiscardableSharedMemoryHeap::InsertIntoFreeList( 237 void DiscardableSharedMemoryHeap::InsertIntoFreeList(
216 scoped_ptr<DiscardableSharedMemoryHeap::Span> span) { 238 scoped_ptr<DiscardableSharedMemoryHeap::Span> span) {
217 DCHECK(!IsInFreeList(span.get())); 239 DCHECK(!IsInFreeList(span.get()));
218 size_t index = std::min(span->length_, arraysize(free_spans_)) - 1; 240 size_t index = std::min(span->length_, arraysize(free_spans_)) - 1;
219 free_spans_[index].Append(span.release()); 241 free_spans_[index].Append(span.release());
220 } 242 }
221 243
222 scoped_ptr<DiscardableSharedMemoryHeap::Span> 244 scoped_ptr<DiscardableSharedMemoryHeap::Span>
223 DiscardableSharedMemoryHeap::RemoveFromFreeList(Span* span) { 245 DiscardableSharedMemoryHeap::RemoveFromFreeList(Span* span) {
224 DCHECK(IsInFreeList(span)); 246 DCHECK(IsInFreeList(span));
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
309 331
310 // If |span| is in the free list, remove it and update |num_free_blocks_|. 332 // If |span| is in the free list, remove it and update |num_free_blocks_|.
311 if (IsInFreeList(span)) { 333 if (IsInFreeList(span)) {
312 DCHECK_GE(num_free_blocks_, span->length_); 334 DCHECK_GE(num_free_blocks_, span->length_);
313 num_free_blocks_ -= span->length_; 335 num_free_blocks_ -= span->length_;
314 RemoveFromFreeList(span); 336 RemoveFromFreeList(span);
315 } 337 }
316 } 338 }
317 } 339 }
318 340
341 void DiscardableSharedMemoryHeap::DumpInto(
342 const base::DiscardableSharedMemory* shared_memory,
343 size_t size,
344 DiscardableSharedMemoryId id,
345 base::trace_event::ProcessMemoryDump* pmd) {
346 std::string heap_name =
347 base::StringPrintf(kMemoryAllocatorHeapNamePrefix, id);
348 base::trace_event::MemoryAllocatorDump* dump =
349 pmd->CreateAllocatorDump(kMemoryAllocatorName, heap_name.c_str());
350
351 size_t allocated_objects_count = 0;
352 size_t allocated_objects_size_in_bytes = 0;
353 size_t offset =
354 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_;
355 size_t end = offset + size / block_size_;
356 while (offset < end) {
357 Span* span = spans_[offset];
358 if (!IsInFreeList(span)) {
359 allocated_objects_count++;
360 allocated_objects_size_in_bytes += span->length_;
361 }
362 offset += span->length_;
363 }
364
365 dump->set_physical_size_in_bytes(static_cast<uint64>(size));
366 dump->set_allocated_objects_count(
367 static_cast<uint64>(allocated_objects_count));
368 dump->set_allocated_objects_size_in_bytes(
369 static_cast<uint64>(allocated_objects_size_in_bytes));
370 }
371
319 } // namespace content 372 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698