OLD | NEW |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/common/discardable_shared_memory_heap.h" | 5 #include "content/common/discardable_shared_memory_heap.h" |
6 | 6 |
7 #include <algorithm> | 7 #include <algorithm> |
8 | 8 |
9 #include "base/memory/discardable_shared_memory.h" | 9 #include "base/memory/discardable_shared_memory.h" |
| 10 #include "base/strings/stringprintf.h" |
10 | 11 |
11 namespace content { | 12 namespace content { |
12 namespace { | 13 namespace { |
13 | 14 |
| 15 const char kMemoryAllocatorHeapNamePrefix[] = "segment"; |
| 16 const char kMemoryAllocatorName[] = "discardable"; |
| 17 |
14 bool IsPowerOfTwo(size_t x) { | 18 bool IsPowerOfTwo(size_t x) { |
15 return (x & (x - 1)) == 0; | 19 return (x & (x - 1)) == 0; |
16 } | 20 } |
17 | 21 |
18 bool IsInFreeList(DiscardableSharedMemoryHeap::Span* span) { | 22 bool IsInFreeList(DiscardableSharedMemoryHeap::Span* span) { |
19 return span->previous() || span->next(); | 23 return span->previous() || span->next(); |
20 } | 24 } |
21 | 25 |
22 } // namespace | 26 } // namespace |
23 | 27 |
24 DiscardableSharedMemoryHeap::Span::Span( | 28 DiscardableSharedMemoryHeap::Span::Span( |
25 base::DiscardableSharedMemory* shared_memory, | 29 base::DiscardableSharedMemory* shared_memory, |
26 size_t start, | 30 size_t start, |
27 size_t length) | 31 size_t length) |
28 : shared_memory_(shared_memory), start_(start), length_(length) { | 32 : shared_memory_(shared_memory), start_(start), length_(length) { |
29 } | 33 } |
30 | 34 |
31 DiscardableSharedMemoryHeap::Span::~Span() { | 35 DiscardableSharedMemoryHeap::Span::~Span() { |
32 } | 36 } |
33 | 37 |
34 DiscardableSharedMemoryHeap::ScopedMemorySegment::ScopedMemorySegment( | 38 DiscardableSharedMemoryHeap::ScopedMemorySegment::ScopedMemorySegment( |
35 DiscardableSharedMemoryHeap* heap, | 39 DiscardableSharedMemoryHeap* heap, |
36 scoped_ptr<base::DiscardableSharedMemory> shared_memory, | 40 scoped_ptr<base::DiscardableSharedMemory> shared_memory, |
37 size_t size, | 41 size_t size, |
| 42 int32_t id, |
38 const base::Closure& deleted_callback) | 43 const base::Closure& deleted_callback) |
39 : heap_(heap), | 44 : heap_(heap), |
40 shared_memory_(shared_memory.Pass()), | 45 shared_memory_(shared_memory.Pass()), |
41 size_(size), | 46 size_(size), |
| 47 id_(id), |
42 deleted_callback_(deleted_callback) { | 48 deleted_callback_(deleted_callback) { |
43 } | 49 } |
44 | 50 |
45 DiscardableSharedMemoryHeap::ScopedMemorySegment::~ScopedMemorySegment() { | 51 DiscardableSharedMemoryHeap::ScopedMemorySegment::~ScopedMemorySegment() { |
46 heap_->ReleaseMemory(shared_memory_.get(), size_); | 52 heap_->ReleaseMemory(shared_memory_.get(), size_); |
47 deleted_callback_.Run(); | 53 deleted_callback_.Run(); |
48 } | 54 } |
49 | 55 |
50 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsUsed() const { | 56 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsUsed() const { |
51 return heap_->IsMemoryUsed(shared_memory_.get(), size_); | 57 return heap_->IsMemoryUsed(shared_memory_.get(), size_); |
52 } | 58 } |
53 | 59 |
54 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsResident() const { | 60 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsResident() const { |
55 return heap_->IsMemoryResident(shared_memory_.get()); | 61 return heap_->IsMemoryResident(shared_memory_.get()); |
56 } | 62 } |
57 | 63 |
| 64 void DiscardableSharedMemoryHeap::ScopedMemorySegment::OnMemoryDump( |
| 65 base::trace_event::ProcessMemoryDump* pmd) const { |
| 66 heap_->OnMemoryDump(shared_memory_.get(), size_, id_, pmd); |
| 67 } |
| 68 |
58 DiscardableSharedMemoryHeap::DiscardableSharedMemoryHeap(size_t block_size) | 69 DiscardableSharedMemoryHeap::DiscardableSharedMemoryHeap(size_t block_size) |
59 : block_size_(block_size), num_blocks_(0), num_free_blocks_(0) { | 70 : block_size_(block_size), num_blocks_(0), num_free_blocks_(0) { |
60 DCHECK_NE(block_size_, 0u); | 71 DCHECK_NE(block_size_, 0u); |
61 DCHECK(IsPowerOfTwo(block_size_)); | 72 DCHECK(IsPowerOfTwo(block_size_)); |
62 } | 73 } |
63 | 74 |
64 DiscardableSharedMemoryHeap::~DiscardableSharedMemoryHeap() { | 75 DiscardableSharedMemoryHeap::~DiscardableSharedMemoryHeap() { |
65 memory_segments_.clear(); | 76 memory_segments_.clear(); |
66 DCHECK_EQ(num_blocks_, 0u); | 77 DCHECK_EQ(num_blocks_, 0u); |
67 DCHECK_EQ(num_free_blocks_, 0u); | 78 DCHECK_EQ(num_free_blocks_, 0u); |
68 DCHECK_EQ(std::count_if(free_spans_, free_spans_ + arraysize(free_spans_), | 79 DCHECK_EQ(std::count_if(free_spans_, free_spans_ + arraysize(free_spans_), |
69 [](const base::LinkedList<Span>& free_spans) { | 80 [](const base::LinkedList<Span>& free_spans) { |
70 return !free_spans.empty(); | 81 return !free_spans.empty(); |
71 }), | 82 }), |
72 0); | 83 0); |
73 } | 84 } |
74 | 85 |
75 scoped_ptr<DiscardableSharedMemoryHeap::Span> DiscardableSharedMemoryHeap::Grow( | 86 scoped_ptr<DiscardableSharedMemoryHeap::Span> DiscardableSharedMemoryHeap::Grow( |
76 scoped_ptr<base::DiscardableSharedMemory> shared_memory, | 87 scoped_ptr<base::DiscardableSharedMemory> shared_memory, |
77 size_t size, | 88 size_t size, |
| 89 int32_t id, |
78 const base::Closure& deleted_callback) { | 90 const base::Closure& deleted_callback) { |
79 // Memory must be aligned to block size. | 91 // Memory must be aligned to block size. |
80 DCHECK_EQ( | 92 DCHECK_EQ( |
81 reinterpret_cast<size_t>(shared_memory->memory()) & (block_size_ - 1), | 93 reinterpret_cast<size_t>(shared_memory->memory()) & (block_size_ - 1), |
82 0u); | 94 0u); |
83 DCHECK_EQ(size & (block_size_ - 1), 0u); | 95 DCHECK_EQ(size & (block_size_ - 1), 0u); |
84 | 96 |
85 scoped_ptr<Span> span( | 97 scoped_ptr<Span> span( |
86 new Span(shared_memory.get(), | 98 new Span(shared_memory.get(), |
87 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_, | 99 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_, |
88 size / block_size_)); | 100 size / block_size_)); |
89 DCHECK(spans_.find(span->start_) == spans_.end()); | 101 DCHECK(spans_.find(span->start_) == spans_.end()); |
90 DCHECK(spans_.find(span->start_ + span->length_ - 1) == spans_.end()); | 102 DCHECK(spans_.find(span->start_ + span->length_ - 1) == spans_.end()); |
91 RegisterSpan(span.get()); | 103 RegisterSpan(span.get()); |
92 | 104 |
93 num_blocks_ += span->length_; | 105 num_blocks_ += span->length_; |
94 | 106 |
95 // Start tracking if segment is resident by adding it to |memory_segments_|. | 107 // Start tracking if segment is resident by adding it to |memory_segments_|. |
96 memory_segments_.push_back(new ScopedMemorySegment(this, shared_memory.Pass(), | 108 memory_segments_.push_back(new ScopedMemorySegment( |
97 size, deleted_callback)); | 109 this, shared_memory.Pass(), size, id, deleted_callback)); |
98 | 110 |
99 return span.Pass(); | 111 return span.Pass(); |
100 } | 112 } |
101 | 113 |
102 void DiscardableSharedMemoryHeap::MergeIntoFreeLists(scoped_ptr<Span> span) { | 114 void DiscardableSharedMemoryHeap::MergeIntoFreeLists(scoped_ptr<Span> span) { |
103 DCHECK(span->shared_memory_); | 115 DCHECK(span->shared_memory_); |
104 | 116 |
105 // First add length of |span| to |num_free_blocks_|. | 117 // First add length of |span| to |num_free_blocks_|. |
106 num_free_blocks_ += span->length_; | 118 num_free_blocks_ += span->length_; |
107 | 119 |
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
205 } | 217 } |
206 | 218 |
207 size_t DiscardableSharedMemoryHeap::GetSize() const { | 219 size_t DiscardableSharedMemoryHeap::GetSize() const { |
208 return num_blocks_ * block_size_; | 220 return num_blocks_ * block_size_; |
209 } | 221 } |
210 | 222 |
211 size_t DiscardableSharedMemoryHeap::GetSizeOfFreeLists() const { | 223 size_t DiscardableSharedMemoryHeap::GetSizeOfFreeLists() const { |
212 return num_free_blocks_ * block_size_; | 224 return num_free_blocks_ * block_size_; |
213 } | 225 } |
214 | 226 |
| 227 bool DiscardableSharedMemoryHeap::OnMemoryDump( |
| 228 base::trace_event::ProcessMemoryDump* pmd) { |
| 229 std::for_each( |
| 230 memory_segments_.begin(), memory_segments_.end(), |
| 231 [pmd](const ScopedMemorySegment* segment) { |
| 232 segment->OnMemoryDump(pmd); |
| 233 }); |
| 234 return true; |
| 235 } |
| 236 |
215 void DiscardableSharedMemoryHeap::InsertIntoFreeList( | 237 void DiscardableSharedMemoryHeap::InsertIntoFreeList( |
216 scoped_ptr<DiscardableSharedMemoryHeap::Span> span) { | 238 scoped_ptr<DiscardableSharedMemoryHeap::Span> span) { |
217 DCHECK(!IsInFreeList(span.get())); | 239 DCHECK(!IsInFreeList(span.get())); |
218 size_t index = std::min(span->length_, arraysize(free_spans_)) - 1; | 240 size_t index = std::min(span->length_, arraysize(free_spans_)) - 1; |
219 free_spans_[index].Append(span.release()); | 241 free_spans_[index].Append(span.release()); |
220 } | 242 } |
221 | 243 |
222 scoped_ptr<DiscardableSharedMemoryHeap::Span> | 244 scoped_ptr<DiscardableSharedMemoryHeap::Span> |
223 DiscardableSharedMemoryHeap::RemoveFromFreeList(Span* span) { | 245 DiscardableSharedMemoryHeap::RemoveFromFreeList(Span* span) { |
224 DCHECK(IsInFreeList(span)); | 246 DCHECK(IsInFreeList(span)); |
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
309 | 331 |
310 // If |span| is in the free list, remove it and update |num_free_blocks_|. | 332 // If |span| is in the free list, remove it and update |num_free_blocks_|. |
311 if (IsInFreeList(span)) { | 333 if (IsInFreeList(span)) { |
312 DCHECK_GE(num_free_blocks_, span->length_); | 334 DCHECK_GE(num_free_blocks_, span->length_); |
313 num_free_blocks_ -= span->length_; | 335 num_free_blocks_ -= span->length_; |
314 RemoveFromFreeList(span); | 336 RemoveFromFreeList(span); |
315 } | 337 } |
316 } | 338 } |
317 } | 339 } |
318 | 340 |
| 341 void DiscardableSharedMemoryHeap::OnMemoryDump( |
| 342 const base::DiscardableSharedMemory* shared_memory, |
| 343 size_t size, |
| 344 int32_t id, |
| 345 base::trace_event::ProcessMemoryDump* pmd) { |
| 346 std::string heap_name = base::StringPrintf( |
| 347 "%s/%s_%d", kMemoryAllocatorName, kMemoryAllocatorHeapNamePrefix, id); |
| 348 base::trace_event::MemoryAllocatorDump* dump = |
| 349 pmd->CreateAllocatorDump(heap_name); |
| 350 DCHECK(dump); |
| 351 |
| 352 size_t allocated_objects_count = 0; |
| 353 size_t allocated_objects_size_in_bytes = 0; |
| 354 size_t offset = |
| 355 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_; |
| 356 size_t end = offset + size / block_size_; |
| 357 while (offset < end) { |
| 358 Span* span = spans_[offset]; |
| 359 if (!IsInFreeList(span)) { |
| 360 allocated_objects_count++; |
| 361 allocated_objects_size_in_bytes += span->length_; |
| 362 } |
| 363 offset += span->length_; |
| 364 } |
| 365 |
| 366 dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameOuterSize, |
| 367 base::trace_event::MemoryAllocatorDump::kUnitsBytes, |
| 368 static_cast<uint64_t>(size)); |
| 369 dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameObjectsCount, |
| 370 base::trace_event::MemoryAllocatorDump::kUnitsObjects, |
| 371 static_cast<uint64_t>(allocated_objects_count)); |
| 372 dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameInnerSize, |
| 373 base::trace_event::MemoryAllocatorDump::kUnitsBytes, |
| 374 static_cast<uint64_t>(allocated_objects_size_in_bytes)); |
| 375 } |
| 376 |
319 } // namespace content | 377 } // namespace content |
OLD | NEW |