OLD | NEW |
| (Empty) |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "content/child/child_discardable_shared_memory_manager.h" | |
6 | |
7 #include <inttypes.h> | |
8 | |
9 #include <algorithm> | |
10 #include <utility> | |
11 | |
12 #include "base/atomic_sequence_num.h" | |
13 #include "base/bind.h" | |
14 #include "base/debug/crash_logging.h" | |
15 #include "base/macros.h" | |
16 #include "base/memory/discardable_memory.h" | |
17 #include "base/memory/discardable_shared_memory.h" | |
18 #include "base/memory/ptr_util.h" | |
19 #include "base/metrics/histogram_macros.h" | |
20 #include "base/process/memory.h" | |
21 #include "base/process/process_metrics.h" | |
22 #include "base/strings/string_number_conversions.h" | |
23 #include "base/strings/stringprintf.h" | |
24 #include "base/threading/thread_task_runner_handle.h" | |
25 #include "base/trace_event/memory_dump_manager.h" | |
26 #include "base/trace_event/trace_event.h" | |
27 #include "content/common/child_process_messages.h" | |
28 | |
29 namespace content { | |
30 namespace { | |
31 | |
32 // Default allocation size. | |
33 const size_t kAllocationSize = 4 * 1024 * 1024; | |
34 | |
35 // Global atomic to generate unique discardable shared memory IDs. | |
36 base::StaticAtomicSequenceNumber g_next_discardable_shared_memory_id; | |
37 | |
38 class DiscardableMemoryImpl : public base::DiscardableMemory { | |
39 public: | |
40 DiscardableMemoryImpl(ChildDiscardableSharedMemoryManager* manager, | |
41 std::unique_ptr<DiscardableSharedMemoryHeap::Span> span) | |
42 : manager_(manager), span_(std::move(span)), is_locked_(true) {} | |
43 | |
44 ~DiscardableMemoryImpl() override { | |
45 if (is_locked_) | |
46 manager_->UnlockSpan(span_.get()); | |
47 | |
48 manager_->ReleaseSpan(std::move(span_)); | |
49 } | |
50 | |
51 // Overridden from base::DiscardableMemory: | |
52 bool Lock() override { | |
53 DCHECK(!is_locked_); | |
54 | |
55 if (!manager_->LockSpan(span_.get())) | |
56 return false; | |
57 | |
58 is_locked_ = true; | |
59 return true; | |
60 } | |
61 void Unlock() override { | |
62 DCHECK(is_locked_); | |
63 | |
64 manager_->UnlockSpan(span_.get()); | |
65 is_locked_ = false; | |
66 } | |
67 void* data() const override { | |
68 DCHECK(is_locked_); | |
69 return reinterpret_cast<void*>(span_->start() * base::GetPageSize()); | |
70 } | |
71 | |
72 base::trace_event::MemoryAllocatorDump* CreateMemoryAllocatorDump( | |
73 const char* name, | |
74 base::trace_event::ProcessMemoryDump* pmd) const override { | |
75 return manager_->CreateMemoryAllocatorDump(span_.get(), name, pmd); | |
76 } | |
77 | |
78 private: | |
79 ChildDiscardableSharedMemoryManager* const manager_; | |
80 std::unique_ptr<DiscardableSharedMemoryHeap::Span> span_; | |
81 bool is_locked_; | |
82 | |
83 DISALLOW_COPY_AND_ASSIGN(DiscardableMemoryImpl); | |
84 }; | |
85 | |
86 void SendDeletedDiscardableSharedMemoryMessage( | |
87 scoped_refptr<ThreadSafeSender> sender, | |
88 DiscardableSharedMemoryId id) { | |
89 sender->Send(new ChildProcessHostMsg_DeletedDiscardableSharedMemory(id)); | |
90 } | |
91 | |
92 } // namespace | |
93 | |
94 ChildDiscardableSharedMemoryManager::ChildDiscardableSharedMemoryManager( | |
95 ThreadSafeSender* sender) | |
96 : heap_(base::GetPageSize()), sender_(sender) { | |
97 base::trace_event::MemoryDumpManager::GetInstance()->RegisterDumpProvider( | |
98 this, "ChildDiscardableSharedMemoryManager", | |
99 base::ThreadTaskRunnerHandle::Get()); | |
100 } | |
101 | |
102 ChildDiscardableSharedMemoryManager::~ChildDiscardableSharedMemoryManager() { | |
103 base::trace_event::MemoryDumpManager::GetInstance()->UnregisterDumpProvider( | |
104 this); | |
105 // TODO(reveman): Determine if this DCHECK can be enabled. crbug.com/430533 | |
106 // DCHECK_EQ(heap_.GetSize(), heap_.GetSizeOfFreeLists()); | |
107 if (heap_.GetSize()) | |
108 MemoryUsageChanged(0, 0); | |
109 } | |
110 | |
111 std::unique_ptr<base::DiscardableMemory> | |
112 ChildDiscardableSharedMemoryManager::AllocateLockedDiscardableMemory( | |
113 size_t size) { | |
114 base::AutoLock lock(lock_); | |
115 | |
116 DCHECK_NE(size, 0u); | |
117 | |
118 UMA_HISTOGRAM_CUSTOM_COUNTS("Memory.DiscardableAllocationSize", | |
119 size / 1024, // In KB | |
120 1, | |
121 4 * 1024 * 1024, // 4 GB | |
122 50); | |
123 | |
124 // Round up to multiple of page size. | |
125 size_t pages = | |
126 std::max((size + base::GetPageSize() - 1) / base::GetPageSize(), | |
127 static_cast<size_t>(1)); | |
128 | |
129 // Default allocation size in pages. | |
130 size_t allocation_pages = kAllocationSize / base::GetPageSize(); | |
131 | |
132 size_t slack = 0; | |
133 // When searching the free lists, allow a slack between required size and | |
134 // free span size that is less or equal to kAllocationSize. This is to | |
135 // avoid segments larger then kAllocationSize unless they are a perfect | |
136 // fit. The result is that large allocations can be reused without reducing | |
137 // the ability to discard memory. | |
138 if (pages < allocation_pages) | |
139 slack = allocation_pages - pages; | |
140 | |
141 size_t heap_size_prior_to_releasing_purged_memory = heap_.GetSize(); | |
142 for (;;) { | |
143 // Search free lists for suitable span. | |
144 std::unique_ptr<DiscardableSharedMemoryHeap::Span> free_span = | |
145 heap_.SearchFreeLists(pages, slack); | |
146 if (!free_span.get()) | |
147 break; | |
148 | |
149 // Attempt to lock |free_span|. Delete span and search free lists again | |
150 // if locking failed. | |
151 if (free_span->shared_memory()->Lock( | |
152 free_span->start() * base::GetPageSize() - | |
153 reinterpret_cast<size_t>(free_span->shared_memory()->memory()), | |
154 free_span->length() * base::GetPageSize()) == | |
155 base::DiscardableSharedMemory::FAILED) { | |
156 DCHECK(!free_span->shared_memory()->IsMemoryResident()); | |
157 // We have to release purged memory before |free_span| can be destroyed. | |
158 heap_.ReleasePurgedMemory(); | |
159 DCHECK(!free_span->shared_memory()); | |
160 continue; | |
161 } | |
162 | |
163 free_span->set_is_locked(true); | |
164 | |
165 // Memory usage is guaranteed to have changed after having removed | |
166 // at least one span from the free lists. | |
167 MemoryUsageChanged(heap_.GetSize(), heap_.GetSizeOfFreeLists()); | |
168 | |
169 return base::MakeUnique<DiscardableMemoryImpl>(this, std::move(free_span)); | |
170 } | |
171 | |
172 // Release purged memory to free up the address space before we attempt to | |
173 // allocate more memory. | |
174 heap_.ReleasePurgedMemory(); | |
175 | |
176 // Make sure crash keys are up to date in case allocation fails. | |
177 if (heap_.GetSize() != heap_size_prior_to_releasing_purged_memory) | |
178 MemoryUsageChanged(heap_.GetSize(), heap_.GetSizeOfFreeLists()); | |
179 | |
180 size_t pages_to_allocate = | |
181 std::max(kAllocationSize / base::GetPageSize(), pages); | |
182 size_t allocation_size_in_bytes = pages_to_allocate * base::GetPageSize(); | |
183 | |
184 DiscardableSharedMemoryId new_id = | |
185 g_next_discardable_shared_memory_id.GetNext(); | |
186 | |
187 // Ask parent process to allocate a new discardable shared memory segment. | |
188 std::unique_ptr<base::DiscardableSharedMemory> shared_memory( | |
189 AllocateLockedDiscardableSharedMemory(allocation_size_in_bytes, new_id)); | |
190 | |
191 // Create span for allocated memory. | |
192 std::unique_ptr<DiscardableSharedMemoryHeap::Span> new_span(heap_.Grow( | |
193 std::move(shared_memory), allocation_size_in_bytes, new_id, | |
194 base::Bind(&SendDeletedDiscardableSharedMemoryMessage, sender_, new_id))); | |
195 new_span->set_is_locked(true); | |
196 | |
197 // Unlock and insert any left over memory into free lists. | |
198 if (pages < pages_to_allocate) { | |
199 std::unique_ptr<DiscardableSharedMemoryHeap::Span> leftover = | |
200 heap_.Split(new_span.get(), pages); | |
201 leftover->shared_memory()->Unlock( | |
202 leftover->start() * base::GetPageSize() - | |
203 reinterpret_cast<size_t>(leftover->shared_memory()->memory()), | |
204 leftover->length() * base::GetPageSize()); | |
205 leftover->set_is_locked(false); | |
206 heap_.MergeIntoFreeLists(std::move(leftover)); | |
207 } | |
208 | |
209 MemoryUsageChanged(heap_.GetSize(), heap_.GetSizeOfFreeLists()); | |
210 | |
211 return base::MakeUnique<DiscardableMemoryImpl>(this, std::move(new_span)); | |
212 } | |
213 | |
214 bool ChildDiscardableSharedMemoryManager::OnMemoryDump( | |
215 const base::trace_event::MemoryDumpArgs& args, | |
216 base::trace_event::ProcessMemoryDump* pmd) { | |
217 base::AutoLock lock(lock_); | |
218 if (args.level_of_detail == | |
219 base::trace_event::MemoryDumpLevelOfDetail::BACKGROUND) { | |
220 base::trace_event::MemoryAllocatorDump* total_dump = | |
221 pmd->CreateAllocatorDump( | |
222 base::StringPrintf("discardable/child_0x%" PRIXPTR, | |
223 reinterpret_cast<uintptr_t>(this))); | |
224 const size_t total_size = heap_.GetSize(); | |
225 const size_t freelist_size = heap_.GetSizeOfFreeLists(); | |
226 total_dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize, | |
227 base::trace_event::MemoryAllocatorDump::kUnitsBytes, | |
228 total_size - freelist_size); | |
229 total_dump->AddScalar("freelist_size", | |
230 base::trace_event::MemoryAllocatorDump::kUnitsBytes, | |
231 freelist_size); | |
232 return true; | |
233 } | |
234 | |
235 return heap_.OnMemoryDump(pmd); | |
236 } | |
237 | |
238 ChildDiscardableSharedMemoryManager::Statistics | |
239 ChildDiscardableSharedMemoryManager::GetStatistics() const { | |
240 base::AutoLock lock(lock_); | |
241 Statistics stats; | |
242 stats.total_size = heap_.GetSize(); | |
243 stats.freelist_size = heap_.GetSizeOfFreeLists(); | |
244 return stats; | |
245 } | |
246 | |
247 void ChildDiscardableSharedMemoryManager::ReleaseFreeMemory() { | |
248 base::AutoLock lock(lock_); | |
249 | |
250 size_t heap_size_prior_to_releasing_memory = heap_.GetSize(); | |
251 | |
252 // Release both purged and free memory. | |
253 heap_.ReleasePurgedMemory(); | |
254 heap_.ReleaseFreeMemory(); | |
255 | |
256 if (heap_.GetSize() != heap_size_prior_to_releasing_memory) | |
257 MemoryUsageChanged(heap_.GetSize(), heap_.GetSizeOfFreeLists()); | |
258 } | |
259 | |
260 bool ChildDiscardableSharedMemoryManager::LockSpan( | |
261 DiscardableSharedMemoryHeap::Span* span) { | |
262 base::AutoLock lock(lock_); | |
263 | |
264 if (!span->shared_memory()) | |
265 return false; | |
266 | |
267 size_t offset = span->start() * base::GetPageSize() - | |
268 reinterpret_cast<size_t>(span->shared_memory()->memory()); | |
269 size_t length = span->length() * base::GetPageSize(); | |
270 | |
271 switch (span->shared_memory()->Lock(offset, length)) { | |
272 case base::DiscardableSharedMemory::SUCCESS: | |
273 span->set_is_locked(true); | |
274 return true; | |
275 case base::DiscardableSharedMemory::PURGED: | |
276 span->shared_memory()->Unlock(offset, length); | |
277 span->set_is_locked(false); | |
278 return false; | |
279 case base::DiscardableSharedMemory::FAILED: | |
280 return false; | |
281 } | |
282 | |
283 NOTREACHED(); | |
284 return false; | |
285 } | |
286 | |
287 void ChildDiscardableSharedMemoryManager::UnlockSpan( | |
288 DiscardableSharedMemoryHeap::Span* span) { | |
289 base::AutoLock lock(lock_); | |
290 | |
291 DCHECK(span->shared_memory()); | |
292 size_t offset = span->start() * base::GetPageSize() - | |
293 reinterpret_cast<size_t>(span->shared_memory()->memory()); | |
294 size_t length = span->length() * base::GetPageSize(); | |
295 | |
296 span->set_is_locked(false); | |
297 return span->shared_memory()->Unlock(offset, length); | |
298 } | |
299 | |
300 void ChildDiscardableSharedMemoryManager::ReleaseSpan( | |
301 std::unique_ptr<DiscardableSharedMemoryHeap::Span> span) { | |
302 base::AutoLock lock(lock_); | |
303 | |
304 // Delete span instead of merging it into free lists if memory is gone. | |
305 if (!span->shared_memory()) | |
306 return; | |
307 | |
308 heap_.MergeIntoFreeLists(std::move(span)); | |
309 | |
310 // Bytes of free memory changed. | |
311 MemoryUsageChanged(heap_.GetSize(), heap_.GetSizeOfFreeLists()); | |
312 } | |
313 | |
314 base::trace_event::MemoryAllocatorDump* | |
315 ChildDiscardableSharedMemoryManager::CreateMemoryAllocatorDump( | |
316 DiscardableSharedMemoryHeap::Span* span, | |
317 const char* name, | |
318 base::trace_event::ProcessMemoryDump* pmd) const { | |
319 base::AutoLock lock(lock_); | |
320 return heap_.CreateMemoryAllocatorDump(span, name, pmd); | |
321 } | |
322 | |
323 std::unique_ptr<base::DiscardableSharedMemory> | |
324 ChildDiscardableSharedMemoryManager::AllocateLockedDiscardableSharedMemory( | |
325 size_t size, | |
326 DiscardableSharedMemoryId id) { | |
327 TRACE_EVENT2("renderer", | |
328 "ChildDiscardableSharedMemoryManager::" | |
329 "AllocateLockedDiscardableSharedMemory", | |
330 "size", size, "id", id); | |
331 | |
332 base::SharedMemoryHandle handle = base::SharedMemory::NULLHandle(); | |
333 sender_->Send( | |
334 new ChildProcessHostMsg_SyncAllocateLockedDiscardableSharedMemory( | |
335 size, id, &handle)); | |
336 std::unique_ptr<base::DiscardableSharedMemory> memory( | |
337 new base::DiscardableSharedMemory(handle)); | |
338 if (!memory->Map(size)) | |
339 base::TerminateBecauseOutOfMemory(size); | |
340 return memory; | |
341 } | |
342 | |
343 void ChildDiscardableSharedMemoryManager::MemoryUsageChanged( | |
344 size_t new_bytes_total, | |
345 size_t new_bytes_free) const { | |
346 static const char kDiscardableMemoryAllocatedKey[] = | |
347 "discardable-memory-allocated"; | |
348 base::debug::SetCrashKeyValue(kDiscardableMemoryAllocatedKey, | |
349 base::Uint64ToString(new_bytes_total)); | |
350 | |
351 static const char kDiscardableMemoryFreeKey[] = "discardable-memory-free"; | |
352 base::debug::SetCrashKeyValue(kDiscardableMemoryFreeKey, | |
353 base::Uint64ToString(new_bytes_free)); | |
354 } | |
355 | |
356 } // namespace content | |
OLD | NEW |