| OLD | NEW |
| 1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "components/discardable_memory/client/client_discardable_shared_memory_
manager.h" | 5 #include "components/discardable_memory/client/client_discardable_shared_memory_
manager.h" |
| 6 | 6 |
| 7 #include <inttypes.h> | 7 #include <inttypes.h> |
| 8 | 8 |
| 9 #include <algorithm> | 9 #include <algorithm> |
| 10 #include <utility> | 10 #include <utility> |
| 11 | 11 |
| 12 #include "base/atomic_sequence_num.h" | 12 #include "base/atomic_sequence_num.h" |
| 13 #include "base/bind.h" | 13 #include "base/bind.h" |
| 14 #include "base/debug/crash_logging.h" | 14 #include "base/debug/crash_logging.h" |
| 15 #include "base/macros.h" | 15 #include "base/macros.h" |
| 16 #include "base/memory/discardable_memory.h" | 16 #include "base/memory/discardable_memory.h" |
| 17 #include "base/memory/discardable_shared_memory.h" | 17 #include "base/memory/discardable_shared_memory.h" |
| 18 #include "base/memory/ptr_util.h" | 18 #include "base/memory/ptr_util.h" |
| 19 #include "base/metrics/histogram_macros.h" | 19 #include "base/metrics/histogram_macros.h" |
| 20 #include "base/process/memory.h" | 20 #include "base/process/memory.h" |
| 21 #include "base/process/process_metrics.h" | 21 #include "base/process/process_metrics.h" |
| 22 #include "base/strings/string_number_conversions.h" | 22 #include "base/strings/string_number_conversions.h" |
| 23 #include "base/strings/stringprintf.h" | 23 #include "base/strings/stringprintf.h" |
| 24 #include "base/synchronization/waitable_event.h" | |
| 25 #include "base/threading/thread_task_runner_handle.h" | 24 #include "base/threading/thread_task_runner_handle.h" |
| 26 #include "base/trace_event/memory_dump_manager.h" | 25 #include "base/trace_event/memory_dump_manager.h" |
| 27 #include "base/trace_event/trace_event.h" | 26 #include "base/trace_event/trace_event.h" |
| 28 #include "mojo/public/cpp/system/platform_handle.h" | |
| 29 | 27 |
| 30 namespace discardable_memory { | 28 namespace discardable_memory { |
| 31 namespace { | 29 namespace { |
| 32 | 30 |
| 33 // Default allocation size. | 31 // Default allocation size. |
| 34 const size_t kAllocationSize = 4 * 1024 * 1024; | 32 const size_t kAllocationSize = 4 * 1024 * 1024; |
| 35 | 33 |
| 36 // Global atomic to generate unique discardable shared memory IDs. | 34 // Global atomic to generate unique discardable shared memory IDs. |
| 37 base::StaticAtomicSequenceNumber g_next_discardable_shared_memory_id; | 35 base::StaticAtomicSequenceNumber g_next_discardable_shared_memory_id; |
| 38 | 36 |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 77 } | 75 } |
| 78 | 76 |
| 79 private: | 77 private: |
| 80 ClientDiscardableSharedMemoryManager* const manager_; | 78 ClientDiscardableSharedMemoryManager* const manager_; |
| 81 std::unique_ptr<DiscardableSharedMemoryHeap::Span> span_; | 79 std::unique_ptr<DiscardableSharedMemoryHeap::Span> span_; |
| 82 bool is_locked_; | 80 bool is_locked_; |
| 83 | 81 |
| 84 DISALLOW_COPY_AND_ASSIGN(DiscardableMemoryImpl); | 82 DISALLOW_COPY_AND_ASSIGN(DiscardableMemoryImpl); |
| 85 }; | 83 }; |
| 86 | 84 |
| 87 void InitManagerMojoOnIO(mojom::DiscardableSharedMemoryManagerPtr* manager_mojo, | 85 void SendDeletedDiscardableSharedMemoryMessage( |
| 88 mojom::DiscardableSharedMemoryManagerPtrInfo info) { | 86 ClientDiscardableSharedMemoryManager::Delegate* delegate, |
| 89 manager_mojo->Bind(std::move(info)); | 87 DiscardableSharedMemoryId id) { |
| 90 } | 88 delegate->DeletedDiscardableSharedMemory(id); |
| 91 | |
| 92 void DeletedDiscardableSharedMemoryOnIO( | |
| 93 mojom::DiscardableSharedMemoryManagerPtr* manager_mojo, | |
| 94 int32_t id) { | |
| 95 (*manager_mojo)->DeletedDiscardableSharedMemory(id); | |
| 96 } | 89 } |
| 97 | 90 |
| 98 } // namespace | 91 } // namespace |
| 99 | 92 |
| 100 ClientDiscardableSharedMemoryManager::ClientDiscardableSharedMemoryManager( | 93 ClientDiscardableSharedMemoryManager::ClientDiscardableSharedMemoryManager( |
| 101 mojom::DiscardableSharedMemoryManagerPtr manager, | 94 Delegate* delegate) |
| 102 scoped_refptr<base::SingleThreadTaskRunner> io_task_runner) | 95 : heap_(base::GetPageSize()), delegate_(delegate) { |
| 103 : io_task_runner_(std::move(io_task_runner)), | |
| 104 manager_mojo_(new mojom::DiscardableSharedMemoryManagerPtr), | |
| 105 heap_(new DiscardableSharedMemoryHeap(base::GetPageSize())) { | |
| 106 base::trace_event::MemoryDumpManager::GetInstance()->RegisterDumpProvider( | 96 base::trace_event::MemoryDumpManager::GetInstance()->RegisterDumpProvider( |
| 107 this, "ClientDiscardableSharedMemoryManager", | 97 this, "ClientDiscardableSharedMemoryManager", |
| 108 base::ThreadTaskRunnerHandle::Get()); | 98 base::ThreadTaskRunnerHandle::Get()); |
| 109 mojom::DiscardableSharedMemoryManagerPtrInfo info = manager.PassInterface(); | |
| 110 io_task_runner_->PostTask( | |
| 111 FROM_HERE, base::Bind(&InitManagerMojoOnIO, manager_mojo_.get(), | |
| 112 base::Passed(&info))); | |
| 113 } | 99 } |
| 114 | 100 |
| 115 ClientDiscardableSharedMemoryManager::~ClientDiscardableSharedMemoryManager() { | 101 ClientDiscardableSharedMemoryManager::~ClientDiscardableSharedMemoryManager() { |
| 116 base::trace_event::MemoryDumpManager::GetInstance()->UnregisterDumpProvider( | 102 base::trace_event::MemoryDumpManager::GetInstance()->UnregisterDumpProvider( |
| 117 this); | 103 this); |
| 118 // TODO(reveman): Determine if this DCHECK can be enabled. crbug.com/430533 | 104 // TODO(reveman): Determine if this DCHECK can be enabled. crbug.com/430533 |
| 119 // DCHECK_EQ(heap_->GetSize(), heap_->GetSizeOfFreeLists()); | 105 // DCHECK_EQ(heap_.GetSize(), heap_.GetSizeOfFreeLists()); |
| 120 if (heap_->GetSize()) | 106 if (heap_.GetSize()) |
| 121 MemoryUsageChanged(0, 0); | 107 MemoryUsageChanged(0, 0); |
| 122 | |
| 123 // Releasing the |heap_| before posting a task for deleting |manager_mojo_|. | |
| 124 // It is because releasing |heap_| will invoke DeletedDiscardableSharedMemory | |
| 125 // which needs |manager_mojo_|. | |
| 126 heap_.reset(); | |
| 127 | |
| 128 // Delete the |manager_mojo_| on IO thread, so any pending tasks on IO thread | |
| 129 // will be executed before the |manager_mojo_| is deleted. | |
| 130 bool posted = io_task_runner_->DeleteSoon(FROM_HERE, manager_mojo_.release()); | |
| 131 DCHECK(posted); | |
| 132 } | 108 } |
| 133 | 109 |
| 134 std::unique_ptr<base::DiscardableMemory> | 110 std::unique_ptr<base::DiscardableMemory> |
| 135 ClientDiscardableSharedMemoryManager::AllocateLockedDiscardableMemory( | 111 ClientDiscardableSharedMemoryManager::AllocateLockedDiscardableMemory( |
| 136 size_t size) { | 112 size_t size) { |
| 137 base::AutoLock lock(lock_); | 113 base::AutoLock lock(lock_); |
| 138 | 114 |
| 139 DCHECK_NE(size, 0u); | 115 DCHECK_NE(size, 0u); |
| 140 | 116 |
| 141 auto size_in_kb = static_cast<base::HistogramBase::Sample>(size / 1024); | 117 auto size_in_kb = static_cast<base::HistogramBase::Sample>(size / 1024); |
| (...skipping 13 matching lines...) Expand all Loading... |
| 155 | 131 |
| 156 size_t slack = 0; | 132 size_t slack = 0; |
| 157 // When searching the free lists, allow a slack between required size and | 133 // When searching the free lists, allow a slack between required size and |
| 158 // free span size that is less or equal to kAllocationSize. This is to | 134 // free span size that is less or equal to kAllocationSize. This is to |
| 159 // avoid segments larger then kAllocationSize unless they are a perfect | 135 // avoid segments larger then kAllocationSize unless they are a perfect |
| 160 // fit. The result is that large allocations can be reused without reducing | 136 // fit. The result is that large allocations can be reused without reducing |
| 161 // the ability to discard memory. | 137 // the ability to discard memory. |
| 162 if (pages < allocation_pages) | 138 if (pages < allocation_pages) |
| 163 slack = allocation_pages - pages; | 139 slack = allocation_pages - pages; |
| 164 | 140 |
| 165 size_t heap_size_prior_to_releasing_purged_memory = heap_->GetSize(); | 141 size_t heap_size_prior_to_releasing_purged_memory = heap_.GetSize(); |
| 166 for (;;) { | 142 for (;;) { |
| 167 // Search free lists for suitable span. | 143 // Search free lists for suitable span. |
| 168 std::unique_ptr<DiscardableSharedMemoryHeap::Span> free_span = | 144 std::unique_ptr<DiscardableSharedMemoryHeap::Span> free_span = |
| 169 heap_->SearchFreeLists(pages, slack); | 145 heap_.SearchFreeLists(pages, slack); |
| 170 if (!free_span.get()) | 146 if (!free_span.get()) |
| 171 break; | 147 break; |
| 172 | 148 |
| 173 // Attempt to lock |free_span|. Delete span and search free lists again | 149 // Attempt to lock |free_span|. Delete span and search free lists again |
| 174 // if locking failed. | 150 // if locking failed. |
| 175 if (free_span->shared_memory()->Lock( | 151 if (free_span->shared_memory()->Lock( |
| 176 free_span->start() * base::GetPageSize() - | 152 free_span->start() * base::GetPageSize() - |
| 177 reinterpret_cast<size_t>(free_span->shared_memory()->memory()), | 153 reinterpret_cast<size_t>(free_span->shared_memory()->memory()), |
| 178 free_span->length() * base::GetPageSize()) == | 154 free_span->length() * base::GetPageSize()) == |
| 179 base::DiscardableSharedMemory::FAILED) { | 155 base::DiscardableSharedMemory::FAILED) { |
| 180 DCHECK(!free_span->shared_memory()->IsMemoryResident()); | 156 DCHECK(!free_span->shared_memory()->IsMemoryResident()); |
| 181 // We have to release purged memory before |free_span| can be destroyed. | 157 // We have to release purged memory before |free_span| can be destroyed. |
| 182 heap_->ReleasePurgedMemory(); | 158 heap_.ReleasePurgedMemory(); |
| 183 DCHECK(!free_span->shared_memory()); | 159 DCHECK(!free_span->shared_memory()); |
| 184 continue; | 160 continue; |
| 185 } | 161 } |
| 186 | 162 |
| 187 free_span->set_is_locked(true); | 163 free_span->set_is_locked(true); |
| 188 | 164 |
| 189 // Memory usage is guaranteed to have changed after having removed | 165 // Memory usage is guaranteed to have changed after having removed |
| 190 // at least one span from the free lists. | 166 // at least one span from the free lists. |
| 191 MemoryUsageChanged(heap_->GetSize(), heap_->GetSizeOfFreeLists()); | 167 MemoryUsageChanged(heap_.GetSize(), heap_.GetSizeOfFreeLists()); |
| 192 | 168 |
| 193 return base::MakeUnique<DiscardableMemoryImpl>(this, std::move(free_span)); | 169 return base::MakeUnique<DiscardableMemoryImpl>(this, std::move(free_span)); |
| 194 } | 170 } |
| 195 | 171 |
| 196 // Release purged memory to free up the address space before we attempt to | 172 // Release purged memory to free up the address space before we attempt to |
| 197 // allocate more memory. | 173 // allocate more memory. |
| 198 heap_->ReleasePurgedMemory(); | 174 heap_.ReleasePurgedMemory(); |
| 199 | 175 |
| 200 // Make sure crash keys are up to date in case allocation fails. | 176 // Make sure crash keys are up to date in case allocation fails. |
| 201 if (heap_->GetSize() != heap_size_prior_to_releasing_purged_memory) | 177 if (heap_.GetSize() != heap_size_prior_to_releasing_purged_memory) |
| 202 MemoryUsageChanged(heap_->GetSize(), heap_->GetSizeOfFreeLists()); | 178 MemoryUsageChanged(heap_.GetSize(), heap_.GetSizeOfFreeLists()); |
| 203 | 179 |
| 204 size_t pages_to_allocate = | 180 size_t pages_to_allocate = |
| 205 std::max(kAllocationSize / base::GetPageSize(), pages); | 181 std::max(kAllocationSize / base::GetPageSize(), pages); |
| 206 size_t allocation_size_in_bytes = pages_to_allocate * base::GetPageSize(); | 182 size_t allocation_size_in_bytes = pages_to_allocate * base::GetPageSize(); |
| 207 | 183 |
| 208 int32_t new_id = g_next_discardable_shared_memory_id.GetNext(); | 184 DiscardableSharedMemoryId new_id = |
| 185 g_next_discardable_shared_memory_id.GetNext(); |
| 209 | 186 |
| 210 // Ask parent process to allocate a new discardable shared memory segment. | 187 // Ask parent process to allocate a new discardable shared memory segment. |
| 211 std::unique_ptr<base::DiscardableSharedMemory> shared_memory = | 188 std::unique_ptr<base::DiscardableSharedMemory> shared_memory( |
| 212 AllocateLockedDiscardableSharedMemory(allocation_size_in_bytes, new_id); | 189 AllocateLockedDiscardableSharedMemory(allocation_size_in_bytes, new_id)); |
| 213 | 190 |
| 214 // Create span for allocated memory. | 191 // Create span for allocated memory. |
| 215 // Spans are managed by |heap_| (the member of | 192 std::unique_ptr<DiscardableSharedMemoryHeap::Span> new_span( |
| 216 // the ClientDiscardableSharedMemoryManager), so it is safe to use | 193 heap_.Grow(std::move(shared_memory), allocation_size_in_bytes, new_id, |
| 217 // base::Unretained(this) here. | 194 base::Bind(&SendDeletedDiscardableSharedMemoryMessage, |
| 218 std::unique_ptr<DiscardableSharedMemoryHeap::Span> new_span(heap_->Grow( | 195 delegate_, new_id))); |
| 219 std::move(shared_memory), allocation_size_in_bytes, new_id, | |
| 220 base::Bind( | |
| 221 &ClientDiscardableSharedMemoryManager::DeletedDiscardableSharedMemory, | |
| 222 base::Unretained(this), new_id))); | |
| 223 new_span->set_is_locked(true); | 196 new_span->set_is_locked(true); |
| 224 | 197 |
| 225 // Unlock and insert any left over memory into free lists. | 198 // Unlock and insert any left over memory into free lists. |
| 226 if (pages < pages_to_allocate) { | 199 if (pages < pages_to_allocate) { |
| 227 std::unique_ptr<DiscardableSharedMemoryHeap::Span> leftover = | 200 std::unique_ptr<DiscardableSharedMemoryHeap::Span> leftover = |
| 228 heap_->Split(new_span.get(), pages); | 201 heap_.Split(new_span.get(), pages); |
| 229 leftover->shared_memory()->Unlock( | 202 leftover->shared_memory()->Unlock( |
| 230 leftover->start() * base::GetPageSize() - | 203 leftover->start() * base::GetPageSize() - |
| 231 reinterpret_cast<size_t>(leftover->shared_memory()->memory()), | 204 reinterpret_cast<size_t>(leftover->shared_memory()->memory()), |
| 232 leftover->length() * base::GetPageSize()); | 205 leftover->length() * base::GetPageSize()); |
| 233 leftover->set_is_locked(false); | 206 leftover->set_is_locked(false); |
| 234 heap_->MergeIntoFreeLists(std::move(leftover)); | 207 heap_.MergeIntoFreeLists(std::move(leftover)); |
| 235 } | 208 } |
| 236 | 209 |
| 237 MemoryUsageChanged(heap_->GetSize(), heap_->GetSizeOfFreeLists()); | 210 MemoryUsageChanged(heap_.GetSize(), heap_.GetSizeOfFreeLists()); |
| 238 | 211 |
| 239 return base::MakeUnique<DiscardableMemoryImpl>(this, std::move(new_span)); | 212 return base::MakeUnique<DiscardableMemoryImpl>(this, std::move(new_span)); |
| 240 } | 213 } |
| 241 | 214 |
| 242 bool ClientDiscardableSharedMemoryManager::OnMemoryDump( | 215 bool ClientDiscardableSharedMemoryManager::OnMemoryDump( |
| 243 const base::trace_event::MemoryDumpArgs& args, | 216 const base::trace_event::MemoryDumpArgs& args, |
| 244 base::trace_event::ProcessMemoryDump* pmd) { | 217 base::trace_event::ProcessMemoryDump* pmd) { |
| 245 base::AutoLock lock(lock_); | 218 base::AutoLock lock(lock_); |
| 246 if (args.level_of_detail == | 219 if (args.level_of_detail == |
| 247 base::trace_event::MemoryDumpLevelOfDetail::BACKGROUND) { | 220 base::trace_event::MemoryDumpLevelOfDetail::BACKGROUND) { |
| 248 base::trace_event::MemoryAllocatorDump* total_dump = | 221 base::trace_event::MemoryAllocatorDump* total_dump = |
| 249 pmd->CreateAllocatorDump( | 222 pmd->CreateAllocatorDump( |
| 250 base::StringPrintf("discardable/child_0x%" PRIXPTR, | 223 base::StringPrintf("discardable/child_0x%" PRIXPTR, |
| 251 reinterpret_cast<uintptr_t>(this))); | 224 reinterpret_cast<uintptr_t>(this))); |
| 252 const size_t total_size = heap_->GetSize(); | 225 const size_t total_size = heap_.GetSize(); |
| 253 const size_t freelist_size = heap_->GetSizeOfFreeLists(); | 226 const size_t freelist_size = heap_.GetSizeOfFreeLists(); |
| 254 total_dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize, | 227 total_dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize, |
| 255 base::trace_event::MemoryAllocatorDump::kUnitsBytes, | 228 base::trace_event::MemoryAllocatorDump::kUnitsBytes, |
| 256 total_size - freelist_size); | 229 total_size - freelist_size); |
| 257 total_dump->AddScalar("freelist_size", | 230 total_dump->AddScalar("freelist_size", |
| 258 base::trace_event::MemoryAllocatorDump::kUnitsBytes, | 231 base::trace_event::MemoryAllocatorDump::kUnitsBytes, |
| 259 freelist_size); | 232 freelist_size); |
| 260 return true; | 233 return true; |
| 261 } | 234 } |
| 262 | 235 |
| 263 return heap_->OnMemoryDump(pmd); | 236 return heap_.OnMemoryDump(pmd); |
| 264 } | 237 } |
| 265 | 238 |
| 266 ClientDiscardableSharedMemoryManager::Statistics | 239 ClientDiscardableSharedMemoryManager::Statistics |
| 267 ClientDiscardableSharedMemoryManager::GetStatistics() const { | 240 ClientDiscardableSharedMemoryManager::GetStatistics() const { |
| 268 base::AutoLock lock(lock_); | 241 base::AutoLock lock(lock_); |
| 269 Statistics stats; | 242 Statistics stats; |
| 270 stats.total_size = heap_->GetSize(); | 243 stats.total_size = heap_.GetSize(); |
| 271 stats.freelist_size = heap_->GetSizeOfFreeLists(); | 244 stats.freelist_size = heap_.GetSizeOfFreeLists(); |
| 272 return stats; | 245 return stats; |
| 273 } | 246 } |
| 274 | 247 |
| 275 void ClientDiscardableSharedMemoryManager::ReleaseFreeMemory() { | 248 void ClientDiscardableSharedMemoryManager::ReleaseFreeMemory() { |
| 276 base::AutoLock lock(lock_); | 249 base::AutoLock lock(lock_); |
| 277 | 250 |
| 278 size_t heap_size_prior_to_releasing_memory = heap_->GetSize(); | 251 size_t heap_size_prior_to_releasing_memory = heap_.GetSize(); |
| 279 | 252 |
| 280 // Release both purged and free memory. | 253 // Release both purged and free memory. |
| 281 heap_->ReleasePurgedMemory(); | 254 heap_.ReleasePurgedMemory(); |
| 282 heap_->ReleaseFreeMemory(); | 255 heap_.ReleaseFreeMemory(); |
| 283 | 256 |
| 284 if (heap_->GetSize() != heap_size_prior_to_releasing_memory) | 257 if (heap_.GetSize() != heap_size_prior_to_releasing_memory) |
| 285 MemoryUsageChanged(heap_->GetSize(), heap_->GetSizeOfFreeLists()); | 258 MemoryUsageChanged(heap_.GetSize(), heap_.GetSizeOfFreeLists()); |
| 286 } | 259 } |
| 287 | 260 |
| 288 bool ClientDiscardableSharedMemoryManager::LockSpan( | 261 bool ClientDiscardableSharedMemoryManager::LockSpan( |
| 289 DiscardableSharedMemoryHeap::Span* span) { | 262 DiscardableSharedMemoryHeap::Span* span) { |
| 290 base::AutoLock lock(lock_); | 263 base::AutoLock lock(lock_); |
| 291 | 264 |
| 292 if (!span->shared_memory()) | 265 if (!span->shared_memory()) |
| 293 return false; | 266 return false; |
| 294 | 267 |
| 295 size_t offset = span->start() * base::GetPageSize() - | 268 size_t offset = span->start() * base::GetPageSize() - |
| (...skipping 30 matching lines...) Expand all Loading... |
| 326 } | 299 } |
| 327 | 300 |
| 328 void ClientDiscardableSharedMemoryManager::ReleaseSpan( | 301 void ClientDiscardableSharedMemoryManager::ReleaseSpan( |
| 329 std::unique_ptr<DiscardableSharedMemoryHeap::Span> span) { | 302 std::unique_ptr<DiscardableSharedMemoryHeap::Span> span) { |
| 330 base::AutoLock lock(lock_); | 303 base::AutoLock lock(lock_); |
| 331 | 304 |
| 332 // Delete span instead of merging it into free lists if memory is gone. | 305 // Delete span instead of merging it into free lists if memory is gone. |
| 333 if (!span->shared_memory()) | 306 if (!span->shared_memory()) |
| 334 return; | 307 return; |
| 335 | 308 |
| 336 heap_->MergeIntoFreeLists(std::move(span)); | 309 heap_.MergeIntoFreeLists(std::move(span)); |
| 337 | 310 |
| 338 // Bytes of free memory changed. | 311 // Bytes of free memory changed. |
| 339 MemoryUsageChanged(heap_->GetSize(), heap_->GetSizeOfFreeLists()); | 312 MemoryUsageChanged(heap_.GetSize(), heap_.GetSizeOfFreeLists()); |
| 340 } | 313 } |
| 341 | 314 |
| 342 base::trace_event::MemoryAllocatorDump* | 315 base::trace_event::MemoryAllocatorDump* |
| 343 ClientDiscardableSharedMemoryManager::CreateMemoryAllocatorDump( | 316 ClientDiscardableSharedMemoryManager::CreateMemoryAllocatorDump( |
| 344 DiscardableSharedMemoryHeap::Span* span, | 317 DiscardableSharedMemoryHeap::Span* span, |
| 345 const char* name, | 318 const char* name, |
| 346 base::trace_event::ProcessMemoryDump* pmd) const { | 319 base::trace_event::ProcessMemoryDump* pmd) const { |
| 347 base::AutoLock lock(lock_); | 320 base::AutoLock lock(lock_); |
| 348 return heap_->CreateMemoryAllocatorDump(span, name, pmd); | 321 return heap_.CreateMemoryAllocatorDump(span, name, pmd); |
| 349 } | 322 } |
| 350 | 323 |
| 351 std::unique_ptr<base::DiscardableSharedMemory> | 324 std::unique_ptr<base::DiscardableSharedMemory> |
| 352 ClientDiscardableSharedMemoryManager::AllocateLockedDiscardableSharedMemory( | 325 ClientDiscardableSharedMemoryManager::AllocateLockedDiscardableSharedMemory( |
| 353 size_t size, | 326 size_t size, |
| 354 int32_t id) { | 327 DiscardableSharedMemoryId id) { |
| 355 TRACE_EVENT2("renderer", | 328 TRACE_EVENT2("renderer", |
| 356 "ClientDiscardableSharedMemoryManager::" | 329 "ClientDiscardableSharedMemoryManager::" |
| 357 "AllocateLockedDiscardableSharedMemory", | 330 "AllocateLockedDiscardableSharedMemory", |
| 358 "size", size, "id", id); | 331 "size", size, "id", id); |
| 359 std::unique_ptr<base::DiscardableSharedMemory> memory; | 332 |
| 360 base::WaitableEvent event(base::WaitableEvent::ResetPolicy::MANUAL, | 333 base::SharedMemoryHandle handle = base::SharedMemory::NULLHandle(); |
| 361 base::WaitableEvent::InitialState::NOT_SIGNALED); | 334 delegate_->AllocateLockedDiscardableSharedMemory(size, id, &handle); |
| 362 base::ScopedClosureRunner event_signal_runner( | 335 std::unique_ptr<base::DiscardableSharedMemory> memory( |
| 363 base::Bind(&base::WaitableEvent::Signal, base::Unretained(&event))); | 336 new base::DiscardableSharedMemory(handle)); |
| 364 io_task_runner_->PostTask( | 337 if (!memory->Map(size)) |
| 365 FROM_HERE, base::Bind(&ClientDiscardableSharedMemoryManager::AllocateOnIO, | 338 base::TerminateBecauseOutOfMemory(size); |
| 366 base::Unretained(this), size, id, &memory, | |
| 367 base::Passed(&event_signal_runner))); | |
| 368 // Waiting until IPC has finished on the IO thread. | |
| 369 event.Wait(); | |
| 370 return memory; | 339 return memory; |
| 371 } | 340 } |
| 372 | 341 |
| 373 void ClientDiscardableSharedMemoryManager::AllocateOnIO( | |
| 374 size_t size, | |
| 375 int32_t id, | |
| 376 std::unique_ptr<base::DiscardableSharedMemory>* memory, | |
| 377 base::ScopedClosureRunner closure_runner) { | |
| 378 (*manager_mojo_) | |
| 379 ->AllocateLockedDiscardableSharedMemory( | |
| 380 static_cast<uint32_t>(size), id, | |
| 381 base::Bind( | |
| 382 &ClientDiscardableSharedMemoryManager::AllocateCompletedOnIO, | |
| 383 base::Unretained(this), memory, base::Passed(&closure_runner))); | |
| 384 } | |
| 385 | |
| 386 void ClientDiscardableSharedMemoryManager::AllocateCompletedOnIO( | |
| 387 std::unique_ptr<base::DiscardableSharedMemory>* memory, | |
| 388 base::ScopedClosureRunner closure_runner, | |
| 389 mojo::ScopedSharedBufferHandle mojo_handle) { | |
| 390 if (!mojo_handle.is_valid()) | |
| 391 return; | |
| 392 base::SharedMemoryHandle handle = base::SharedMemory::NULLHandle(); | |
| 393 size_t memory_size = 0; | |
| 394 bool read_only = false; | |
| 395 auto result = mojo::UnwrapSharedMemoryHandle(std::move(mojo_handle), &handle, | |
| 396 &memory_size, &read_only); | |
| 397 if (result != MOJO_RESULT_OK) | |
| 398 return; | |
| 399 auto discardable_shared_memory = | |
| 400 base::MakeUnique<base::DiscardableSharedMemory>(handle); | |
| 401 if (!discardable_shared_memory->Map(memory_size)) | |
| 402 base::TerminateBecauseOutOfMemory(memory_size); | |
| 403 *memory = std::move(discardable_shared_memory); | |
| 404 } | |
| 405 | |
| 406 void ClientDiscardableSharedMemoryManager::DeletedDiscardableSharedMemory( | |
| 407 int32_t id) { | |
| 408 io_task_runner_->PostTask( | |
| 409 FROM_HERE, | |
| 410 base::Bind(&DeletedDiscardableSharedMemoryOnIO, manager_mojo_.get(), id)); | |
| 411 } | |
| 412 | |
| 413 void ClientDiscardableSharedMemoryManager::MemoryUsageChanged( | 342 void ClientDiscardableSharedMemoryManager::MemoryUsageChanged( |
| 414 size_t new_bytes_total, | 343 size_t new_bytes_total, |
| 415 size_t new_bytes_free) const { | 344 size_t new_bytes_free) const { |
| 416 static const char kDiscardableMemoryAllocatedKey[] = | 345 static const char kDiscardableMemoryAllocatedKey[] = |
| 417 "discardable-memory-allocated"; | 346 "discardable-memory-allocated"; |
| 418 base::debug::SetCrashKeyValue(kDiscardableMemoryAllocatedKey, | 347 base::debug::SetCrashKeyValue(kDiscardableMemoryAllocatedKey, |
| 419 base::Uint64ToString(new_bytes_total)); | 348 base::Uint64ToString(new_bytes_total)); |
| 420 | 349 |
| 421 static const char kDiscardableMemoryFreeKey[] = "discardable-memory-free"; | 350 static const char kDiscardableMemoryFreeKey[] = "discardable-memory-free"; |
| 422 base::debug::SetCrashKeyValue(kDiscardableMemoryFreeKey, | 351 base::debug::SetCrashKeyValue(kDiscardableMemoryFreeKey, |
| 423 base::Uint64ToString(new_bytes_free)); | 352 base::Uint64ToString(new_bytes_free)); |
| 424 } | 353 } |
| 425 | 354 |
| 426 } // namespace discardable_memory | 355 } // namespace discardable_memory |
| OLD | NEW |