Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(98)

Side by Side Diff: components/discardable_memory/client/client_discardable_shared_memory_manager.cc

Issue 2485623002: discardable_memory: Using mojo IPC to replace Chrome IPC (Closed)
Patch Set: Fix build bots. Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 The Chromium Authors. All rights reserved. 1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "components/discardable_memory/client/client_discardable_shared_memory_ manager.h" 5 #include "components/discardable_memory/client/client_discardable_shared_memory_ manager.h"
6 6
7 #include <inttypes.h> 7 #include <inttypes.h>
8 8
9 #include <algorithm> 9 #include <algorithm>
10 #include <utility> 10 #include <utility>
11 11
12 #include "base/atomic_sequence_num.h" 12 #include "base/atomic_sequence_num.h"
13 #include "base/bind.h" 13 #include "base/bind.h"
14 #include "base/callback_helpers.h"
14 #include "base/debug/crash_logging.h" 15 #include "base/debug/crash_logging.h"
15 #include "base/macros.h" 16 #include "base/macros.h"
16 #include "base/memory/discardable_memory.h" 17 #include "base/memory/discardable_memory.h"
17 #include "base/memory/discardable_shared_memory.h" 18 #include "base/memory/discardable_shared_memory.h"
18 #include "base/memory/ptr_util.h" 19 #include "base/memory/ptr_util.h"
19 #include "base/metrics/histogram_macros.h" 20 #include "base/metrics/histogram_macros.h"
20 #include "base/process/memory.h" 21 #include "base/process/memory.h"
21 #include "base/process/process_metrics.h" 22 #include "base/process/process_metrics.h"
22 #include "base/strings/string_number_conversions.h" 23 #include "base/strings/string_number_conversions.h"
23 #include "base/strings/stringprintf.h" 24 #include "base/strings/stringprintf.h"
25 #include "base/synchronization/waitable_event.h"
24 #include "base/threading/thread_task_runner_handle.h" 26 #include "base/threading/thread_task_runner_handle.h"
25 #include "base/trace_event/memory_dump_manager.h" 27 #include "base/trace_event/memory_dump_manager.h"
26 #include "base/trace_event/trace_event.h" 28 #include "base/trace_event/trace_event.h"
29 #include "mojo/public/cpp/system/platform_handle.h"
27 30
28 namespace discardable_memory { 31 namespace discardable_memory {
29 namespace { 32 namespace {
30 33
31 // Default allocation size. 34 // Default allocation size.
32 const size_t kAllocationSize = 4 * 1024 * 1024; 35 const size_t kAllocationSize = 4 * 1024 * 1024;
33 36
34 // Global atomic to generate unique discardable shared memory IDs. 37 // Global atomic to generate unique discardable shared memory IDs.
35 base::StaticAtomicSequenceNumber g_next_discardable_shared_memory_id; 38 base::StaticAtomicSequenceNumber g_next_discardable_shared_memory_id;
36 39
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
75 } 78 }
76 79
77 private: 80 private:
78 ClientDiscardableSharedMemoryManager* const manager_; 81 ClientDiscardableSharedMemoryManager* const manager_;
79 std::unique_ptr<DiscardableSharedMemoryHeap::Span> span_; 82 std::unique_ptr<DiscardableSharedMemoryHeap::Span> span_;
80 bool is_locked_; 83 bool is_locked_;
81 84
82 DISALLOW_COPY_AND_ASSIGN(DiscardableMemoryImpl); 85 DISALLOW_COPY_AND_ASSIGN(DiscardableMemoryImpl);
83 }; 86 };
84 87
85 void SendDeletedDiscardableSharedMemoryMessage( 88 void OnManagerMojoConnectionError(
86 ClientDiscardableSharedMemoryManager::Delegate* delegate, 89 mojom::DiscardableSharedMemoryManagerPtr* manager_mojo) {
87 DiscardableSharedMemoryId id) { 90 manager_mojo->reset();
dcheng 2016/11/25 00:07:08 Why do we do this?
Peng 2016/11/25 16:41:52 When the mojo connection is broken for some reason
dcheng 2016/11/28 18:29:14 Any pending tasks will just become no-ops when the
Peng 2016/11/29 15:20:12 If calling a broken or unbound interface does caus
88 delegate->DeletedDiscardableSharedMemory(id); 91 }
92
93 void InitManagerMojoOnIO(mojom::DiscardableSharedMemoryManagerPtr* manager_mojo,
94 mojom::DiscardableSharedMemoryManagerPtrInfo info) {
95 manager_mojo->Bind(std::move(info));
96 manager_mojo->set_connection_error_handler(
97 base::Bind(&OnManagerMojoConnectionError, manager_mojo));
98 }
99
100 void DeletedDiscardableSharedMemoryOnIO(
101 mojom::DiscardableSharedMemoryManagerPtr* manager_mojo,
102 int32_t id) {
103 if (!manager_mojo->is_bound())
104 return;
dcheng 2016/11/25 00:07:08 Should this be a NOTREACHED()?
Peng 2016/11/25 16:41:52 I think it can be reached. For example: manager_mo
105 (*manager_mojo)->DeletedDiscardableSharedMemory(id);
89 } 106 }
90 107
91 } // namespace 108 } // namespace
92 109
93 ClientDiscardableSharedMemoryManager::ClientDiscardableSharedMemoryManager( 110 ClientDiscardableSharedMemoryManager::ClientDiscardableSharedMemoryManager(
94 Delegate* delegate) 111 mojom::DiscardableSharedMemoryManagerPtrInfo info,
95 : heap_(base::GetPageSize()), delegate_(delegate) { 112 base::SingleThreadTaskRunner* io_task_runner)
113 : io_task_runner_(io_task_runner),
114 manager_mojo_(new mojom::DiscardableSharedMemoryManagerPtr),
115 heap_(new DiscardableSharedMemoryHeap(base::GetPageSize())) {
96 base::trace_event::MemoryDumpManager::GetInstance()->RegisterDumpProvider( 116 base::trace_event::MemoryDumpManager::GetInstance()->RegisterDumpProvider(
97 this, "ClientDiscardableSharedMemoryManager", 117 this, "ClientDiscardableSharedMemoryManager",
98 base::ThreadTaskRunnerHandle::Get()); 118 base::ThreadTaskRunnerHandle::Get());
119 io_task_runner_->PostTask(
120 FROM_HERE, base::Bind(&InitManagerMojoOnIO, manager_mojo_.get(),
121 base::Passed(&info)));
99 } 122 }
100 123
101 ClientDiscardableSharedMemoryManager::~ClientDiscardableSharedMemoryManager() { 124 ClientDiscardableSharedMemoryManager::~ClientDiscardableSharedMemoryManager() {
102 base::trace_event::MemoryDumpManager::GetInstance()->UnregisterDumpProvider( 125 base::trace_event::MemoryDumpManager::GetInstance()->UnregisterDumpProvider(
103 this); 126 this);
104 // TODO(reveman): Determine if this DCHECK can be enabled. crbug.com/430533 127 // TODO(reveman): Determine if this DCHECK can be enabled. crbug.com/430533
105 // DCHECK_EQ(heap_.GetSize(), heap_.GetSizeOfFreeLists()); 128 // DCHECK_EQ(heap_->GetSize(), heap_->GetSizeOfFreeLists());
106 if (heap_.GetSize()) 129 if (heap_->GetSize())
107 MemoryUsageChanged(0, 0); 130 MemoryUsageChanged(0, 0);
131
132 // Releasing the |heap_| before posting a task for deleting |manager_mojo_|.
133 // It is because releasing |heap_| will invoke DeletedDiscardableSharedMemory
134 // which needs |manager_mojo_|.
135 heap_.reset();
136
137 // Delete the |manager_mojo_| on IO thread, so any pending tasks on IO thread
138 // will be executed before the |manager_mojo_| is deleted.
139 bool posted = io_task_runner_->DeleteSoon(FROM_HERE, manager_mojo_.release());
140 DCHECK(posted);
108 } 141 }
109 142
110 std::unique_ptr<base::DiscardableMemory> 143 std::unique_ptr<base::DiscardableMemory>
111 ClientDiscardableSharedMemoryManager::AllocateLockedDiscardableMemory( 144 ClientDiscardableSharedMemoryManager::AllocateLockedDiscardableMemory(
112 size_t size) { 145 size_t size) {
113 base::AutoLock lock(lock_); 146 base::AutoLock lock(lock_);
114 147
115 DCHECK_NE(size, 0u); 148 DCHECK_NE(size, 0u);
116 149
117 auto size_in_kb = static_cast<base::HistogramBase::Sample>(size / 1024); 150 auto size_in_kb = static_cast<base::HistogramBase::Sample>(size / 1024);
(...skipping 13 matching lines...) Expand all
131 164
132 size_t slack = 0; 165 size_t slack = 0;
133 // When searching the free lists, allow a slack between required size and 166 // When searching the free lists, allow a slack between required size and
134 // free span size that is less or equal to kAllocationSize. This is to 167 // free span size that is less or equal to kAllocationSize. This is to
135 // avoid segments larger then kAllocationSize unless they are a perfect 168 // avoid segments larger then kAllocationSize unless they are a perfect
136 // fit. The result is that large allocations can be reused without reducing 169 // fit. The result is that large allocations can be reused without reducing
137 // the ability to discard memory. 170 // the ability to discard memory.
138 if (pages < allocation_pages) 171 if (pages < allocation_pages)
139 slack = allocation_pages - pages; 172 slack = allocation_pages - pages;
140 173
141 size_t heap_size_prior_to_releasing_purged_memory = heap_.GetSize(); 174 size_t heap_size_prior_to_releasing_purged_memory = heap_->GetSize();
142 for (;;) { 175 for (;;) {
143 // Search free lists for suitable span. 176 // Search free lists for suitable span.
144 std::unique_ptr<DiscardableSharedMemoryHeap::Span> free_span = 177 std::unique_ptr<DiscardableSharedMemoryHeap::Span> free_span =
145 heap_.SearchFreeLists(pages, slack); 178 heap_->SearchFreeLists(pages, slack);
146 if (!free_span.get()) 179 if (!free_span.get())
147 break; 180 break;
148 181
149 // Attempt to lock |free_span|. Delete span and search free lists again 182 // Attempt to lock |free_span|. Delete span and search free lists again
150 // if locking failed. 183 // if locking failed.
151 if (free_span->shared_memory()->Lock( 184 if (free_span->shared_memory()->Lock(
152 free_span->start() * base::GetPageSize() - 185 free_span->start() * base::GetPageSize() -
153 reinterpret_cast<size_t>(free_span->shared_memory()->memory()), 186 reinterpret_cast<size_t>(free_span->shared_memory()->memory()),
154 free_span->length() * base::GetPageSize()) == 187 free_span->length() * base::GetPageSize()) ==
155 base::DiscardableSharedMemory::FAILED) { 188 base::DiscardableSharedMemory::FAILED) {
156 DCHECK(!free_span->shared_memory()->IsMemoryResident()); 189 DCHECK(!free_span->shared_memory()->IsMemoryResident());
157 // We have to release purged memory before |free_span| can be destroyed. 190 // We have to release purged memory before |free_span| can be destroyed.
158 heap_.ReleasePurgedMemory(); 191 heap_->ReleasePurgedMemory();
159 DCHECK(!free_span->shared_memory()); 192 DCHECK(!free_span->shared_memory());
160 continue; 193 continue;
161 } 194 }
162 195
163 free_span->set_is_locked(true); 196 free_span->set_is_locked(true);
164 197
165 // Memory usage is guaranteed to have changed after having removed 198 // Memory usage is guaranteed to have changed after having removed
166 // at least one span from the free lists. 199 // at least one span from the free lists.
167 MemoryUsageChanged(heap_.GetSize(), heap_.GetSizeOfFreeLists()); 200 MemoryUsageChanged(heap_->GetSize(), heap_->GetSizeOfFreeLists());
168 201
169 return base::MakeUnique<DiscardableMemoryImpl>(this, std::move(free_span)); 202 return base::MakeUnique<DiscardableMemoryImpl>(this, std::move(free_span));
170 } 203 }
171 204
172 // Release purged memory to free up the address space before we attempt to 205 // Release purged memory to free up the address space before we attempt to
173 // allocate more memory. 206 // allocate more memory.
174 heap_.ReleasePurgedMemory(); 207 heap_->ReleasePurgedMemory();
175 208
176 // Make sure crash keys are up to date in case allocation fails. 209 // Make sure crash keys are up to date in case allocation fails.
177 if (heap_.GetSize() != heap_size_prior_to_releasing_purged_memory) 210 if (heap_->GetSize() != heap_size_prior_to_releasing_purged_memory)
178 MemoryUsageChanged(heap_.GetSize(), heap_.GetSizeOfFreeLists()); 211 MemoryUsageChanged(heap_->GetSize(), heap_->GetSizeOfFreeLists());
179 212
180 size_t pages_to_allocate = 213 size_t pages_to_allocate =
181 std::max(kAllocationSize / base::GetPageSize(), pages); 214 std::max(kAllocationSize / base::GetPageSize(), pages);
182 size_t allocation_size_in_bytes = pages_to_allocate * base::GetPageSize(); 215 size_t allocation_size_in_bytes = pages_to_allocate * base::GetPageSize();
183 216
184 DiscardableSharedMemoryId new_id = 217 int32_t new_id = g_next_discardable_shared_memory_id.GetNext();
185 g_next_discardable_shared_memory_id.GetNext();
186 218
187 // Ask parent process to allocate a new discardable shared memory segment. 219 // Ask parent process to allocate a new discardable shared memory segment.
188 std::unique_ptr<base::DiscardableSharedMemory> shared_memory( 220 std::unique_ptr<base::DiscardableSharedMemory> shared_memory =
189 AllocateLockedDiscardableSharedMemory(allocation_size_in_bytes, new_id)); 221 AllocateLockedDiscardableSharedMemory(allocation_size_in_bytes, new_id);
190 222
191 // Create span for allocated memory. 223 // Create span for allocated memory.
192 std::unique_ptr<DiscardableSharedMemoryHeap::Span> new_span( 224 std::unique_ptr<DiscardableSharedMemoryHeap::Span> new_span(heap_->Grow(
193 heap_.Grow(std::move(shared_memory), allocation_size_in_bytes, new_id, 225 std::move(shared_memory), allocation_size_in_bytes, new_id,
194 base::Bind(&SendDeletedDiscardableSharedMemoryMessage, 226 base::Bind(
195 delegate_, new_id))); 227 &ClientDiscardableSharedMemoryManager::DeletedDiscardableSharedMemory,
228 base::Unretained(this), new_id)));
196 new_span->set_is_locked(true); 229 new_span->set_is_locked(true);
197 230
198 // Unlock and insert any left over memory into free lists. 231 // Unlock and insert any left over memory into free lists.
199 if (pages < pages_to_allocate) { 232 if (pages < pages_to_allocate) {
200 std::unique_ptr<DiscardableSharedMemoryHeap::Span> leftover = 233 std::unique_ptr<DiscardableSharedMemoryHeap::Span> leftover =
201 heap_.Split(new_span.get(), pages); 234 heap_->Split(new_span.get(), pages);
202 leftover->shared_memory()->Unlock( 235 leftover->shared_memory()->Unlock(
203 leftover->start() * base::GetPageSize() - 236 leftover->start() * base::GetPageSize() -
204 reinterpret_cast<size_t>(leftover->shared_memory()->memory()), 237 reinterpret_cast<size_t>(leftover->shared_memory()->memory()),
205 leftover->length() * base::GetPageSize()); 238 leftover->length() * base::GetPageSize());
206 leftover->set_is_locked(false); 239 leftover->set_is_locked(false);
207 heap_.MergeIntoFreeLists(std::move(leftover)); 240 heap_->MergeIntoFreeLists(std::move(leftover));
208 } 241 }
209 242
210 MemoryUsageChanged(heap_.GetSize(), heap_.GetSizeOfFreeLists()); 243 MemoryUsageChanged(heap_->GetSize(), heap_->GetSizeOfFreeLists());
211 244
212 return base::MakeUnique<DiscardableMemoryImpl>(this, std::move(new_span)); 245 return base::MakeUnique<DiscardableMemoryImpl>(this, std::move(new_span));
213 } 246 }
214 247
215 bool ClientDiscardableSharedMemoryManager::OnMemoryDump( 248 bool ClientDiscardableSharedMemoryManager::OnMemoryDump(
216 const base::trace_event::MemoryDumpArgs& args, 249 const base::trace_event::MemoryDumpArgs& args,
217 base::trace_event::ProcessMemoryDump* pmd) { 250 base::trace_event::ProcessMemoryDump* pmd) {
218 base::AutoLock lock(lock_); 251 base::AutoLock lock(lock_);
219 if (args.level_of_detail == 252 if (args.level_of_detail ==
220 base::trace_event::MemoryDumpLevelOfDetail::BACKGROUND) { 253 base::trace_event::MemoryDumpLevelOfDetail::BACKGROUND) {
221 base::trace_event::MemoryAllocatorDump* total_dump = 254 base::trace_event::MemoryAllocatorDump* total_dump =
222 pmd->CreateAllocatorDump( 255 pmd->CreateAllocatorDump(
223 base::StringPrintf("discardable/child_0x%" PRIXPTR, 256 base::StringPrintf("discardable/child_0x%" PRIXPTR,
224 reinterpret_cast<uintptr_t>(this))); 257 reinterpret_cast<uintptr_t>(this)));
225 const size_t total_size = heap_.GetSize(); 258 const size_t total_size = heap_->GetSize();
226 const size_t freelist_size = heap_.GetSizeOfFreeLists(); 259 const size_t freelist_size = heap_->GetSizeOfFreeLists();
227 total_dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize, 260 total_dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize,
228 base::trace_event::MemoryAllocatorDump::kUnitsBytes, 261 base::trace_event::MemoryAllocatorDump::kUnitsBytes,
229 total_size - freelist_size); 262 total_size - freelist_size);
230 total_dump->AddScalar("freelist_size", 263 total_dump->AddScalar("freelist_size",
231 base::trace_event::MemoryAllocatorDump::kUnitsBytes, 264 base::trace_event::MemoryAllocatorDump::kUnitsBytes,
232 freelist_size); 265 freelist_size);
233 return true; 266 return true;
234 } 267 }
235 268
236 return heap_.OnMemoryDump(pmd); 269 return heap_->OnMemoryDump(pmd);
237 } 270 }
238 271
239 ClientDiscardableSharedMemoryManager::Statistics 272 ClientDiscardableSharedMemoryManager::Statistics
240 ClientDiscardableSharedMemoryManager::GetStatistics() const { 273 ClientDiscardableSharedMemoryManager::GetStatistics() const {
241 base::AutoLock lock(lock_); 274 base::AutoLock lock(lock_);
242 Statistics stats; 275 Statistics stats;
243 stats.total_size = heap_.GetSize(); 276 stats.total_size = heap_->GetSize();
244 stats.freelist_size = heap_.GetSizeOfFreeLists(); 277 stats.freelist_size = heap_->GetSizeOfFreeLists();
245 return stats; 278 return stats;
246 } 279 }
247 280
248 void ClientDiscardableSharedMemoryManager::ReleaseFreeMemory() { 281 void ClientDiscardableSharedMemoryManager::ReleaseFreeMemory() {
249 base::AutoLock lock(lock_); 282 base::AutoLock lock(lock_);
250 283
251 size_t heap_size_prior_to_releasing_memory = heap_.GetSize(); 284 size_t heap_size_prior_to_releasing_memory = heap_->GetSize();
252 285
253 // Release both purged and free memory. 286 // Release both purged and free memory.
254 heap_.ReleasePurgedMemory(); 287 heap_->ReleasePurgedMemory();
255 heap_.ReleaseFreeMemory(); 288 heap_->ReleaseFreeMemory();
256 289
257 if (heap_.GetSize() != heap_size_prior_to_releasing_memory) 290 if (heap_->GetSize() != heap_size_prior_to_releasing_memory)
258 MemoryUsageChanged(heap_.GetSize(), heap_.GetSizeOfFreeLists()); 291 MemoryUsageChanged(heap_->GetSize(), heap_->GetSizeOfFreeLists());
259 } 292 }
260 293
261 bool ClientDiscardableSharedMemoryManager::LockSpan( 294 bool ClientDiscardableSharedMemoryManager::LockSpan(
262 DiscardableSharedMemoryHeap::Span* span) { 295 DiscardableSharedMemoryHeap::Span* span) {
263 base::AutoLock lock(lock_); 296 base::AutoLock lock(lock_);
264 297
265 if (!span->shared_memory()) 298 if (!span->shared_memory())
266 return false; 299 return false;
267 300
268 size_t offset = span->start() * base::GetPageSize() - 301 size_t offset = span->start() * base::GetPageSize() -
(...skipping 30 matching lines...) Expand all
299 } 332 }
300 333
301 void ClientDiscardableSharedMemoryManager::ReleaseSpan( 334 void ClientDiscardableSharedMemoryManager::ReleaseSpan(
302 std::unique_ptr<DiscardableSharedMemoryHeap::Span> span) { 335 std::unique_ptr<DiscardableSharedMemoryHeap::Span> span) {
303 base::AutoLock lock(lock_); 336 base::AutoLock lock(lock_);
304 337
305 // Delete span instead of merging it into free lists if memory is gone. 338 // Delete span instead of merging it into free lists if memory is gone.
306 if (!span->shared_memory()) 339 if (!span->shared_memory())
307 return; 340 return;
308 341
309 heap_.MergeIntoFreeLists(std::move(span)); 342 heap_->MergeIntoFreeLists(std::move(span));
310 343
311 // Bytes of free memory changed. 344 // Bytes of free memory changed.
312 MemoryUsageChanged(heap_.GetSize(), heap_.GetSizeOfFreeLists()); 345 MemoryUsageChanged(heap_->GetSize(), heap_->GetSizeOfFreeLists());
313 } 346 }
314 347
315 base::trace_event::MemoryAllocatorDump* 348 base::trace_event::MemoryAllocatorDump*
316 ClientDiscardableSharedMemoryManager::CreateMemoryAllocatorDump( 349 ClientDiscardableSharedMemoryManager::CreateMemoryAllocatorDump(
317 DiscardableSharedMemoryHeap::Span* span, 350 DiscardableSharedMemoryHeap::Span* span,
318 const char* name, 351 const char* name,
319 base::trace_event::ProcessMemoryDump* pmd) const { 352 base::trace_event::ProcessMemoryDump* pmd) const {
320 base::AutoLock lock(lock_); 353 base::AutoLock lock(lock_);
321 return heap_.CreateMemoryAllocatorDump(span, name, pmd); 354 return heap_->CreateMemoryAllocatorDump(span, name, pmd);
322 } 355 }
323 356
324 std::unique_ptr<base::DiscardableSharedMemory> 357 std::unique_ptr<base::DiscardableSharedMemory>
325 ClientDiscardableSharedMemoryManager::AllocateLockedDiscardableSharedMemory( 358 ClientDiscardableSharedMemoryManager::AllocateLockedDiscardableSharedMemory(
326 size_t size, 359 size_t size,
dcheng 2016/11/25 00:07:08 Let's use uint32_t consistently throughout, if tha
Peng 2016/11/25 16:41:52 This function overrides from subclass. So can not
327 DiscardableSharedMemoryId id) { 360 int32_t id) {
328 TRACE_EVENT2("renderer", 361 TRACE_EVENT2("renderer",
329 "ClientDiscardableSharedMemoryManager::" 362 "ClientDiscardableSharedMemoryManager::"
330 "AllocateLockedDiscardableSharedMemory", 363 "AllocateLockedDiscardableSharedMemory",
331 "size", size, "id", id); 364 "size", size, "id", id);
332 365 std::unique_ptr<base::DiscardableSharedMemory> memory;
333 base::SharedMemoryHandle handle = base::SharedMemory::NULLHandle(); 366 base::WaitableEvent event(base::WaitableEvent::ResetPolicy::MANUAL,
334 delegate_->AllocateLockedDiscardableSharedMemory(size, id, &handle); 367 base::WaitableEvent::InitialState::NOT_SIGNALED);
335 std::unique_ptr<base::DiscardableSharedMemory> memory( 368 io_task_runner_->PostTask(
336 new base::DiscardableSharedMemory(handle)); 369 FROM_HERE, base::Bind(&ClientDiscardableSharedMemoryManager::AllocateOnIO,
337 if (!memory->Map(size)) 370 base::Unretained(this), size, id, &memory, &event));
338 base::TerminateBecauseOutOfMemory(size); 371 // Waiting until IPC has finished on the IO thread.
372 event.Wait();
dcheng 2016/11/25 00:07:08 I wonder... should we just be using a sync mojo IP
Peng 2016/11/25 16:41:52 This function could be call by any threads. But th
339 return memory; 373 return memory;
340 } 374 }
341 375
376 void ClientDiscardableSharedMemoryManager::AllocateOnIO(
377 size_t size,
378 int32_t id,
379 std::unique_ptr<base::DiscardableSharedMemory>* memory,
380 base::WaitableEvent* event) {
381 if (!manager_mojo_->is_bound())
382 return;
383 (*manager_mojo_)
384 ->AllocateLockedDiscardableSharedMemory(
385 static_cast<uint32_t>(size), id,
386 base::Bind(
387 &ClientDiscardableSharedMemoryManager::AllocateCompletedOnIO,
388 base::Unretained(this), memory, event));
389 }
390
391 void ClientDiscardableSharedMemoryManager::AllocateCompletedOnIO(
392 std::unique_ptr<base::DiscardableSharedMemory>* memory,
393 base::WaitableEvent* event,
394 mojo::ScopedSharedBufferHandle mojo_handle) {
395 base::ScopedClosureRunner event_signal_runner(
396 base::Bind(&base::WaitableEvent::Signal, base::Unretained(event)));
397
398 if (!mojo_handle.is_valid())
399 return;
400 base::SharedMemoryHandle handle = base::SharedMemory::NULLHandle();
401 size_t memory_size = 0;
402 bool read_only = false;
403 auto result = mojo::UnwrapSharedMemoryHandle(std::move(mojo_handle), &handle,
404 &memory_size, &read_only);
405 if (result != MOJO_RESULT_OK)
406 return;
407 auto discardable_shared_memory =
408 base::MakeUnique<base::DiscardableSharedMemory>(handle);
409 if (!discardable_shared_memory->Map(memory_size))
410 base::TerminateBecauseOutOfMemory(memory_size);
411 *memory = std::move(discardable_shared_memory);
412 }
413
414 void ClientDiscardableSharedMemoryManager::DeletedDiscardableSharedMemory(
415 int32_t id) {
416 io_task_runner_->PostTask(
417 FROM_HERE,
418 base::Bind(&DeletedDiscardableSharedMemoryOnIO, manager_mojo_.get(), id));
419 }
420
342 void ClientDiscardableSharedMemoryManager::MemoryUsageChanged( 421 void ClientDiscardableSharedMemoryManager::MemoryUsageChanged(
343 size_t new_bytes_total, 422 size_t new_bytes_total,
344 size_t new_bytes_free) const { 423 size_t new_bytes_free) const {
345 static const char kDiscardableMemoryAllocatedKey[] = 424 static const char kDiscardableMemoryAllocatedKey[] =
346 "discardable-memory-allocated"; 425 "discardable-memory-allocated";
347 base::debug::SetCrashKeyValue(kDiscardableMemoryAllocatedKey, 426 base::debug::SetCrashKeyValue(kDiscardableMemoryAllocatedKey,
348 base::Uint64ToString(new_bytes_total)); 427 base::Uint64ToString(new_bytes_total));
349 428
350 static const char kDiscardableMemoryFreeKey[] = "discardable-memory-free"; 429 static const char kDiscardableMemoryFreeKey[] = "discardable-memory-free";
351 base::debug::SetCrashKeyValue(kDiscardableMemoryFreeKey, 430 base::debug::SetCrashKeyValue(kDiscardableMemoryFreeKey,
352 base::Uint64ToString(new_bytes_free)); 431 base::Uint64ToString(new_bytes_free));
353 } 432 }
354 433
355 } // namespace discardable_memory 434 } // namespace discardable_memory
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698