Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(224)

Side by Side Diff: components/discardable_memory/common/discardable_shared_memory_heap.cc

Issue 2459733002: Move discardable memory to //components from //content (Closed)
Patch Set: Fix build error Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 The Chromium Authors. All rights reserved. 1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/common/discardable_shared_memory_heap.h" 5 #include "components/discardable_memory/common/discardable_shared_memory_heap.h"
6 6
7 #include <algorithm> 7 #include <algorithm>
8 #include <utility> 8 #include <utility>
9 9
10 #include "base/format_macros.h" 10 #include "base/format_macros.h"
11 #include "base/macros.h" 11 #include "base/macros.h"
12 #include "base/memory/discardable_shared_memory.h" 12 #include "base/memory/discardable_shared_memory.h"
13 #include "base/memory/ptr_util.h" 13 #include "base/memory/ptr_util.h"
14 #include "base/strings/stringprintf.h" 14 #include "base/strings/stringprintf.h"
15 #include "base/trace_event/memory_dump_manager.h" 15 #include "base/trace_event/memory_dump_manager.h"
16 16
17 namespace content { 17 namespace discardable_memory {
18 namespace { 18 namespace {
19 19
20 bool IsPowerOfTwo(size_t x) { 20 bool IsPowerOfTwo(size_t x) {
21 return (x & (x - 1)) == 0; 21 return (x & (x - 1)) == 0;
22 } 22 }
23 23
24 bool IsInFreeList(DiscardableSharedMemoryHeap::Span* span) { 24 bool IsInFreeList(DiscardableSharedMemoryHeap::Span* span) {
25 return span->previous() || span->next(); 25 return span->previous() || span->next();
26 } 26 }
27 27
28 } // namespace 28 } // namespace
29 29
30 DiscardableSharedMemoryHeap::Span::Span( 30 DiscardableSharedMemoryHeap::Span::Span(
31 base::DiscardableSharedMemory* shared_memory, 31 base::DiscardableSharedMemory* shared_memory,
32 size_t start, 32 size_t start,
33 size_t length) 33 size_t length)
34 : shared_memory_(shared_memory), 34 : shared_memory_(shared_memory),
35 start_(start), 35 start_(start),
36 length_(length), 36 length_(length),
37 is_locked_(false) {} 37 is_locked_(false) {}
38 38
39 DiscardableSharedMemoryHeap::Span::~Span() { 39 DiscardableSharedMemoryHeap::Span::~Span() {}
40 }
41 40
42 DiscardableSharedMemoryHeap::ScopedMemorySegment::ScopedMemorySegment( 41 DiscardableSharedMemoryHeap::ScopedMemorySegment::ScopedMemorySegment(
43 DiscardableSharedMemoryHeap* heap, 42 DiscardableSharedMemoryHeap* heap,
44 std::unique_ptr<base::DiscardableSharedMemory> shared_memory, 43 std::unique_ptr<base::DiscardableSharedMemory> shared_memory,
45 size_t size, 44 size_t size,
46 int32_t id, 45 int32_t id,
47 const base::Closure& deleted_callback) 46 const base::Closure& deleted_callback)
48 : heap_(heap), 47 : heap_(heap),
49 shared_memory_(std::move(shared_memory)), 48 shared_memory_(std::move(shared_memory)),
50 size_(size), 49 size_(size),
(...skipping 196 matching lines...) Expand 10 before | Expand all | Expand 10 after
247 size_t DiscardableSharedMemoryHeap::GetSize() const { 246 size_t DiscardableSharedMemoryHeap::GetSize() const {
248 return num_blocks_ * block_size_; 247 return num_blocks_ * block_size_;
249 } 248 }
250 249
251 size_t DiscardableSharedMemoryHeap::GetSizeOfFreeLists() const { 250 size_t DiscardableSharedMemoryHeap::GetSizeOfFreeLists() const {
252 return num_free_blocks_ * block_size_; 251 return num_free_blocks_ * block_size_;
253 } 252 }
254 253
255 bool DiscardableSharedMemoryHeap::OnMemoryDump( 254 bool DiscardableSharedMemoryHeap::OnMemoryDump(
256 base::trace_event::ProcessMemoryDump* pmd) { 255 base::trace_event::ProcessMemoryDump* pmd) {
257 std::for_each( 256 std::for_each(memory_segments_.begin(), memory_segments_.end(),
258 memory_segments_.begin(), memory_segments_.end(), 257 [pmd](const ScopedMemorySegment* segment) {
259 [pmd](const ScopedMemorySegment* segment) { 258 segment->OnMemoryDump(pmd);
260 segment->OnMemoryDump(pmd); 259 });
261 });
262 return true; 260 return true;
263 } 261 }
264 262
265 void DiscardableSharedMemoryHeap::InsertIntoFreeList( 263 void DiscardableSharedMemoryHeap::InsertIntoFreeList(
266 std::unique_ptr<DiscardableSharedMemoryHeap::Span> span) { 264 std::unique_ptr<DiscardableSharedMemoryHeap::Span> span) {
267 DCHECK(!IsInFreeList(span.get())); 265 DCHECK(!IsInFreeList(span.get()));
268 size_t index = std::min(span->length_, arraysize(free_spans_)) - 1; 266 size_t index = std::min(span->length_, arraysize(free_spans_)) - 1;
269 free_spans_[index].Append(span.release()); 267 free_spans_[index].Append(span.release());
270 } 268 }
271 269
272 std::unique_ptr<DiscardableSharedMemoryHeap::Span> 270 std::unique_ptr<DiscardableSharedMemoryHeap::Span>
273 DiscardableSharedMemoryHeap::RemoveFromFreeList(Span* span) { 271 DiscardableSharedMemoryHeap::RemoveFromFreeList(Span* span) {
274 DCHECK(IsInFreeList(span)); 272 DCHECK(IsInFreeList(span));
275 span->RemoveFromList(); 273 span->RemoveFromList();
276 return base::WrapUnique(span); 274 return base::WrapUnique(span);
277 } 275 }
278 276
279 std::unique_ptr<DiscardableSharedMemoryHeap::Span> 277 std::unique_ptr<DiscardableSharedMemoryHeap::Span>
280 DiscardableSharedMemoryHeap::Carve(Span* span, size_t blocks) { 278 DiscardableSharedMemoryHeap::Carve(Span* span, size_t blocks) {
281 std::unique_ptr<Span> serving = RemoveFromFreeList(span); 279 std::unique_ptr<Span> serving = RemoveFromFreeList(span);
282 280
283 const int extra = serving->length_ - blocks; 281 const size_t extra = serving->length_ - blocks;
284 if (extra) { 282 if (extra) {
285 std::unique_ptr<Span> leftover( 283 std::unique_ptr<Span> leftover(
286 new Span(serving->shared_memory_, serving->start_ + blocks, extra)); 284 new Span(serving->shared_memory_, serving->start_ + blocks, extra));
287 leftover->set_is_locked(false); 285 leftover->set_is_locked(false);
288 DCHECK(extra == 1 || spans_.find(leftover->start_) == spans_.end()); 286 DCHECK(extra == 1 || spans_.find(leftover->start_) == spans_.end());
289 RegisterSpan(leftover.get()); 287 RegisterSpan(leftover.get());
290 288
291 // No need to coalesce as the previous span of |leftover| was just split 289 // No need to coalesce as the previous span of |leftover| was just split
292 // and the next span of |leftover| was not previously coalesced with 290 // and the next span of |leftover| was not previously coalesced with
293 // |span|. 291 // |span|.
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after
411 base::trace_event::MemoryAllocatorDump::kUnitsObjects, 409 base::trace_event::MemoryAllocatorDump::kUnitsObjects,
412 allocated_objects_count); 410 allocated_objects_count);
413 obj_dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize, 411 obj_dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize,
414 base::trace_event::MemoryAllocatorDump::kUnitsBytes, 412 base::trace_event::MemoryAllocatorDump::kUnitsBytes,
415 allocated_objects_size_in_bytes); 413 allocated_objects_size_in_bytes);
416 obj_dump->AddScalar("locked_size", 414 obj_dump->AddScalar("locked_size",
417 base::trace_event::MemoryAllocatorDump::kUnitsBytes, 415 base::trace_event::MemoryAllocatorDump::kUnitsBytes,
418 locked_objects_size_in_bytes); 416 locked_objects_size_in_bytes);
419 417
420 // Emit an ownership edge towards a global allocator dump node. This allows 418 // Emit an ownership edge towards a global allocator dump node. This allows
421 // to avoid double-counting segments when both browser and child process emit 419 // to avoid double-counting segments when both browser and client process emit
422 // them. In the special case of single-process-mode, this will be the only 420 // them. In the special case of single-process-mode, this will be the only
423 // dumper active and the single ownership edge will become a no-op in the UI. 421 // dumper active and the single ownership edge will become a no-op in the UI.
424 // The global dump is created as a weak dump so that the segment is removed if 422 // The global dump is created as a weak dump so that the segment is removed if
425 // the browser does not dump it (segment was purged). 423 // the browser does not dump it (segment was purged).
426 const uint64_t tracing_process_id = 424 const uint64_t tracing_process_id =
427 base::trace_event::MemoryDumpManager::GetInstance() 425 base::trace_event::MemoryDumpManager::GetInstance()
428 ->GetTracingProcessId(); 426 ->GetTracingProcessId();
429 base::trace_event::MemoryAllocatorDumpGuid shared_segment_guid = 427 base::trace_event::MemoryAllocatorDumpGuid shared_segment_guid =
430 GetSegmentGUIDForTracing(tracing_process_id, segment_id); 428 GetSegmentGUIDForTracing(tracing_process_id, segment_id);
431 pmd->CreateWeakSharedGlobalAllocatorDump(shared_segment_guid); 429 pmd->CreateWeakSharedGlobalAllocatorDump(shared_segment_guid);
432 430
433 // The size is added to the global dump so that it gets propagated to both the 431 // The size is added to the global dump so that it gets propagated to both the
434 // dumps associated. 432 // dumps associated.
435 pmd->GetSharedGlobalAllocatorDump(shared_segment_guid) 433 pmd->GetSharedGlobalAllocatorDump(shared_segment_guid)
436 ->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize, 434 ->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize,
437 base::trace_event::MemoryAllocatorDump::kUnitsBytes, 435 base::trace_event::MemoryAllocatorDump::kUnitsBytes,
438 allocated_objects_size_in_bytes); 436 allocated_objects_size_in_bytes);
439 437
440 // By creating an edge with a higher |importance| (w.r.t. browser-side dumps) 438 // By creating an edge with a higher |importance| (w.r.t. browser-side dumps)
441 // the tracing UI will account the effective size of the segment to the child. 439 // the tracing UI will account the effective size of the segment to the
440 // client.
442 const int kImportance = 2; 441 const int kImportance = 2;
443 pmd->AddOwnershipEdge(segment_dump->guid(), shared_segment_guid, kImportance); 442 pmd->AddOwnershipEdge(segment_dump->guid(), shared_segment_guid, kImportance);
444 } 443 }
445 444
446 // static 445 // static
447 base::trace_event::MemoryAllocatorDumpGuid 446 base::trace_event::MemoryAllocatorDumpGuid
448 DiscardableSharedMemoryHeap::GetSegmentGUIDForTracing( 447 DiscardableSharedMemoryHeap::GetSegmentGUIDForTracing(
449 uint64_t tracing_process_id, 448 uint64_t tracing_process_id,
450 int32_t segment_id) { 449 int32_t segment_id) {
451 return base::trace_event::MemoryAllocatorDumpGuid(base::StringPrintf( 450 return base::trace_event::MemoryAllocatorDumpGuid(base::StringPrintf(
(...skipping 15 matching lines...) Expand all
467 466
468 ScopedVector<ScopedMemorySegment>::const_iterator it = 467 ScopedVector<ScopedMemorySegment>::const_iterator it =
469 std::find_if(memory_segments_.begin(), memory_segments_.end(), 468 std::find_if(memory_segments_.begin(), memory_segments_.end(),
470 [span](const ScopedMemorySegment* segment) { 469 [span](const ScopedMemorySegment* segment) {
471 return segment->ContainsSpan(span); 470 return segment->ContainsSpan(span);
472 }); 471 });
473 DCHECK(it != memory_segments_.end()); 472 DCHECK(it != memory_segments_.end());
474 return (*it)->CreateMemoryAllocatorDump(span, block_size_, name, pmd); 473 return (*it)->CreateMemoryAllocatorDump(span, block_size_, name, pmd);
475 } 474 }
476 475
477 } // namespace content 476 } // namespace discardable_memory
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698