Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(627)

Side by Side Diff: content/common/discardable_shared_memory_heap.cc

Issue 1374213002: [tracing] Display the locked size of discardable memory segment. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Storing locked state in spans. Created 5 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 The Chromium Authors. All rights reserved. 1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/common/discardable_shared_memory_heap.h" 5 #include "content/common/discardable_shared_memory_heap.h"
6 6
7 #include <algorithm> 7 #include <algorithm>
8 8
9 #include "base/format_macros.h" 9 #include "base/format_macros.h"
10 #include "base/memory/discardable_shared_memory.h" 10 #include "base/memory/discardable_shared_memory.h"
11 #include "base/strings/stringprintf.h" 11 #include "base/strings/stringprintf.h"
12 #include "base/trace_event/memory_dump_manager.h" 12 #include "base/trace_event/memory_dump_manager.h"
13 13
14 namespace content { 14 namespace content {
15 namespace { 15 namespace {
16 16
17 bool IsPowerOfTwo(size_t x) { 17 bool IsPowerOfTwo(size_t x) {
18 return (x & (x - 1)) == 0; 18 return (x & (x - 1)) == 0;
19 } 19 }
20 20
21 bool IsInFreeList(DiscardableSharedMemoryHeap::Span* span) { 21 bool IsInFreeList(DiscardableSharedMemoryHeap::Span* span) {
22 return span->previous() || span->next(); 22 return span->previous() || span->next();
23 } 23 }
24 24
25 } // namespace 25 } // namespace
26 26
27 DiscardableSharedMemoryHeap::Span::Span( 27 DiscardableSharedMemoryHeap::Span::Span(
28 base::DiscardableSharedMemory* shared_memory, 28 base::DiscardableSharedMemory* shared_memory,
29 size_t start, 29 size_t start,
30 size_t length) 30 size_t length,
31 : shared_memory_(shared_memory), start_(start), length_(length) { 31 bool is_locked)
32 : shared_memory_(shared_memory),
33 start_(start),
34 length_(length),
35 is_locked_(is_locked) {}
36
37 DiscardableSharedMemoryHeap::Span::~Span() {
32 } 38 }
33 39
34 DiscardableSharedMemoryHeap::Span::~Span() { 40 base::DiscardableSharedMemory::LockResult
41 DiscardableSharedMemoryHeap::Span::Lock(size_t page_size) {
42 const size_t offset =
43 start_ * page_size - reinterpret_cast<size_t>(shared_memory_->memory());
44 const size_t length = length_ * page_size;
45 base::DiscardableSharedMemory::LockResult result =
46 shared_memory_->Lock(offset, length);
47 is_locked_ = result == base::DiscardableSharedMemory::SUCCESS;
48 return result;
49 }
50
51 void DiscardableSharedMemoryHeap::Span::Unlock(size_t page_size) {
52 const size_t offset =
53 start_ * page_size - reinterpret_cast<size_t>(shared_memory_->memory());
54 const size_t length = length_ * page_size;
55 shared_memory_->Unlock(offset, length);
56 is_locked_ = false;
57 }
58
59 bool DiscardableSharedMemoryHeap::Span::IsMemoryResident() const {
60 return shared_memory_->IsMemoryResident();
35 } 61 }
36 62
37 DiscardableSharedMemoryHeap::ScopedMemorySegment::ScopedMemorySegment( 63 DiscardableSharedMemoryHeap::ScopedMemorySegment::ScopedMemorySegment(
38 DiscardableSharedMemoryHeap* heap, 64 DiscardableSharedMemoryHeap* heap,
39 scoped_ptr<base::DiscardableSharedMemory> shared_memory, 65 scoped_ptr<base::DiscardableSharedMemory> shared_memory,
40 size_t size, 66 size_t size,
41 int32_t id, 67 int32_t id,
42 const base::Closure& deleted_callback) 68 const base::Closure& deleted_callback)
43 : heap_(heap), 69 : heap_(heap),
44 shared_memory_(shared_memory.Pass()), 70 shared_memory_(shared_memory.Pass()),
(...skipping 10 matching lines...) Expand all
55 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsUsed() const { 81 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsUsed() const {
56 return heap_->IsMemoryUsed(shared_memory_.get(), size_); 82 return heap_->IsMemoryUsed(shared_memory_.get(), size_);
57 } 83 }
58 84
59 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsResident() const { 85 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsResident() const {
60 return heap_->IsMemoryResident(shared_memory_.get()); 86 return heap_->IsMemoryResident(shared_memory_.get());
61 } 87 }
62 88
63 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::ContainsSpan( 89 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::ContainsSpan(
64 Span* span) const { 90 Span* span) const {
65 return shared_memory_ == span->shared_memory(); 91 return shared_memory_ == span->shared_memory_;
66 } 92 }
67 93
68 base::trace_event::MemoryAllocatorDump* 94 base::trace_event::MemoryAllocatorDump*
69 DiscardableSharedMemoryHeap::ScopedMemorySegment::CreateMemoryAllocatorDump( 95 DiscardableSharedMemoryHeap::ScopedMemorySegment::CreateMemoryAllocatorDump(
70 Span* span, 96 Span* span,
71 size_t block_size, 97 size_t block_size,
72 const char* name, 98 const char* name,
73 base::trace_event::ProcessMemoryDump* pmd) const { 99 base::trace_event::ProcessMemoryDump* pmd) const {
74 DCHECK_EQ(shared_memory_, span->shared_memory()); 100 DCHECK_EQ(shared_memory_, span->shared_memory_);
75 base::trace_event::MemoryAllocatorDump* dump = pmd->CreateAllocatorDump(name); 101 base::trace_event::MemoryAllocatorDump* dump = pmd->CreateAllocatorDump(name);
76 dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize, 102 dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize,
77 base::trace_event::MemoryAllocatorDump::kUnitsBytes, 103 base::trace_event::MemoryAllocatorDump::kUnitsBytes,
78 static_cast<uint64_t>(span->length() * block_size)); 104 static_cast<uint64_t>(span->length() * block_size));
79 105
80 pmd->AddSuballocation( 106 pmd->AddSuballocation(
81 dump->guid(), 107 dump->guid(),
82 base::StringPrintf("discardable/segment_%d/allocated_objects", id_)); 108 base::StringPrintf("discardable/segment_%d/allocated_objects", id_));
83 return dump; 109 return dump;
84 } 110 }
(...skipping 27 matching lines...) Expand all
112 const base::Closure& deleted_callback) { 138 const base::Closure& deleted_callback) {
113 // Memory must be aligned to block size. 139 // Memory must be aligned to block size.
114 DCHECK_EQ( 140 DCHECK_EQ(
115 reinterpret_cast<size_t>(shared_memory->memory()) & (block_size_ - 1), 141 reinterpret_cast<size_t>(shared_memory->memory()) & (block_size_ - 1),
116 0u); 142 0u);
117 DCHECK_EQ(size & (block_size_ - 1), 0u); 143 DCHECK_EQ(size & (block_size_ - 1), 0u);
118 144
119 scoped_ptr<Span> span( 145 scoped_ptr<Span> span(
120 new Span(shared_memory.get(), 146 new Span(shared_memory.get(),
121 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_, 147 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_,
122 size / block_size_)); 148 size / block_size_, true /* is_locked */));
123 DCHECK(spans_.find(span->start_) == spans_.end()); 149 DCHECK(spans_.find(span->start_) == spans_.end());
124 DCHECK(spans_.find(span->start_ + span->length_ - 1) == spans_.end()); 150 DCHECK(spans_.find(span->start_ + span->length_ - 1) == spans_.end());
125 RegisterSpan(span.get()); 151 RegisterSpan(span.get());
126 152
127 num_blocks_ += span->length_; 153 num_blocks_ += span->length_;
128 154
129 // Start tracking if segment is resident by adding it to |memory_segments_|. 155 // Start tracking if segment is resident by adding it to |memory_segments_|.
130 memory_segments_.push_back(new ScopedMemorySegment( 156 memory_segments_.push_back(new ScopedMemorySegment(
131 this, shared_memory.Pass(), size, id, deleted_callback)); 157 this, shared_memory.Pass(), size, id, deleted_callback));
132 158
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
165 } 191 }
166 192
167 InsertIntoFreeList(span.Pass()); 193 InsertIntoFreeList(span.Pass());
168 } 194 }
169 195
170 scoped_ptr<DiscardableSharedMemoryHeap::Span> 196 scoped_ptr<DiscardableSharedMemoryHeap::Span>
171 DiscardableSharedMemoryHeap::Split(Span* span, size_t blocks) { 197 DiscardableSharedMemoryHeap::Split(Span* span, size_t blocks) {
172 DCHECK(blocks); 198 DCHECK(blocks);
173 DCHECK_LT(blocks, span->length_); 199 DCHECK_LT(blocks, span->length_);
174 200
175 scoped_ptr<Span> leftover(new Span( 201 scoped_ptr<Span> leftover(
176 span->shared_memory_, span->start_ + blocks, span->length_ - blocks)); 202 new Span(span->shared_memory_, span->start_ + blocks,
203 span->length_ - blocks, true /* is_locked */));
177 DCHECK_IMPLIES(leftover->length_ > 1, 204 DCHECK_IMPLIES(leftover->length_ > 1,
178 spans_.find(leftover->start_) == spans_.end()); 205 spans_.find(leftover->start_) == spans_.end());
179 RegisterSpan(leftover.get()); 206 RegisterSpan(leftover.get());
180 spans_[span->start_ + blocks - 1] = span; 207 spans_[span->start_ + blocks - 1] = span;
181 span->length_ = blocks; 208 span->length_ = blocks;
182 return leftover.Pass(); 209 return leftover.Pass();
183 } 210 }
184 211
185 scoped_ptr<DiscardableSharedMemoryHeap::Span> 212 scoped_ptr<DiscardableSharedMemoryHeap::Span>
186 DiscardableSharedMemoryHeap::SearchFreeLists(size_t blocks, size_t slack) { 213 DiscardableSharedMemoryHeap::SearchFreeLists(size_t blocks, size_t slack) {
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
269 span->RemoveFromList(); 296 span->RemoveFromList();
270 return make_scoped_ptr(span); 297 return make_scoped_ptr(span);
271 } 298 }
272 299
273 scoped_ptr<DiscardableSharedMemoryHeap::Span> 300 scoped_ptr<DiscardableSharedMemoryHeap::Span>
274 DiscardableSharedMemoryHeap::Carve(Span* span, size_t blocks) { 301 DiscardableSharedMemoryHeap::Carve(Span* span, size_t blocks) {
275 scoped_ptr<Span> serving = RemoveFromFreeList(span); 302 scoped_ptr<Span> serving = RemoveFromFreeList(span);
276 303
277 const int extra = serving->length_ - blocks; 304 const int extra = serving->length_ - blocks;
278 if (extra) { 305 if (extra) {
279 scoped_ptr<Span> leftover( 306 scoped_ptr<Span> leftover(new Span(serving->shared_memory_,
280 new Span(serving->shared_memory_, serving->start_ + blocks, extra)); 307 serving->start_ + blocks, extra,
308 false /* is_locked */));
281 DCHECK_IMPLIES(extra > 1, spans_.find(leftover->start_) == spans_.end()); 309 DCHECK_IMPLIES(extra > 1, spans_.find(leftover->start_) == spans_.end());
282 RegisterSpan(leftover.get()); 310 RegisterSpan(leftover.get());
283 311
284 // No need to coalesce as the previous span of |leftover| was just split 312 // No need to coalesce as the previous span of |leftover| was just split
285 // and the next span of |leftover| was not previously coalesced with 313 // and the next span of |leftover| was not previously coalesced with
286 // |span|. 314 // |span|.
287 InsertIntoFreeList(leftover.Pass()); 315 InsertIntoFreeList(leftover.Pass());
288 316
289 serving->length_ = blocks; 317 serving->length_ = blocks;
290 spans_[serving->start_ + blocks - 1] = serving.get(); 318 spans_[serving->start_ + blocks - 1] = serving.get();
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
360 } 388 }
361 } 389 }
362 390
363 void DiscardableSharedMemoryHeap::OnMemoryDump( 391 void DiscardableSharedMemoryHeap::OnMemoryDump(
364 const base::DiscardableSharedMemory* shared_memory, 392 const base::DiscardableSharedMemory* shared_memory,
365 size_t size, 393 size_t size,
366 int32_t segment_id, 394 int32_t segment_id,
367 base::trace_event::ProcessMemoryDump* pmd) { 395 base::trace_event::ProcessMemoryDump* pmd) {
368 size_t allocated_objects_count = 0; 396 size_t allocated_objects_count = 0;
369 size_t allocated_objects_size_in_bytes = 0; 397 size_t allocated_objects_size_in_bytes = 0;
398 size_t locked_size_in_bytes = 0;
370 size_t offset = 399 size_t offset =
371 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_; 400 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_;
372 size_t end = offset + size / block_size_; 401 size_t end = offset + size / block_size_;
373 while (offset < end) { 402 while (offset < end) {
374 Span* span = spans_[offset]; 403 Span* span = spans_[offset];
375 if (!IsInFreeList(span)) { 404 if (!IsInFreeList(span)) {
376 allocated_objects_count++; 405 allocated_objects_count++;
377 allocated_objects_size_in_bytes += span->length_ * block_size_; 406 const size_t span_size = span->length_ * block_size_;
407 allocated_objects_size_in_bytes += span_size;
408 locked_size_in_bytes = span->is_locked_ ? span_size : 0;
378 } 409 }
379 offset += span->length_; 410 offset += span->length_;
380 } 411 }
381 412
382 std::string segment_dump_name = 413 std::string segment_dump_name =
383 base::StringPrintf("discardable/segment_%d", segment_id); 414 base::StringPrintf("discardable/segment_%d", segment_id);
384 base::trace_event::MemoryAllocatorDump* segment_dump = 415 base::trace_event::MemoryAllocatorDump* segment_dump =
385 pmd->CreateAllocatorDump(segment_dump_name); 416 pmd->CreateAllocatorDump(segment_dump_name);
386 segment_dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize, 417 segment_dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize,
387 base::trace_event::MemoryAllocatorDump::kUnitsBytes, 418 base::trace_event::MemoryAllocatorDump::kUnitsBytes,
388 static_cast<uint64_t>(size)); 419 static_cast<uint64_t>(size));
389 420
390 base::trace_event::MemoryAllocatorDump* obj_dump = 421 base::trace_event::MemoryAllocatorDump* obj_dump =
391 pmd->CreateAllocatorDump(segment_dump_name + "/allocated_objects"); 422 pmd->CreateAllocatorDump(segment_dump_name + "/allocated_objects");
392 obj_dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameObjectCount, 423 obj_dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameObjectCount,
393 base::trace_event::MemoryAllocatorDump::kUnitsObjects, 424 base::trace_event::MemoryAllocatorDump::kUnitsObjects,
394 static_cast<uint64_t>(allocated_objects_count)); 425 static_cast<uint64_t>(allocated_objects_count));
395 obj_dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize, 426 obj_dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize,
396 base::trace_event::MemoryAllocatorDump::kUnitsBytes, 427 base::trace_event::MemoryAllocatorDump::kUnitsBytes,
397 static_cast<uint64_t>(allocated_objects_size_in_bytes)); 428 static_cast<uint64_t>(allocated_objects_size_in_bytes));
429 obj_dump->AddScalar("locked_size",
430 base::trace_event::MemoryAllocatorDump::kUnitsBytes,
431 locked_size_in_bytes);
398 432
399 // Emit an ownership edge towards a global allocator dump node. This allows 433 // Emit an ownership edge towards a global allocator dump node. This allows
400 // to avoid double-counting segments when both browser and child process emit 434 // to avoid double-counting segments when both browser and child process emit
401 // them. In the special case of single-process-mode, this will be the only 435 // them. In the special case of single-process-mode, this will be the only
402 // dumper active and the single ownership edge will become a no-op in the UI. 436 // dumper active and the single ownership edge will become a no-op in the UI.
403 const uint64 tracing_process_id = 437 const uint64 tracing_process_id =
404 base::trace_event::MemoryDumpManager::GetInstance() 438 base::trace_event::MemoryDumpManager::GetInstance()
405 ->GetTracingProcessId(); 439 ->GetTracingProcessId();
406 base::trace_event::MemoryAllocatorDumpGuid shared_segment_guid = 440 base::trace_event::MemoryAllocatorDumpGuid shared_segment_guid =
407 GetSegmentGUIDForTracing(tracing_process_id, segment_id); 441 GetSegmentGUIDForTracing(tracing_process_id, segment_id);
(...skipping 29 matching lines...) Expand all
437 ScopedVector<ScopedMemorySegment>::const_iterator it = 471 ScopedVector<ScopedMemorySegment>::const_iterator it =
438 std::find_if(memory_segments_.begin(), memory_segments_.end(), 472 std::find_if(memory_segments_.begin(), memory_segments_.end(),
439 [span](const ScopedMemorySegment* segment) { 473 [span](const ScopedMemorySegment* segment) {
440 return segment->ContainsSpan(span); 474 return segment->ContainsSpan(span);
441 }); 475 });
442 DCHECK(it != memory_segments_.end()); 476 DCHECK(it != memory_segments_.end());
443 return (*it)->CreateMemoryAllocatorDump(span, block_size_, name, pmd); 477 return (*it)->CreateMemoryAllocatorDump(span, block_size_, name, pmd);
444 } 478 }
445 479
446 } // namespace content 480 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698