Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1675)

Unified Diff: content/common/discardable_shared_memory_heap.cc

Issue 2459733002: Move discardable memory to //components from //content (Closed)
Patch Set: Fix build error Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: content/common/discardable_shared_memory_heap.cc
diff --git a/content/common/discardable_shared_memory_heap.cc b/content/common/discardable_shared_memory_heap.cc
deleted file mode 100644
index b6559af4c103d42b6c631631d76fa9e32847342c..0000000000000000000000000000000000000000
--- a/content/common/discardable_shared_memory_heap.cc
+++ /dev/null
@@ -1,477 +0,0 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "content/common/discardable_shared_memory_heap.h"
-
-#include <algorithm>
-#include <utility>
-
-#include "base/format_macros.h"
-#include "base/macros.h"
-#include "base/memory/discardable_shared_memory.h"
-#include "base/memory/ptr_util.h"
-#include "base/strings/stringprintf.h"
-#include "base/trace_event/memory_dump_manager.h"
-
-namespace content {
-namespace {
-
-bool IsPowerOfTwo(size_t x) {
- return (x & (x - 1)) == 0;
-}
-
-bool IsInFreeList(DiscardableSharedMemoryHeap::Span* span) {
- return span->previous() || span->next();
-}
-
-} // namespace
-
-DiscardableSharedMemoryHeap::Span::Span(
- base::DiscardableSharedMemory* shared_memory,
- size_t start,
- size_t length)
- : shared_memory_(shared_memory),
- start_(start),
- length_(length),
- is_locked_(false) {}
-
-DiscardableSharedMemoryHeap::Span::~Span() {
-}
-
-DiscardableSharedMemoryHeap::ScopedMemorySegment::ScopedMemorySegment(
- DiscardableSharedMemoryHeap* heap,
- std::unique_ptr<base::DiscardableSharedMemory> shared_memory,
- size_t size,
- int32_t id,
- const base::Closure& deleted_callback)
- : heap_(heap),
- shared_memory_(std::move(shared_memory)),
- size_(size),
- id_(id),
- deleted_callback_(deleted_callback) {}
-
-DiscardableSharedMemoryHeap::ScopedMemorySegment::~ScopedMemorySegment() {
- heap_->ReleaseMemory(shared_memory_.get(), size_);
- deleted_callback_.Run();
-}
-
-bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsUsed() const {
- return heap_->IsMemoryUsed(shared_memory_.get(), size_);
-}
-
-bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsResident() const {
- return heap_->IsMemoryResident(shared_memory_.get());
-}
-
-bool DiscardableSharedMemoryHeap::ScopedMemorySegment::ContainsSpan(
- Span* span) const {
- return shared_memory_.get() == span->shared_memory();
-}
-
-base::trace_event::MemoryAllocatorDump*
-DiscardableSharedMemoryHeap::ScopedMemorySegment::CreateMemoryAllocatorDump(
- Span* span,
- size_t block_size,
- const char* name,
- base::trace_event::ProcessMemoryDump* pmd) const {
- DCHECK_EQ(shared_memory_.get(), span->shared_memory());
- base::trace_event::MemoryAllocatorDump* dump = pmd->CreateAllocatorDump(name);
- dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize,
- base::trace_event::MemoryAllocatorDump::kUnitsBytes,
- static_cast<uint64_t>(span->length() * block_size));
-
- pmd->AddSuballocation(
- dump->guid(),
- base::StringPrintf("discardable/segment_%d/allocated_objects", id_));
- return dump;
-}
-
-void DiscardableSharedMemoryHeap::ScopedMemorySegment::OnMemoryDump(
- base::trace_event::ProcessMemoryDump* pmd) const {
- heap_->OnMemoryDump(shared_memory_.get(), size_, id_, pmd);
-}
-
-DiscardableSharedMemoryHeap::DiscardableSharedMemoryHeap(size_t block_size)
- : block_size_(block_size), num_blocks_(0), num_free_blocks_(0) {
- DCHECK_NE(block_size_, 0u);
- DCHECK(IsPowerOfTwo(block_size_));
-}
-
-DiscardableSharedMemoryHeap::~DiscardableSharedMemoryHeap() {
- memory_segments_.clear();
- DCHECK_EQ(num_blocks_, 0u);
- DCHECK_EQ(num_free_blocks_, 0u);
- DCHECK_EQ(std::count_if(free_spans_, free_spans_ + arraysize(free_spans_),
- [](const base::LinkedList<Span>& free_spans) {
- return !free_spans.empty();
- }),
- 0);
-}
-
-std::unique_ptr<DiscardableSharedMemoryHeap::Span>
-DiscardableSharedMemoryHeap::Grow(
- std::unique_ptr<base::DiscardableSharedMemory> shared_memory,
- size_t size,
- int32_t id,
- const base::Closure& deleted_callback) {
- // Memory must be aligned to block size.
- DCHECK_EQ(
- reinterpret_cast<size_t>(shared_memory->memory()) & (block_size_ - 1),
- 0u);
- DCHECK_EQ(size & (block_size_ - 1), 0u);
-
- std::unique_ptr<Span> span(
- new Span(shared_memory.get(),
- reinterpret_cast<size_t>(shared_memory->memory()) / block_size_,
- size / block_size_));
- DCHECK(spans_.find(span->start_) == spans_.end());
- DCHECK(spans_.find(span->start_ + span->length_ - 1) == spans_.end());
- RegisterSpan(span.get());
-
- num_blocks_ += span->length_;
-
- // Start tracking if segment is resident by adding it to |memory_segments_|.
- memory_segments_.push_back(new ScopedMemorySegment(
- this, std::move(shared_memory), size, id, deleted_callback));
-
- return span;
-}
-
-void DiscardableSharedMemoryHeap::MergeIntoFreeLists(
- std::unique_ptr<Span> span) {
- DCHECK(span->shared_memory_);
-
- // First add length of |span| to |num_free_blocks_|.
- num_free_blocks_ += span->length_;
-
- // Merge with previous span if possible.
- SpanMap::iterator prev_it = spans_.find(span->start_ - 1);
- if (prev_it != spans_.end() && IsInFreeList(prev_it->second)) {
- std::unique_ptr<Span> prev = RemoveFromFreeList(prev_it->second);
- DCHECK_EQ(prev->start_ + prev->length_, span->start_);
- UnregisterSpan(prev.get());
- if (span->length_ > 1)
- spans_.erase(span->start_);
- span->start_ -= prev->length_;
- span->length_ += prev->length_;
- spans_[span->start_] = span.get();
- }
-
- // Merge with next span if possible.
- SpanMap::iterator next_it = spans_.find(span->start_ + span->length_);
- if (next_it != spans_.end() && IsInFreeList(next_it->second)) {
- std::unique_ptr<Span> next = RemoveFromFreeList(next_it->second);
- DCHECK_EQ(next->start_, span->start_ + span->length_);
- UnregisterSpan(next.get());
- if (span->length_ > 1)
- spans_.erase(span->start_ + span->length_ - 1);
- span->length_ += next->length_;
- spans_[span->start_ + span->length_ - 1] = span.get();
- }
-
- InsertIntoFreeList(std::move(span));
-}
-
-std::unique_ptr<DiscardableSharedMemoryHeap::Span>
-DiscardableSharedMemoryHeap::Split(Span* span, size_t blocks) {
- DCHECK(blocks);
- DCHECK_LT(blocks, span->length_);
-
- std::unique_ptr<Span> leftover(new Span(
- span->shared_memory_, span->start_ + blocks, span->length_ - blocks));
- DCHECK(leftover->length_ == 1 ||
- spans_.find(leftover->start_) == spans_.end());
- RegisterSpan(leftover.get());
- spans_[span->start_ + blocks - 1] = span;
- span->length_ = blocks;
- return leftover;
-}
-
-std::unique_ptr<DiscardableSharedMemoryHeap::Span>
-DiscardableSharedMemoryHeap::SearchFreeLists(size_t blocks, size_t slack) {
- DCHECK(blocks);
-
- size_t length = blocks;
- size_t max_length = blocks + slack;
-
- // Search array of free lists for a suitable span.
- while (length - 1 < arraysize(free_spans_) - 1) {
- const base::LinkedList<Span>& free_spans = free_spans_[length - 1];
- if (!free_spans.empty()) {
- // Return the most recently used span located in tail.
- return Carve(free_spans.tail()->value(), blocks);
- }
-
- // Return early after surpassing |max_length|.
- if (++length > max_length)
- return nullptr;
- }
-
- const base::LinkedList<Span>& overflow_free_spans =
- free_spans_[arraysize(free_spans_) - 1];
-
- // Search overflow free list for a suitable span. Starting with the most
- // recently used span located in tail and moving towards head.
- for (base::LinkNode<Span>* node = overflow_free_spans.tail();
- node != overflow_free_spans.end(); node = node->previous()) {
- Span* span = node->value();
- if (span->length_ >= blocks && span->length_ <= max_length)
- return Carve(span, blocks);
- }
-
- return nullptr;
-}
-
-void DiscardableSharedMemoryHeap::ReleaseFreeMemory() {
- // Erase all free segments after rearranging the segments in such a way
- // that used segments precede all free segments.
- memory_segments_.erase(
- std::partition(
- memory_segments_.begin(), memory_segments_.end(),
- [](const ScopedMemorySegment* segment) { return segment->IsUsed(); }),
- memory_segments_.end());
-}
-
-void DiscardableSharedMemoryHeap::ReleasePurgedMemory() {
- // Erase all purged segments after rearranging the segments in such a way
- // that resident segments precede all purged segments.
- memory_segments_.erase(
- std::partition(memory_segments_.begin(), memory_segments_.end(),
- [](const ScopedMemorySegment* segment) {
- return segment->IsResident();
- }),
- memory_segments_.end());
-}
-
-size_t DiscardableSharedMemoryHeap::GetSize() const {
- return num_blocks_ * block_size_;
-}
-
-size_t DiscardableSharedMemoryHeap::GetSizeOfFreeLists() const {
- return num_free_blocks_ * block_size_;
-}
-
-bool DiscardableSharedMemoryHeap::OnMemoryDump(
- base::trace_event::ProcessMemoryDump* pmd) {
- std::for_each(
- memory_segments_.begin(), memory_segments_.end(),
- [pmd](const ScopedMemorySegment* segment) {
- segment->OnMemoryDump(pmd);
- });
- return true;
-}
-
-void DiscardableSharedMemoryHeap::InsertIntoFreeList(
- std::unique_ptr<DiscardableSharedMemoryHeap::Span> span) {
- DCHECK(!IsInFreeList(span.get()));
- size_t index = std::min(span->length_, arraysize(free_spans_)) - 1;
- free_spans_[index].Append(span.release());
-}
-
-std::unique_ptr<DiscardableSharedMemoryHeap::Span>
-DiscardableSharedMemoryHeap::RemoveFromFreeList(Span* span) {
- DCHECK(IsInFreeList(span));
- span->RemoveFromList();
- return base::WrapUnique(span);
-}
-
-std::unique_ptr<DiscardableSharedMemoryHeap::Span>
-DiscardableSharedMemoryHeap::Carve(Span* span, size_t blocks) {
- std::unique_ptr<Span> serving = RemoveFromFreeList(span);
-
- const int extra = serving->length_ - blocks;
- if (extra) {
- std::unique_ptr<Span> leftover(
- new Span(serving->shared_memory_, serving->start_ + blocks, extra));
- leftover->set_is_locked(false);
- DCHECK(extra == 1 || spans_.find(leftover->start_) == spans_.end());
- RegisterSpan(leftover.get());
-
- // No need to coalesce as the previous span of |leftover| was just split
- // and the next span of |leftover| was not previously coalesced with
- // |span|.
- InsertIntoFreeList(std::move(leftover));
-
- serving->length_ = blocks;
- spans_[serving->start_ + blocks - 1] = serving.get();
- }
-
- // |serving| is no longer in the free list, remove its length from
- // |num_free_blocks_|.
- DCHECK_GE(num_free_blocks_, serving->length_);
- num_free_blocks_ -= serving->length_;
-
- return serving;
-}
-
-void DiscardableSharedMemoryHeap::RegisterSpan(Span* span) {
- spans_[span->start_] = span;
- if (span->length_ > 1)
- spans_[span->start_ + span->length_ - 1] = span;
-}
-
-void DiscardableSharedMemoryHeap::UnregisterSpan(Span* span) {
- DCHECK(spans_.find(span->start_) != spans_.end());
- DCHECK_EQ(spans_[span->start_], span);
- spans_.erase(span->start_);
- if (span->length_ > 1) {
- DCHECK(spans_.find(span->start_ + span->length_ - 1) != spans_.end());
- DCHECK_EQ(spans_[span->start_ + span->length_ - 1], span);
- spans_.erase(span->start_ + span->length_ - 1);
- }
-}
-
-bool DiscardableSharedMemoryHeap::IsMemoryUsed(
- const base::DiscardableSharedMemory* shared_memory,
- size_t size) {
- size_t offset =
- reinterpret_cast<size_t>(shared_memory->memory()) / block_size_;
- size_t length = size / block_size_;
- DCHECK(spans_.find(offset) != spans_.end());
- Span* span = spans_[offset];
- DCHECK_LE(span->length_, length);
- // Memory is used if first span is not in free list or shorter than segment.
- return !IsInFreeList(span) || span->length_ != length;
-}
-
-bool DiscardableSharedMemoryHeap::IsMemoryResident(
- const base::DiscardableSharedMemory* shared_memory) {
- return shared_memory->IsMemoryResident();
-}
-
-void DiscardableSharedMemoryHeap::ReleaseMemory(
- const base::DiscardableSharedMemory* shared_memory,
- size_t size) {
- size_t offset =
- reinterpret_cast<size_t>(shared_memory->memory()) / block_size_;
- size_t end = offset + size / block_size_;
- while (offset < end) {
- DCHECK(spans_.find(offset) != spans_.end());
- Span* span = spans_[offset];
- DCHECK_EQ(span->shared_memory_, shared_memory);
- span->shared_memory_ = nullptr;
- UnregisterSpan(span);
-
- offset += span->length_;
-
- DCHECK_GE(num_blocks_, span->length_);
- num_blocks_ -= span->length_;
-
- // If |span| is in the free list, remove it and update |num_free_blocks_|.
- if (IsInFreeList(span)) {
- DCHECK_GE(num_free_blocks_, span->length_);
- num_free_blocks_ -= span->length_;
- RemoveFromFreeList(span);
- }
- }
-}
-
-void DiscardableSharedMemoryHeap::OnMemoryDump(
- const base::DiscardableSharedMemory* shared_memory,
- size_t size,
- int32_t segment_id,
- base::trace_event::ProcessMemoryDump* pmd) {
- size_t allocated_objects_count = 0;
- size_t allocated_objects_size_in_blocks = 0;
- size_t locked_objects_size_in_blocks = 0;
- size_t offset =
- reinterpret_cast<size_t>(shared_memory->memory()) / block_size_;
- size_t end = offset + size / block_size_;
- while (offset < end) {
- Span* span = spans_[offset];
- if (!IsInFreeList(span)) {
- allocated_objects_size_in_blocks += span->length_;
- locked_objects_size_in_blocks += span->is_locked_ ? span->length_ : 0;
- allocated_objects_count++;
- }
- offset += span->length_;
- }
- size_t allocated_objects_size_in_bytes =
- allocated_objects_size_in_blocks * block_size_;
- size_t locked_objects_size_in_bytes =
- locked_objects_size_in_blocks * block_size_;
-
- std::string segment_dump_name =
- base::StringPrintf("discardable/segment_%d", segment_id);
- base::trace_event::MemoryAllocatorDump* segment_dump =
- pmd->CreateAllocatorDump(segment_dump_name);
- // The size is added here so that telemetry picks up the size. Usually it is
- // just enough to add it to the global dump.
- segment_dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize,
- base::trace_event::MemoryAllocatorDump::kUnitsBytes,
- allocated_objects_size_in_bytes);
- segment_dump->AddScalar("virtual_size",
- base::trace_event::MemoryAllocatorDump::kUnitsBytes,
- size);
-
- base::trace_event::MemoryAllocatorDump* obj_dump =
- pmd->CreateAllocatorDump(segment_dump_name + "/allocated_objects");
- obj_dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameObjectCount,
- base::trace_event::MemoryAllocatorDump::kUnitsObjects,
- allocated_objects_count);
- obj_dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize,
- base::trace_event::MemoryAllocatorDump::kUnitsBytes,
- allocated_objects_size_in_bytes);
- obj_dump->AddScalar("locked_size",
- base::trace_event::MemoryAllocatorDump::kUnitsBytes,
- locked_objects_size_in_bytes);
-
- // Emit an ownership edge towards a global allocator dump node. This allows
- // to avoid double-counting segments when both browser and child process emit
- // them. In the special case of single-process-mode, this will be the only
- // dumper active and the single ownership edge will become a no-op in the UI.
- // The global dump is created as a weak dump so that the segment is removed if
- // the browser does not dump it (segment was purged).
- const uint64_t tracing_process_id =
- base::trace_event::MemoryDumpManager::GetInstance()
- ->GetTracingProcessId();
- base::trace_event::MemoryAllocatorDumpGuid shared_segment_guid =
- GetSegmentGUIDForTracing(tracing_process_id, segment_id);
- pmd->CreateWeakSharedGlobalAllocatorDump(shared_segment_guid);
-
- // The size is added to the global dump so that it gets propagated to both the
- // dumps associated.
- pmd->GetSharedGlobalAllocatorDump(shared_segment_guid)
- ->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize,
- base::trace_event::MemoryAllocatorDump::kUnitsBytes,
- allocated_objects_size_in_bytes);
-
- // By creating an edge with a higher |importance| (w.r.t. browser-side dumps)
- // the tracing UI will account the effective size of the segment to the child.
- const int kImportance = 2;
- pmd->AddOwnershipEdge(segment_dump->guid(), shared_segment_guid, kImportance);
-}
-
-// static
-base::trace_event::MemoryAllocatorDumpGuid
-DiscardableSharedMemoryHeap::GetSegmentGUIDForTracing(
- uint64_t tracing_process_id,
- int32_t segment_id) {
- return base::trace_event::MemoryAllocatorDumpGuid(base::StringPrintf(
- "discardable-x-process/%" PRIx64 "/%d", tracing_process_id, segment_id));
-}
-
-base::trace_event::MemoryAllocatorDump*
-DiscardableSharedMemoryHeap::CreateMemoryAllocatorDump(
- Span* span,
- const char* name,
- base::trace_event::ProcessMemoryDump* pmd) const {
- if (!span->shared_memory()) {
- base::trace_event::MemoryAllocatorDump* dump =
- pmd->CreateAllocatorDump(name);
- dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize,
- base::trace_event::MemoryAllocatorDump::kUnitsBytes, 0u);
- return dump;
- }
-
- ScopedVector<ScopedMemorySegment>::const_iterator it =
- std::find_if(memory_segments_.begin(), memory_segments_.end(),
- [span](const ScopedMemorySegment* segment) {
- return segment->ContainsSpan(span);
- });
- DCHECK(it != memory_segments_.end());
- return (*it)->CreateMemoryAllocatorDump(span, block_size_, name, pmd);
-}
-
-} // namespace content
« no previous file with comments | « content/common/discardable_shared_memory_heap.h ('k') | content/common/discardable_shared_memory_heap_perftest.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698