| OLD | NEW |
| (Empty) |
| 1 // Copyright 2014 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "content/common/discardable_shared_memory_heap.h" | |
| 6 | |
| 7 #include <algorithm> | |
| 8 #include <utility> | |
| 9 | |
| 10 #include "base/format_macros.h" | |
| 11 #include "base/macros.h" | |
| 12 #include "base/memory/discardable_shared_memory.h" | |
| 13 #include "base/memory/ptr_util.h" | |
| 14 #include "base/strings/stringprintf.h" | |
| 15 #include "base/trace_event/memory_dump_manager.h" | |
| 16 | |
| 17 namespace content { | |
| 18 namespace { | |
| 19 | |
| 20 bool IsPowerOfTwo(size_t x) { | |
| 21 return (x & (x - 1)) == 0; | |
| 22 } | |
| 23 | |
| 24 bool IsInFreeList(DiscardableSharedMemoryHeap::Span* span) { | |
| 25 return span->previous() || span->next(); | |
| 26 } | |
| 27 | |
| 28 } // namespace | |
| 29 | |
| 30 DiscardableSharedMemoryHeap::Span::Span( | |
| 31 base::DiscardableSharedMemory* shared_memory, | |
| 32 size_t start, | |
| 33 size_t length) | |
| 34 : shared_memory_(shared_memory), | |
| 35 start_(start), | |
| 36 length_(length), | |
| 37 is_locked_(false) {} | |
| 38 | |
| 39 DiscardableSharedMemoryHeap::Span::~Span() { | |
| 40 } | |
| 41 | |
| 42 DiscardableSharedMemoryHeap::ScopedMemorySegment::ScopedMemorySegment( | |
| 43 DiscardableSharedMemoryHeap* heap, | |
| 44 std::unique_ptr<base::DiscardableSharedMemory> shared_memory, | |
| 45 size_t size, | |
| 46 int32_t id, | |
| 47 const base::Closure& deleted_callback) | |
| 48 : heap_(heap), | |
| 49 shared_memory_(std::move(shared_memory)), | |
| 50 size_(size), | |
| 51 id_(id), | |
| 52 deleted_callback_(deleted_callback) {} | |
| 53 | |
| 54 DiscardableSharedMemoryHeap::ScopedMemorySegment::~ScopedMemorySegment() { | |
| 55 heap_->ReleaseMemory(shared_memory_.get(), size_); | |
| 56 deleted_callback_.Run(); | |
| 57 } | |
| 58 | |
| 59 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsUsed() const { | |
| 60 return heap_->IsMemoryUsed(shared_memory_.get(), size_); | |
| 61 } | |
| 62 | |
| 63 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsResident() const { | |
| 64 return heap_->IsMemoryResident(shared_memory_.get()); | |
| 65 } | |
| 66 | |
| 67 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::ContainsSpan( | |
| 68 Span* span) const { | |
| 69 return shared_memory_.get() == span->shared_memory(); | |
| 70 } | |
| 71 | |
| 72 base::trace_event::MemoryAllocatorDump* | |
| 73 DiscardableSharedMemoryHeap::ScopedMemorySegment::CreateMemoryAllocatorDump( | |
| 74 Span* span, | |
| 75 size_t block_size, | |
| 76 const char* name, | |
| 77 base::trace_event::ProcessMemoryDump* pmd) const { | |
| 78 DCHECK_EQ(shared_memory_.get(), span->shared_memory()); | |
| 79 base::trace_event::MemoryAllocatorDump* dump = pmd->CreateAllocatorDump(name); | |
| 80 dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize, | |
| 81 base::trace_event::MemoryAllocatorDump::kUnitsBytes, | |
| 82 static_cast<uint64_t>(span->length() * block_size)); | |
| 83 | |
| 84 pmd->AddSuballocation( | |
| 85 dump->guid(), | |
| 86 base::StringPrintf("discardable/segment_%d/allocated_objects", id_)); | |
| 87 return dump; | |
| 88 } | |
| 89 | |
| 90 void DiscardableSharedMemoryHeap::ScopedMemorySegment::OnMemoryDump( | |
| 91 base::trace_event::ProcessMemoryDump* pmd) const { | |
| 92 heap_->OnMemoryDump(shared_memory_.get(), size_, id_, pmd); | |
| 93 } | |
| 94 | |
| 95 DiscardableSharedMemoryHeap::DiscardableSharedMemoryHeap(size_t block_size) | |
| 96 : block_size_(block_size), num_blocks_(0), num_free_blocks_(0) { | |
| 97 DCHECK_NE(block_size_, 0u); | |
| 98 DCHECK(IsPowerOfTwo(block_size_)); | |
| 99 } | |
| 100 | |
| 101 DiscardableSharedMemoryHeap::~DiscardableSharedMemoryHeap() { | |
| 102 memory_segments_.clear(); | |
| 103 DCHECK_EQ(num_blocks_, 0u); | |
| 104 DCHECK_EQ(num_free_blocks_, 0u); | |
| 105 DCHECK_EQ(std::count_if(free_spans_, free_spans_ + arraysize(free_spans_), | |
| 106 [](const base::LinkedList<Span>& free_spans) { | |
| 107 return !free_spans.empty(); | |
| 108 }), | |
| 109 0); | |
| 110 } | |
| 111 | |
| 112 std::unique_ptr<DiscardableSharedMemoryHeap::Span> | |
| 113 DiscardableSharedMemoryHeap::Grow( | |
| 114 std::unique_ptr<base::DiscardableSharedMemory> shared_memory, | |
| 115 size_t size, | |
| 116 int32_t id, | |
| 117 const base::Closure& deleted_callback) { | |
| 118 // Memory must be aligned to block size. | |
| 119 DCHECK_EQ( | |
| 120 reinterpret_cast<size_t>(shared_memory->memory()) & (block_size_ - 1), | |
| 121 0u); | |
| 122 DCHECK_EQ(size & (block_size_ - 1), 0u); | |
| 123 | |
| 124 std::unique_ptr<Span> span( | |
| 125 new Span(shared_memory.get(), | |
| 126 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_, | |
| 127 size / block_size_)); | |
| 128 DCHECK(spans_.find(span->start_) == spans_.end()); | |
| 129 DCHECK(spans_.find(span->start_ + span->length_ - 1) == spans_.end()); | |
| 130 RegisterSpan(span.get()); | |
| 131 | |
| 132 num_blocks_ += span->length_; | |
| 133 | |
| 134 // Start tracking if segment is resident by adding it to |memory_segments_|. | |
| 135 memory_segments_.push_back(new ScopedMemorySegment( | |
| 136 this, std::move(shared_memory), size, id, deleted_callback)); | |
| 137 | |
| 138 return span; | |
| 139 } | |
| 140 | |
| 141 void DiscardableSharedMemoryHeap::MergeIntoFreeLists( | |
| 142 std::unique_ptr<Span> span) { | |
| 143 DCHECK(span->shared_memory_); | |
| 144 | |
| 145 // First add length of |span| to |num_free_blocks_|. | |
| 146 num_free_blocks_ += span->length_; | |
| 147 | |
| 148 // Merge with previous span if possible. | |
| 149 SpanMap::iterator prev_it = spans_.find(span->start_ - 1); | |
| 150 if (prev_it != spans_.end() && IsInFreeList(prev_it->second)) { | |
| 151 std::unique_ptr<Span> prev = RemoveFromFreeList(prev_it->second); | |
| 152 DCHECK_EQ(prev->start_ + prev->length_, span->start_); | |
| 153 UnregisterSpan(prev.get()); | |
| 154 if (span->length_ > 1) | |
| 155 spans_.erase(span->start_); | |
| 156 span->start_ -= prev->length_; | |
| 157 span->length_ += prev->length_; | |
| 158 spans_[span->start_] = span.get(); | |
| 159 } | |
| 160 | |
| 161 // Merge with next span if possible. | |
| 162 SpanMap::iterator next_it = spans_.find(span->start_ + span->length_); | |
| 163 if (next_it != spans_.end() && IsInFreeList(next_it->second)) { | |
| 164 std::unique_ptr<Span> next = RemoveFromFreeList(next_it->second); | |
| 165 DCHECK_EQ(next->start_, span->start_ + span->length_); | |
| 166 UnregisterSpan(next.get()); | |
| 167 if (span->length_ > 1) | |
| 168 spans_.erase(span->start_ + span->length_ - 1); | |
| 169 span->length_ += next->length_; | |
| 170 spans_[span->start_ + span->length_ - 1] = span.get(); | |
| 171 } | |
| 172 | |
| 173 InsertIntoFreeList(std::move(span)); | |
| 174 } | |
| 175 | |
| 176 std::unique_ptr<DiscardableSharedMemoryHeap::Span> | |
| 177 DiscardableSharedMemoryHeap::Split(Span* span, size_t blocks) { | |
| 178 DCHECK(blocks); | |
| 179 DCHECK_LT(blocks, span->length_); | |
| 180 | |
| 181 std::unique_ptr<Span> leftover(new Span( | |
| 182 span->shared_memory_, span->start_ + blocks, span->length_ - blocks)); | |
| 183 DCHECK(leftover->length_ == 1 || | |
| 184 spans_.find(leftover->start_) == spans_.end()); | |
| 185 RegisterSpan(leftover.get()); | |
| 186 spans_[span->start_ + blocks - 1] = span; | |
| 187 span->length_ = blocks; | |
| 188 return leftover; | |
| 189 } | |
| 190 | |
| 191 std::unique_ptr<DiscardableSharedMemoryHeap::Span> | |
| 192 DiscardableSharedMemoryHeap::SearchFreeLists(size_t blocks, size_t slack) { | |
| 193 DCHECK(blocks); | |
| 194 | |
| 195 size_t length = blocks; | |
| 196 size_t max_length = blocks + slack; | |
| 197 | |
| 198 // Search array of free lists for a suitable span. | |
| 199 while (length - 1 < arraysize(free_spans_) - 1) { | |
| 200 const base::LinkedList<Span>& free_spans = free_spans_[length - 1]; | |
| 201 if (!free_spans.empty()) { | |
| 202 // Return the most recently used span located in tail. | |
| 203 return Carve(free_spans.tail()->value(), blocks); | |
| 204 } | |
| 205 | |
| 206 // Return early after surpassing |max_length|. | |
| 207 if (++length > max_length) | |
| 208 return nullptr; | |
| 209 } | |
| 210 | |
| 211 const base::LinkedList<Span>& overflow_free_spans = | |
| 212 free_spans_[arraysize(free_spans_) - 1]; | |
| 213 | |
| 214 // Search overflow free list for a suitable span. Starting with the most | |
| 215 // recently used span located in tail and moving towards head. | |
| 216 for (base::LinkNode<Span>* node = overflow_free_spans.tail(); | |
| 217 node != overflow_free_spans.end(); node = node->previous()) { | |
| 218 Span* span = node->value(); | |
| 219 if (span->length_ >= blocks && span->length_ <= max_length) | |
| 220 return Carve(span, blocks); | |
| 221 } | |
| 222 | |
| 223 return nullptr; | |
| 224 } | |
| 225 | |
| 226 void DiscardableSharedMemoryHeap::ReleaseFreeMemory() { | |
| 227 // Erase all free segments after rearranging the segments in such a way | |
| 228 // that used segments precede all free segments. | |
| 229 memory_segments_.erase( | |
| 230 std::partition( | |
| 231 memory_segments_.begin(), memory_segments_.end(), | |
| 232 [](const ScopedMemorySegment* segment) { return segment->IsUsed(); }), | |
| 233 memory_segments_.end()); | |
| 234 } | |
| 235 | |
| 236 void DiscardableSharedMemoryHeap::ReleasePurgedMemory() { | |
| 237 // Erase all purged segments after rearranging the segments in such a way | |
| 238 // that resident segments precede all purged segments. | |
| 239 memory_segments_.erase( | |
| 240 std::partition(memory_segments_.begin(), memory_segments_.end(), | |
| 241 [](const ScopedMemorySegment* segment) { | |
| 242 return segment->IsResident(); | |
| 243 }), | |
| 244 memory_segments_.end()); | |
| 245 } | |
| 246 | |
| 247 size_t DiscardableSharedMemoryHeap::GetSize() const { | |
| 248 return num_blocks_ * block_size_; | |
| 249 } | |
| 250 | |
| 251 size_t DiscardableSharedMemoryHeap::GetSizeOfFreeLists() const { | |
| 252 return num_free_blocks_ * block_size_; | |
| 253 } | |
| 254 | |
| 255 bool DiscardableSharedMemoryHeap::OnMemoryDump( | |
| 256 base::trace_event::ProcessMemoryDump* pmd) { | |
| 257 std::for_each( | |
| 258 memory_segments_.begin(), memory_segments_.end(), | |
| 259 [pmd](const ScopedMemorySegment* segment) { | |
| 260 segment->OnMemoryDump(pmd); | |
| 261 }); | |
| 262 return true; | |
| 263 } | |
| 264 | |
| 265 void DiscardableSharedMemoryHeap::InsertIntoFreeList( | |
| 266 std::unique_ptr<DiscardableSharedMemoryHeap::Span> span) { | |
| 267 DCHECK(!IsInFreeList(span.get())); | |
| 268 size_t index = std::min(span->length_, arraysize(free_spans_)) - 1; | |
| 269 free_spans_[index].Append(span.release()); | |
| 270 } | |
| 271 | |
| 272 std::unique_ptr<DiscardableSharedMemoryHeap::Span> | |
| 273 DiscardableSharedMemoryHeap::RemoveFromFreeList(Span* span) { | |
| 274 DCHECK(IsInFreeList(span)); | |
| 275 span->RemoveFromList(); | |
| 276 return base::WrapUnique(span); | |
| 277 } | |
| 278 | |
| 279 std::unique_ptr<DiscardableSharedMemoryHeap::Span> | |
| 280 DiscardableSharedMemoryHeap::Carve(Span* span, size_t blocks) { | |
| 281 std::unique_ptr<Span> serving = RemoveFromFreeList(span); | |
| 282 | |
| 283 const int extra = serving->length_ - blocks; | |
| 284 if (extra) { | |
| 285 std::unique_ptr<Span> leftover( | |
| 286 new Span(serving->shared_memory_, serving->start_ + blocks, extra)); | |
| 287 leftover->set_is_locked(false); | |
| 288 DCHECK(extra == 1 || spans_.find(leftover->start_) == spans_.end()); | |
| 289 RegisterSpan(leftover.get()); | |
| 290 | |
| 291 // No need to coalesce as the previous span of |leftover| was just split | |
| 292 // and the next span of |leftover| was not previously coalesced with | |
| 293 // |span|. | |
| 294 InsertIntoFreeList(std::move(leftover)); | |
| 295 | |
| 296 serving->length_ = blocks; | |
| 297 spans_[serving->start_ + blocks - 1] = serving.get(); | |
| 298 } | |
| 299 | |
| 300 // |serving| is no longer in the free list, remove its length from | |
| 301 // |num_free_blocks_|. | |
| 302 DCHECK_GE(num_free_blocks_, serving->length_); | |
| 303 num_free_blocks_ -= serving->length_; | |
| 304 | |
| 305 return serving; | |
| 306 } | |
| 307 | |
| 308 void DiscardableSharedMemoryHeap::RegisterSpan(Span* span) { | |
| 309 spans_[span->start_] = span; | |
| 310 if (span->length_ > 1) | |
| 311 spans_[span->start_ + span->length_ - 1] = span; | |
| 312 } | |
| 313 | |
| 314 void DiscardableSharedMemoryHeap::UnregisterSpan(Span* span) { | |
| 315 DCHECK(spans_.find(span->start_) != spans_.end()); | |
| 316 DCHECK_EQ(spans_[span->start_], span); | |
| 317 spans_.erase(span->start_); | |
| 318 if (span->length_ > 1) { | |
| 319 DCHECK(spans_.find(span->start_ + span->length_ - 1) != spans_.end()); | |
| 320 DCHECK_EQ(spans_[span->start_ + span->length_ - 1], span); | |
| 321 spans_.erase(span->start_ + span->length_ - 1); | |
| 322 } | |
| 323 } | |
| 324 | |
| 325 bool DiscardableSharedMemoryHeap::IsMemoryUsed( | |
| 326 const base::DiscardableSharedMemory* shared_memory, | |
| 327 size_t size) { | |
| 328 size_t offset = | |
| 329 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_; | |
| 330 size_t length = size / block_size_; | |
| 331 DCHECK(spans_.find(offset) != spans_.end()); | |
| 332 Span* span = spans_[offset]; | |
| 333 DCHECK_LE(span->length_, length); | |
| 334 // Memory is used if first span is not in free list or shorter than segment. | |
| 335 return !IsInFreeList(span) || span->length_ != length; | |
| 336 } | |
| 337 | |
| 338 bool DiscardableSharedMemoryHeap::IsMemoryResident( | |
| 339 const base::DiscardableSharedMemory* shared_memory) { | |
| 340 return shared_memory->IsMemoryResident(); | |
| 341 } | |
| 342 | |
| 343 void DiscardableSharedMemoryHeap::ReleaseMemory( | |
| 344 const base::DiscardableSharedMemory* shared_memory, | |
| 345 size_t size) { | |
| 346 size_t offset = | |
| 347 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_; | |
| 348 size_t end = offset + size / block_size_; | |
| 349 while (offset < end) { | |
| 350 DCHECK(spans_.find(offset) != spans_.end()); | |
| 351 Span* span = spans_[offset]; | |
| 352 DCHECK_EQ(span->shared_memory_, shared_memory); | |
| 353 span->shared_memory_ = nullptr; | |
| 354 UnregisterSpan(span); | |
| 355 | |
| 356 offset += span->length_; | |
| 357 | |
| 358 DCHECK_GE(num_blocks_, span->length_); | |
| 359 num_blocks_ -= span->length_; | |
| 360 | |
| 361 // If |span| is in the free list, remove it and update |num_free_blocks_|. | |
| 362 if (IsInFreeList(span)) { | |
| 363 DCHECK_GE(num_free_blocks_, span->length_); | |
| 364 num_free_blocks_ -= span->length_; | |
| 365 RemoveFromFreeList(span); | |
| 366 } | |
| 367 } | |
| 368 } | |
| 369 | |
| 370 void DiscardableSharedMemoryHeap::OnMemoryDump( | |
| 371 const base::DiscardableSharedMemory* shared_memory, | |
| 372 size_t size, | |
| 373 int32_t segment_id, | |
| 374 base::trace_event::ProcessMemoryDump* pmd) { | |
| 375 size_t allocated_objects_count = 0; | |
| 376 size_t allocated_objects_size_in_blocks = 0; | |
| 377 size_t locked_objects_size_in_blocks = 0; | |
| 378 size_t offset = | |
| 379 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_; | |
| 380 size_t end = offset + size / block_size_; | |
| 381 while (offset < end) { | |
| 382 Span* span = spans_[offset]; | |
| 383 if (!IsInFreeList(span)) { | |
| 384 allocated_objects_size_in_blocks += span->length_; | |
| 385 locked_objects_size_in_blocks += span->is_locked_ ? span->length_ : 0; | |
| 386 allocated_objects_count++; | |
| 387 } | |
| 388 offset += span->length_; | |
| 389 } | |
| 390 size_t allocated_objects_size_in_bytes = | |
| 391 allocated_objects_size_in_blocks * block_size_; | |
| 392 size_t locked_objects_size_in_bytes = | |
| 393 locked_objects_size_in_blocks * block_size_; | |
| 394 | |
| 395 std::string segment_dump_name = | |
| 396 base::StringPrintf("discardable/segment_%d", segment_id); | |
| 397 base::trace_event::MemoryAllocatorDump* segment_dump = | |
| 398 pmd->CreateAllocatorDump(segment_dump_name); | |
| 399 // The size is added here so that telemetry picks up the size. Usually it is | |
| 400 // just enough to add it to the global dump. | |
| 401 segment_dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize, | |
| 402 base::trace_event::MemoryAllocatorDump::kUnitsBytes, | |
| 403 allocated_objects_size_in_bytes); | |
| 404 segment_dump->AddScalar("virtual_size", | |
| 405 base::trace_event::MemoryAllocatorDump::kUnitsBytes, | |
| 406 size); | |
| 407 | |
| 408 base::trace_event::MemoryAllocatorDump* obj_dump = | |
| 409 pmd->CreateAllocatorDump(segment_dump_name + "/allocated_objects"); | |
| 410 obj_dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameObjectCount, | |
| 411 base::trace_event::MemoryAllocatorDump::kUnitsObjects, | |
| 412 allocated_objects_count); | |
| 413 obj_dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize, | |
| 414 base::trace_event::MemoryAllocatorDump::kUnitsBytes, | |
| 415 allocated_objects_size_in_bytes); | |
| 416 obj_dump->AddScalar("locked_size", | |
| 417 base::trace_event::MemoryAllocatorDump::kUnitsBytes, | |
| 418 locked_objects_size_in_bytes); | |
| 419 | |
| 420 // Emit an ownership edge towards a global allocator dump node. This allows | |
| 421 // to avoid double-counting segments when both browser and child process emit | |
| 422 // them. In the special case of single-process-mode, this will be the only | |
| 423 // dumper active and the single ownership edge will become a no-op in the UI. | |
| 424 // The global dump is created as a weak dump so that the segment is removed if | |
| 425 // the browser does not dump it (segment was purged). | |
| 426 const uint64_t tracing_process_id = | |
| 427 base::trace_event::MemoryDumpManager::GetInstance() | |
| 428 ->GetTracingProcessId(); | |
| 429 base::trace_event::MemoryAllocatorDumpGuid shared_segment_guid = | |
| 430 GetSegmentGUIDForTracing(tracing_process_id, segment_id); | |
| 431 pmd->CreateWeakSharedGlobalAllocatorDump(shared_segment_guid); | |
| 432 | |
| 433 // The size is added to the global dump so that it gets propagated to both the | |
| 434 // dumps associated. | |
| 435 pmd->GetSharedGlobalAllocatorDump(shared_segment_guid) | |
| 436 ->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize, | |
| 437 base::trace_event::MemoryAllocatorDump::kUnitsBytes, | |
| 438 allocated_objects_size_in_bytes); | |
| 439 | |
| 440 // By creating an edge with a higher |importance| (w.r.t. browser-side dumps) | |
| 441 // the tracing UI will account the effective size of the segment to the child. | |
| 442 const int kImportance = 2; | |
| 443 pmd->AddOwnershipEdge(segment_dump->guid(), shared_segment_guid, kImportance); | |
| 444 } | |
| 445 | |
| 446 // static | |
| 447 base::trace_event::MemoryAllocatorDumpGuid | |
| 448 DiscardableSharedMemoryHeap::GetSegmentGUIDForTracing( | |
| 449 uint64_t tracing_process_id, | |
| 450 int32_t segment_id) { | |
| 451 return base::trace_event::MemoryAllocatorDumpGuid(base::StringPrintf( | |
| 452 "discardable-x-process/%" PRIx64 "/%d", tracing_process_id, segment_id)); | |
| 453 } | |
| 454 | |
| 455 base::trace_event::MemoryAllocatorDump* | |
| 456 DiscardableSharedMemoryHeap::CreateMemoryAllocatorDump( | |
| 457 Span* span, | |
| 458 const char* name, | |
| 459 base::trace_event::ProcessMemoryDump* pmd) const { | |
| 460 if (!span->shared_memory()) { | |
| 461 base::trace_event::MemoryAllocatorDump* dump = | |
| 462 pmd->CreateAllocatorDump(name); | |
| 463 dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize, | |
| 464 base::trace_event::MemoryAllocatorDump::kUnitsBytes, 0u); | |
| 465 return dump; | |
| 466 } | |
| 467 | |
| 468 ScopedVector<ScopedMemorySegment>::const_iterator it = | |
| 469 std::find_if(memory_segments_.begin(), memory_segments_.end(), | |
| 470 [span](const ScopedMemorySegment* segment) { | |
| 471 return segment->ContainsSpan(span); | |
| 472 }); | |
| 473 DCHECK(it != memory_segments_.end()); | |
| 474 return (*it)->CreateMemoryAllocatorDump(span, block_size_, name, pmd); | |
| 475 } | |
| 476 | |
| 477 } // namespace content | |
| OLD | NEW |