Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/common/discardable_shared_memory_heap.h" | 5 #include "content/common/discardable_shared_memory_heap.h" |
| 6 | 6 |
| 7 #include "base/memory/discardable_shared_memory.h" | 7 #include "base/memory/discardable_shared_memory.h" |
| 8 | 8 |
| 9 namespace content { | 9 namespace content { |
| 10 namespace { | 10 namespace { |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 22 DiscardableSharedMemoryHeap::Span::Span( | 22 DiscardableSharedMemoryHeap::Span::Span( |
| 23 base::DiscardableSharedMemory* shared_memory, | 23 base::DiscardableSharedMemory* shared_memory, |
| 24 size_t start, | 24 size_t start, |
| 25 size_t length) | 25 size_t length) |
| 26 : shared_memory_(shared_memory), start_(start), length_(length) { | 26 : shared_memory_(shared_memory), start_(start), length_(length) { |
| 27 } | 27 } |
| 28 | 28 |
| 29 DiscardableSharedMemoryHeap::Span::~Span() { | 29 DiscardableSharedMemoryHeap::Span::~Span() { |
| 30 } | 30 } |
| 31 | 31 |
| 32 DiscardableSharedMemoryHeap::ScopedMemorySegment::ScopedMemorySegment( | |
| 33 scoped_ptr<base::DiscardableSharedMemory> shared_memory, | |
| 34 DiscardableSharedMemoryHeap* heap) | |
| 35 : shared_memory_(shared_memory.Pass()), heap_(heap) { | |
| 36 } | |
| 37 | |
| 38 DiscardableSharedMemoryHeap::ScopedMemorySegment::~ScopedMemorySegment() { | |
| 39 heap_->ReleaseMemory(shared_memory_.get()); | |
| 40 // Purge memory. This has no effect if already purged. | |
| 41 shared_memory_->Purge(base::Time::Now()); | |
| 42 } | |
| 43 | |
| 44 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsUsed() const { | |
| 45 return heap_->IsMemoryUsed(shared_memory_.get()); | |
| 46 } | |
| 47 | |
| 48 bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsResident() const { | |
| 49 return heap_->IsMemoryResident(shared_memory_.get()); | |
| 50 } | |
| 51 | |
| 32 DiscardableSharedMemoryHeap::DiscardableSharedMemoryHeap(size_t block_size) | 52 DiscardableSharedMemoryHeap::DiscardableSharedMemoryHeap(size_t block_size) |
| 33 : block_size_(block_size) { | 53 : block_size_(block_size), num_blocks_(0), num_free_blocks_(0) { |
| 34 DCHECK_NE(block_size_, 0u); | 54 DCHECK_NE(block_size_, 0u); |
| 35 DCHECK(IsPowerOfTwo(block_size_)); | 55 DCHECK(IsPowerOfTwo(block_size_)); |
| 36 } | 56 } |
| 37 | 57 |
| 38 DiscardableSharedMemoryHeap::~DiscardableSharedMemoryHeap() { | 58 DiscardableSharedMemoryHeap::~DiscardableSharedMemoryHeap() { |
| 39 for (auto shared_memory : shared_memory_segments_) | 59 memory_segments_.clear(); |
| 40 ReleaseMemory(shared_memory); | 60 DCHECK_EQ(num_blocks_, 0u); |
| 41 | 61 DCHECK_EQ(num_free_blocks_, 0u); |
|
Avi (use Gerrit)
2015/03/09 16:03:37
Expected value as the first parameter.
| |
| 42 DCHECK(free_spans_.empty()); | 62 DCHECK(free_spans_.empty()); |
| 43 } | 63 } |
| 44 | 64 |
| 45 scoped_ptr<DiscardableSharedMemoryHeap::Span> DiscardableSharedMemoryHeap::Grow( | 65 scoped_ptr<DiscardableSharedMemoryHeap::Span> DiscardableSharedMemoryHeap::Grow( |
| 46 scoped_ptr<base::DiscardableSharedMemory> shared_memory, | 66 scoped_ptr<base::DiscardableSharedMemory> shared_memory) { |
| 47 size_t size) { | |
| 48 // Memory must be aligned to block size. | 67 // Memory must be aligned to block size. |
| 49 DCHECK_EQ( | 68 DCHECK_EQ( |
| 50 reinterpret_cast<size_t>(shared_memory->memory()) & (block_size_ - 1), | 69 reinterpret_cast<size_t>(shared_memory->memory()) & (block_size_ - 1), |
| 51 0u); | 70 0u); |
| 52 DCHECK_EQ(size & (block_size_ - 1), 0u); | 71 DCHECK_EQ(shared_memory->mapped_size() & (block_size_ - 1), 0u); |
| 53 | 72 |
| 54 scoped_ptr<Span> span( | 73 scoped_ptr<Span> span( |
| 55 new Span(shared_memory.get(), | 74 new Span(shared_memory.get(), |
| 56 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_, | 75 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_, |
| 57 size / block_size_)); | 76 shared_memory->mapped_size() / block_size_)); |
| 58 DCHECK(spans_.find(span->start_) == spans_.end()); | 77 DCHECK(spans_.find(span->start_) == spans_.end()); |
| 59 DCHECK(spans_.find(span->start_ + span->length_ - 1) == spans_.end()); | 78 DCHECK(spans_.find(span->start_ + span->length_ - 1) == spans_.end()); |
| 60 RegisterSpan(span.get()); | 79 RegisterSpan(span.get()); |
| 61 | 80 |
| 62 // Start tracking if segment is resident by adding it to | 81 num_blocks_ += span->length_; |
| 63 // |shared_memory_segments_|. | 82 |
| 64 shared_memory_segments_.push_back(shared_memory.release()); | 83 // Start tracking if segment is resident by adding it to |memory_segments_|. |
| 84 memory_segments_.push_back( | |
| 85 make_linked_ptr(new ScopedMemorySegment(shared_memory.Pass(), this))); | |
| 65 | 86 |
| 66 return span.Pass(); | 87 return span.Pass(); |
| 67 } | 88 } |
| 68 | 89 |
| 69 void DiscardableSharedMemoryHeap::MergeIntoFreeList(scoped_ptr<Span> span) { | 90 void DiscardableSharedMemoryHeap::MergeIntoFreeList(scoped_ptr<Span> span) { |
| 70 DCHECK(span->shared_memory_); | 91 DCHECK(span->shared_memory_); |
| 71 | 92 |
| 93 // First add length of |span| to |num_free_blocks_|. | |
| 94 num_free_blocks_ += span->length_; | |
| 95 | |
| 72 // Merge with previous span if possible. | 96 // Merge with previous span if possible. |
| 73 SpanMap::iterator prev_it = spans_.find(span->start_ - 1); | 97 SpanMap::iterator prev_it = spans_.find(span->start_ - 1); |
| 74 if (prev_it != spans_.end() && IsInFreeList(prev_it->second)) { | 98 if (prev_it != spans_.end() && IsInFreeList(prev_it->second)) { |
| 75 scoped_ptr<Span> prev = RemoveFromFreeList(prev_it->second); | 99 scoped_ptr<Span> prev = RemoveFromFreeList(prev_it->second); |
| 76 DCHECK_EQ(prev->start_ + prev->length_, span->start_); | 100 DCHECK_EQ(prev->start_ + prev->length_, span->start_); |
| 77 UnregisterSpan(prev.get()); | 101 UnregisterSpan(prev.get()); |
| 78 if (span->length_ > 1) | 102 if (span->length_ > 1) |
| 79 spans_.erase(span->start_); | 103 spans_.erase(span->start_); |
| 80 span->start_ -= prev->length_; | 104 span->start_ -= prev->length_; |
| 81 span->length_ += prev->length_; | 105 span->length_ += prev->length_; |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 135 if ((span->length_ == best->length_) && (span->start_ > best->start_)) | 159 if ((span->length_ == best->length_) && (span->start_ > best->start_)) |
| 136 continue; | 160 continue; |
| 137 } | 161 } |
| 138 | 162 |
| 139 best = span; | 163 best = span; |
| 140 } | 164 } |
| 141 | 165 |
| 142 return best ? Carve(best, blocks) : nullptr; | 166 return best ? Carve(best, blocks) : nullptr; |
| 143 } | 167 } |
| 144 | 168 |
| 145 size_t DiscardableSharedMemoryHeap::ReleaseFreeMemory() { | 169 void DiscardableSharedMemoryHeap::ReleaseFreeMemory() { |
| 146 size_t bytes_released = 0; | 170 memory_segments_.erase( |
| 147 size_t i = 0; | 171 std::remove_if(memory_segments_.begin(), memory_segments_.end(), |
| 172 [](const linked_ptr<ScopedMemorySegment>& segment) { | |
| 173 return !segment->IsUsed(); | |
| 174 }), | |
| 175 memory_segments_.end()); | |
| 176 } | |
| 148 | 177 |
| 149 // Release memory for all non-resident segments. | 178 void DiscardableSharedMemoryHeap::ReleasePurgedMemory() { |
| 150 while (i < shared_memory_segments_.size()) { | 179 memory_segments_.erase( |
| 151 base::DiscardableSharedMemory* shared_memory = shared_memory_segments_[i]; | 180 std::remove_if(memory_segments_.begin(), memory_segments_.end(), |
| 181 [](const linked_ptr<ScopedMemorySegment>& segment) { | |
| 182 return !segment->IsResident(); | |
| 183 }), | |
| 184 memory_segments_.end()); | |
| 185 } | |
| 152 | 186 |
| 153 // Skip segment if still resident. | 187 size_t DiscardableSharedMemoryHeap::GetSize() const { |
| 154 if (shared_memory->IsMemoryResident()) { | 188 return num_blocks_ * block_size_; |
| 155 ++i; | 189 } |
| 156 continue; | |
| 157 } | |
| 158 | 190 |
| 159 bytes_released += shared_memory->mapped_size(); | 191 size_t DiscardableSharedMemoryHeap::GetFreeListSize() const { |
| 160 | 192 return num_free_blocks_ * block_size_; |
| 161 // Release the memory and unregistering all associated spans. | |
| 162 ReleaseMemory(shared_memory); | |
| 163 | |
| 164 std::swap(shared_memory_segments_[i], shared_memory_segments_.back()); | |
| 165 shared_memory_segments_.pop_back(); | |
| 166 } | |
| 167 | |
| 168 return bytes_released; | |
| 169 } | 193 } |
| 170 | 194 |
| 171 scoped_ptr<DiscardableSharedMemoryHeap::Span> | 195 scoped_ptr<DiscardableSharedMemoryHeap::Span> |
| 172 DiscardableSharedMemoryHeap::RemoveFromFreeList(Span* span) { | 196 DiscardableSharedMemoryHeap::RemoveFromFreeList(Span* span) { |
| 173 span->RemoveFromList(); | 197 span->RemoveFromList(); |
| 174 return make_scoped_ptr(span); | 198 return make_scoped_ptr(span); |
| 175 } | 199 } |
| 176 | 200 |
| 177 scoped_ptr<DiscardableSharedMemoryHeap::Span> | 201 scoped_ptr<DiscardableSharedMemoryHeap::Span> |
| 178 DiscardableSharedMemoryHeap::Carve(Span* span, size_t blocks) { | 202 DiscardableSharedMemoryHeap::Carve(Span* span, size_t blocks) { |
| 179 scoped_ptr<Span> serving = RemoveFromFreeList(span); | 203 scoped_ptr<Span> serving = RemoveFromFreeList(span); |
| 180 | 204 |
| 181 const int extra = serving->length_ - blocks; | 205 const int extra = serving->length_ - blocks; |
| 182 if (extra) { | 206 if (extra) { |
| 183 scoped_ptr<Span> leftover( | 207 scoped_ptr<Span> leftover( |
| 184 new Span(serving->shared_memory_, serving->start_ + blocks, extra)); | 208 new Span(serving->shared_memory_, serving->start_ + blocks, extra)); |
| 185 DCHECK_IMPLIES(extra > 1, spans_.find(leftover->start_) == spans_.end()); | 209 DCHECK_IMPLIES(extra > 1, spans_.find(leftover->start_) == spans_.end()); |
| 186 RegisterSpan(leftover.get()); | 210 RegisterSpan(leftover.get()); |
| 187 | 211 |
| 188 // No need to coalesce as the previous span of |leftover| was just split | 212 // No need to coalesce as the previous span of |leftover| was just split |
| 189 // and the next span of |leftover| was not previously coalesced with | 213 // and the next span of |leftover| was not previously coalesced with |
| 190 // |span|. | 214 // |span|. |
| 191 free_spans_.Append(leftover.release()); | 215 free_spans_.Append(leftover.release()); |
| 192 | 216 |
| 193 serving->length_ = blocks; | 217 serving->length_ = blocks; |
| 194 spans_[serving->start_ + blocks - 1] = serving.get(); | 218 spans_[serving->start_ + blocks - 1] = serving.get(); |
| 195 } | 219 } |
| 196 | 220 |
| 221 // |serving| is no longer in the free list, remove its length from | |
| 222 // |num_free_blocks_|. | |
| 223 DCHECK_GE(num_free_blocks_, serving->length_); | |
| 224 num_free_blocks_ -= serving->length_; | |
| 225 | |
| 197 return serving.Pass(); | 226 return serving.Pass(); |
| 198 } | 227 } |
| 199 | 228 |
| 200 void DiscardableSharedMemoryHeap::RegisterSpan(Span* span) { | 229 void DiscardableSharedMemoryHeap::RegisterSpan(Span* span) { |
| 201 spans_[span->start_] = span; | 230 spans_[span->start_] = span; |
| 202 if (span->length_ > 1) | 231 if (span->length_ > 1) |
| 203 spans_[span->start_ + span->length_ - 1] = span; | 232 spans_[span->start_ + span->length_ - 1] = span; |
| 204 } | 233 } |
| 205 | 234 |
| 206 void DiscardableSharedMemoryHeap::UnregisterSpan(Span* span) { | 235 void DiscardableSharedMemoryHeap::UnregisterSpan(Span* span) { |
| 207 DCHECK(spans_.find(span->start_) != spans_.end()); | 236 DCHECK(spans_.find(span->start_) != spans_.end()); |
| 208 DCHECK_EQ(spans_[span->start_], span); | 237 DCHECK_EQ(spans_[span->start_], span); |
| 209 spans_.erase(span->start_); | 238 spans_.erase(span->start_); |
| 210 if (span->length_ > 1) { | 239 if (span->length_ > 1) { |
| 211 DCHECK(spans_.find(span->start_ + span->length_ - 1) != spans_.end()); | 240 DCHECK(spans_.find(span->start_ + span->length_ - 1) != spans_.end()); |
| 212 DCHECK_EQ(spans_[span->start_ + span->length_ - 1], span); | 241 DCHECK_EQ(spans_[span->start_ + span->length_ - 1], span); |
| 213 spans_.erase(span->start_ + span->length_ - 1); | 242 spans_.erase(span->start_ + span->length_ - 1); |
| 214 } | 243 } |
| 215 } | 244 } |
| 216 | 245 |
| 246 bool DiscardableSharedMemoryHeap::IsMemoryUsed( | |
| 247 const base::DiscardableSharedMemory* shared_memory) { | |
| 248 size_t offset = | |
| 249 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_; | |
| 250 size_t length = shared_memory->mapped_size() / block_size_; | |
| 251 DCHECK(spans_.find(offset) != spans_.end()); | |
| 252 Span* span = spans_[offset]; | |
| 253 DCHECK_LE(span->length_, length); | |
| 254 // Memory is used if first span is not in free list or shorter than segment. | |
| 255 return !IsInFreeList(span) || span->length_ != length; | |
| 256 } | |
| 257 | |
| 258 bool DiscardableSharedMemoryHeap::IsMemoryResident( | |
| 259 const base::DiscardableSharedMemory* shared_memory) { | |
| 260 return shared_memory->IsMemoryResident(); | |
| 261 } | |
| 262 | |
| 217 void DiscardableSharedMemoryHeap::ReleaseMemory( | 263 void DiscardableSharedMemoryHeap::ReleaseMemory( |
| 218 base::DiscardableSharedMemory* shared_memory) { | 264 const base::DiscardableSharedMemory* shared_memory) { |
| 219 size_t offset = | 265 size_t offset = |
| 220 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_; | 266 reinterpret_cast<size_t>(shared_memory->memory()) / block_size_; |
| 221 size_t end = offset + shared_memory->mapped_size() / block_size_; | 267 size_t end = offset + shared_memory->mapped_size() / block_size_; |
| 222 while (offset < end) { | 268 while (offset < end) { |
| 223 DCHECK(spans_.find(offset) != spans_.end()); | 269 DCHECK(spans_.find(offset) != spans_.end()); |
| 224 Span* span = spans_[offset]; | 270 Span* span = spans_[offset]; |
| 225 DCHECK_EQ(span->shared_memory_, shared_memory); | 271 DCHECK_EQ(span->shared_memory_, shared_memory); |
| 226 span->shared_memory_ = nullptr; | 272 span->shared_memory_ = nullptr; |
| 227 UnregisterSpan(span); | 273 UnregisterSpan(span); |
| 228 | 274 |
| 229 offset += span->length_; | 275 offset += span->length_; |
| 230 | 276 |
| 231 // If |span| is in the free list, remove it. | 277 DCHECK_GE(num_blocks_, span->length_); |
| 232 if (IsInFreeList(span)) | 278 num_blocks_ -= span->length_; |
| 279 | |
| 280 // If |span| is in the free list, remove it and update |num_free_blocks_|. | |
| 281 if (IsInFreeList(span)) { | |
| 282 DCHECK_GE(num_free_blocks_, span->length_); | |
| 283 num_free_blocks_ -= span->length_; | |
| 233 RemoveFromFreeList(span); | 284 RemoveFromFreeList(span); |
| 285 } | |
| 234 } | 286 } |
| 235 } | 287 } |
| 236 | 288 |
| 237 } // namespace content | 289 } // namespace content |
| OLD | NEW |