| OLD | NEW |
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "net/disk_cache/memory/mem_backend_impl.h" | 5 #include "net/disk_cache/memory/mem_backend_impl.h" |
| 6 | 6 |
| 7 #include "base/logging.h" | 7 #include "base/logging.h" |
| 8 #include "base/sys_info.h" | 8 #include "base/sys_info.h" |
| 9 #include "net/base/net_errors.h" | 9 #include "net/base/net_errors.h" |
| 10 #include "net/disk_cache/cache_util.h" | 10 #include "net/disk_cache/cache_util.h" |
| (...skipping 11 matching lines...) Expand all Loading... |
| 22 return 0; | 22 return 0; |
| 23 | 23 |
| 24 return high_water - kCleanUpMargin; | 24 return high_water - kCleanUpMargin; |
| 25 } | 25 } |
| 26 | 26 |
| 27 } // namespace | 27 } // namespace |
| 28 | 28 |
| 29 namespace disk_cache { | 29 namespace disk_cache { |
| 30 | 30 |
| 31 MemBackendImpl::MemBackendImpl(net::NetLog* net_log) | 31 MemBackendImpl::MemBackendImpl(net::NetLog* net_log) |
| 32 : max_size_(0), current_size_(0), net_log_(net_log), weak_factory_(this) { | 32 : max_size_(0), current_size_(0), net_log_(net_log) {} |
| 33 } | |
| 34 | 33 |
| 35 MemBackendImpl::~MemBackendImpl() { | 34 MemBackendImpl::~MemBackendImpl() { |
| 36 EntryMap::iterator it = entries_.begin(); | 35 EntryMap::iterator it = entries_.begin(); |
| 37 while (it != entries_.end()) { | 36 while (it != entries_.end()) { |
| 38 it->second->Doom(); | 37 it->second->Doom(); |
| 39 it = entries_.begin(); | 38 it = entries_.begin(); |
| 40 } | 39 } |
| 41 DCHECK(!current_size_); | 40 DCHECK(!current_size_); |
| 42 } | 41 } |
| 43 | 42 |
| (...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 174 } | 173 } |
| 175 | 174 |
| 176 int MemBackendImpl::DoomEntriesSince(const base::Time initial_time, | 175 int MemBackendImpl::DoomEntriesSince(const base::Time initial_time, |
| 177 const CompletionCallback& callback) { | 176 const CompletionCallback& callback) { |
| 178 if (DoomEntriesSince(initial_time)) | 177 if (DoomEntriesSince(initial_time)) |
| 179 return net::OK; | 178 return net::OK; |
| 180 | 179 |
| 181 return net::ERR_FAILED; | 180 return net::ERR_FAILED; |
| 182 } | 181 } |
| 183 | 182 |
| 184 class MemBackendImpl::MemIterator : public Backend::Iterator { | 183 int MemBackendImpl::OpenNextEntry(void** iter, Entry** next_entry, |
| 185 public: | 184 const CompletionCallback& callback) { |
| 186 explicit MemIterator(base::WeakPtr<MemBackendImpl> backend) | 185 if (OpenNextEntry(iter, next_entry)) |
| 187 : backend_(backend), current_(NULL) { | 186 return net::OK; |
| 188 } | |
| 189 | 187 |
| 190 virtual int OpenNextEntry(Entry** next_entry, | 188 return net::ERR_FAILED; |
| 191 const CompletionCallback& callback) OVERRIDE { | 189 } |
| 192 if (!backend_) | |
| 193 return net::ERR_FAILED; | |
| 194 | 190 |
| 195 MemEntryImpl* node = backend_->rankings_.GetNext(current_); | 191 void MemBackendImpl::EndEnumeration(void** iter) { |
| 196 // We should never return a child entry so iterate until we hit a parent | 192 *iter = NULL; |
| 197 // entry. | |
| 198 while (node && node->type() != MemEntryImpl::kParentEntry) | |
| 199 node = backend_->rankings_.GetNext(node); | |
| 200 *next_entry = node; | |
| 201 current_ = node; | |
| 202 | |
| 203 if (node) { | |
| 204 node->Open(); | |
| 205 return net::OK; | |
| 206 } | |
| 207 return net::ERR_FAILED; | |
| 208 } | |
| 209 | |
| 210 private: | |
| 211 base::WeakPtr<MemBackendImpl> backend_; | |
| 212 MemEntryImpl* current_; | |
| 213 }; | |
| 214 | |
| 215 scoped_ptr<Backend::Iterator> MemBackendImpl::CreateIterator() { | |
| 216 return scoped_ptr<Backend::Iterator>( | |
| 217 new MemIterator(weak_factory_.GetWeakPtr())); | |
| 218 } | 193 } |
| 219 | 194 |
| 220 void MemBackendImpl::OnExternalCacheHit(const std::string& key) { | 195 void MemBackendImpl::OnExternalCacheHit(const std::string& key) { |
| 221 EntryMap::iterator it = entries_.find(key); | 196 EntryMap::iterator it = entries_.find(key); |
| 222 if (it != entries_.end()) { | 197 if (it != entries_.end()) { |
| 223 UpdateRank(it->second); | 198 UpdateRank(it->second); |
| 224 } | 199 } |
| 225 } | 200 } |
| 226 | 201 |
| 227 bool MemBackendImpl::OpenEntry(const std::string& key, Entry** entry) { | 202 bool MemBackendImpl::OpenEntry(const std::string& key, Entry** entry) { |
| (...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 305 // Get the entry in the front. | 280 // Get the entry in the front. |
| 306 Entry* entry = rankings_.GetNext(NULL); | 281 Entry* entry = rankings_.GetNext(NULL); |
| 307 | 282 |
| 308 // Break the loop when there are no more entries or the entry is too old. | 283 // Break the loop when there are no more entries or the entry is too old. |
| 309 if (!entry || entry->GetLastUsed() < initial_time) | 284 if (!entry || entry->GetLastUsed() < initial_time) |
| 310 return true; | 285 return true; |
| 311 entry->Doom(); | 286 entry->Doom(); |
| 312 } | 287 } |
| 313 } | 288 } |
| 314 | 289 |
| 290 bool MemBackendImpl::OpenNextEntry(void** iter, Entry** next_entry) { |
| 291 MemEntryImpl* current = reinterpret_cast<MemEntryImpl*>(*iter); |
| 292 MemEntryImpl* node = rankings_.GetNext(current); |
| 293 // We should never return a child entry so iterate until we hit a parent |
| 294 // entry. |
| 295 while (node && node->type() != MemEntryImpl::kParentEntry) { |
| 296 node = rankings_.GetNext(node); |
| 297 } |
| 298 *next_entry = node; |
| 299 *iter = node; |
| 300 |
| 301 if (node) |
| 302 node->Open(); |
| 303 |
| 304 return NULL != node; |
| 305 } |
| 306 |
| 315 void MemBackendImpl::TrimCache(bool empty) { | 307 void MemBackendImpl::TrimCache(bool empty) { |
| 316 MemEntryImpl* next = rankings_.GetPrev(NULL); | 308 MemEntryImpl* next = rankings_.GetPrev(NULL); |
| 317 if (!next) | 309 if (!next) |
| 318 return; | 310 return; |
| 319 | 311 |
| 320 int target_size = empty ? 0 : LowWaterAdjust(max_size_); | 312 int target_size = empty ? 0 : LowWaterAdjust(max_size_); |
| 321 while (current_size_ > target_size && next) { | 313 while (current_size_ > target_size && next) { |
| 322 MemEntryImpl* node = next; | 314 MemEntryImpl* node = next; |
| 323 next = rankings_.GetPrev(next); | 315 next = rankings_.GetPrev(next); |
| 324 if (!node->InUse() || empty) { | 316 if (!node->InUse() || empty) { |
| (...skipping 11 matching lines...) Expand all Loading... |
| 336 if (current_size_ > max_size_) | 328 if (current_size_ > max_size_) |
| 337 TrimCache(false); | 329 TrimCache(false); |
| 338 } | 330 } |
| 339 | 331 |
| 340 void MemBackendImpl::SubstractStorageSize(int32 bytes) { | 332 void MemBackendImpl::SubstractStorageSize(int32 bytes) { |
| 341 current_size_ -= bytes; | 333 current_size_ -= bytes; |
| 342 DCHECK_GE(current_size_, 0); | 334 DCHECK_GE(current_size_, 0); |
| 343 } | 335 } |
| 344 | 336 |
| 345 } // namespace disk_cache | 337 } // namespace disk_cache |
| OLD | NEW |