Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(188)

Side by Side Diff: src/core/SkScaledImageCache.cpp

Issue 507483002: retool image cache to be generic cache (Closed) Base URL: https://skia.googlesource.com/skia.git@master
Patch Set: incorporate sizeof key into bytesUsed() Created 6 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright 2013 Google Inc. 2 * Copyright 2013 Google Inc.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license that can be 4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file. 5 * found in the LICENSE file.
6 */ 6 */
7 7
8 #include "SkChecksum.h" 8 #include "SkChecksum.h"
9 #include "SkScaledImageCache.h" 9 #include "SkScaledImageCache.h"
10 #include "SkMipMap.h" 10 #include "SkMipMap.h"
11 #include "SkPixelRef.h" 11 #include "SkPixelRef.h"
12 #include "SkRect.h" 12 #include "SkRect.h"
13 13
14 // This can be defined by the caller's build system 14 // This can be defined by the caller's build system
15 //#define SK_USE_DISCARDABLE_SCALEDIMAGECACHE 15 //#define SK_USE_DISCARDABLE_SCALEDIMAGECACHE
16 16
17 #ifndef SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT 17 #ifndef SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT
18 # define SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT 1024 18 # define SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT 1024
19 #endif 19 #endif
20 20
21 #ifndef SK_DEFAULT_IMAGE_CACHE_LIMIT 21 #ifndef SK_DEFAULT_IMAGE_CACHE_LIMIT
22 #define SK_DEFAULT_IMAGE_CACHE_LIMIT (2 * 1024 * 1024) 22 #define SK_DEFAULT_IMAGE_CACHE_LIMIT (2 * 1024 * 1024)
23 #endif 23 #endif
24 24
25 static inline SkScaledImageCache::ID* rec_to_id(SkScaledImageCache::Rec* rec) {
26 return reinterpret_cast<SkScaledImageCache::ID*>(rec);
27 }
28
29 static inline SkScaledImageCache::Rec* id_to_rec(SkScaledImageCache::ID* id) {
30 return reinterpret_cast<SkScaledImageCache::Rec*>(id);
31 }
32
33 void SkScaledImageCache::Key::init(size_t length) { 25 void SkScaledImageCache::Key::init(size_t length) {
34 SkASSERT(SkAlign4(length) == length); 26 SkASSERT(SkAlign4(length) == length);
35 // 2 is fCount32 and fHash 27 // 2 is fCount32 and fHash
36 fCount32 = SkToS32(2 + (length >> 2)); 28 fCount32 = SkToS32(2 + (length >> 2));
37 // skip both of our fields whe computing the murmur 29 // skip both of our fields whe computing the murmur
38 fHash = SkChecksum::Murmur3(this->as32() + 2, (fCount32 - 2) << 2); 30 fHash = SkChecksum::Murmur3(this->as32() + 2, (fCount32 - 2) << 2);
39 } 31 }
40 32
41 SkScaledImageCache::Key* SkScaledImageCache::Key::clone() const {
42 size_t size = fCount32 << 2;
43 void* copy = sk_malloc_throw(size);
44 memcpy(copy, this, size);
45 return (Key*)copy;
46 }
47
48 struct SkScaledImageCache::Rec {
49 Rec(const Key& key, const SkBitmap& bm) : fKey(key.clone()), fBitmap(bm) {
50 fLockCount = 1;
51 fMip = NULL;
52 }
53
54 Rec(const Key& key, const SkMipMap* mip) : fKey(key.clone()) {
55 fLockCount = 1;
56 fMip = mip;
57 mip->ref();
58 }
59
60 ~Rec() {
61 SkSafeUnref(fMip);
62 sk_free(fKey);
63 }
64
65 static const Key& GetKey(const Rec& rec) { return *rec.fKey; }
66 static uint32_t Hash(const Key& key) { return key.hash(); }
67
68 size_t bytesUsed() const {
69 return fMip ? fMip->getSize() : fBitmap.getSize();
70 }
71
72 Rec* fNext;
73 Rec* fPrev;
74
75 // this guy wants to be 64bit aligned
76 Key* fKey;
77
78 int32_t fLockCount;
79
80 // we use either fBitmap or fMip, but not both
81 SkBitmap fBitmap;
82 const SkMipMap* fMip;
83 };
84
85 #include "SkTDynamicHash.h" 33 #include "SkTDynamicHash.h"
86 34
87 class SkScaledImageCache::Hash : 35 class SkScaledImageCache::Hash :
88 public SkTDynamicHash<SkScaledImageCache::Rec, SkScaledImageCache::Key> {}; 36 public SkTDynamicHash<SkScaledImageCache::Rec, SkScaledImageCache::Key> {};
89 37
90 38
91 /////////////////////////////////////////////////////////////////////////////// 39 ///////////////////////////////////////////////////////////////////////////////
92 40
93 // experimental hash to speed things up 41 // experimental hash to speed things up
94 #define USE_HASH 42 #define USE_HASH
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after
253 while (rec) { 201 while (rec) {
254 Rec* next = rec->fNext; 202 Rec* next = rec->fNext;
255 SkDELETE(rec); 203 SkDELETE(rec);
256 rec = next; 204 rec = next;
257 } 205 }
258 delete fHash; 206 delete fHash;
259 } 207 }
260 208
261 //////////////////////////////////////////////////////////////////////////////// 209 ////////////////////////////////////////////////////////////////////////////////
262 210
263 /** 211 const SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(const Key& key) {
264 This private method is the fully general record finder. All other
265 record finders should call this function or the one above.
266 */
267 SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(const SkScaledImageCach e::Key& key) {
268 #ifdef USE_HASH 212 #ifdef USE_HASH
269 Rec* rec = fHash->find(key); 213 Rec* rec = fHash->find(key);
270 #else 214 #else
271 Rec* rec = find_rec_in_list(fHead, key); 215 Rec* rec = find_rec_in_list(fHead, key);
272 #endif 216 #endif
273 if (rec) { 217 if (rec) {
274 this->moveToHead(rec); // for our LRU 218 this->moveToHead(rec); // for our LRU
275 rec->fLockCount += 1; 219 rec->fLockCount += 1;
276 } 220 }
277 return rec; 221 return rec;
278 } 222 }
279 223
280 SkScaledImageCache::ID* SkScaledImageCache::findAndLock(const Key& key, SkBitmap * result) { 224 const SkScaledImageCache::Rec* SkScaledImageCache::addAndLock(Rec* rec) {
281 Rec* rec = this->findAndLock(key);
282 if (rec) {
283 SkASSERT(NULL == rec->fMip);
284 SkASSERT(rec->fBitmap.pixelRef());
285 *result = rec->fBitmap;
286 }
287 return rec_to_id(rec);
288 }
289
290 SkScaledImageCache::ID* SkScaledImageCache::findAndLock(const Key& key, const Sk MipMap** mip) {
291 Rec* rec = this->findAndLock(key);
292 if (rec) {
293 SkASSERT(rec->fMip);
294 SkASSERT(NULL == rec->fBitmap.pixelRef());
295 *mip = rec->fMip;
296 }
297 return rec_to_id(rec);
298 }
299
300
301 ////////////////////////////////////////////////////////////////////////////////
302 /**
303 This private method is the fully general record adder. All other
304 record adders should call this funtion. */
305 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(SkScaledImageCache::Rec* rec) {
306 SkASSERT(rec); 225 SkASSERT(rec);
307 // See if we already have this key (racy inserts, etc.) 226 // See if we already have this key (racy inserts, etc.)
308 Rec* existing = this->findAndLock(*rec->fKey); 227 const Rec* existing = this->findAndLock(rec->getKey());
309 if (NULL != existing) { 228 if (NULL != existing) {
310 // Since we already have a matching entry, just delete the new one and r eturn.
311 // Call sites cannot assume the passed in object will live past this cal l.
312 existing->fBitmap = rec->fBitmap;
313 SkDELETE(rec); 229 SkDELETE(rec);
314 return rec_to_id(existing); 230 return existing;
315 } 231 }
316 232
317 this->addToHead(rec); 233 this->addToHead(rec);
318 SkASSERT(1 == rec->fLockCount); 234 SkASSERT(1 == rec->fLockCount);
319 #ifdef USE_HASH 235 #ifdef USE_HASH
320 SkASSERT(fHash); 236 SkASSERT(fHash);
321 fHash->add(rec); 237 fHash->add(rec);
322 #endif 238 #endif
323 // We may (now) be overbudget, so see if we need to purge something. 239 // We may (now) be overbudget, so see if we need to purge something.
324 this->purgeAsNeeded(); 240 this->purgeAsNeeded();
325 return rec_to_id(rec); 241 return rec;
326 } 242 }
327 243
328 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const Key& key, const SkB itmap& scaled) { 244 void SkScaledImageCache::add(Rec* rec) {
329 Rec* rec = SkNEW_ARGS(Rec, (key, scaled)); 245 SkASSERT(rec);
330 return this->addAndLock(rec); 246 // See if we already have this key (racy inserts, etc.)
247 const Rec* existing = this->findAndLock(rec->getKey());
248 if (NULL != existing) {
249 SkDELETE(rec);
250 this->unlock(existing);
251 return;
252 }
253
254 this->addToHead(rec);
255 SkASSERT(1 == rec->fLockCount);
256 #ifdef USE_HASH
257 SkASSERT(fHash);
258 fHash->add(rec);
259 #endif
260 this->unlock(rec);
331 } 261 }
332 262
333 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const Key& key, const SkM ipMap* mip) { 263 void SkScaledImageCache::unlock(SkScaledImageCache::ID id) {
334 Rec* rec = SkNEW_ARGS(Rec, (key, mip));
335 return this->addAndLock(rec);
336 }
337
338 void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) {
339 SkASSERT(id); 264 SkASSERT(id);
340 265
341 #ifdef SK_DEBUG 266 #ifdef SK_DEBUG
342 { 267 {
343 bool found = false; 268 bool found = false;
344 Rec* rec = fHead; 269 Rec* rec = fHead;
345 while (rec != NULL) { 270 while (rec != NULL) {
346 if (rec == id_to_rec(id)) { 271 if (rec == id) {
347 found = true; 272 found = true;
348 break; 273 break;
349 } 274 }
350 rec = rec->fNext; 275 rec = rec->fNext;
351 } 276 }
352 SkASSERT(found); 277 SkASSERT(found);
353 } 278 }
354 #endif 279 #endif
355 Rec* rec = id_to_rec(id); 280 const Rec* rec = id;
356 SkASSERT(rec->fLockCount > 0); 281 SkASSERT(rec->fLockCount > 0);
357 rec->fLockCount -= 1; 282 const_cast<Rec*>(rec)->fLockCount -= 1;
358 283
359 // we may have been over-budget, but now have released something, so check 284 // we may have been over-budget, but now have released something, so check
360 // if we should purge. 285 // if we should purge.
361 if (0 == rec->fLockCount) { 286 if (0 == rec->fLockCount) {
362 this->purgeAsNeeded(); 287 this->purgeAsNeeded();
363 } 288 }
364 } 289 }
365 290
366 void SkScaledImageCache::purgeAsNeeded() { 291 void SkScaledImageCache::purgeAsNeeded() {
367 size_t byteLimit; 292 size_t byteLimit;
(...skipping 15 matching lines...) Expand all
383 if (bytesUsed < byteLimit && countUsed < countLimit) { 308 if (bytesUsed < byteLimit && countUsed < countLimit) {
384 break; 309 break;
385 } 310 }
386 311
387 Rec* prev = rec->fPrev; 312 Rec* prev = rec->fPrev;
388 if (0 == rec->fLockCount) { 313 if (0 == rec->fLockCount) {
389 size_t used = rec->bytesUsed(); 314 size_t used = rec->bytesUsed();
390 SkASSERT(used <= bytesUsed); 315 SkASSERT(used <= bytesUsed);
391 this->detach(rec); 316 this->detach(rec);
392 #ifdef USE_HASH 317 #ifdef USE_HASH
393 fHash->remove(*rec->fKey); 318 fHash->remove(rec->getKey());
394 #endif 319 #endif
395 320
396 SkDELETE(rec); 321 SkDELETE(rec);
397 322
398 bytesUsed -= used; 323 bytesUsed -= used;
399 countUsed -= 1; 324 countUsed -= 1;
400 } 325 }
401 rec = prev; 326 rec = prev;
402 } 327 }
403 328
(...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after
569 #ifdef SK_USE_DISCARDABLE_SCALEDIMAGECACHE 494 #ifdef SK_USE_DISCARDABLE_SCALEDIMAGECACHE
570 gScaledImageCache = SkNEW_ARGS(SkScaledImageCache, (SkDiscardableMemory: :Create)); 495 gScaledImageCache = SkNEW_ARGS(SkScaledImageCache, (SkDiscardableMemory: :Create));
571 #else 496 #else
572 gScaledImageCache = SkNEW_ARGS(SkScaledImageCache, (SK_DEFAULT_IMAGE_CAC HE_LIMIT)); 497 gScaledImageCache = SkNEW_ARGS(SkScaledImageCache, (SK_DEFAULT_IMAGE_CAC HE_LIMIT));
573 #endif 498 #endif
574 atexit(cleanup_gScaledImageCache); 499 atexit(cleanup_gScaledImageCache);
575 } 500 }
576 return gScaledImageCache; 501 return gScaledImageCache;
577 } 502 }
578 503
579 SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(const Key& key, SkBitmap * result) { 504 void SkScaledImageCache::Unlock(SkScaledImageCache::ID id) {
580 SkAutoMutexAcquire am(gMutex);
581 return get_cache()->findAndLock(key, result);
582 }
583
584 SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(const Key& key, SkMipMap const ** mip) {
585 SkAutoMutexAcquire am(gMutex);
586 return get_cache()->findAndLock(key, mip);
587 }
588
589 SkScaledImageCache::ID* SkScaledImageCache::AddAndLock(const Key& key, const SkB itmap& scaled) {
590 SkAutoMutexAcquire am(gMutex);
591 return get_cache()->addAndLock(key, scaled);
592 }
593
594 SkScaledImageCache::ID* SkScaledImageCache::AddAndLock(const Key& key, const SkM ipMap* mip) {
595 SkAutoMutexAcquire am(gMutex);
596 return get_cache()->addAndLock(key, mip);
597 }
598
599 void SkScaledImageCache::Unlock(SkScaledImageCache::ID* id) {
600 SkAutoMutexAcquire am(gMutex); 505 SkAutoMutexAcquire am(gMutex);
601 get_cache()->unlock(id); 506 get_cache()->unlock(id);
602 507
603 // get_cache()->dump(); 508 // get_cache()->dump();
604 } 509 }
605 510
606 size_t SkScaledImageCache::GetTotalBytesUsed() { 511 size_t SkScaledImageCache::GetTotalBytesUsed() {
607 SkAutoMutexAcquire am(gMutex); 512 SkAutoMutexAcquire am(gMutex);
608 return get_cache()->getTotalBytesUsed(); 513 return get_cache()->getTotalBytesUsed();
609 } 514 }
(...skipping 21 matching lines...) Expand all
631 size_t SkScaledImageCache::SetSingleAllocationByteLimit(size_t size) { 536 size_t SkScaledImageCache::SetSingleAllocationByteLimit(size_t size) {
632 SkAutoMutexAcquire am(gMutex); 537 SkAutoMutexAcquire am(gMutex);
633 return get_cache()->setSingleAllocationByteLimit(size); 538 return get_cache()->setSingleAllocationByteLimit(size);
634 } 539 }
635 540
636 size_t SkScaledImageCache::GetSingleAllocationByteLimit() { 541 size_t SkScaledImageCache::GetSingleAllocationByteLimit() {
637 SkAutoMutexAcquire am(gMutex); 542 SkAutoMutexAcquire am(gMutex);
638 return get_cache()->getSingleAllocationByteLimit(); 543 return get_cache()->getSingleAllocationByteLimit();
639 } 544 }
640 545
546 const SkScaledImageCache::Rec* SkScaledImageCache::FindAndLock(const Key& key) {
547 SkAutoMutexAcquire am(gMutex);
548 return get_cache()->findAndLock(key);
549 }
550
551 const SkScaledImageCache::Rec* SkScaledImageCache::AddAndLock(Rec* rec) {
552 SkAutoMutexAcquire am(gMutex);
553 return get_cache()->addAndLock(rec);
554 }
555
556 void SkScaledImageCache::Add(Rec* rec) {
557 SkAutoMutexAcquire am(gMutex);
558 get_cache()->add(rec);
559 }
560
641 /////////////////////////////////////////////////////////////////////////////// 561 ///////////////////////////////////////////////////////////////////////////////
642 562
643 #include "SkGraphics.h" 563 #include "SkGraphics.h"
644 564
645 size_t SkGraphics::GetImageCacheTotalBytesUsed() { 565 size_t SkGraphics::GetImageCacheTotalBytesUsed() {
646 return SkScaledImageCache::GetTotalBytesUsed(); 566 return SkScaledImageCache::GetTotalBytesUsed();
647 } 567 }
648 568
649 size_t SkGraphics::GetImageCacheTotalByteLimit() { 569 size_t SkGraphics::GetImageCacheTotalByteLimit() {
650 return SkScaledImageCache::GetTotalByteLimit(); 570 return SkScaledImageCache::GetTotalByteLimit();
651 } 571 }
652 572
653 size_t SkGraphics::SetImageCacheTotalByteLimit(size_t newLimit) { 573 size_t SkGraphics::SetImageCacheTotalByteLimit(size_t newLimit) {
654 return SkScaledImageCache::SetTotalByteLimit(newLimit); 574 return SkScaledImageCache::SetTotalByteLimit(newLimit);
655 } 575 }
656 576
657 size_t SkGraphics::GetImageCacheSingleAllocationByteLimit() { 577 size_t SkGraphics::GetImageCacheSingleAllocationByteLimit() {
658 return SkScaledImageCache::GetSingleAllocationByteLimit(); 578 return SkScaledImageCache::GetSingleAllocationByteLimit();
659 } 579 }
660 580
661 size_t SkGraphics::SetImageCacheSingleAllocationByteLimit(size_t newLimit) { 581 size_t SkGraphics::SetImageCacheSingleAllocationByteLimit(size_t newLimit) {
662 return SkScaledImageCache::SetSingleAllocationByteLimit(newLimit); 582 return SkScaledImageCache::SetSingleAllocationByteLimit(newLimit);
663 } 583 }
664 584
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698