Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(534)

Side by Side Diff: src/core/SkScaledImageCache.cpp

Issue 507483002: retool image cache to be generic cache (Closed) Base URL: https://skia.googlesource.com/skia.git@master
Patch Set: rebase + add comment in unlock Created 6 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/core/SkScaledImageCache.h ('k') | src/lazy/SkCachingPixelRef.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* 1 /*
2 * Copyright 2013 Google Inc. 2 * Copyright 2013 Google Inc.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license that can be 4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file. 5 * found in the LICENSE file.
6 */ 6 */
7 7
8 #include "SkChecksum.h" 8 #include "SkChecksum.h"
9 #include "SkScaledImageCache.h" 9 #include "SkScaledImageCache.h"
10 #include "SkMipMap.h" 10 #include "SkMipMap.h"
11 #include "SkPixelRef.h" 11 #include "SkPixelRef.h"
12 12
13 // This can be defined by the caller's build system 13 // This can be defined by the caller's build system
14 //#define SK_USE_DISCARDABLE_SCALEDIMAGECACHE 14 //#define SK_USE_DISCARDABLE_SCALEDIMAGECACHE
15 15
16 #ifndef SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT 16 #ifndef SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT
17 # define SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT 1024 17 # define SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT 1024
18 #endif 18 #endif
19 19
20 #ifndef SK_DEFAULT_IMAGE_CACHE_LIMIT 20 #ifndef SK_DEFAULT_IMAGE_CACHE_LIMIT
21 #define SK_DEFAULT_IMAGE_CACHE_LIMIT (2 * 1024 * 1024) 21 #define SK_DEFAULT_IMAGE_CACHE_LIMIT (2 * 1024 * 1024)
22 #endif 22 #endif
23 23
24 static inline SkScaledImageCache::ID* rec_to_id(SkScaledImageCache::Rec* rec) {
25 return reinterpret_cast<SkScaledImageCache::ID*>(rec);
26 }
27
28 static inline SkScaledImageCache::Rec* id_to_rec(SkScaledImageCache::ID* id) {
29 return reinterpret_cast<SkScaledImageCache::Rec*>(id);
30 }
31
32 void SkScaledImageCache::Key::init(size_t length) { 24 void SkScaledImageCache::Key::init(size_t length) {
33 SkASSERT(SkAlign4(length) == length); 25 SkASSERT(SkAlign4(length) == length);
34 // 2 is fCount32 and fHash 26 // 2 is fCount32 and fHash
35 fCount32 = SkToS32(2 + (length >> 2)); 27 fCount32 = SkToS32(2 + (length >> 2));
36 // skip both of our fields whe computing the murmur 28 // skip both of our fields whe computing the murmur
37 fHash = SkChecksum::Murmur3(this->as32() + 2, (fCount32 - 2) << 2); 29 fHash = SkChecksum::Murmur3(this->as32() + 2, (fCount32 - 2) << 2);
38 } 30 }
39 31
40 SkScaledImageCache::Key* SkScaledImageCache::Key::clone() const {
41 size_t size = fCount32 << 2;
42 void* copy = sk_malloc_throw(size);
43 memcpy(copy, this, size);
44 return (Key*)copy;
45 }
46
47 struct SkScaledImageCache::Rec {
48 Rec(const Key& key, const SkBitmap& bm) : fKey(key.clone()), fBitmap(bm) {
49 fLockCount = 1;
50 fMip = NULL;
51 }
52
53 Rec(const Key& key, const SkMipMap* mip) : fKey(key.clone()) {
54 fLockCount = 1;
55 fMip = mip;
56 mip->ref();
57 }
58
59 ~Rec() {
60 SkSafeUnref(fMip);
61 sk_free(fKey);
62 }
63
64 static const Key& GetKey(const Rec& rec) { return *rec.fKey; }
65 static uint32_t Hash(const Key& key) { return key.hash(); }
66
67 size_t bytesUsed() const {
68 return fMip ? fMip->getSize() : fBitmap.getSize();
69 }
70
71 Rec* fNext;
72 Rec* fPrev;
73
74 // this guy wants to be 64bit aligned
75 Key* fKey;
76
77 int32_t fLockCount;
78
79 // we use either fBitmap or fMip, but not both
80 SkBitmap fBitmap;
81 const SkMipMap* fMip;
82 };
83
84 #include "SkTDynamicHash.h" 32 #include "SkTDynamicHash.h"
85 33
86 class SkScaledImageCache::Hash : 34 class SkScaledImageCache::Hash :
87 public SkTDynamicHash<SkScaledImageCache::Rec, SkScaledImageCache::Key> {}; 35 public SkTDynamicHash<SkScaledImageCache::Rec, SkScaledImageCache::Key> {};
88 36
89 37
90 /////////////////////////////////////////////////////////////////////////////// 38 ///////////////////////////////////////////////////////////////////////////////
91 39
92 // experimental hash to speed things up 40 // experimental hash to speed things up
93 #define USE_HASH 41 #define USE_HASH
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after
252 while (rec) { 200 while (rec) {
253 Rec* next = rec->fNext; 201 Rec* next = rec->fNext;
254 SkDELETE(rec); 202 SkDELETE(rec);
255 rec = next; 203 rec = next;
256 } 204 }
257 delete fHash; 205 delete fHash;
258 } 206 }
259 207
260 //////////////////////////////////////////////////////////////////////////////// 208 ////////////////////////////////////////////////////////////////////////////////
261 209
262 /** 210 const SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(const Key& key) {
263 This private method is the fully general record finder. All other
264 record finders should call this function or the one above.
265 */
266 SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(const SkScaledImageCach e::Key& key) {
267 #ifdef USE_HASH 211 #ifdef USE_HASH
268 Rec* rec = fHash->find(key); 212 Rec* rec = fHash->find(key);
269 #else 213 #else
270 Rec* rec = find_rec_in_list(fHead, key); 214 Rec* rec = find_rec_in_list(fHead, key);
271 #endif 215 #endif
272 if (rec) { 216 if (rec) {
273 this->moveToHead(rec); // for our LRU 217 this->moveToHead(rec); // for our LRU
274 rec->fLockCount += 1; 218 rec->fLockCount += 1;
275 } 219 }
276 return rec; 220 return rec;
277 } 221 }
278 222
279 SkScaledImageCache::ID* SkScaledImageCache::findAndLock(const Key& key, SkBitmap * result) { 223 const SkScaledImageCache::Rec* SkScaledImageCache::addAndLock(Rec* rec) {
280 Rec* rec = this->findAndLock(key);
281 if (rec) {
282 SkASSERT(NULL == rec->fMip);
283 SkASSERT(rec->fBitmap.pixelRef());
284 *result = rec->fBitmap;
285 }
286 return rec_to_id(rec);
287 }
288
289 SkScaledImageCache::ID* SkScaledImageCache::findAndLock(const Key& key, const Sk MipMap** mip) {
290 Rec* rec = this->findAndLock(key);
291 if (rec) {
292 SkASSERT(rec->fMip);
293 SkASSERT(NULL == rec->fBitmap.pixelRef());
294 *mip = rec->fMip;
295 }
296 return rec_to_id(rec);
297 }
298
299
300 ////////////////////////////////////////////////////////////////////////////////
301 /**
302 This private method is the fully general record adder. All other
303 record adders should call this funtion. */
304 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(SkScaledImageCache::Rec* rec) {
305 SkASSERT(rec); 224 SkASSERT(rec);
306 // See if we already have this key (racy inserts, etc.) 225 // See if we already have this key (racy inserts, etc.)
307 Rec* existing = this->findAndLock(*rec->fKey); 226 const Rec* existing = this->findAndLock(rec->getKey());
308 if (NULL != existing) { 227 if (NULL != existing) {
309 // Since we already have a matching entry, just delete the new one and r eturn.
310 // Call sites cannot assume the passed in object will live past this cal l.
311 existing->fBitmap = rec->fBitmap;
312 SkDELETE(rec); 228 SkDELETE(rec);
313 return rec_to_id(existing); 229 return existing;
314 } 230 }
315 231
316 this->addToHead(rec); 232 this->addToHead(rec);
317 SkASSERT(1 == rec->fLockCount); 233 SkASSERT(1 == rec->fLockCount);
318 #ifdef USE_HASH 234 #ifdef USE_HASH
319 SkASSERT(fHash); 235 SkASSERT(fHash);
320 fHash->add(rec); 236 fHash->add(rec);
321 #endif 237 #endif
322 // We may (now) be overbudget, so see if we need to purge something. 238 // We may (now) be overbudget, so see if we need to purge something.
323 this->purgeAsNeeded(); 239 this->purgeAsNeeded();
324 return rec_to_id(rec); 240 return rec;
325 } 241 }
326 242
327 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const Key& key, const SkB itmap& scaled) { 243 void SkScaledImageCache::add(Rec* rec) {
328 Rec* rec = SkNEW_ARGS(Rec, (key, scaled)); 244 SkASSERT(rec);
329 return this->addAndLock(rec); 245 // See if we already have this key (racy inserts, etc.)
246 const Rec* existing = this->findAndLock(rec->getKey());
247 if (NULL != existing) {
248 SkDELETE(rec);
249 this->unlock(existing);
250 return;
251 }
252
253 this->addToHead(rec);
254 SkASSERT(1 == rec->fLockCount);
255 #ifdef USE_HASH
256 SkASSERT(fHash);
257 fHash->add(rec);
258 #endif
259 this->unlock(rec);
330 } 260 }
331 261
332 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const Key& key, const SkM ipMap* mip) { 262 void SkScaledImageCache::unlock(SkScaledImageCache::ID id) {
333 Rec* rec = SkNEW_ARGS(Rec, (key, mip));
334 return this->addAndLock(rec);
335 }
336
337 void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) {
338 SkASSERT(id); 263 SkASSERT(id);
339 264
340 #ifdef SK_DEBUG 265 #ifdef SK_DEBUG
341 { 266 {
342 bool found = false; 267 bool found = false;
343 Rec* rec = fHead; 268 Rec* rec = fHead;
344 while (rec != NULL) { 269 while (rec != NULL) {
345 if (rec == id_to_rec(id)) { 270 if (rec == id) {
346 found = true; 271 found = true;
347 break; 272 break;
348 } 273 }
349 rec = rec->fNext; 274 rec = rec->fNext;
350 } 275 }
351 SkASSERT(found); 276 SkASSERT(found);
352 } 277 }
353 #endif 278 #endif
354 Rec* rec = id_to_rec(id); 279 const Rec* rec = id;
355 SkASSERT(rec->fLockCount > 0); 280 SkASSERT(rec->fLockCount > 0);
356 rec->fLockCount -= 1; 281 // We're under our lock, and we're the only possible mutator, so unconsting is fine.
282 const_cast<Rec*>(rec)->fLockCount -= 1;
357 283
358 // we may have been over-budget, but now have released something, so check 284 // we may have been over-budget, but now have released something, so check
359 // if we should purge. 285 // if we should purge.
360 if (0 == rec->fLockCount) { 286 if (0 == rec->fLockCount) {
361 this->purgeAsNeeded(); 287 this->purgeAsNeeded();
362 } 288 }
363 } 289 }
364 290
365 void SkScaledImageCache::purgeAsNeeded() { 291 void SkScaledImageCache::purgeAsNeeded() {
366 size_t byteLimit; 292 size_t byteLimit;
(...skipping 15 matching lines...) Expand all
382 if (bytesUsed < byteLimit && countUsed < countLimit) { 308 if (bytesUsed < byteLimit && countUsed < countLimit) {
383 break; 309 break;
384 } 310 }
385 311
386 Rec* prev = rec->fPrev; 312 Rec* prev = rec->fPrev;
387 if (0 == rec->fLockCount) { 313 if (0 == rec->fLockCount) {
388 size_t used = rec->bytesUsed(); 314 size_t used = rec->bytesUsed();
389 SkASSERT(used <= bytesUsed); 315 SkASSERT(used <= bytesUsed);
390 this->detach(rec); 316 this->detach(rec);
391 #ifdef USE_HASH 317 #ifdef USE_HASH
392 fHash->remove(*rec->fKey); 318 fHash->remove(rec->getKey());
393 #endif 319 #endif
394 320
395 SkDELETE(rec); 321 SkDELETE(rec);
396 322
397 bytesUsed -= used; 323 bytesUsed -= used;
398 countUsed -= 1; 324 countUsed -= 1;
399 } 325 }
400 rec = prev; 326 rec = prev;
401 } 327 }
402 328
(...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after
568 #ifdef SK_USE_DISCARDABLE_SCALEDIMAGECACHE 494 #ifdef SK_USE_DISCARDABLE_SCALEDIMAGECACHE
569 gScaledImageCache = SkNEW_ARGS(SkScaledImageCache, (SkDiscardableMemory: :Create)); 495 gScaledImageCache = SkNEW_ARGS(SkScaledImageCache, (SkDiscardableMemory: :Create));
570 #else 496 #else
571 gScaledImageCache = SkNEW_ARGS(SkScaledImageCache, (SK_DEFAULT_IMAGE_CAC HE_LIMIT)); 497 gScaledImageCache = SkNEW_ARGS(SkScaledImageCache, (SK_DEFAULT_IMAGE_CAC HE_LIMIT));
572 #endif 498 #endif
573 atexit(cleanup_gScaledImageCache); 499 atexit(cleanup_gScaledImageCache);
574 } 500 }
575 return gScaledImageCache; 501 return gScaledImageCache;
576 } 502 }
577 503
578 SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(const Key& key, SkBitmap * result) { 504 void SkScaledImageCache::Unlock(SkScaledImageCache::ID id) {
579 SkAutoMutexAcquire am(gMutex);
580 return get_cache()->findAndLock(key, result);
581 }
582
583 SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(const Key& key, SkMipMap const ** mip) {
584 SkAutoMutexAcquire am(gMutex);
585 return get_cache()->findAndLock(key, mip);
586 }
587
588 SkScaledImageCache::ID* SkScaledImageCache::AddAndLock(const Key& key, const SkB itmap& scaled) {
589 SkAutoMutexAcquire am(gMutex);
590 return get_cache()->addAndLock(key, scaled);
591 }
592
593 SkScaledImageCache::ID* SkScaledImageCache::AddAndLock(const Key& key, const SkM ipMap* mip) {
594 SkAutoMutexAcquire am(gMutex);
595 return get_cache()->addAndLock(key, mip);
596 }
597
598 void SkScaledImageCache::Unlock(SkScaledImageCache::ID* id) {
599 SkAutoMutexAcquire am(gMutex); 505 SkAutoMutexAcquire am(gMutex);
600 get_cache()->unlock(id); 506 get_cache()->unlock(id);
601 507
602 // get_cache()->dump(); 508 // get_cache()->dump();
603 } 509 }
604 510
605 size_t SkScaledImageCache::GetTotalBytesUsed() { 511 size_t SkScaledImageCache::GetTotalBytesUsed() {
606 SkAutoMutexAcquire am(gMutex); 512 SkAutoMutexAcquire am(gMutex);
607 return get_cache()->getTotalBytesUsed(); 513 return get_cache()->getTotalBytesUsed();
608 } 514 }
(...skipping 21 matching lines...) Expand all
630 size_t SkScaledImageCache::SetSingleAllocationByteLimit(size_t size) { 536 size_t SkScaledImageCache::SetSingleAllocationByteLimit(size_t size) {
631 SkAutoMutexAcquire am(gMutex); 537 SkAutoMutexAcquire am(gMutex);
632 return get_cache()->setSingleAllocationByteLimit(size); 538 return get_cache()->setSingleAllocationByteLimit(size);
633 } 539 }
634 540
635 size_t SkScaledImageCache::GetSingleAllocationByteLimit() { 541 size_t SkScaledImageCache::GetSingleAllocationByteLimit() {
636 SkAutoMutexAcquire am(gMutex); 542 SkAutoMutexAcquire am(gMutex);
637 return get_cache()->getSingleAllocationByteLimit(); 543 return get_cache()->getSingleAllocationByteLimit();
638 } 544 }
639 545
546 const SkScaledImageCache::Rec* SkScaledImageCache::FindAndLock(const Key& key) {
547 SkAutoMutexAcquire am(gMutex);
548 return get_cache()->findAndLock(key);
549 }
550
551 const SkScaledImageCache::Rec* SkScaledImageCache::AddAndLock(Rec* rec) {
552 SkAutoMutexAcquire am(gMutex);
553 return get_cache()->addAndLock(rec);
554 }
555
556 void SkScaledImageCache::Add(Rec* rec) {
557 SkAutoMutexAcquire am(gMutex);
558 get_cache()->add(rec);
559 }
560
640 /////////////////////////////////////////////////////////////////////////////// 561 ///////////////////////////////////////////////////////////////////////////////
641 562
642 #include "SkGraphics.h" 563 #include "SkGraphics.h"
643 564
644 size_t SkGraphics::GetImageCacheTotalBytesUsed() { 565 size_t SkGraphics::GetImageCacheTotalBytesUsed() {
645 return SkScaledImageCache::GetTotalBytesUsed(); 566 return SkScaledImageCache::GetTotalBytesUsed();
646 } 567 }
647 568
648 size_t SkGraphics::GetImageCacheTotalByteLimit() { 569 size_t SkGraphics::GetImageCacheTotalByteLimit() {
649 return SkScaledImageCache::GetTotalByteLimit(); 570 return SkScaledImageCache::GetTotalByteLimit();
650 } 571 }
651 572
652 size_t SkGraphics::SetImageCacheTotalByteLimit(size_t newLimit) { 573 size_t SkGraphics::SetImageCacheTotalByteLimit(size_t newLimit) {
653 return SkScaledImageCache::SetTotalByteLimit(newLimit); 574 return SkScaledImageCache::SetTotalByteLimit(newLimit);
654 } 575 }
655 576
656 size_t SkGraphics::GetImageCacheSingleAllocationByteLimit() { 577 size_t SkGraphics::GetImageCacheSingleAllocationByteLimit() {
657 return SkScaledImageCache::GetSingleAllocationByteLimit(); 578 return SkScaledImageCache::GetSingleAllocationByteLimit();
658 } 579 }
659 580
660 size_t SkGraphics::SetImageCacheSingleAllocationByteLimit(size_t newLimit) { 581 size_t SkGraphics::SetImageCacheSingleAllocationByteLimit(size_t newLimit) {
661 return SkScaledImageCache::SetSingleAllocationByteLimit(newLimit); 582 return SkScaledImageCache::SetSingleAllocationByteLimit(newLimit);
662 } 583 }
663 584
OLDNEW
« no previous file with comments | « src/core/SkScaledImageCache.h ('k') | src/lazy/SkCachingPixelRef.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698