Chromium Code Reviews| Index: src/core/SkScaledImageCache.cpp | 
| diff --git a/src/core/SkScaledImageCache.cpp b/src/core/SkScaledImageCache.cpp | 
| index 11a0ee448f9c7cd590ff81c3409afded24163d8d..aadcfab4ec402a086838428f457decf4023babb8 100644 | 
| --- a/src/core/SkScaledImageCache.cpp | 
| +++ b/src/core/SkScaledImageCache.cpp | 
| @@ -7,6 +7,7 @@ | 
| #include "SkScaledImageCache.h" | 
| #include "SkMipMap.h" | 
| +#include "SkOnce.h" | 
| #include "SkPixelRef.h" | 
| #include "SkRect.h" | 
| @@ -14,6 +15,13 @@ | 
| #define SK_DEFAULT_IMAGE_CACHE_LIMIT (2 * 1024 * 1024) | 
| #endif | 
| +static inline SkScaledImageCache::ID* rec_to_id(SkScaledImageCache::Rec* rec) { | 
| + return reinterpret_cast<SkScaledImageCache::ID*>(rec); | 
| +} | 
| + | 
| +static inline SkScaledImageCache::Rec* id_to_rec(SkScaledImageCache::ID* id) { | 
| + return reinterpret_cast<SkScaledImageCache::Rec*>(id); | 
| +} | 
| // Implemented from en.wikipedia.org/wiki/MurmurHash. | 
| static uint32_t compute_hash(const uint32_t data[], int count) { | 
| @@ -54,12 +62,23 @@ struct Key { | 
| fGenID = pr->getGenerationID(); | 
| fBounds.set(x, y, x + bm.width(), y + bm.height()); | 
| - fScaleX = scaleX; | 
| - fScaleY = scaleY; | 
| + fScaleX = SkScalarToFloat(scaleX); | 
| + fScaleY = SkScalarToFloat(scaleY); | 
| fHash = compute_hash(&fGenID, 7); | 
| return true; | 
| } | 
| + void init(uint32_t genID, | 
| + SkScalar scaleX, | 
| 
 
reed1
2013/10/24 21:07:04
do we *always* pass in 1.0 for these today? If so,
 
hal.canary
2013/10/25 16:37:10
I have just cleaned this code up a lot.  Now there
 
 | 
| + SkScalar scaleY, | 
| + int32_t width, | 
| + int32_t height) { | 
| + fGenID = genID; | 
| + fScaleX = SkScalarToFloat(scaleX); | 
| + fScaleY = SkScalarToFloat(scaleY); | 
| + fBounds.set(0, 0, width, height); | 
| + fHash = compute_hash(&fGenID, 7); | 
| + } | 
| bool operator<(const Key& other) const { | 
| const uint32_t* a = &fGenID; | 
| @@ -174,6 +193,15 @@ SkScaledImageCache::~SkScaledImageCache() { | 
| delete fHash; | 
| } | 
| +static inline SkScaledImageCache::Rec* find_rec_in_list( | 
| + SkScaledImageCache::Rec* head, const Key & key) { | 
| + SkScaledImageCache::Rec* rec = head; | 
| + while ((rec != NULL) && !(rec->fKey == key)) { | 
| 
 
reed1
2013/10/24 21:07:04
!(a == b)   ?
seems like we should have != operat
 
hal.canary
2013/10/25 16:37:10
Done.
 
 | 
| + rec = rec->fNext; | 
| + } | 
| + return rec; | 
| +} | 
| + | 
| SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(const SkBitmap& orig, | 
| SkScalar scaleX, | 
| SkScalar scaleY) { | 
| @@ -185,15 +213,8 @@ SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(const SkBitmap& orig, | 
| #ifdef USE_HASH | 
| Rec* rec = fHash->find(key); | 
| 
 
scroggo
2013/10/24 20:41:39
Can this whole #if #else be a helper function?
Re
 
hal.canary
2013/10/25 16:37:10
I have move some things around to remove this repe
 
 | 
| #else | 
| - Rec* rec = fHead; | 
| - while (rec != NULL) { | 
| - if (rec->fKey == key) { | 
| - break; | 
| - } | 
| - rec = rec->fNext; | 
| - } | 
| + Rec* rec = find_rec_in_list(fHead, key); | 
| #endif | 
| - | 
| if (rec) { | 
| this->moveToHead(rec); // for our LRU | 
| rec->fLockCount += 1; | 
| @@ -201,6 +222,29 @@ SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(const SkBitmap& orig, | 
| return rec; | 
| } | 
| + | 
| +SkScaledImageCache::ID* SkScaledImageCache::findAndLock( | 
| + uint32_t pixelGenerationID, | 
| + int32_t width, | 
| + int32_t height, | 
| + SkBitmap* scaled) { | 
| + Key key; | 
| + key.init(pixelGenerationID, SK_Scalar1, SK_Scalar1, width, height); | 
| +#ifdef USE_HASH | 
| + Rec* rec = fHash->find(key); | 
| +#else | 
| + Rec* rec = find_rec_in_list(fHead, key); | 
| +#endif | 
| + if (rec) { | 
| + this->moveToHead(rec); // for our LRU | 
| + rec->fLockCount += 1; | 
| + SkASSERT(NULL == rec->fMip); | 
| + SkASSERT(rec->fBitmap.pixelRef()); | 
| + *scaled = rec->fBitmap; | 
| + } | 
| + return rec_to_id(rec); | 
| +} | 
| + | 
| SkScaledImageCache::ID* SkScaledImageCache::findAndLock(const SkBitmap& orig, | 
| SkScalar scaleX, | 
| SkScalar scaleY, | 
| @@ -216,7 +260,7 @@ SkScaledImageCache::ID* SkScaledImageCache::findAndLock(const SkBitmap& orig, | 
| SkASSERT(rec->fBitmap.pixelRef()); | 
| *scaled = rec->fBitmap; | 
| } | 
| - return (ID*)rec; | 
| + return rec_to_id(rec); | 
| } | 
| SkScaledImageCache::ID* SkScaledImageCache::findAndLockMip(const SkBitmap& orig, | 
| @@ -227,7 +271,27 @@ SkScaledImageCache::ID* SkScaledImageCache::findAndLockMip(const SkBitmap& orig, | 
| SkASSERT(NULL == rec->fBitmap.pixelRef()); | 
| *mip = rec->fMip; | 
| } | 
| - return (ID*)rec; | 
| + return rec_to_id(rec); | 
| +} | 
| + | 
| +SkScaledImageCache::ID* SkScaledImageCache::addAndLock( | 
| + uint32_t pixelGenerationID, | 
| + int32_t width, | 
| + int32_t height, | 
| + const SkBitmap& scaled) { | 
| + Key key; | 
| + key.init(pixelGenerationID, SK_Scalar1, SK_Scalar1, width, height); | 
| + Rec* rec = SkNEW_ARGS(Rec, (key, scaled)); | 
| + this->addToHead(rec); | 
| + SkASSERT(1 == rec->fLockCount); | 
| + | 
| +#ifdef USE_HASH | 
| + fHash->add(rec); | 
| +#endif | 
| + | 
| + // We may (now) be overbudget, so see if we need to purge something. | 
| + this->purgeAsNeeded(); | 
| + return rec_to_id(rec); | 
| } | 
| SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const SkBitmap& orig, | 
| @@ -254,7 +318,7 @@ SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const SkBitmap& orig, | 
| // We may (now) be overbudget, so see if we need to purge something. | 
| this->purgeAsNeeded(); | 
| - return (ID*)rec; | 
| + return rec_to_id(rec); | 
| } | 
| SkScaledImageCache::ID* SkScaledImageCache::addAndLockMip(const SkBitmap& orig, | 
| @@ -274,7 +338,7 @@ SkScaledImageCache::ID* SkScaledImageCache::addAndLockMip(const SkBitmap& orig, | 
| // We may (now) be overbudget, so see if we need to purge something. | 
| this->purgeAsNeeded(); | 
| - return (ID*)rec; | 
| + return rec_to_id(rec); | 
| } | 
| void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) { | 
| @@ -285,7 +349,7 @@ void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) { | 
| bool found = false; | 
| Rec* rec = fHead; | 
| while (rec != NULL) { | 
| - if ((ID*)rec == id) { | 
| + if (rec == id_to_rec(id)) { | 
| found = true; | 
| break; | 
| } | 
| @@ -294,7 +358,7 @@ void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) { | 
| SkASSERT(found); | 
| } | 
| #endif | 
| - Rec* rec = (Rec*)id; | 
| + Rec* rec = id_to_rec(id); | 
| SkASSERT(rec->fLockCount > 0); | 
| rec->fLockCount -= 1; | 
| @@ -451,14 +515,38 @@ void SkScaledImageCache::validate() const { | 
| SK_DECLARE_STATIC_MUTEX(gMutex); | 
| +static void create_cache(SkScaledImageCache** cache) { | 
| + *cache = SkNEW_ARGS(SkScaledImageCache, (SK_DEFAULT_IMAGE_CACHE_LIMIT)); | 
| +} | 
| + | 
| static SkScaledImageCache* get_cache() { | 
| - static SkScaledImageCache* gCache; | 
| - if (!gCache) { | 
| - gCache = SkNEW_ARGS(SkScaledImageCache, (SK_DEFAULT_IMAGE_CACHE_LIMIT)); | 
| - } | 
| + static SkScaledImageCache* gCache(NULL); | 
| + SK_DECLARE_STATIC_ONCE(create_cache_once); | 
| + SkOnce<SkScaledImageCache**>(&create_cache_once, create_cache, &gCache); | 
| + SkASSERT(NULL != gCache); | 
| return gCache; | 
| } | 
| + | 
| +SkScaledImageCache::ID* SkScaledImageCache::FindAndLock( | 
| + uint32_t pixelGenerationID, | 
| + int32_t width, | 
| + int32_t height, | 
| + SkBitmap* scaled) { | 
| + SkAutoMutexAcquire am(gMutex); | 
| + return get_cache()->findAndLock(pixelGenerationID, width, height, scaled); | 
| +} | 
| + | 
| +SkScaledImageCache::ID* SkScaledImageCache::AddAndLock( | 
| + uint32_t pixelGenerationID, | 
| + int32_t width, | 
| + int32_t height, | 
| + const SkBitmap& scaled) { | 
| + SkAutoMutexAcquire am(gMutex); | 
| + return get_cache()->addAndLock(pixelGenerationID, width, height, scaled); | 
| +} | 
| + | 
| + | 
| SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(const SkBitmap& orig, | 
| SkScalar scaleX, | 
| SkScalar scaleY, |