Chromium Code Reviews| Index: src/core/SkScaledImageCache.cpp |
| diff --git a/src/core/SkScaledImageCache.cpp b/src/core/SkScaledImageCache.cpp |
| index ea29843c9217984bd4f7c31e7da050edad901286..f9830af705efdc17122b8553c29408301ca50f63 100644 |
| --- a/src/core/SkScaledImageCache.cpp |
| +++ b/src/core/SkScaledImageCache.cpp |
| @@ -23,7 +23,7 @@ static inline SkScaledImageCache::Rec* id_to_rec(SkScaledImageCache::ID* id) { |
| return reinterpret_cast<SkScaledImageCache::Rec*>(id); |
| } |
| - // Implemented from en.wikipedia.org/wiki/MurmurHash. |
| +// Implemented from en.wikipedia.org/wiki/MurmurHash. |
| static uint32_t compute_hash(const uint32_t data[], int count) { |
| uint32_t hash = 0; |
| @@ -128,7 +128,7 @@ struct SkScaledImageCache::Rec { |
| #include "SkTDynamicHash.h" |
| -namespace { // can't use static functions w/ template parameters |
| +namespace { // can't use static functions w/ template parameters |
| const Key& key_from_rec(const SkScaledImageCache::Rec& rec) { |
| return rec.fKey; |
| } |
| @@ -140,7 +140,7 @@ uint32_t hash_from_key(const Key& key) { |
| bool eq_rec_key(const SkScaledImageCache::Rec& rec, const Key& key) { |
| return rec.fKey == key; |
| } |
| -} |
| +} // namespace |
| class SkScaledImageCache::Hash : public SkTDynamicHash<SkScaledImageCache::Rec, |
| Key, key_from_rec, hash_from_key, |
| @@ -162,7 +162,8 @@ static inline SkScaledImageCache::Rec* find_rec_in_list( |
| } |
| #endif |
| -SkScaledImageCache::SkScaledImageCache(size_t byteLimit) { |
| +SkScaledImageCache::SkScaledImageCache(size_t byteLimit, SkBaseMutex* mutex) |
| + : fMutex(mutex) { |
| fHead = NULL; |
| fTail = NULL; |
| #ifdef USE_HASH |
| @@ -230,6 +231,7 @@ SkScaledImageCache::ID* SkScaledImageCache::findAndLock(uint32_t genID, |
| int32_t width, |
| int32_t height, |
| SkBitmap* bitmap) { |
| + SkAutoMutexAcquire am(fMutex); |
| Rec* rec = this->findAndLock(genID, SK_Scalar1, SK_Scalar1, |
| SkIRect::MakeWH(width, height)); |
| if (rec) { |
| @@ -244,6 +246,7 @@ SkScaledImageCache::ID* SkScaledImageCache::findAndLock(const SkBitmap& orig, |
| SkScalar scaleX, |
| SkScalar scaleY, |
| SkBitmap* scaled) { |
| + SkAutoMutexAcquire am(fMutex); |
| if (0 == scaleX || 0 == scaleY) { |
| // degenerate, and the key we use for mipmaps |
| return NULL; |
| @@ -260,6 +263,7 @@ SkScaledImageCache::ID* SkScaledImageCache::findAndLock(const SkBitmap& orig, |
| SkScaledImageCache::ID* SkScaledImageCache::findAndLockMip(const SkBitmap& orig, |
| SkMipMap const ** mip) { |
| + SkAutoMutexAcquire am(fMutex); |
| Rec* rec = this->findAndLock(orig.getGenerationID(), 0, 0, |
| get_bounds_from_bitmap(orig)); |
| if (rec) { |
| @@ -291,6 +295,7 @@ SkScaledImageCache::ID* SkScaledImageCache::addAndLock(uint32_t genID, |
| int32_t width, |
| int32_t height, |
| const SkBitmap& bitmap) { |
| + SkAutoMutexAcquire am(fMutex); |
| Key key(genID, SK_Scalar1, SK_Scalar1, SkIRect::MakeWH(width, height)); |
| Rec* rec = SkNEW_ARGS(Rec, (key, bitmap)); |
| this->addAndLock(rec); |
| @@ -301,6 +306,7 @@ SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const SkBitmap& orig, |
| SkScalar scaleX, |
| SkScalar scaleY, |
| const SkBitmap& scaled) { |
| + SkAutoMutexAcquire am(fMutex); |
| if (0 == scaleX || 0 == scaleY) { |
| // degenerate, and the key we use for mipmaps |
| return NULL; |
| @@ -317,6 +323,7 @@ SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const SkBitmap& orig, |
| SkScaledImageCache::ID* SkScaledImageCache::addAndLockMip(const SkBitmap& orig, |
| const SkMipMap* mip) { |
| + SkAutoMutexAcquire am(fMutex); |
| SkIRect bounds = get_bounds_from_bitmap(orig); |
| if (bounds.isEmpty()) { |
| return NULL; |
| @@ -328,6 +335,7 @@ SkScaledImageCache::ID* SkScaledImageCache::addAndLockMip(const SkBitmap& orig, |
| } |
| void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) { |
| + SkAutoMutexAcquire am(fMutex); |
| SkASSERT(id); |
| #ifdef SK_DEBUG |
| @@ -356,7 +364,7 @@ void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) { |
| } |
| void SkScaledImageCache::purgeAsNeeded() { |
| - size_t byteLimit = fByteLimit; |
| + const size_t & byteLimit = fByteLimit; |
| size_t bytesUsed = fBytesUsed; |
| Rec* rec = fTail; |
| @@ -383,6 +391,7 @@ void SkScaledImageCache::purgeAsNeeded() { |
| } |
| size_t SkScaledImageCache::setByteLimit(size_t newLimit) { |
| + SkAutoMutexAcquire am(fMutex); |
| size_t prevLimit = fByteLimit; |
| fByteLimit = newLimit; |
| if (newLimit < prevLimit) { |
| @@ -391,6 +400,16 @@ size_t SkScaledImageCache::setByteLimit(size_t newLimit) { |
| return prevLimit; |
| } |
| +size_t SkScaledImageCache::getBytesUsed() const { |
|
reed1
2013/11/01 18:22:27
Why is this better than the inlines?
hal.canary
2013/11/01 18:52:59
I did it because it exactly matches the behavior o
|
| + SkAutoMutexAcquire am(fMutex); |
| + return fBytesUsed; |
| +} |
| + |
| +size_t SkScaledImageCache::getByteLimit() const { |
| + SkAutoMutexAcquire am(fMutex); |
| + return fByteLimit; |
| +} |
| + |
| /////////////////////////////////////////////////////////////////////////////// |
| void SkScaledImageCache::detach(Rec* rec) { |
| @@ -480,6 +499,7 @@ void SkScaledImageCache::validate() const { |
| rec = rec->fNext; |
| } |
| SkASSERT(fCount == count); |
| + SkASSERT(fBytesUsed == used); |
| rec = fTail; |
| while (rec) { |
| @@ -499,13 +519,13 @@ void SkScaledImageCache::validate() const { |
| #include "SkThread.h" |
| -SK_DECLARE_STATIC_MUTEX(gMutex); |
| - |
| static void create_cache(SkScaledImageCache** cache) { |
| - *cache = SkNEW_ARGS(SkScaledImageCache, (SK_DEFAULT_IMAGE_CACHE_LIMIT)); |
| + SK_DECLARE_STATIC_MUTEX(mutex); |
| + *cache = SkNEW_ARGS(SkScaledImageCache, |
| + (SK_DEFAULT_IMAGE_CACHE_LIMIT, &mutex)); |
| } |
| -static SkScaledImageCache* get_cache() { |
| +SkScaledImageCache* SkScaledImageCache::GetGlobalInstance() { |
| static SkScaledImageCache* gCache(NULL); |
| SK_DECLARE_STATIC_ONCE(create_cache_once); |
| SkOnce<SkScaledImageCache**>(&create_cache_once, create_cache, &gCache); |
| @@ -519,8 +539,8 @@ SkScaledImageCache::ID* SkScaledImageCache::FindAndLock( |
| int32_t width, |
| int32_t height, |
| SkBitmap* scaled) { |
| - SkAutoMutexAcquire am(gMutex); |
| - return get_cache()->findAndLock(pixelGenerationID, width, height, scaled); |
| + SkScaledImageCache* cache = SkScaledImageCache::GetGlobalInstance(); |
| + return cache->findAndLock(pixelGenerationID, width, height, scaled); |
| } |
| SkScaledImageCache::ID* SkScaledImageCache::AddAndLock( |
| @@ -528,8 +548,8 @@ SkScaledImageCache::ID* SkScaledImageCache::AddAndLock( |
| int32_t width, |
| int32_t height, |
| const SkBitmap& scaled) { |
| - SkAutoMutexAcquire am(gMutex); |
| - return get_cache()->addAndLock(pixelGenerationID, width, height, scaled); |
| + SkScaledImageCache* cache = SkScaledImageCache::GetGlobalInstance(); |
| + return cache->addAndLock(pixelGenerationID, width, height, scaled); |
| } |
| @@ -537,48 +557,48 @@ SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(const SkBitmap& orig, |
| SkScalar scaleX, |
| SkScalar scaleY, |
| SkBitmap* scaled) { |
| - SkAutoMutexAcquire am(gMutex); |
| - return get_cache()->findAndLock(orig, scaleX, scaleY, scaled); |
| + SkScaledImageCache* cache = SkScaledImageCache::GetGlobalInstance(); |
| + return cache->findAndLock(orig, scaleX, scaleY, scaled); |
| } |
| SkScaledImageCache::ID* SkScaledImageCache::FindAndLockMip(const SkBitmap& orig, |
| SkMipMap const ** mip) { |
| - SkAutoMutexAcquire am(gMutex); |
| - return get_cache()->findAndLockMip(orig, mip); |
| + SkScaledImageCache* cache = SkScaledImageCache::GetGlobalInstance(); |
| + return cache->findAndLockMip(orig, mip); |
| } |
| SkScaledImageCache::ID* SkScaledImageCache::AddAndLock(const SkBitmap& orig, |
| SkScalar scaleX, |
| SkScalar scaleY, |
| const SkBitmap& scaled) { |
| - SkAutoMutexAcquire am(gMutex); |
| - return get_cache()->addAndLock(orig, scaleX, scaleY, scaled); |
| + SkScaledImageCache* cache = SkScaledImageCache::GetGlobalInstance(); |
| + return cache->addAndLock(orig, scaleX, scaleY, scaled); |
| } |
| SkScaledImageCache::ID* SkScaledImageCache::AddAndLockMip(const SkBitmap& orig, |
| const SkMipMap* mip) { |
| - SkAutoMutexAcquire am(gMutex); |
| - return get_cache()->addAndLockMip(orig, mip); |
| + SkScaledImageCache* cache = SkScaledImageCache::GetGlobalInstance(); |
| + return cache->addAndLockMip(orig, mip); |
| } |
| void SkScaledImageCache::Unlock(SkScaledImageCache::ID* id) { |
| - SkAutoMutexAcquire am(gMutex); |
| - return get_cache()->unlock(id); |
| + SkScaledImageCache* cache = SkScaledImageCache::GetGlobalInstance(); |
| + return cache->unlock(id); |
| } |
| size_t SkScaledImageCache::GetBytesUsed() { |
| - SkAutoMutexAcquire am(gMutex); |
| - return get_cache()->getBytesUsed(); |
| + SkScaledImageCache* cache = SkScaledImageCache::GetGlobalInstance(); |
| + return cache->getBytesUsed(); |
| } |
| size_t SkScaledImageCache::GetByteLimit() { |
| - SkAutoMutexAcquire am(gMutex); |
| - return get_cache()->getByteLimit(); |
| + SkScaledImageCache* cache = SkScaledImageCache::GetGlobalInstance(); |
| + return cache->getByteLimit(); |
| } |
| size_t SkScaledImageCache::SetByteLimit(size_t newLimit) { |
| - SkAutoMutexAcquire am(gMutex); |
| - return get_cache()->setByteLimit(newLimit); |
| + SkScaledImageCache* cache = SkScaledImageCache::GetGlobalInstance(); |
| + return cache->setByteLimit(newLimit); |
| } |
| /////////////////////////////////////////////////////////////////////////////// |
| @@ -596,3 +616,4 @@ size_t SkGraphics::GetImageCacheByteLimit() { |
| size_t SkGraphics::SetImageCacheByteLimit(size_t newLimit) { |
| return SkScaledImageCache::SetByteLimit(newLimit); |
| } |
| + |