OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright 2013 Google Inc. | 2 * Copyright 2013 Google Inc. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
6 */ | 6 */ |
7 | 7 |
8 #include "SkScaledImageCache.h" | 8 #include "SkScaledImageCache.h" |
9 #include "SkMipMap.h" | 9 #include "SkMipMap.h" |
10 #include "SkOnce.h" | |
10 #include "SkPixelRef.h" | 11 #include "SkPixelRef.h" |
11 #include "SkRect.h" | 12 #include "SkRect.h" |
12 | 13 |
13 #ifndef SK_DEFAULT_IMAGE_CACHE_LIMIT | 14 #ifndef SK_DEFAULT_IMAGE_CACHE_LIMIT |
14 #define SK_DEFAULT_IMAGE_CACHE_LIMIT (2 * 1024 * 1024) | 15 #define SK_DEFAULT_IMAGE_CACHE_LIMIT (2 * 1024 * 1024) |
15 #endif | 16 #endif |
16 | 17 |
18 static inline SkScaledImageCache::ID* rec_to_id(SkScaledImageCache::Rec* rec) { | |
19 return reinterpret_cast<SkScaledImageCache::ID*>(rec); | |
20 } | |
21 | |
22 static inline SkScaledImageCache::Rec* id_to_rec(SkScaledImageCache::ID* id) { | |
23 return reinterpret_cast<SkScaledImageCache::Rec*>(id); | |
24 } | |
17 | 25 |
18 // Implemented from en.wikipedia.org/wiki/MurmurHash. | 26 // Implemented from en.wikipedia.org/wiki/MurmurHash. |
19 static uint32_t compute_hash(const uint32_t data[], int count) { | 27 static uint32_t compute_hash(const uint32_t data[], int count) { |
20 uint32_t hash = 0; | 28 uint32_t hash = 0; |
21 | 29 |
22 for (int i = 0; i < count; ++i) { | 30 for (int i = 0; i < count; ++i) { |
23 uint32_t k = data[i]; | 31 uint32_t k = data[i]; |
24 k *= 0xcc9e2d51; | 32 k *= 0xcc9e2d51; |
25 k = (k << 15) | (k >> 17); | 33 k = (k << 15) | (k >> 17); |
26 k *= 0x1b873593; | 34 k *= 0x1b873593; |
27 | 35 |
28 hash ^= k; | 36 hash ^= k; |
29 hash = (hash << 13) | (hash >> 19); | 37 hash = (hash << 13) | (hash >> 19); |
30 hash *= 5; | 38 hash *= 5; |
31 hash += 0xe6546b64; | 39 hash += 0xe6546b64; |
32 } | 40 } |
33 | 41 |
34 // hash ^= size; | 42 // hash ^= size; |
35 hash ^= hash >> 16; | 43 hash ^= hash >> 16; |
36 hash *= 0x85ebca6b; | 44 hash *= 0x85ebca6b; |
37 hash ^= hash >> 13; | 45 hash ^= hash >> 13; |
38 hash *= 0xc2b2ae35; | 46 hash *= 0xc2b2ae35; |
39 hash ^= hash >> 16; | 47 hash ^= hash >> 16; |
40 | 48 |
41 return hash; | 49 return hash; |
42 } | 50 } |
43 | 51 |
44 struct Key { | 52 struct Key { |
45 bool init(const SkBitmap& bm, SkScalar scaleX, SkScalar scaleY) { | 53 Key(uint32_t genID, |
46 SkPixelRef* pr = bm.pixelRef(); | 54 SkScalar scaleX, |
47 if (!pr) { | 55 SkScalar scaleY, |
48 return false; | 56 SkIRect bounds) |
49 } | 57 : fGenID(genID) |
50 | 58 , fScaleX(scaleX) |
51 size_t x, y; | 59 , fScaleY(scaleY) |
52 SkTDivMod(bm.pixelRefOffset(), bm.rowBytes(), &y, &x); | 60 , fBounds(bounds) { |
53 x >>= 2; | |
54 | |
55 fGenID = pr->getGenerationID(); | |
56 fBounds.set(x, y, x + bm.width(), y + bm.height()); | |
57 fScaleX = scaleX; | |
58 fScaleY = scaleY; | |
59 | |
60 fHash = compute_hash(&fGenID, 7); | 61 fHash = compute_hash(&fGenID, 7); |
61 return true; | |
62 } | 62 } |
63 | 63 |
64 bool operator<(const Key& other) const { | 64 bool operator<(const Key& other) const { |
65 const uint32_t* a = &fGenID; | 65 const uint32_t* a = &fGenID; |
66 const uint32_t* b = &other.fGenID; | 66 const uint32_t* b = &other.fGenID; |
67 for (int i = 0; i < 7; ++i) { | 67 for (int i = 0; i < 7; ++i) { |
68 if (a[i] < b[i]) { | 68 if (a[i] < b[i]) { |
69 return true; | 69 return true; |
70 } | 70 } |
71 if (a[i] > b[i]) { | 71 if (a[i] > b[i]) { |
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
144 | 144 |
145 class SkScaledImageCache::Hash : public SkTDynamicHash<SkScaledImageCache::Rec, | 145 class SkScaledImageCache::Hash : public SkTDynamicHash<SkScaledImageCache::Rec, |
146 Key, key_from_rec, hash_from_key, | 146 Key, key_from_rec, hash_from_key, |
147 eq_rec_key> {}; | 147 eq_rec_key> {}; |
148 | 148 |
149 /////////////////////////////////////////////////////////////////////////////// | 149 /////////////////////////////////////////////////////////////////////////////// |
150 | 150 |
151 // experimental hash to speed things up | 151 // experimental hash to speed things up |
152 #define USE_HASH | 152 #define USE_HASH |
153 | 153 |
154 #if !defined(USE_HASH) | |
155 static inline SkScaledImageCache::Rec* find_rec_in_list( | |
156 SkScaledImageCache::Rec* head, const Key & key) { | |
157 SkScaledImageCache::Rec* rec = head; | |
158 while ((rec != NULL) && (rec->fKey != key)) { | |
159 rec = rec->fNext; | |
160 } | |
161 return rec; | |
162 } | |
163 #endif | |
164 | |
154 SkScaledImageCache::SkScaledImageCache(size_t byteLimit) { | 165 SkScaledImageCache::SkScaledImageCache(size_t byteLimit) { |
155 fHead = NULL; | 166 fHead = NULL; |
156 fTail = NULL; | 167 fTail = NULL; |
157 #ifdef USE_HASH | 168 #ifdef USE_HASH |
158 fHash = new Hash; | 169 fHash = new Hash; |
159 #else | 170 #else |
160 fHash = NULL; | 171 fHash = NULL; |
161 #endif | 172 #endif |
162 fBytesUsed = 0; | 173 fBytesUsed = 0; |
163 fByteLimit = byteLimit; | 174 fByteLimit = byteLimit; |
164 fCount = 0; | 175 fCount = 0; |
165 } | 176 } |
166 | 177 |
167 SkScaledImageCache::~SkScaledImageCache() { | 178 SkScaledImageCache::~SkScaledImageCache() { |
168 Rec* rec = fHead; | 179 Rec* rec = fHead; |
169 while (rec) { | 180 while (rec) { |
170 Rec* next = rec->fNext; | 181 Rec* next = rec->fNext; |
171 SkDELETE(rec); | 182 SkDELETE(rec); |
172 rec = next; | 183 rec = next; |
173 } | 184 } |
174 delete fHash; | 185 delete fHash; |
175 } | 186 } |
176 | 187 |
177 SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(const SkBitmap& orig, | 188 //////////////////////////////////////////////////////////////////////////////// |
178 SkScalar scaleX, | |
179 SkScalar scaleY) { | |
180 Key key; | |
181 if (!key.init(orig, scaleX, scaleY)) { | |
182 return NULL; | |
183 } | |
184 | 189 |
190 /** | |
191 This private method is the fully general record finder. All other | |
192 record finders should call this funtion. */ | |
193 SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(uint32_t genID, | |
194 SkScalar scaleX, | |
195 SkScalar scaleY, | |
196 const SkIRect& bound) { | |
scroggo
2013/10/25 18:09:48
nit: bounds
hal.canary
2013/10/25 20:49:38
Done.
| |
197 Key key(genID, scaleX, scaleY, bound); | |
185 #ifdef USE_HASH | 198 #ifdef USE_HASH |
186 Rec* rec = fHash->find(key); | 199 Rec* rec = fHash->find(key); |
187 #else | 200 #else |
188 Rec* rec = fHead; | 201 Rec* rec = find_rec_in_list(fHead, key); |
189 while (rec != NULL) { | |
190 if (rec->fKey == key) { | |
191 break; | |
192 } | |
193 rec = rec->fNext; | |
194 } | |
195 #endif | 202 #endif |
196 | |
197 if (rec) { | 203 if (rec) { |
198 this->moveToHead(rec); // for our LRU | 204 this->moveToHead(rec); // for our LRU |
199 rec->fLockCount += 1; | 205 rec->fLockCount += 1; |
200 } | 206 } |
201 return rec; | 207 return rec; |
202 } | 208 } |
203 | 209 |
210 /** | |
211 This function finds the bounds of the bitmap *within its pixelRef*. | |
212 If the bitmap lacks a pixelRef, it will return false, since that | |
213 doesn't make sense. This may be a useful enoguh function that it | |
214 should be somewhere else (in SkBitmap?). */ | |
215 static bool get_bounds_from_bitmap(const SkBitmap& bm, SkIRect * bounds) { | |
216 if (!(bm.pixelRef())) { | |
217 return false; | |
218 } | |
219 size_t x, y; | |
220 SkTDivMod(bm.pixelRefOffset(), bm.rowBytes(), &y, &x); | |
221 x >>= bm.shiftPerPixel(); | |
222 bounds->set(x, y, x + bm.width(), y + bm.height()); | |
223 return true; | |
224 } | |
225 | |
226 SkScaledImageCache::ID* SkScaledImageCache::findAndLock(uint32_t genID, | |
227 int32_t width, | |
228 int32_t height, | |
229 SkBitmap* bitmap) { | |
230 SkIRect bounds = SkIRect::MakeWH(width, height); | |
231 Rec* rec = this->findAndLock(genID, SK_Scalar1, SK_Scalar1, bounds); | |
232 if (rec) { | |
233 SkASSERT(NULL == rec->fMip); | |
234 SkASSERT(rec->fBitmap.pixelRef()); | |
235 *bitmap = rec->fBitmap; | |
236 } | |
237 return rec_to_id(rec); | |
238 } | |
239 | |
204 SkScaledImageCache::ID* SkScaledImageCache::findAndLock(const SkBitmap& orig, | 240 SkScaledImageCache::ID* SkScaledImageCache::findAndLock(const SkBitmap& orig, |
205 SkScalar scaleX, | 241 SkScalar scaleX, |
206 SkScalar scaleY, | 242 SkScalar scaleY, |
207 SkBitmap* scaled) { | 243 SkBitmap* scaled) { |
208 if (0 == scaleX || 0 == scaleY) { | 244 if (0 == scaleX || 0 == scaleY) { |
209 // degenerate, and the key we use for mipmaps | 245 // degenerate, and the key we use for mipmaps |
210 return NULL; | 246 return NULL; |
211 } | 247 } |
212 | 248 SkIRect bounds; |
213 Rec* rec = this->findAndLock(orig, scaleX, scaleY); | 249 if (!get_bounds_from_bitmap(orig, &bounds)) { |
250 return NULL; | |
251 } | |
252 Rec* rec = this->findAndLock(orig.getGenerationID(), scaleX, | |
253 scaleY, bounds); | |
scroggo
2013/10/25 18:09:48
This pattern is repeated 4 times. Helper function?
hal.canary
2013/10/25 20:49:38
Really, it is two times for findAndLock and two ti
| |
214 if (rec) { | 254 if (rec) { |
215 SkASSERT(NULL == rec->fMip); | 255 SkASSERT(NULL == rec->fMip); |
216 SkASSERT(rec->fBitmap.pixelRef()); | 256 SkASSERT(rec->fBitmap.pixelRef()); |
217 *scaled = rec->fBitmap; | 257 *scaled = rec->fBitmap; |
218 } | 258 } |
219 return (ID*)rec; | 259 return rec_to_id(rec); |
220 } | 260 } |
221 | 261 |
222 SkScaledImageCache::ID* SkScaledImageCache::findAndLockMip(const SkBitmap& orig, | 262 SkScaledImageCache::ID* SkScaledImageCache::findAndLockMip(const SkBitmap& orig, |
223 SkMipMap const ** mip ) { | 263 SkMipMap const ** mip ) { |
224 Rec* rec = this->findAndLock(orig, 0, 0); | 264 SkIRect bounds; |
265 if (!get_bounds_from_bitmap(orig, &bounds)) { | |
266 return NULL; | |
267 } | |
268 Rec* rec = this->findAndLock(orig.getGenerationID(), 0, 0, bounds); | |
225 if (rec) { | 269 if (rec) { |
226 SkASSERT(rec->fMip); | 270 SkASSERT(rec->fMip); |
227 SkASSERT(NULL == rec->fBitmap.pixelRef()); | 271 SkASSERT(NULL == rec->fBitmap.pixelRef()); |
228 *mip = rec->fMip; | 272 *mip = rec->fMip; |
229 } | 273 } |
230 return (ID*)rec; | 274 return rec_to_id(rec); |
275 } | |
276 | |
277 | |
278 //////////////////////////////////////////////////////////////////////////////// | |
279 /** | |
280 This private method is the fully general record adder. All other | |
281 record adders should call this funtion. */ | |
282 void SkScaledImageCache::addAndLock(SkScaledImageCache::Rec* rec) { | |
283 SkASSERT(rec); | |
284 this->addToHead(rec); | |
285 SkASSERT(1 == rec->fLockCount); | |
286 #ifdef USE_HASH | |
287 SkASSERT(fHash); | |
288 fHash->add(rec); | |
289 #endif | |
290 // We may (now) be overbudget, so see if we need to purge something. | |
291 this->purgeAsNeeded(); | |
292 } | |
293 | |
294 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(uint32_t genID, | |
295 int32_t width, | |
296 int32_t height, | |
297 const SkBitmap& bitmap) { | |
298 SkIRect bounds = SkIRect::MakeWH(width, height); | |
299 Key key(genID, SK_Scalar1, SK_Scalar1, bounds); | |
300 Rec* rec = SkNEW_ARGS(Rec, (key, bitmap)); | |
301 this->addAndLock(rec); | |
302 return rec_to_id(rec); | |
231 } | 303 } |
232 | 304 |
233 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const SkBitmap& orig, | 305 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const SkBitmap& orig, |
234 SkScalar scaleX, | 306 SkScalar scaleX, |
235 SkScalar scaleY, | 307 SkScalar scaleY, |
236 const SkBitmap& scaled) { | 308 const SkBitmap& scaled) { |
237 if (0 == scaleX || 0 == scaleY) { | 309 if (0 == scaleX || 0 == scaleY) { |
238 // degenerate, and the key we use for mipmaps | 310 // degenerate, and the key we use for mipmaps |
239 return NULL; | 311 return NULL; |
240 } | 312 } |
241 | 313 SkIRect bounds; |
242 Key key; | 314 if (!get_bounds_from_bitmap(orig, &bounds)) { |
243 if (!key.init(orig, scaleX, scaleY)) { | |
244 return NULL; | 315 return NULL; |
245 } | 316 } |
246 | 317 Key key(orig.getGenerationID(), scaleX, scaleY, bounds); |
247 Rec* rec = SkNEW_ARGS(Rec, (key, scaled)); | 318 Rec* rec = SkNEW_ARGS(Rec, (key, scaled)); |
248 this->addToHead(rec); | 319 this->addAndLock(rec); |
249 SkASSERT(1 == rec->fLockCount); | 320 return rec_to_id(rec); |
250 | |
251 #ifdef USE_HASH | |
252 fHash->add(rec); | |
253 #endif | |
254 | |
255 // We may (now) be overbudget, so see if we need to purge something. | |
256 this->purgeAsNeeded(); | |
257 return (ID*)rec; | |
258 } | 321 } |
259 | 322 |
260 SkScaledImageCache::ID* SkScaledImageCache::addAndLockMip(const SkBitmap& orig, | 323 SkScaledImageCache::ID* SkScaledImageCache::addAndLockMip(const SkBitmap& orig, |
261 const SkMipMap* mip) { | 324 const SkMipMap* mip) { |
262 Key key; | 325 SkIRect bounds; |
263 if (!key.init(orig, 0, 0)) { | 326 if (!get_bounds_from_bitmap(orig, &bounds)) { |
264 return NULL; | 327 return NULL; |
265 } | 328 } |
266 | 329 Key key(orig.getGenerationID(), 0, 0, bounds); |
267 Rec* rec = SkNEW_ARGS(Rec, (key, mip)); | 330 Rec* rec = SkNEW_ARGS(Rec, (key, mip)); |
268 this->addToHead(rec); | 331 this->addAndLock(rec); |
269 SkASSERT(1 == rec->fLockCount); | 332 return rec_to_id(rec); |
270 | |
271 #ifdef USE_HASH | |
272 fHash->add(rec); | |
273 #endif | |
274 | |
275 // We may (now) be overbudget, so see if we need to purge something. | |
276 this->purgeAsNeeded(); | |
277 return (ID*)rec; | |
278 } | 333 } |
279 | 334 |
280 void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) { | 335 void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) { |
281 SkASSERT(id); | 336 SkASSERT(id); |
282 | 337 |
283 #ifdef SK_DEBUG | 338 #ifdef SK_DEBUG |
284 { | 339 { |
285 bool found = false; | 340 bool found = false; |
286 Rec* rec = fHead; | 341 Rec* rec = fHead; |
287 while (rec != NULL) { | 342 while (rec != NULL) { |
288 if ((ID*)rec == id) { | 343 if (rec == id_to_rec(id)) { |
289 found = true; | 344 found = true; |
290 break; | 345 break; |
291 } | 346 } |
292 rec = rec->fNext; | 347 rec = rec->fNext; |
293 } | 348 } |
294 SkASSERT(found); | 349 SkASSERT(found); |
295 } | 350 } |
296 #endif | 351 #endif |
297 Rec* rec = (Rec*)id; | 352 Rec* rec = id_to_rec(id); |
298 SkASSERT(rec->fLockCount > 0); | 353 SkASSERT(rec->fLockCount > 0); |
299 rec->fLockCount -= 1; | 354 rec->fLockCount -= 1; |
300 | 355 |
301 // we may have been over-budget, but now have released something, so check | 356 // we may have been over-budget, but now have released something, so check |
302 // if we should purge. | 357 // if we should purge. |
303 if (0 == rec->fLockCount) { | 358 if (0 == rec->fLockCount) { |
304 this->purgeAsNeeded(); | 359 this->purgeAsNeeded(); |
305 } | 360 } |
306 } | 361 } |
307 | 362 |
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
444 SkASSERT(0 == used); | 499 SkASSERT(0 == used); |
445 } | 500 } |
446 #endif | 501 #endif |
447 | 502 |
448 /////////////////////////////////////////////////////////////////////////////// | 503 /////////////////////////////////////////////////////////////////////////////// |
449 | 504 |
450 #include "SkThread.h" | 505 #include "SkThread.h" |
451 | 506 |
452 SK_DECLARE_STATIC_MUTEX(gMutex); | 507 SK_DECLARE_STATIC_MUTEX(gMutex); |
453 | 508 |
509 static void create_cache(SkScaledImageCache** cache) { | |
510 *cache = SkNEW_ARGS(SkScaledImageCache, (SK_DEFAULT_IMAGE_CACHE_LIMIT)); | |
511 } | |
512 | |
454 static SkScaledImageCache* get_cache() { | 513 static SkScaledImageCache* get_cache() { |
455 static SkScaledImageCache* gCache; | 514 static SkScaledImageCache* gCache(NULL); |
456 if (!gCache) { | 515 SK_DECLARE_STATIC_ONCE(create_cache_once); |
457 gCache = SkNEW_ARGS(SkScaledImageCache, (SK_DEFAULT_IMAGE_CACHE_LIMIT)); | 516 SkOnce<SkScaledImageCache**>(&create_cache_once, create_cache, &gCache); |
458 } | 517 SkASSERT(NULL != gCache); |
459 return gCache; | 518 return gCache; |
460 } | 519 } |
461 | 520 |
521 | |
522 SkScaledImageCache::ID* SkScaledImageCache::FindAndLock( | |
523 uint32_t pixelGenerationID, | |
524 int32_t width, | |
525 int32_t height, | |
526 SkBitmap* scaled) { | |
527 SkAutoMutexAcquire am(gMutex); | |
528 return get_cache()->findAndLock(pixelGenerationID, width, height, scaled); | |
529 } | |
530 | |
531 SkScaledImageCache::ID* SkScaledImageCache::AddAndLock( | |
532 uint32_t pixelGenerationID, | |
533 int32_t width, | |
534 int32_t height, | |
535 const SkBitmap& scaled) { | |
536 SkAutoMutexAcquire am(gMutex); | |
537 return get_cache()->addAndLock(pixelGenerationID, width, height, scaled); | |
538 } | |
539 | |
540 | |
462 SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(const SkBitmap& orig, | 541 SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(const SkBitmap& orig, |
463 SkScalar scaleX, | 542 SkScalar scaleX, |
464 SkScalar scaleY, | 543 SkScalar scaleY, |
465 SkBitmap* scaled) { | 544 SkBitmap* scaled) { |
466 SkAutoMutexAcquire am(gMutex); | 545 SkAutoMutexAcquire am(gMutex); |
467 return get_cache()->findAndLock(orig, scaleX, scaleY, scaled); | 546 return get_cache()->findAndLock(orig, scaleX, scaleY, scaled); |
468 } | 547 } |
469 | 548 |
470 SkScaledImageCache::ID* SkScaledImageCache::FindAndLockMip(const SkBitmap& orig, | 549 SkScaledImageCache::ID* SkScaledImageCache::FindAndLockMip(const SkBitmap& orig, |
471 SkMipMap const ** mip) { | 550 SkMipMap const ** mip) { |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
515 return SkScaledImageCache::GetBytesUsed(); | 594 return SkScaledImageCache::GetBytesUsed(); |
516 } | 595 } |
517 | 596 |
518 size_t SkGraphics::GetImageCacheByteLimit() { | 597 size_t SkGraphics::GetImageCacheByteLimit() { |
519 return SkScaledImageCache::GetByteLimit(); | 598 return SkScaledImageCache::GetByteLimit(); |
520 } | 599 } |
521 | 600 |
522 size_t SkGraphics::SetImageCacheByteLimit(size_t newLimit) { | 601 size_t SkGraphics::SetImageCacheByteLimit(size_t newLimit) { |
523 return SkScaledImageCache::SetByteLimit(newLimit); | 602 return SkScaledImageCache::SetByteLimit(newLimit); |
524 } | 603 } |
OLD | NEW |