OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2013 Google Inc. | 2 * Copyright 2013 Google Inc. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
6 */ | 6 */ |
7 | 7 |
8 #include "SkScaledImageCache.h" | 8 #include "SkScaledImageCache.h" |
9 #include "SkMipMap.h" | 9 #include "SkMipMap.h" |
| 10 #include "SkOnce.h" |
10 #include "SkPixelRef.h" | 11 #include "SkPixelRef.h" |
11 #include "SkRect.h" | 12 #include "SkRect.h" |
12 | 13 |
13 #ifndef SK_DEFAULT_IMAGE_CACHE_LIMIT | 14 #ifndef SK_DEFAULT_IMAGE_CACHE_LIMIT |
14 #define SK_DEFAULT_IMAGE_CACHE_LIMIT (2 * 1024 * 1024) | 15 #define SK_DEFAULT_IMAGE_CACHE_LIMIT (2 * 1024 * 1024) |
15 #endif | 16 #endif |
16 | 17 |
| 18 static inline SkScaledImageCache::ID* rec_to_id(SkScaledImageCache::Rec* rec) { |
| 19 return reinterpret_cast<SkScaledImageCache::ID*>(rec); |
| 20 } |
| 21 |
| 22 static inline SkScaledImageCache::Rec* id_to_rec(SkScaledImageCache::ID* id) { |
| 23 return reinterpret_cast<SkScaledImageCache::Rec*>(id); |
| 24 } |
17 | 25 |
18 // Implemented from en.wikipedia.org/wiki/MurmurHash. | 26 // Implemented from en.wikipedia.org/wiki/MurmurHash. |
19 static uint32_t compute_hash(const uint32_t data[], int count) { | 27 static uint32_t compute_hash(const uint32_t data[], int count) { |
20 uint32_t hash = 0; | 28 uint32_t hash = 0; |
21 | 29 |
22 for (int i = 0; i < count; ++i) { | 30 for (int i = 0; i < count; ++i) { |
23 uint32_t k = data[i]; | 31 uint32_t k = data[i]; |
24 k *= 0xcc9e2d51; | 32 k *= 0xcc9e2d51; |
25 k = (k << 15) | (k >> 17); | 33 k = (k << 15) | (k >> 17); |
26 k *= 0x1b873593; | 34 k *= 0x1b873593; |
27 | 35 |
28 hash ^= k; | 36 hash ^= k; |
29 hash = (hash << 13) | (hash >> 19); | 37 hash = (hash << 13) | (hash >> 19); |
30 hash *= 5; | 38 hash *= 5; |
31 hash += 0xe6546b64; | 39 hash += 0xe6546b64; |
32 } | 40 } |
33 | 41 |
34 // hash ^= size; | 42 // hash ^= size; |
35 hash ^= hash >> 16; | 43 hash ^= hash >> 16; |
36 hash *= 0x85ebca6b; | 44 hash *= 0x85ebca6b; |
37 hash ^= hash >> 13; | 45 hash ^= hash >> 13; |
38 hash *= 0xc2b2ae35; | 46 hash *= 0xc2b2ae35; |
39 hash ^= hash >> 16; | 47 hash ^= hash >> 16; |
40 | 48 |
41 return hash; | 49 return hash; |
42 } | 50 } |
43 | 51 |
44 struct Key { | 52 struct Key { |
45 bool init(const SkBitmap& bm, SkScalar scaleX, SkScalar scaleY) { | 53 Key(uint32_t genID, |
46 SkPixelRef* pr = bm.pixelRef(); | 54 SkScalar scaleX, |
47 if (!pr) { | 55 SkScalar scaleY, |
48 return false; | 56 SkIRect bounds) |
49 } | 57 : fGenID(genID) |
50 | 58 , fScaleX(scaleX) |
51 size_t x, y; | 59 , fScaleY(scaleY) |
52 SkTDivMod(bm.pixelRefOffset(), bm.rowBytes(), &y, &x); | 60 , fBounds(bounds) { |
53 x >>= 2; | |
54 | |
55 fGenID = pr->getGenerationID(); | |
56 fBounds.set(x, y, x + bm.width(), y + bm.height()); | |
57 fScaleX = scaleX; | |
58 fScaleY = scaleY; | |
59 | |
60 fHash = compute_hash(&fGenID, 7); | 61 fHash = compute_hash(&fGenID, 7); |
61 return true; | |
62 } | 62 } |
63 | 63 |
64 bool operator<(const Key& other) const { | 64 bool operator<(const Key& other) const { |
65 const uint32_t* a = &fGenID; | 65 const uint32_t* a = &fGenID; |
66 const uint32_t* b = &other.fGenID; | 66 const uint32_t* b = &other.fGenID; |
67 for (int i = 0; i < 7; ++i) { | 67 for (int i = 0; i < 7; ++i) { |
68 if (a[i] < b[i]) { | 68 if (a[i] < b[i]) { |
69 return true; | 69 return true; |
70 } | 70 } |
71 if (a[i] > b[i]) { | 71 if (a[i] > b[i]) { |
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
144 | 144 |
145 class SkScaledImageCache::Hash : public SkTDynamicHash<SkScaledImageCache::Rec, | 145 class SkScaledImageCache::Hash : public SkTDynamicHash<SkScaledImageCache::Rec, |
146 Key, key_from_rec, hash_from_key, | 146 Key, key_from_rec, hash_from_key, |
147 eq_rec_key> {}; | 147 eq_rec_key> {}; |
148 | 148 |
149 /////////////////////////////////////////////////////////////////////////////// | 149 /////////////////////////////////////////////////////////////////////////////// |
150 | 150 |
151 // experimental hash to speed things up | 151 // experimental hash to speed things up |
152 #define USE_HASH | 152 #define USE_HASH |
153 | 153 |
| 154 #if !defined(USE_HASH) |
| 155 static inline SkScaledImageCache::Rec* find_rec_in_list( |
| 156 SkScaledImageCache::Rec* head, const Key & key) { |
| 157 SkScaledImageCache::Rec* rec = head; |
| 158 while ((rec != NULL) && (rec->fKey != key)) { |
| 159 rec = rec->fNext; |
| 160 } |
| 161 return rec; |
| 162 } |
| 163 #endif |
| 164 |
154 SkScaledImageCache::SkScaledImageCache(size_t byteLimit) { | 165 SkScaledImageCache::SkScaledImageCache(size_t byteLimit) { |
155 fHead = NULL; | 166 fHead = NULL; |
156 fTail = NULL; | 167 fTail = NULL; |
157 #ifdef USE_HASH | 168 #ifdef USE_HASH |
158 fHash = new Hash; | 169 fHash = new Hash; |
159 #else | 170 #else |
160 fHash = NULL; | 171 fHash = NULL; |
161 #endif | 172 #endif |
162 fBytesUsed = 0; | 173 fBytesUsed = 0; |
163 fByteLimit = byteLimit; | 174 fByteLimit = byteLimit; |
164 fCount = 0; | 175 fCount = 0; |
165 } | 176 } |
166 | 177 |
167 SkScaledImageCache::~SkScaledImageCache() { | 178 SkScaledImageCache::~SkScaledImageCache() { |
168 Rec* rec = fHead; | 179 Rec* rec = fHead; |
169 while (rec) { | 180 while (rec) { |
170 Rec* next = rec->fNext; | 181 Rec* next = rec->fNext; |
171 SkDELETE(rec); | 182 SkDELETE(rec); |
172 rec = next; | 183 rec = next; |
173 } | 184 } |
174 delete fHash; | 185 delete fHash; |
175 } | 186 } |
176 | 187 |
177 SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(const SkBitmap& orig, | 188 //////////////////////////////////////////////////////////////////////////////// |
| 189 |
| 190 /** |
| 191 This private method is the fully general record finder. All other |
| 192 record finders should call this funtion. */ |
| 193 SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(uint32_t genID, |
178 SkScalar scaleX, | 194 SkScalar scaleX, |
179 SkScalar scaleY) { | 195 SkScalar scaleY, |
180 Key key; | 196 const SkIRect& bounds) { |
181 if (!key.init(orig, scaleX, scaleY)) { | 197 if (bounds.isEmpty()) { |
182 return NULL; | 198 return NULL; |
183 } | 199 } |
184 | 200 Key key(genID, scaleX, scaleY, bounds); |
185 #ifdef USE_HASH | 201 #ifdef USE_HASH |
186 Rec* rec = fHash->find(key); | 202 Rec* rec = fHash->find(key); |
187 #else | 203 #else |
188 Rec* rec = fHead; | 204 Rec* rec = find_rec_in_list(fHead, key); |
189 while (rec != NULL) { | |
190 if (rec->fKey == key) { | |
191 break; | |
192 } | |
193 rec = rec->fNext; | |
194 } | |
195 #endif | 205 #endif |
196 | |
197 if (rec) { | 206 if (rec) { |
198 this->moveToHead(rec); // for our LRU | 207 this->moveToHead(rec); // for our LRU |
199 rec->fLockCount += 1; | 208 rec->fLockCount += 1; |
200 } | 209 } |
201 return rec; | 210 return rec; |
202 } | 211 } |
203 | 212 |
| 213 /** |
| 214 This function finds the bounds of the bitmap *within its pixelRef*. |
| 215 If the bitmap lacks a pixelRef, it will return an empty rect, since |
| 216 that doesn't make sense. This may be a useful enough function that |
| 217 it should be somewhere else (in SkBitmap?). */ |
| 218 static SkIRect get_bounds_from_bitmap(const SkBitmap& bm) { |
| 219 if (!(bm.pixelRef())) { |
| 220 return SkIRect::MakeEmpty(); |
| 221 } |
| 222 size_t x, y; |
| 223 SkTDivMod(bm.pixelRefOffset(), bm.rowBytes(), &y, &x); |
| 224 x >>= bm.shiftPerPixel(); |
| 225 return SkIRect::MakeXYWH(x, y, bm.width(), bm.height()); |
| 226 } |
| 227 |
| 228 |
| 229 SkScaledImageCache::ID* SkScaledImageCache::findAndLock(uint32_t genID, |
| 230 int32_t width, |
| 231 int32_t height, |
| 232 SkBitmap* bitmap) { |
| 233 Rec* rec = this->findAndLock(genID, SK_Scalar1, SK_Scalar1, |
| 234 SkIRect::MakeWH(width, height)); |
| 235 if (rec) { |
| 236 SkASSERT(NULL == rec->fMip); |
| 237 SkASSERT(rec->fBitmap.pixelRef()); |
| 238 *bitmap = rec->fBitmap; |
| 239 } |
| 240 return rec_to_id(rec); |
| 241 } |
| 242 |
204 SkScaledImageCache::ID* SkScaledImageCache::findAndLock(const SkBitmap& orig, | 243 SkScaledImageCache::ID* SkScaledImageCache::findAndLock(const SkBitmap& orig, |
205 SkScalar scaleX, | 244 SkScalar scaleX, |
206 SkScalar scaleY, | 245 SkScalar scaleY, |
207 SkBitmap* scaled) { | 246 SkBitmap* scaled) { |
208 if (0 == scaleX || 0 == scaleY) { | 247 if (0 == scaleX || 0 == scaleY) { |
209 // degenerate, and the key we use for mipmaps | 248 // degenerate, and the key we use for mipmaps |
210 return NULL; | 249 return NULL; |
211 } | 250 } |
212 | 251 Rec* rec = this->findAndLock(orig.getGenerationID(), scaleX, |
213 Rec* rec = this->findAndLock(orig, scaleX, scaleY); | 252 scaleY, get_bounds_from_bitmap(orig)); |
214 if (rec) { | 253 if (rec) { |
215 SkASSERT(NULL == rec->fMip); | 254 SkASSERT(NULL == rec->fMip); |
216 SkASSERT(rec->fBitmap.pixelRef()); | 255 SkASSERT(rec->fBitmap.pixelRef()); |
217 *scaled = rec->fBitmap; | 256 *scaled = rec->fBitmap; |
218 } | 257 } |
219 return (ID*)rec; | 258 return rec_to_id(rec); |
220 } | 259 } |
221 | 260 |
222 SkScaledImageCache::ID* SkScaledImageCache::findAndLockMip(const SkBitmap& orig, | 261 SkScaledImageCache::ID* SkScaledImageCache::findAndLockMip(const SkBitmap& orig, |
223 SkMipMap const ** mip
) { | 262 SkMipMap const ** mip
) { |
224 Rec* rec = this->findAndLock(orig, 0, 0); | 263 Rec* rec = this->findAndLock(orig.getGenerationID(), 0, 0, |
| 264 get_bounds_from_bitmap(orig)); |
225 if (rec) { | 265 if (rec) { |
226 SkASSERT(rec->fMip); | 266 SkASSERT(rec->fMip); |
227 SkASSERT(NULL == rec->fBitmap.pixelRef()); | 267 SkASSERT(NULL == rec->fBitmap.pixelRef()); |
228 *mip = rec->fMip; | 268 *mip = rec->fMip; |
229 } | 269 } |
230 return (ID*)rec; | 270 return rec_to_id(rec); |
| 271 } |
| 272 |
| 273 |
| 274 //////////////////////////////////////////////////////////////////////////////// |
| 275 /** |
| 276 This private method is the fully general record adder. All other |
| 277 record adders should call this funtion. */ |
| 278 void SkScaledImageCache::addAndLock(SkScaledImageCache::Rec* rec) { |
| 279 SkASSERT(rec); |
| 280 this->addToHead(rec); |
| 281 SkASSERT(1 == rec->fLockCount); |
| 282 #ifdef USE_HASH |
| 283 SkASSERT(fHash); |
| 284 fHash->add(rec); |
| 285 #endif |
| 286 // We may (now) be overbudget, so see if we need to purge something. |
| 287 this->purgeAsNeeded(); |
| 288 } |
| 289 |
| 290 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(uint32_t genID, |
| 291 int32_t width, |
| 292 int32_t height, |
| 293 const SkBitmap& bitmap) { |
| 294 Key key(genID, SK_Scalar1, SK_Scalar1, SkIRect::MakeWH(width, height)); |
| 295 Rec* rec = SkNEW_ARGS(Rec, (key, bitmap)); |
| 296 this->addAndLock(rec); |
| 297 return rec_to_id(rec); |
231 } | 298 } |
232 | 299 |
233 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const SkBitmap& orig, | 300 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const SkBitmap& orig, |
234 SkScalar scaleX, | 301 SkScalar scaleX, |
235 SkScalar scaleY, | 302 SkScalar scaleY, |
236 const SkBitmap& scaled) { | 303 const SkBitmap& scaled) { |
237 if (0 == scaleX || 0 == scaleY) { | 304 if (0 == scaleX || 0 == scaleY) { |
238 // degenerate, and the key we use for mipmaps | 305 // degenerate, and the key we use for mipmaps |
239 return NULL; | 306 return NULL; |
240 } | 307 } |
241 | 308 SkIRect bounds = get_bounds_from_bitmap(orig); |
242 Key key; | 309 if (bounds.isEmpty()) { |
243 if (!key.init(orig, scaleX, scaleY)) { | |
244 return NULL; | 310 return NULL; |
245 } | 311 } |
246 | 312 Key key(orig.getGenerationID(), scaleX, scaleY, bounds); |
247 Rec* rec = SkNEW_ARGS(Rec, (key, scaled)); | 313 Rec* rec = SkNEW_ARGS(Rec, (key, scaled)); |
248 this->addToHead(rec); | 314 this->addAndLock(rec); |
249 SkASSERT(1 == rec->fLockCount); | 315 return rec_to_id(rec); |
250 | |
251 #ifdef USE_HASH | |
252 fHash->add(rec); | |
253 #endif | |
254 | |
255 // We may (now) be overbudget, so see if we need to purge something. | |
256 this->purgeAsNeeded(); | |
257 return (ID*)rec; | |
258 } | 316 } |
259 | 317 |
260 SkScaledImageCache::ID* SkScaledImageCache::addAndLockMip(const SkBitmap& orig, | 318 SkScaledImageCache::ID* SkScaledImageCache::addAndLockMip(const SkBitmap& orig, |
261 const SkMipMap* mip) { | 319 const SkMipMap* mip) { |
262 Key key; | 320 SkIRect bounds = get_bounds_from_bitmap(orig); |
263 if (!key.init(orig, 0, 0)) { | 321 if (bounds.isEmpty()) { |
264 return NULL; | 322 return NULL; |
265 } | 323 } |
266 | 324 Key key(orig.getGenerationID(), 0, 0, bounds); |
267 Rec* rec = SkNEW_ARGS(Rec, (key, mip)); | 325 Rec* rec = SkNEW_ARGS(Rec, (key, mip)); |
268 this->addToHead(rec); | 326 this->addAndLock(rec); |
269 SkASSERT(1 == rec->fLockCount); | 327 return rec_to_id(rec); |
270 | |
271 #ifdef USE_HASH | |
272 fHash->add(rec); | |
273 #endif | |
274 | |
275 // We may (now) be overbudget, so see if we need to purge something. | |
276 this->purgeAsNeeded(); | |
277 return (ID*)rec; | |
278 } | 328 } |
279 | 329 |
280 void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) { | 330 void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) { |
281 SkASSERT(id); | 331 SkASSERT(id); |
282 | 332 |
283 #ifdef SK_DEBUG | 333 #ifdef SK_DEBUG |
284 { | 334 { |
285 bool found = false; | 335 bool found = false; |
286 Rec* rec = fHead; | 336 Rec* rec = fHead; |
287 while (rec != NULL) { | 337 while (rec != NULL) { |
288 if ((ID*)rec == id) { | 338 if (rec == id_to_rec(id)) { |
289 found = true; | 339 found = true; |
290 break; | 340 break; |
291 } | 341 } |
292 rec = rec->fNext; | 342 rec = rec->fNext; |
293 } | 343 } |
294 SkASSERT(found); | 344 SkASSERT(found); |
295 } | 345 } |
296 #endif | 346 #endif |
297 Rec* rec = (Rec*)id; | 347 Rec* rec = id_to_rec(id); |
298 SkASSERT(rec->fLockCount > 0); | 348 SkASSERT(rec->fLockCount > 0); |
299 rec->fLockCount -= 1; | 349 rec->fLockCount -= 1; |
300 | 350 |
301 // we may have been over-budget, but now have released something, so check | 351 // we may have been over-budget, but now have released something, so check |
302 // if we should purge. | 352 // if we should purge. |
303 if (0 == rec->fLockCount) { | 353 if (0 == rec->fLockCount) { |
304 this->purgeAsNeeded(); | 354 this->purgeAsNeeded(); |
305 } | 355 } |
306 } | 356 } |
307 | 357 |
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
444 SkASSERT(0 == used); | 494 SkASSERT(0 == used); |
445 } | 495 } |
446 #endif | 496 #endif |
447 | 497 |
448 /////////////////////////////////////////////////////////////////////////////// | 498 /////////////////////////////////////////////////////////////////////////////// |
449 | 499 |
450 #include "SkThread.h" | 500 #include "SkThread.h" |
451 | 501 |
452 SK_DECLARE_STATIC_MUTEX(gMutex); | 502 SK_DECLARE_STATIC_MUTEX(gMutex); |
453 | 503 |
| 504 static void create_cache(SkScaledImageCache** cache) { |
| 505 *cache = SkNEW_ARGS(SkScaledImageCache, (SK_DEFAULT_IMAGE_CACHE_LIMIT)); |
| 506 } |
| 507 |
454 static SkScaledImageCache* get_cache() { | 508 static SkScaledImageCache* get_cache() { |
455 static SkScaledImageCache* gCache; | 509 static SkScaledImageCache* gCache(NULL); |
456 if (!gCache) { | 510 SK_DECLARE_STATIC_ONCE(create_cache_once); |
457 gCache = SkNEW_ARGS(SkScaledImageCache, (SK_DEFAULT_IMAGE_CACHE_LIMIT)); | 511 SkOnce<SkScaledImageCache**>(&create_cache_once, create_cache, &gCache); |
458 } | 512 SkASSERT(NULL != gCache); |
459 return gCache; | 513 return gCache; |
460 } | 514 } |
461 | 515 |
| 516 |
| 517 SkScaledImageCache::ID* SkScaledImageCache::FindAndLock( |
| 518 uint32_t pixelGenerationID, |
| 519 int32_t width, |
| 520 int32_t height, |
| 521 SkBitmap* scaled) { |
| 522 SkAutoMutexAcquire am(gMutex); |
| 523 return get_cache()->findAndLock(pixelGenerationID, width, height, scaled); |
| 524 } |
| 525 |
| 526 SkScaledImageCache::ID* SkScaledImageCache::AddAndLock( |
| 527 uint32_t pixelGenerationID, |
| 528 int32_t width, |
| 529 int32_t height, |
| 530 const SkBitmap& scaled) { |
| 531 SkAutoMutexAcquire am(gMutex); |
| 532 return get_cache()->addAndLock(pixelGenerationID, width, height, scaled); |
| 533 } |
| 534 |
| 535 |
462 SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(const SkBitmap& orig, | 536 SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(const SkBitmap& orig, |
463 SkScalar scaleX, | 537 SkScalar scaleX, |
464 SkScalar scaleY, | 538 SkScalar scaleY, |
465 SkBitmap* scaled) { | 539 SkBitmap* scaled) { |
466 SkAutoMutexAcquire am(gMutex); | 540 SkAutoMutexAcquire am(gMutex); |
467 return get_cache()->findAndLock(orig, scaleX, scaleY, scaled); | 541 return get_cache()->findAndLock(orig, scaleX, scaleY, scaled); |
468 } | 542 } |
469 | 543 |
470 SkScaledImageCache::ID* SkScaledImageCache::FindAndLockMip(const SkBitmap& orig, | 544 SkScaledImageCache::ID* SkScaledImageCache::FindAndLockMip(const SkBitmap& orig, |
471 SkMipMap const ** mip) { | 545 SkMipMap const ** mip) { |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
515 return SkScaledImageCache::GetBytesUsed(); | 589 return SkScaledImageCache::GetBytesUsed(); |
516 } | 590 } |
517 | 591 |
518 size_t SkGraphics::GetImageCacheByteLimit() { | 592 size_t SkGraphics::GetImageCacheByteLimit() { |
519 return SkScaledImageCache::GetByteLimit(); | 593 return SkScaledImageCache::GetByteLimit(); |
520 } | 594 } |
521 | 595 |
522 size_t SkGraphics::SetImageCacheByteLimit(size_t newLimit) { | 596 size_t SkGraphics::SetImageCacheByteLimit(size_t newLimit) { |
523 return SkScaledImageCache::SetByteLimit(newLimit); | 597 return SkScaledImageCache::SetByteLimit(newLimit); |
524 } | 598 } |
OLD | NEW |