| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2013 Google Inc. | 2 * Copyright 2013 Google Inc. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
| 5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
| 6 */ | 6 */ |
| 7 | 7 |
| 8 #include "SkChecksum.h" | 8 #include "SkChecksum.h" |
| 9 #include "SkScaledImageCache.h" | 9 #include "SkScaledImageCache.h" |
| 10 #include "SkMipMap.h" | 10 #include "SkMipMap.h" |
| (...skipping 12 matching lines...) Expand all Loading... |
| 23 #endif | 23 #endif |
| 24 | 24 |
| 25 static inline SkScaledImageCache::ID* rec_to_id(SkScaledImageCache::Rec* rec) { | 25 static inline SkScaledImageCache::ID* rec_to_id(SkScaledImageCache::Rec* rec) { |
| 26 return reinterpret_cast<SkScaledImageCache::ID*>(rec); | 26 return reinterpret_cast<SkScaledImageCache::ID*>(rec); |
| 27 } | 27 } |
| 28 | 28 |
| 29 static inline SkScaledImageCache::Rec* id_to_rec(SkScaledImageCache::ID* id) { | 29 static inline SkScaledImageCache::Rec* id_to_rec(SkScaledImageCache::ID* id) { |
| 30 return reinterpret_cast<SkScaledImageCache::Rec*>(id); | 30 return reinterpret_cast<SkScaledImageCache::Rec*>(id); |
| 31 } | 31 } |
| 32 | 32 |
| 33 struct SkScaledImageCache::Key { | 33 void SkScaledImageCache::Key::init(size_t length) { |
| 34 Key(uint32_t genID, | 34 SkASSERT(SkAlign4(length) == length); |
| 35 SkScalar scaleX, | 35 // 2 is fCount32 and fHash |
| 36 SkScalar scaleY, | 36 fCount32 = SkToS32(2 + (length >> 2)); |
| 37 SkIRect bounds) | 37 // skip both of our fields whe computing the murmur |
| 38 : fGenID(genID) | 38 fHash = SkChecksum::Murmur3(this->as32() + 2, (fCount32 - 2) << 2); |
| 39 , fScaleX(scaleX) | 39 } |
| 40 , fScaleY(scaleY) | |
| 41 , fBounds(bounds) { | |
| 42 fHash = SkChecksum::Murmur3(&fGenID, 28); | |
| 43 } | |
| 44 | 40 |
| 45 bool operator<(const Key& other) const { | 41 SkScaledImageCache::Key* SkScaledImageCache::Key::clone() const { |
| 46 const uint32_t* a = &fGenID; | 42 size_t size = fCount32 << 2; |
| 47 const uint32_t* b = &other.fGenID; | 43 void* copy = sk_malloc_throw(size); |
| 48 for (int i = 0; i < 7; ++i) { | 44 memcpy(copy, this, size); |
| 49 if (a[i] < b[i]) { | 45 return (Key*)copy; |
| 50 return true; | 46 } |
| 51 } | |
| 52 if (a[i] > b[i]) { | |
| 53 return false; | |
| 54 } | |
| 55 } | |
| 56 return false; | |
| 57 } | |
| 58 | |
| 59 bool operator==(const Key& other) const { | |
| 60 const uint32_t* a = &fHash; | |
| 61 const uint32_t* b = &other.fHash; | |
| 62 for (int i = 0; i < 8; ++i) { | |
| 63 if (a[i] != b[i]) { | |
| 64 return false; | |
| 65 } | |
| 66 } | |
| 67 return true; | |
| 68 } | |
| 69 | |
| 70 uint32_t fHash; | |
| 71 uint32_t fGenID; | |
| 72 float fScaleX; | |
| 73 float fScaleY; | |
| 74 SkIRect fBounds; | |
| 75 }; | |
| 76 | 47 |
| 77 struct SkScaledImageCache::Rec { | 48 struct SkScaledImageCache::Rec { |
| 78 Rec(const Key& key, const SkBitmap& bm) : fKey(key), fBitmap(bm) { | 49 Rec(const Key& key, const SkBitmap& bm) : fKey(key.clone()), fBitmap(bm) { |
| 79 fLockCount = 1; | 50 fLockCount = 1; |
| 80 fMip = NULL; | 51 fMip = NULL; |
| 81 } | 52 } |
| 82 | 53 |
| 83 Rec(const Key& key, const SkMipMap* mip) : fKey(key) { | 54 Rec(const Key& key, const SkMipMap* mip) : fKey(key.clone()) { |
| 84 fLockCount = 1; | 55 fLockCount = 1; |
| 85 fMip = mip; | 56 fMip = mip; |
| 86 mip->ref(); | 57 mip->ref(); |
| 87 } | 58 } |
| 88 | 59 |
| 89 ~Rec() { | 60 ~Rec() { |
| 90 SkSafeUnref(fMip); | 61 SkSafeUnref(fMip); |
| 62 sk_free(fKey); |
| 91 } | 63 } |
| 92 | 64 |
| 93 static const Key& GetKey(const Rec& rec) { return rec.fKey; } | 65 static const Key& GetKey(const Rec& rec) { return *rec.fKey; } |
| 94 static uint32_t Hash(const Key& key) { return key.fHash; } | 66 static uint32_t Hash(const Key& key) { return key.hash(); } |
| 95 | 67 |
| 96 size_t bytesUsed() const { | 68 size_t bytesUsed() const { |
| 97 return fMip ? fMip->getSize() : fBitmap.getSize(); | 69 return fMip ? fMip->getSize() : fBitmap.getSize(); |
| 98 } | 70 } |
| 99 | 71 |
| 100 Rec* fNext; | 72 Rec* fNext; |
| 101 Rec* fPrev; | 73 Rec* fPrev; |
| 102 | 74 |
| 103 // this guy wants to be 64bit aligned | 75 // this guy wants to be 64bit aligned |
| 104 Key fKey; | 76 Key* fKey; |
| 105 | 77 |
| 106 int32_t fLockCount; | 78 int32_t fLockCount; |
| 107 | 79 |
| 108 // we use either fBitmap or fMip, but not both | 80 // we use either fBitmap or fMip, but not both |
| 109 SkBitmap fBitmap; | 81 SkBitmap fBitmap; |
| 110 const SkMipMap* fMip; | 82 const SkMipMap* fMip; |
| 111 }; | 83 }; |
| 112 | 84 |
| 113 #include "SkTDynamicHash.h" | 85 #include "SkTDynamicHash.h" |
| 114 | 86 |
| (...skipping 166 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 281 while (rec) { | 253 while (rec) { |
| 282 Rec* next = rec->fNext; | 254 Rec* next = rec->fNext; |
| 283 SkDELETE(rec); | 255 SkDELETE(rec); |
| 284 rec = next; | 256 rec = next; |
| 285 } | 257 } |
| 286 delete fHash; | 258 delete fHash; |
| 287 } | 259 } |
| 288 | 260 |
| 289 //////////////////////////////////////////////////////////////////////////////// | 261 //////////////////////////////////////////////////////////////////////////////// |
| 290 | 262 |
| 263 struct GenWHKey : public SkScaledImageCache::Key { |
| 264 public: |
| 265 GenWHKey(uint32_t genID, SkScalar scaleX, SkScalar scaleY, const SkIRect& bo
unds) |
| 266 : fGenID(genID) |
| 267 , fScaleX(scaleX) |
| 268 , fScaleY(scaleY) |
| 269 , fBounds(bounds) { |
| 270 this->init(7 * sizeof(uint32_t)); |
| 271 } |
| 272 |
| 273 uint32_t fGenID; |
| 274 SkScalar fScaleX; |
| 275 SkScalar fScaleY; |
| 276 SkIRect fBounds; |
| 277 }; |
| 291 | 278 |
| 292 SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(uint32_t genID, | 279 SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(uint32_t genID, |
| 293 SkScalar scaleX, | 280 SkScalar scaleX, |
| 294 SkScalar scaleY, | 281 SkScalar scaleY, |
| 295 const SkIRect& bounds) { | 282 const SkIRect& bounds) { |
| 296 const Key key(genID, scaleX, scaleY, bounds); | 283 return this->findAndLock(GenWHKey(genID, scaleX, scaleY, bounds)); |
| 297 return this->findAndLock(key); | |
| 298 } | 284 } |
| 299 | 285 |
| 300 /** | 286 /** |
| 301 This private method is the fully general record finder. All other | 287 This private method is the fully general record finder. All other |
| 302 record finders should call this function or the one above. */ | 288 record finders should call this function or the one above. */ |
| 303 SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(const SkScaledImageCach
e::Key& key) { | 289 SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(const SkScaledImageCach
e::Key& key) { |
| 304 if (key.fBounds.isEmpty()) { | |
| 305 return NULL; | |
| 306 } | |
| 307 #ifdef USE_HASH | 290 #ifdef USE_HASH |
| 308 Rec* rec = fHash->find(key); | 291 Rec* rec = fHash->find(key); |
| 309 #else | 292 #else |
| 310 Rec* rec = find_rec_in_list(fHead, key); | 293 Rec* rec = find_rec_in_list(fHead, key); |
| 311 #endif | 294 #endif |
| 312 if (rec) { | 295 if (rec) { |
| 313 this->moveToHead(rec); // for our LRU | 296 this->moveToHead(rec); // for our LRU |
| 314 rec->fLockCount += 1; | 297 rec->fLockCount += 1; |
| 315 } | 298 } |
| 316 return rec; | 299 return rec; |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 375 } | 358 } |
| 376 | 359 |
| 377 | 360 |
| 378 //////////////////////////////////////////////////////////////////////////////// | 361 //////////////////////////////////////////////////////////////////////////////// |
| 379 /** | 362 /** |
| 380 This private method is the fully general record adder. All other | 363 This private method is the fully general record adder. All other |
| 381 record adders should call this funtion. */ | 364 record adders should call this funtion. */ |
| 382 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(SkScaledImageCache::Rec*
rec) { | 365 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(SkScaledImageCache::Rec*
rec) { |
| 383 SkASSERT(rec); | 366 SkASSERT(rec); |
| 384 // See if we already have this key (racy inserts, etc.) | 367 // See if we already have this key (racy inserts, etc.) |
| 385 Rec* existing = this->findAndLock(rec->fKey); | 368 Rec* existing = this->findAndLock(*rec->fKey); |
| 386 if (NULL != existing) { | 369 if (NULL != existing) { |
| 387 // Since we already have a matching entry, just delete the new one and r
eturn. | 370 // Since we already have a matching entry, just delete the new one and r
eturn. |
| 388 // Call sites cannot assume the passed in object will live past this cal
l. | 371 // Call sites cannot assume the passed in object will live past this cal
l. |
| 389 existing->fBitmap = rec->fBitmap; | 372 existing->fBitmap = rec->fBitmap; |
| 390 SkDELETE(rec); | 373 SkDELETE(rec); |
| 391 return rec_to_id(existing); | 374 return rec_to_id(existing); |
| 392 } | 375 } |
| 393 | 376 |
| 394 this->addToHead(rec); | 377 this->addToHead(rec); |
| 395 SkASSERT(1 == rec->fLockCount); | 378 SkASSERT(1 == rec->fLockCount); |
| 396 #ifdef USE_HASH | 379 #ifdef USE_HASH |
| 397 SkASSERT(fHash); | 380 SkASSERT(fHash); |
| 398 fHash->add(rec); | 381 fHash->add(rec); |
| 399 #endif | 382 #endif |
| 400 // We may (now) be overbudget, so see if we need to purge something. | 383 // We may (now) be overbudget, so see if we need to purge something. |
| 401 this->purgeAsNeeded(); | 384 this->purgeAsNeeded(); |
| 402 return rec_to_id(rec); | 385 return rec_to_id(rec); |
| 403 } | 386 } |
| 404 | 387 |
| 405 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(uint32_t genID, | 388 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(uint32_t genID, |
| 406 int32_t width, | 389 int32_t width, |
| 407 int32_t height, | 390 int32_t height, |
| 408 const SkBitmap& bitmap) { | 391 const SkBitmap& bitmap) { |
| 409 Key key(genID, SK_Scalar1, SK_Scalar1, SkIRect::MakeWH(width, height)); | 392 GenWHKey key(genID, SK_Scalar1, SK_Scalar1, SkIRect::MakeWH(width, height)); |
| 410 Rec* rec = SkNEW_ARGS(Rec, (key, bitmap)); | 393 Rec* rec = SkNEW_ARGS(Rec, (key, bitmap)); |
| 411 return this->addAndLock(rec); | 394 return this->addAndLock(rec); |
| 412 } | 395 } |
| 413 | 396 |
| 414 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const SkBitmap& orig, | 397 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const SkBitmap& orig, |
| 415 SkScalar scaleX, | 398 SkScalar scaleX, |
| 416 SkScalar scaleY, | 399 SkScalar scaleY, |
| 417 const SkBitmap& scaled) { | 400 const SkBitmap& scaled) { |
| 418 if (0 == scaleX || 0 == scaleY) { | 401 if (0 == scaleX || 0 == scaleY) { |
| 419 // degenerate, and the key we use for mipmaps | 402 // degenerate, and the key we use for mipmaps |
| 420 return NULL; | 403 return NULL; |
| 421 } | 404 } |
| 422 SkIRect bounds = get_bounds_from_bitmap(orig); | 405 SkIRect bounds = get_bounds_from_bitmap(orig); |
| 423 if (bounds.isEmpty()) { | 406 if (bounds.isEmpty()) { |
| 424 return NULL; | 407 return NULL; |
| 425 } | 408 } |
| 426 Key key(orig.getGenerationID(), scaleX, scaleY, bounds); | 409 GenWHKey key(orig.getGenerationID(), scaleX, scaleY, bounds); |
| 427 Rec* rec = SkNEW_ARGS(Rec, (key, scaled)); | 410 Rec* rec = SkNEW_ARGS(Rec, (key, scaled)); |
| 428 return this->addAndLock(rec); | 411 return this->addAndLock(rec); |
| 429 } | 412 } |
| 430 | 413 |
| 431 SkScaledImageCache::ID* SkScaledImageCache::addAndLockMip(const SkBitmap& orig, | 414 SkScaledImageCache::ID* SkScaledImageCache::addAndLockMip(const SkBitmap& orig, |
| 432 const SkMipMap* mip) { | 415 const SkMipMap* mip) { |
| 433 SkIRect bounds = get_bounds_from_bitmap(orig); | 416 SkIRect bounds = get_bounds_from_bitmap(orig); |
| 434 if (bounds.isEmpty()) { | 417 if (bounds.isEmpty()) { |
| 435 return NULL; | 418 return NULL; |
| 436 } | 419 } |
| 437 Key key(orig.getGenerationID(), 0, 0, bounds); | 420 GenWHKey key(orig.getGenerationID(), 0, 0, bounds); |
| 438 Rec* rec = SkNEW_ARGS(Rec, (key, mip)); | 421 Rec* rec = SkNEW_ARGS(Rec, (key, mip)); |
| 439 return this->addAndLock(rec); | 422 return this->addAndLock(rec); |
| 440 } | 423 } |
| 441 | 424 |
| 442 void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) { | 425 void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) { |
| 443 SkASSERT(id); | 426 SkASSERT(id); |
| 444 | 427 |
| 445 #ifdef SK_DEBUG | 428 #ifdef SK_DEBUG |
| 446 { | 429 { |
| 447 bool found = false; | 430 bool found = false; |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 487 if (bytesUsed < byteLimit && countUsed < countLimit) { | 470 if (bytesUsed < byteLimit && countUsed < countLimit) { |
| 488 break; | 471 break; |
| 489 } | 472 } |
| 490 | 473 |
| 491 Rec* prev = rec->fPrev; | 474 Rec* prev = rec->fPrev; |
| 492 if (0 == rec->fLockCount) { | 475 if (0 == rec->fLockCount) { |
| 493 size_t used = rec->bytesUsed(); | 476 size_t used = rec->bytesUsed(); |
| 494 SkASSERT(used <= bytesUsed); | 477 SkASSERT(used <= bytesUsed); |
| 495 this->detach(rec); | 478 this->detach(rec); |
| 496 #ifdef USE_HASH | 479 #ifdef USE_HASH |
| 497 fHash->remove(rec->fKey); | 480 fHash->remove(*rec->fKey); |
| 498 #endif | 481 #endif |
| 499 | 482 |
| 500 SkDELETE(rec); | 483 SkDELETE(rec); |
| 501 | 484 |
| 502 bytesUsed -= used; | 485 bytesUsed -= used; |
| 503 countUsed -= 1; | 486 countUsed -= 1; |
| 504 } | 487 } |
| 505 rec = prev; | 488 rec = prev; |
| 506 } | 489 } |
| 507 | 490 |
| (...skipping 279 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 787 } | 770 } |
| 788 | 771 |
| 789 size_t SkGraphics::GetImageCacheSingleAllocationByteLimit() { | 772 size_t SkGraphics::GetImageCacheSingleAllocationByteLimit() { |
| 790 return SkScaledImageCache::GetSingleAllocationByteLimit(); | 773 return SkScaledImageCache::GetSingleAllocationByteLimit(); |
| 791 } | 774 } |
| 792 | 775 |
| 793 size_t SkGraphics::SetImageCacheSingleAllocationByteLimit(size_t newLimit) { | 776 size_t SkGraphics::SetImageCacheSingleAllocationByteLimit(size_t newLimit) { |
| 794 return SkScaledImageCache::SetSingleAllocationByteLimit(newLimit); | 777 return SkScaledImageCache::SetSingleAllocationByteLimit(newLimit); |
| 795 } | 778 } |
| 796 | 779 |
| OLD | NEW |