OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright 2012 The Android Open Source Project | 2 * Copyright 2012 The Android Open Source Project |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
6 */ | 6 */ |
7 | 7 |
8 #include "SkImageFilter.h" | 8 #include "SkImageFilter.h" |
9 | 9 |
10 #include "SkBitmap.h" | 10 #include "SkBitmap.h" |
11 #include "SkBitmapDevice.h" | 11 #include "SkBitmapDevice.h" |
12 #include "SkChecksum.h" | 12 #include "SkChecksum.h" |
13 #include "SkDevice.h" | 13 #include "SkDevice.h" |
14 #include "SkMatrixImageFilter.h" | 14 #include "SkMatrixImageFilter.h" |
15 #include "SkMutex.h" | 15 #include "SkMutex.h" |
16 #include "SkOncePtr.h" | 16 #include "SkOncePtr.h" |
17 #include "SkPixelRef.h" | |
17 #include "SkReadBuffer.h" | 18 #include "SkReadBuffer.h" |
18 #include "SkRect.h" | 19 #include "SkRect.h" |
20 #include "SkResourceCache.h" | |
19 #include "SkTDynamicHash.h" | 21 #include "SkTDynamicHash.h" |
20 #include "SkTInternalLList.h" | 22 #include "SkTInternalLList.h" |
21 #include "SkValidationUtils.h" | 23 #include "SkValidationUtils.h" |
22 #include "SkWriteBuffer.h" | 24 #include "SkWriteBuffer.h" |
23 #if SK_SUPPORT_GPU | 25 #if SK_SUPPORT_GPU |
24 #include "GrContext.h" | 26 #include "GrContext.h" |
25 #include "GrDrawContext.h" | 27 #include "GrDrawContext.h" |
26 #include "SkGrPixelRef.h" | 28 #include "SkGrPixelRef.h" |
27 #include "SkGr.h" | 29 #include "SkGr.h" |
28 #endif | 30 #endif |
29 | 31 |
30 #ifdef SK_BUILD_FOR_IOS | |
31 enum { kDefaultCacheSize = 2 * 1024 * 1024 }; | |
32 #else | |
33 enum { kDefaultCacheSize = 128 * 1024 * 1024 }; | |
34 #endif | |
35 | |
36 #ifndef SK_IGNORE_TO_STRING | 32 #ifndef SK_IGNORE_TO_STRING |
37 void SkImageFilter::CropRect::toString(SkString* str) const { | 33 void SkImageFilter::CropRect::toString(SkString* str) const { |
38 if (!fFlags) { | 34 if (!fFlags) { |
39 return; | 35 return; |
40 } | 36 } |
41 | 37 |
42 str->appendf("cropRect ("); | 38 str->appendf("cropRect ("); |
43 if (fFlags & CropRect::kHasLeft_CropEdge) { | 39 if (fFlags & CropRect::kHasLeft_CropEdge) { |
44 str->appendf("%.2f, ", fRect.fLeft); | 40 str->appendf("%.2f, ", fRect.fLeft); |
45 } else { | 41 } else { |
(...skipping 424 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
470 return true; | 466 return true; |
471 } else { | 467 } else { |
472 return false; | 468 return false; |
473 } | 469 } |
474 } | 470 } |
475 } | 471 } |
476 #endif | 472 #endif |
477 | 473 |
478 namespace { | 474 namespace { |
479 | 475 |
480 class CacheImpl : public SkImageFilter::Cache { | 476 class ImageFilterRec : public SkResourceCache::Rec { |
481 public: | 477 public: |
482 CacheImpl(size_t maxBytes) : fMaxBytes(maxBytes), fCurrentBytes(0) { | 478 struct Keys { |
483 } | 479 Keys(SkImageFilter::Cache::Key ifcKey) : fIFCKey(ifcKey) { |
484 virtual ~CacheImpl() { | 480 static bool unique_namespace; |
485 SkTDynamicHash<Value, Key>::Iter iter(&fLookup); | 481 fRCKey.init(&unique_namespace, |
482 0 /*shared ids allow fine-grained purging, which we can' t use here*/, | |
483 sizeof(fIFCKey)); | |
484 } | |
485 // The SkImageFilter::Cache::Key must immediately follow the SkResourceC ache::Key. | |
486 SkResourceCache::Key fRCKey; | |
487 const SkImageFilter::Cache::Key fIFCKey; | |
488 }; | |
486 | 489 |
487 while (!iter.done()) { | 490 ImageFilterRec(SkImageFilter::Cache::Key ifcKey, const SkBitmap& bm, const S kIPoint& offset) |
488 Value* v = &*iter; | 491 : fKeys(ifcKey) |
489 ++iter; | 492 , fBitmap(bm) |
490 delete v; | 493 , fOffset(offset) {} |
494 | |
495 const Key& getKey() const override { return fKeys.fRCKey; } | |
496 size_t bytesUsed() const override { return sizeof(*this) + fBitmap.getSize() ; } | |
497 const char* getCategory() const override { return "SkImageFilter::Cache"; } | |
498 | |
499 SkDiscardableMemory* diagnostic_only_getDiscardable() const override { | |
500 if (auto pr = fBitmap.pixelRef()) { | |
501 return pr->diagnostic_only_getDiscardable(); | |
491 } | 502 } |
492 } | 503 return nullptr; |
493 struct Value { | |
494 Value(const Key& key, const SkBitmap& bitmap, const SkIPoint& offset) | |
495 : fKey(key), fBitmap(bitmap), fOffset(offset) {} | |
496 Key fKey; | |
497 SkBitmap fBitmap; | |
498 SkIPoint fOffset; | |
499 static const Key& GetKey(const Value& v) { | |
500 return v.fKey; | |
501 } | |
502 static uint32_t Hash(const Key& key) { | |
503 return SkChecksum::Murmur3(reinterpret_cast<const uint32_t*>(&key), sizeof(Key)); | |
504 } | |
505 SK_DECLARE_INTERNAL_LLIST_INTERFACE(Value); | |
506 }; | |
507 bool get(const Key& key, SkBitmap* result, SkIPoint* offset) const override { | |
508 SkAutoMutexAcquire mutex(fMutex); | |
509 if (Value* v = fLookup.find(key)) { | |
510 *result = v->fBitmap; | |
511 *offset = v->fOffset; | |
512 if (v != fLRU.head()) { | |
513 fLRU.remove(v); | |
514 fLRU.addToHead(v); | |
515 } | |
516 return true; | |
517 } | |
518 return false; | |
519 } | |
520 void set(const Key& key, const SkBitmap& result, const SkIPoint& offset) ove rride { | |
521 SkAutoMutexAcquire mutex(fMutex); | |
522 if (Value* v = fLookup.find(key)) { | |
523 removeInternal(v); | |
524 } | |
525 Value* v = new Value(key, result, offset); | |
526 fLookup.add(v); | |
527 fLRU.addToHead(v); | |
528 fCurrentBytes += result.getSize(); | |
529 while (fCurrentBytes > fMaxBytes) { | |
530 Value* tail = fLRU.tail(); | |
531 SkASSERT(tail); | |
532 if (tail == v) { | |
533 break; | |
534 } | |
535 removeInternal(tail); | |
536 } | |
537 } | 504 } |
538 | 505 |
539 void purge() override { | 506 const SkBitmap& bitmap() const { return fBitmap; } |
540 SkAutoMutexAcquire mutex(fMutex); | 507 const SkIPoint& offset() const { return fOffset; } |
541 while (fCurrentBytes > 0) { | 508 |
542 Value* tail = fLRU.tail(); | 509 private: |
543 SkASSERT(tail); | 510 const Keys fKeys; |
544 this->removeInternal(tail); | 511 const SkBitmap fBitmap; |
512 const SkIPoint fOffset; | |
513 }; | |
514 | |
515 struct GetVisitor { | |
516 SkBitmap* fResult; | |
517 SkIPoint* fOffset; | |
518 | |
519 static bool Visit(const SkResourceCache::Rec& rec, void* context) { | |
520 auto r = (const ImageFilterRec&)rec; | |
521 auto c = (GetVisitor*)context; | |
522 *c->fResult = r.bitmap(); | |
523 *c->fOffset = r.offset(); | |
524 return true; | |
525 } | |
526 }; | |
527 | |
528 // TODO: just a single (global) cache that uses SkResourceCache / GrResourceCach e as appropriate. | |
529 | |
530 // Thread-safe SkImageFilter::Cache that uses the global SkResourceCache. | |
531 class GlobalCache : public SkImageFilter::Cache { | |
532 public: | |
533 GlobalCache() {} | |
534 | |
535 void set(const Key& key, const SkBitmap& result, const SkIPoint& offset) ove rride { | |
536 const SkBitmap* bm = &result; | |
537 // Image filters allocate their own result bitmaps. | |
Stephen White
2015/10/13 16:50:08
How much cleaner/faster would this code become if
| |
538 // If we're putting a bitmap into the SkResourceCache backed by discarda ble memory, | |
539 // we'd better make sure those bitmaps are discardable too (and not if n ot). | |
540 // The expected case in Chrome is: rcIsDiscardable == true, bmIsDiscarda ble == false. | |
Stephen White
2015/10/13 16:50:08
So you're saying the expected case in Chrome is th
| |
541 auto allocator = SkResourceCache::GetAllocator(); | |
542 bool rcIsDiscardable = allocator, | |
543 bmIsDiscardable = bm->pixelRef() && bm->pixelRef()->diagnostic_only _getDiscardable(); | |
544 SkBitmap copy; | |
545 if (rcIsDiscardable != bmIsDiscardable) { | |
546 bm->copyTo(©, allocator); | |
547 bm = © | |
545 } | 548 } |
549 SkResourceCache::Add(new ImageFilterRec(key, *bm, offset)); | |
546 } | 550 } |
547 | 551 |
548 private: | 552 bool get(const Key& ifcKey, SkBitmap* result, SkIPoint* offset) const overri de { |
549 void removeInternal(Value* v) { | 553 const ImageFilterRec::Keys keys(ifcKey); |
550 fCurrentBytes -= v->fBitmap.getSize(); | 554 GetVisitor visitor { result, offset }; |
551 fLRU.remove(v); | 555 return SkResourceCache::Find(keys.fRCKey, GetVisitor::Visit, &visitor); |
552 fLookup.remove(v->fKey); | 556 } |
553 delete v; | 557 }; |
558 | |
559 // Non-thread-safe siloed SkImageFilter::Cache, meant to be small and ephemeral. | |
560 class LocalCache : public SkImageFilter::Cache { | |
561 public: | |
562 LocalCache(size_t maxBytes) : fRC(maxBytes) { | |
563 SkASSERT(fRC.allocator() == nullptr); | |
564 } | |
565 | |
566 void set(const Key& key, const SkBitmap& result, const SkIPoint& offset) ove rride { | |
567 SkASSERT(result.pixelRef() == nullptr || | |
568 result.pixelRef()->diagnostic_only_getDiscardable() == nullptr) ; | |
569 fRC.add(new ImageFilterRec(key, result, offset)); | |
570 } | |
571 | |
572 bool get(const Key& ifcKey, SkBitmap* result, SkIPoint* offset) const overri de { | |
573 const ImageFilterRec::Keys keys(ifcKey); | |
574 GetVisitor visitor { result, offset }; | |
575 return fRC.find(keys.fRCKey, GetVisitor::Visit, &visitor); | |
554 } | 576 } |
555 private: | 577 private: |
556 SkTDynamicHash<Value, Key> fLookup; | 578 mutable SkResourceCache fRC; // SkResourceCache::find() is not const (updat es LRU). |
557 mutable SkTInternalLList<Value> fLRU; | |
558 size_t fMaxBytes; | |
559 size_t fCurrentBytes; | |
560 mutable SkMutex fMutex; | |
561 }; | 579 }; |
562 | 580 |
563 } // namespace | 581 } // namespace |
564 | 582 |
565 SkImageFilter::Cache* SkImageFilter::Cache::Create(size_t maxBytes) { | 583 SkImageFilter::Cache* SkImageFilter::Cache::Create(size_t maxBytes) { |
Stephen White
2015/10/13 16:50:08
Rename this to CreateLocal()?
| |
566 return new CacheImpl(maxBytes); | 584 return new LocalCache(maxBytes); |
567 } | 585 } |
568 | 586 |
569 SK_DECLARE_STATIC_ONCE_PTR(SkImageFilter::Cache, cache); | 587 SK_DECLARE_STATIC_ONCE_PTR(SkImageFilter::Cache, cache); |
570 SkImageFilter::Cache* SkImageFilter::Cache::Get() { | 588 SkImageFilter::Cache* SkImageFilter::Cache::Get() { |
571 return cache.get([]{ return SkImageFilter::Cache::Create(kDefaultCacheSize); }); | 589 return cache.get([]{ return new GlobalCache; }); |
572 } | |
573 | |
574 void SkImageFilter::PurgeCache() { | |
575 Cache::Get()->purge(); | |
576 } | 590 } |
577 | 591 |
578 //////////////////////////////////////////////////////////////////////////////// /////////////////// | 592 //////////////////////////////////////////////////////////////////////////////// /////////////////// |
579 | 593 |
580 SkBaseDevice* SkImageFilter::Proxy::createDevice(int w, int h) { | 594 SkBaseDevice* SkImageFilter::Proxy::createDevice(int w, int h) { |
581 SkBaseDevice::CreateInfo cinfo(SkImageInfo::MakeN32Premul(w, h), | 595 SkBaseDevice::CreateInfo cinfo(SkImageInfo::MakeN32Premul(w, h), |
582 SkBaseDevice::kNever_TileUsage, | 596 SkBaseDevice::kNever_TileUsage, |
583 kUnknown_SkPixelGeometry, | 597 kUnknown_SkPixelGeometry, |
584 true /*forImageFilter*/); | 598 true /*forImageFilter*/); |
585 SkBaseDevice* dev = fDevice->onCreateDevice(cinfo, nullptr); | 599 SkBaseDevice* dev = fDevice->onCreateDevice(cinfo, nullptr); |
586 if (nullptr == dev) { | 600 if (nullptr == dev) { |
587 const SkSurfaceProps surfaceProps(fDevice->fSurfaceProps.flags(), | 601 const SkSurfaceProps surfaceProps(fDevice->fSurfaceProps.flags(), |
588 kUnknown_SkPixelGeometry); | 602 kUnknown_SkPixelGeometry); |
589 dev = SkBitmapDevice::Create(cinfo.fInfo, surfaceProps); | 603 dev = SkBitmapDevice::Create(cinfo.fInfo, surfaceProps); |
590 } | 604 } |
591 return dev; | 605 return dev; |
592 } | 606 } |
593 | 607 |
594 bool SkImageFilter::Proxy::filterImage(const SkImageFilter* filter, const SkBitm ap& src, | 608 bool SkImageFilter::Proxy::filterImage(const SkImageFilter* filter, const SkBitm ap& src, |
595 const SkImageFilter::Context& ctx, | 609 const SkImageFilter::Context& ctx, |
596 SkBitmap* result, SkIPoint* offset) { | 610 SkBitmap* result, SkIPoint* offset) { |
597 return fDevice->filterImage(filter, src, ctx, result, offset); | 611 return fDevice->filterImage(filter, src, ctx, result, offset); |
598 } | 612 } |
599 | 613 |
OLD | NEW |