| OLD | NEW | 
|    1  |    1  | 
|    2 /* |    2 /* | 
|    3  * Copyright 2008 The Android Open Source Project |    3  * Copyright 2008 The Android Open Source Project | 
|    4  * |    4  * | 
|    5  * Use of this source code is governed by a BSD-style license that can be |    5  * Use of this source code is governed by a BSD-style license that can be | 
|    6  * found in the LICENSE file. |    6  * found in the LICENSE file. | 
|    7  */ |    7  */ | 
|    8  |    8  | 
|    9  |    9  | 
|   10 #ifndef SkPixelRef_DEFINED |   10 #ifndef SkPixelRef_DEFINED | 
|   11 #define SkPixelRef_DEFINED |   11 #define SkPixelRef_DEFINED | 
|   12  |   12  | 
|   13 #include "SkBitmap.h" |   13 #include "SkBitmap.h" | 
|   14 #include "SkRefCnt.h" |   14 #include "SkRefCnt.h" | 
|   15 #include "SkString.h" |   15 #include "SkString.h" | 
|   16 #include "SkFlattenable.h" |   16 #include "SkFlattenable.h" | 
 |   17 #include "SkImageInfo.h" | 
|   17 #include "SkTDArray.h" |   18 #include "SkTDArray.h" | 
|   18  |   19  | 
|   19 #define SK_SUPPORT_LEGACY_PIXELREF_CONSTRUCTOR |   20 //#define SK_SUPPORT_LEGACY_PIXELREF_CONSTRUCTOR | 
 |   21  | 
 |   22 #define SK_SUPPORT_LEGACY_ONLOCKPIXELS | 
|   20  |   23  | 
|   21 #ifdef SK_DEBUG |   24 #ifdef SK_DEBUG | 
|   22     /** |   25     /** | 
|   23      *  Defining SK_IGNORE_PIXELREF_SETPRELOCKED will force all pixelref |   26      *  Defining SK_IGNORE_PIXELREF_SETPRELOCKED will force all pixelref | 
|   24      *  subclasses to correctly handle lock/unlock pixels. For performance |   27      *  subclasses to correctly handle lock/unlock pixels. For performance | 
|   25      *  reasons, simple malloc-based subclasses call setPreLocked() to skip |   28      *  reasons, simple malloc-based subclasses call setPreLocked() to skip | 
|   26      *  the overhead of implementing these calls. |   29      *  the overhead of implementing these calls. | 
|   27      * |   30      * | 
|   28      *  This build-flag disables that optimization, to add in debugging our |   31      *  This build-flag disables that optimization, to add in debugging our | 
|   29      *  call-sites, to ensure that they correctly balance their calls of |   32      *  call-sites, to ensure that they correctly balance their calls of | 
| (...skipping 30 matching lines...) Expand all  Loading... | 
|   60     SkPixelRef(const SkImageInfo&, SkBaseMutex* mutex); |   63     SkPixelRef(const SkImageInfo&, SkBaseMutex* mutex); | 
|   61     virtual ~SkPixelRef(); |   64     virtual ~SkPixelRef(); | 
|   62  |   65  | 
|   63     const SkImageInfo& info() const { |   66     const SkImageInfo& info() const { | 
|   64         return fInfo; |   67         return fInfo; | 
|   65     } |   68     } | 
|   66  |   69  | 
|   67     /** Return the pixel memory returned from lockPixels, or null if the |   70     /** Return the pixel memory returned from lockPixels, or null if the | 
|   68         lockCount is 0. |   71         lockCount is 0. | 
|   69     */ |   72     */ | 
|   70     void* pixels() const { return fPixels; } |   73     void* pixels() const { return fRec.fPixels; } | 
|   71  |   74  | 
|   72     /** Return the current colorTable (if any) if pixels are locked, or null. |   75     /** Return the current colorTable (if any) if pixels are locked, or null. | 
|   73     */ |   76     */ | 
|   74     SkColorTable* colorTable() const { return fColorTable; } |   77     SkColorTable* colorTable() const { return fRec.fColorTable; } | 
|   75  |   78  | 
|   76     /** |   79     /** | 
 |   80      *  To access the actual pixels of a pixelref, it must be "locked". | 
 |   81      *  Calling lockPixels returns a LockRec struct (on success). | 
 |   82      */ | 
 |   83     struct LockRec { | 
 |   84         void*           fPixels; | 
 |   85         SkColorTable*   fColorTable; | 
 |   86         size_t          fRowBytes; | 
 |   87          | 
 |   88         void zero() { sk_bzero(this, sizeof(*this)); } | 
 |   89  | 
 |   90         bool isZero() const { | 
 |   91             return NULL == fPixels && NULL == fColorTable && 0 == fRowBytes; | 
 |   92         } | 
 |   93     }; | 
 |   94      | 
 |   95     /** | 
|   77      *  Returns true if the lockcount > 0 |   96      *  Returns true if the lockcount > 0 | 
|   78      */ |   97      */ | 
|   79     bool isLocked() const { return fLockCount > 0; } |   98     bool isLocked() const { return fLockCount > 0; } | 
|   80  |   99  | 
|   81     SkDEBUGCODE(int getLockCount() const { return fLockCount; }) |  100     SkDEBUGCODE(int getLockCount() const { return fLockCount; }) | 
|   82  |  101  | 
|   83     /** Call to access the pixel memory, which is returned. Balance with a call |  102     /** | 
|   84         to unlockPixels(). |  103      *  Call to access the pixel memory. Return true on success. Balance this | 
|   85     */ |  104      *  with a call to unlockPixels(). | 
|   86     void lockPixels(); |  105      */ | 
 |  106     bool lockPixels(); | 
 |  107  | 
 |  108     /** | 
 |  109      *  Call to access the pixel memory. On success, return true and fill out | 
 |  110      *  the specified rec. On failure, return false and ignore the rec parameter
     . | 
 |  111      *  Balance this with a call to unlockPixels(). | 
 |  112      */ | 
 |  113     bool lockPixels(LockRec* rec); | 
 |  114  | 
|   87     /** Call to balanace a previous call to lockPixels(). Returns the pixels |  115     /** Call to balanace a previous call to lockPixels(). Returns the pixels | 
|   88         (or null) after the unlock. NOTE: lock calls can be nested, but the |  116         (or null) after the unlock. NOTE: lock calls can be nested, but the | 
|   89         matching number of unlock calls must be made in order to free the |  117         matching number of unlock calls must be made in order to free the | 
|   90         memory (if the subclass implements caching/deferred-decoding.) |  118         memory (if the subclass implements caching/deferred-decoding.) | 
|   91     */ |  119     */ | 
|   92     void unlockPixels(); |  120     void unlockPixels(); | 
|   93  |  121  | 
|   94     /** |  122     /** | 
|   95      *  Some bitmaps can return a copy of their pixels for lockPixels(), but |  123      *  Some bitmaps can return a copy of their pixels for lockPixels(), but | 
|   96      *  that copy, if modified, will not be pushed back. These bitmaps should |  124      *  that copy, if modified, will not be pushed back. These bitmaps should | 
| (...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  233     // This can be used to invalidate caches keyed by SkPixelRef generation ID. |  261     // This can be used to invalidate caches keyed by SkPixelRef generation ID. | 
|  234     struct GenIDChangeListener { |  262     struct GenIDChangeListener { | 
|  235         virtual ~GenIDChangeListener() {} |  263         virtual ~GenIDChangeListener() {} | 
|  236         virtual void onChange() = 0; |  264         virtual void onChange() = 0; | 
|  237     }; |  265     }; | 
|  238  |  266  | 
|  239     // Takes ownership of listener. |  267     // Takes ownership of listener. | 
|  240     void addGenIDChangeListener(GenIDChangeListener* listener); |  268     void addGenIDChangeListener(GenIDChangeListener* listener); | 
|  241  |  269  | 
|  242 protected: |  270 protected: | 
|  243     /** Called when the lockCount goes from 0 to 1. The caller will have already |  271 #ifdef SK_SUPPORT_LEGACY_ONLOCKPIXELS | 
|  244         acquire a mutex for thread safety, so this method need not do that. |  272     virtual void* onLockPixels(SkColorTable**); | 
|  245     */ |  273     virtual bool onNewLockPixels(LockRec*); | 
|  246     virtual void* onLockPixels(SkColorTable**) = 0; |  274 #else | 
|  247   |  | 
|  248     /** |  275     /** | 
|  249      *  Called when the lock count goes from 1 to 0. The caller will have |  276      *  On success, returns true and fills out the LockRec for the pixels. On | 
|  250      *  already acquire a mutex for thread safety, so this method need not do |  277      *  failure returns false and ignores the LockRec parameter. | 
|  251      *  that. |  | 
|  252      * |  278      * | 
|  253      *  If the previous call to onLockPixels failed (i.e. returned NULL), then |  279      *  The caller will have already acquired a mutex for thread safety, so this | 
|  254      *  the onUnlockPixels will NOT be called. |  280      *  method need not do that. | 
 |  281      */ | 
 |  282     virtual bool onNewLockPixels(LockRec*) = 0; | 
 |  283 #endif | 
 |  284  | 
 |  285     /** | 
 |  286      *  Balancing the previous successful call to onNewLockPixels. The locked | 
 |  287      *  pixel address will no longer be referenced, so the subclass is free to | 
 |  288      *  move or discard that memory. | 
 |  289      * | 
 |  290      *  The caller will have already acquired a mutex for thread safety, so this | 
 |  291      *  method need not do that. | 
|  255      */ |  292      */ | 
|  256     virtual void onUnlockPixels() = 0; |  293     virtual void onUnlockPixels() = 0; | 
|  257  |  294  | 
|  258     /** Default impl returns true */ |  295     /** Default impl returns true */ | 
|  259     virtual bool onLockPixelsAreWritable() const; |  296     virtual bool onLockPixelsAreWritable() const; | 
|  260  |  297  | 
|  261     // returns false; |  298     // returns false; | 
|  262     virtual bool onImplementsDecodeInto(); |  299     virtual bool onImplementsDecodeInto(); | 
|  263     // returns false; |  300     // returns false; | 
|  264     virtual bool onDecodeInto(int pow2, SkBitmap* bitmap); |  301     virtual bool onDecodeInto(int pow2, SkBitmap* bitmap); | 
| (...skipping 24 matching lines...) Expand all  Loading... | 
|  289     */ |  326     */ | 
|  290     SkBaseMutex* mutex() const { return fMutex; } |  327     SkBaseMutex* mutex() const { return fMutex; } | 
|  291  |  328  | 
|  292     // serialization |  329     // serialization | 
|  293     SkPixelRef(SkFlattenableReadBuffer&, SkBaseMutex*); |  330     SkPixelRef(SkFlattenableReadBuffer&, SkBaseMutex*); | 
|  294     virtual void flatten(SkFlattenableWriteBuffer&) const SK_OVERRIDE; |  331     virtual void flatten(SkFlattenableWriteBuffer&) const SK_OVERRIDE; | 
|  295  |  332  | 
|  296     // only call from constructor. Flags this to always be locked, removing |  333     // only call from constructor. Flags this to always be locked, removing | 
|  297     // the need to grab the mutex and call onLockPixels/onUnlockPixels. |  334     // the need to grab the mutex and call onLockPixels/onUnlockPixels. | 
|  298     // Performance tweak to avoid those calls (esp. in multi-thread use case). |  335     // Performance tweak to avoid those calls (esp. in multi-thread use case). | 
|  299     void setPreLocked(void* pixels, SkColorTable* ctable); |  336     void setPreLocked(void*, size_t rowBytes, SkColorTable*); | 
|  300  |  337  | 
|  301 private: |  338 private: | 
|  302     SkBaseMutex*    fMutex; // must remain in scope for the life of this object |  339     SkBaseMutex*    fMutex; // must remain in scope for the life of this object | 
|  303     // FIXME: fInfo should be const once we remove old constructor that does |  340     // FIXME: fInfo should be const once we remove old constructor that does | 
|  304     // not set it. |  341     // not set it. | 
|  305     SkImageInfo     fInfo; |  342     SkImageInfo     fInfo; | 
|  306  |  343      | 
|  307     void*           fPixels; |  344     // LockRec is only valid if we're in a locked state (isLocked()) | 
|  308     SkColorTable*   fColorTable;    // we do not track ownership, subclass does |  345     LockRec         fRec; | 
|  309     int             fLockCount; |  346     int             fLockCount; | 
|  310  |  347  | 
|  311     mutable uint32_t fGenerationID; |  348     mutable uint32_t fGenerationID; | 
|  312     mutable bool     fUniqueGenerationID; |  349     mutable bool     fUniqueGenerationID; | 
|  313  |  350  | 
|  314     SkTDArray<GenIDChangeListener*> fGenIDChangeListeners;  // pointers are owne
     d |  351     SkTDArray<GenIDChangeListener*> fGenIDChangeListeners;  // pointers are owne
     d | 
|  315  |  352  | 
|  316     SkString    fURI; |  353     SkString    fURI; | 
|  317  |  354  | 
|  318     // can go from false to true, but never from true to false |  355     // can go from false to true, but never from true to false | 
|  319     bool    fIsImmutable; |  356     bool    fIsImmutable; | 
|  320     // only ever set in constructor, const after that |  357     // only ever set in constructor, const after that | 
|  321     bool    fPreLocked; |  358     bool    fPreLocked; | 
|  322  |  359  | 
|  323     void needsNewGenID(); |  360     void needsNewGenID(); | 
|  324     void callGenIDChangeListeners(); |  361     void callGenIDChangeListeners(); | 
|  325  |  362  | 
|  326     void setMutex(SkBaseMutex* mutex); |  363     void setMutex(SkBaseMutex* mutex); | 
|  327  |  364  | 
|  328     // When copying a bitmap to another with the same shape and config, we can s
     afely |  365     // When copying a bitmap to another with the same shape and config, we can s
     afely | 
|  329     // clone the pixelref generation ID too, which makes them equivalent under c
     aching. |  366     // clone the pixelref generation ID too, which makes them equivalent under c
     aching. | 
|  330     friend class SkBitmap;  // only for cloneGenID |  367     friend class SkBitmap;  // only for cloneGenID | 
|  331     void cloneGenID(const SkPixelRef&); |  368     void cloneGenID(const SkPixelRef&); | 
|  332  |  369  | 
|  333     typedef SkFlattenable INHERITED; |  370     typedef SkFlattenable INHERITED; | 
|  334 }; |  371 }; | 
|  335  |  372  | 
|  336 #endif |  373 #endif | 
| OLD | NEW |