| OLD | NEW |
| 1 | 1 |
| 2 /* | 2 /* |
| 3 * Copyright 2008 The Android Open Source Project | 3 * Copyright 2008 The Android Open Source Project |
| 4 * | 4 * |
| 5 * Use of this source code is governed by a BSD-style license that can be | 5 * Use of this source code is governed by a BSD-style license that can be |
| 6 * found in the LICENSE file. | 6 * found in the LICENSE file. |
| 7 */ | 7 */ |
| 8 | 8 |
| 9 | 9 |
| 10 #ifndef SkPixelRef_DEFINED | 10 #ifndef SkPixelRef_DEFINED |
| 11 #define SkPixelRef_DEFINED | 11 #define SkPixelRef_DEFINED |
| 12 | 12 |
| 13 #include "SkBitmap.h" | 13 #include "SkBitmap.h" |
| 14 #include "SkRefCnt.h" | 14 #include "SkRefCnt.h" |
| 15 #include "SkString.h" | 15 #include "SkString.h" |
| 16 #include "SkFlattenable.h" | 16 #include "SkFlattenable.h" |
| 17 #include "SkImageInfo.h" | |
| 18 #include "SkTDArray.h" | 17 #include "SkTDArray.h" |
| 19 | 18 |
| 20 //#define SK_SUPPORT_LEGACY_ONLOCKPIXELS | |
| 21 | |
| 22 #ifdef SK_DEBUG | 19 #ifdef SK_DEBUG |
| 23 /** | 20 /** |
| 24 * Defining SK_IGNORE_PIXELREF_SETPRELOCKED will force all pixelref | 21 * Defining SK_IGNORE_PIXELREF_SETPRELOCKED will force all pixelref |
| 25 * subclasses to correctly handle lock/unlock pixels. For performance | 22 * subclasses to correctly handle lock/unlock pixels. For performance |
| 26 * reasons, simple malloc-based subclasses call setPreLocked() to skip | 23 * reasons, simple malloc-based subclasses call setPreLocked() to skip |
| 27 * the overhead of implementing these calls. | 24 * the overhead of implementing these calls. |
| 28 * | 25 * |
| 29 * This build-flag disables that optimization, to add in debugging our | 26 * This build-flag disables that optimization, to add in debugging our |
| 30 * call-sites, to ensure that they correctly balance their calls of | 27 * call-sites, to ensure that they correctly balance their calls of |
| 31 * lock and unlock. | 28 * lock and unlock. |
| (...skipping 24 matching lines...) Expand all Loading... |
| 56 SkPixelRef(const SkImageInfo&, SkBaseMutex* mutex); | 53 SkPixelRef(const SkImageInfo&, SkBaseMutex* mutex); |
| 57 virtual ~SkPixelRef(); | 54 virtual ~SkPixelRef(); |
| 58 | 55 |
| 59 const SkImageInfo& info() const { | 56 const SkImageInfo& info() const { |
| 60 return fInfo; | 57 return fInfo; |
| 61 } | 58 } |
| 62 | 59 |
| 63 /** Return the pixel memory returned from lockPixels, or null if the | 60 /** Return the pixel memory returned from lockPixels, or null if the |
| 64 lockCount is 0. | 61 lockCount is 0. |
| 65 */ | 62 */ |
| 66 void* pixels() const { return fRec.fPixels; } | 63 void* pixels() const { return fPixels; } |
| 67 | 64 |
| 68 /** Return the current colorTable (if any) if pixels are locked, or null. | 65 /** Return the current colorTable (if any) if pixels are locked, or null. |
| 69 */ | 66 */ |
| 70 SkColorTable* colorTable() const { return fRec.fColorTable; } | 67 SkColorTable* colorTable() const { return fColorTable; } |
| 71 | 68 |
| 72 /** | 69 /** |
| 73 * To access the actual pixels of a pixelref, it must be "locked". | |
| 74 * Calling lockPixels returns a LockRec struct (on success). | |
| 75 */ | |
| 76 struct LockRec { | |
| 77 void* fPixels; | |
| 78 SkColorTable* fColorTable; | |
| 79 size_t fRowBytes; | |
| 80 | |
| 81 void zero() { sk_bzero(this, sizeof(*this)); } | |
| 82 | |
| 83 bool isZero() const { | |
| 84 return NULL == fPixels && NULL == fColorTable && 0 == fRowBytes; | |
| 85 } | |
| 86 }; | |
| 87 | |
| 88 /** | |
| 89 * Returns true if the lockcount > 0 | 70 * Returns true if the lockcount > 0 |
| 90 */ | 71 */ |
| 91 bool isLocked() const { return fLockCount > 0; } | 72 bool isLocked() const { return fLockCount > 0; } |
| 92 | 73 |
| 93 SkDEBUGCODE(int getLockCount() const { return fLockCount; }) | 74 SkDEBUGCODE(int getLockCount() const { return fLockCount; }) |
| 94 | 75 |
| 95 /** | 76 /** Call to access the pixel memory, which is returned. Balance with a call |
| 96 * Call to access the pixel memory. Return true on success. Balance this | 77 to unlockPixels(). |
| 97 * with a call to unlockPixels(). | 78 */ |
| 98 */ | 79 void lockPixels(); |
| 99 bool lockPixels(); | |
| 100 | |
| 101 /** | |
| 102 * Call to access the pixel memory. On success, return true and fill out | |
| 103 * the specified rec. On failure, return false and ignore the rec parameter
. | |
| 104 * Balance this with a call to unlockPixels(). | |
| 105 */ | |
| 106 bool lockPixels(LockRec* rec); | |
| 107 | |
| 108 /** Call to balanace a previous call to lockPixels(). Returns the pixels | 80 /** Call to balanace a previous call to lockPixels(). Returns the pixels |
| 109 (or null) after the unlock. NOTE: lock calls can be nested, but the | 81 (or null) after the unlock. NOTE: lock calls can be nested, but the |
| 110 matching number of unlock calls must be made in order to free the | 82 matching number of unlock calls must be made in order to free the |
| 111 memory (if the subclass implements caching/deferred-decoding.) | 83 memory (if the subclass implements caching/deferred-decoding.) |
| 112 */ | 84 */ |
| 113 void unlockPixels(); | 85 void unlockPixels(); |
| 114 | 86 |
| 115 /** | 87 /** |
| 116 * Some bitmaps can return a copy of their pixels for lockPixels(), but | 88 * Some bitmaps can return a copy of their pixels for lockPixels(), but |
| 117 * that copy, if modified, will not be pushed back. These bitmaps should | 89 * that copy, if modified, will not be pushed back. These bitmaps should |
| (...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 254 // This can be used to invalidate caches keyed by SkPixelRef generation ID. | 226 // This can be used to invalidate caches keyed by SkPixelRef generation ID. |
| 255 struct GenIDChangeListener { | 227 struct GenIDChangeListener { |
| 256 virtual ~GenIDChangeListener() {} | 228 virtual ~GenIDChangeListener() {} |
| 257 virtual void onChange() = 0; | 229 virtual void onChange() = 0; |
| 258 }; | 230 }; |
| 259 | 231 |
| 260 // Takes ownership of listener. | 232 // Takes ownership of listener. |
| 261 void addGenIDChangeListener(GenIDChangeListener* listener); | 233 void addGenIDChangeListener(GenIDChangeListener* listener); |
| 262 | 234 |
| 263 protected: | 235 protected: |
| 264 #ifdef SK_SUPPORT_LEGACY_ONLOCKPIXELS | 236 /** Called when the lockCount goes from 0 to 1. The caller will have already |
| 265 virtual void* onLockPixels(SkColorTable**); | 237 acquire a mutex for thread safety, so this method need not do that. |
| 266 virtual bool onNewLockPixels(LockRec*); | 238 */ |
| 267 #else | 239 virtual void* onLockPixels(SkColorTable**) = 0; |
| 268 /** | |
| 269 * On success, returns true and fills out the LockRec for the pixels. On | |
| 270 * failure returns false and ignores the LockRec parameter. | |
| 271 * | |
| 272 * The caller will have already acquired a mutex for thread safety, so this | |
| 273 * method need not do that. | |
| 274 */ | |
| 275 virtual bool onNewLockPixels(LockRec*) = 0; | |
| 276 #endif | |
| 277 | 240 |
| 278 /** | 241 /** |
| 279 * Balancing the previous successful call to onNewLockPixels. The locked | 242 * Called when the lock count goes from 1 to 0. The caller will have |
| 280 * pixel address will no longer be referenced, so the subclass is free to | 243 * already acquire a mutex for thread safety, so this method need not do |
| 281 * move or discard that memory. | 244 * that. |
| 282 * | 245 * |
| 283 * The caller will have already acquired a mutex for thread safety, so this | 246 * If the previous call to onLockPixels failed (i.e. returned NULL), then |
| 284 * method need not do that. | 247 * the onUnlockPixels will NOT be called. |
| 285 */ | 248 */ |
| 286 virtual void onUnlockPixels() = 0; | 249 virtual void onUnlockPixels() = 0; |
| 287 | 250 |
| 288 /** Default impl returns true */ | 251 /** Default impl returns true */ |
| 289 virtual bool onLockPixelsAreWritable() const; | 252 virtual bool onLockPixelsAreWritable() const; |
| 290 | 253 |
| 291 // returns false; | 254 // returns false; |
| 292 virtual bool onImplementsDecodeInto(); | 255 virtual bool onImplementsDecodeInto(); |
| 293 // returns false; | 256 // returns false; |
| 294 virtual bool onDecodeInto(int pow2, SkBitmap* bitmap); | 257 virtual bool onDecodeInto(int pow2, SkBitmap* bitmap); |
| (...skipping 24 matching lines...) Expand all Loading... |
| 319 */ | 282 */ |
| 320 SkBaseMutex* mutex() const { return fMutex; } | 283 SkBaseMutex* mutex() const { return fMutex; } |
| 321 | 284 |
| 322 // serialization | 285 // serialization |
| 323 SkPixelRef(SkFlattenableReadBuffer&, SkBaseMutex*); | 286 SkPixelRef(SkFlattenableReadBuffer&, SkBaseMutex*); |
| 324 virtual void flatten(SkFlattenableWriteBuffer&) const SK_OVERRIDE; | 287 virtual void flatten(SkFlattenableWriteBuffer&) const SK_OVERRIDE; |
| 325 | 288 |
| 326 // only call from constructor. Flags this to always be locked, removing | 289 // only call from constructor. Flags this to always be locked, removing |
| 327 // the need to grab the mutex and call onLockPixels/onUnlockPixels. | 290 // the need to grab the mutex and call onLockPixels/onUnlockPixels. |
| 328 // Performance tweak to avoid those calls (esp. in multi-thread use case). | 291 // Performance tweak to avoid those calls (esp. in multi-thread use case). |
| 329 void setPreLocked(void*, size_t rowBytes, SkColorTable*); | 292 void setPreLocked(void* pixels, SkColorTable* ctable); |
| 330 | 293 |
| 331 private: | 294 private: |
| 332 SkBaseMutex* fMutex; // must remain in scope for the life of this object | 295 SkBaseMutex* fMutex; // must remain in scope for the life of this object |
| 333 | 296 |
| 334 const SkImageInfo fInfo; | 297 const SkImageInfo fInfo; |
| 335 | 298 |
| 336 // LockRec is only valid if we're in a locked state (isLocked()) | 299 void* fPixels; |
| 337 LockRec fRec; | 300 SkColorTable* fColorTable; // we do not track ownership, subclass does |
| 338 int fLockCount; | 301 int fLockCount; |
| 339 | 302 |
| 340 mutable uint32_t fGenerationID; | 303 mutable uint32_t fGenerationID; |
| 341 mutable bool fUniqueGenerationID; | 304 mutable bool fUniqueGenerationID; |
| 342 | 305 |
| 343 SkTDArray<GenIDChangeListener*> fGenIDChangeListeners; // pointers are owne
d | 306 SkTDArray<GenIDChangeListener*> fGenIDChangeListeners; // pointers are owne
d |
| 344 | 307 |
| 345 SkString fURI; | 308 SkString fURI; |
| 346 | 309 |
| 347 // can go from false to true, but never from true to false | 310 // can go from false to true, but never from true to false |
| 348 bool fIsImmutable; | 311 bool fIsImmutable; |
| 349 // only ever set in constructor, const after that | 312 // only ever set in constructor, const after that |
| 350 bool fPreLocked; | 313 bool fPreLocked; |
| 351 | 314 |
| 352 void needsNewGenID(); | 315 void needsNewGenID(); |
| 353 void callGenIDChangeListeners(); | 316 void callGenIDChangeListeners(); |
| 354 | 317 |
| 355 void setMutex(SkBaseMutex* mutex); | 318 void setMutex(SkBaseMutex* mutex); |
| 356 | 319 |
| 357 // When copying a bitmap to another with the same shape and config, we can s
afely | 320 // When copying a bitmap to another with the same shape and config, we can s
afely |
| 358 // clone the pixelref generation ID too, which makes them equivalent under c
aching. | 321 // clone the pixelref generation ID too, which makes them equivalent under c
aching. |
| 359 friend class SkBitmap; // only for cloneGenID | 322 friend class SkBitmap; // only for cloneGenID |
| 360 void cloneGenID(const SkPixelRef&); | 323 void cloneGenID(const SkPixelRef&); |
| 361 | 324 |
| 362 typedef SkFlattenable INHERITED; | 325 typedef SkFlattenable INHERITED; |
| 363 }; | 326 }; |
| 364 | 327 |
| 365 #endif | 328 #endif |
| OLD | NEW |