OLD | NEW |
1 | 1 |
2 /* | 2 /* |
3 * Copyright 2008 The Android Open Source Project | 3 * Copyright 2008 The Android Open Source Project |
4 * | 4 * |
5 * Use of this source code is governed by a BSD-style license that can be | 5 * Use of this source code is governed by a BSD-style license that can be |
6 * found in the LICENSE file. | 6 * found in the LICENSE file. |
7 */ | 7 */ |
8 | 8 |
9 | 9 |
10 #ifndef SkPixelRef_DEFINED | 10 #ifndef SkPixelRef_DEFINED |
11 #define SkPixelRef_DEFINED | 11 #define SkPixelRef_DEFINED |
12 | 12 |
13 #include "SkBitmap.h" | 13 #include "SkBitmap.h" |
14 #include "SkRefCnt.h" | 14 #include "SkRefCnt.h" |
15 #include "SkString.h" | 15 #include "SkString.h" |
16 #include "SkFlattenable.h" | 16 #include "SkFlattenable.h" |
17 #include "SkImageInfo.h" | |
18 #include "SkTDArray.h" | 17 #include "SkTDArray.h" |
19 | 18 |
20 //#define SK_SUPPORT_LEGACY_ONLOCKPIXELS | |
21 | |
22 #ifdef SK_DEBUG | 19 #ifdef SK_DEBUG |
23 /** | 20 /** |
24 * Defining SK_IGNORE_PIXELREF_SETPRELOCKED will force all pixelref | 21 * Defining SK_IGNORE_PIXELREF_SETPRELOCKED will force all pixelref |
25 * subclasses to correctly handle lock/unlock pixels. For performance | 22 * subclasses to correctly handle lock/unlock pixels. For performance |
26 * reasons, simple malloc-based subclasses call setPreLocked() to skip | 23 * reasons, simple malloc-based subclasses call setPreLocked() to skip |
27 * the overhead of implementing these calls. | 24 * the overhead of implementing these calls. |
28 * | 25 * |
29 * This build-flag disables that optimization, to add in debugging our | 26 * This build-flag disables that optimization, to add in debugging our |
30 * call-sites, to ensure that they correctly balance their calls of | 27 * call-sites, to ensure that they correctly balance their calls of |
31 * lock and unlock. | 28 * lock and unlock. |
(...skipping 24 matching lines...) Expand all Loading... |
56 SkPixelRef(const SkImageInfo&, SkBaseMutex* mutex); | 53 SkPixelRef(const SkImageInfo&, SkBaseMutex* mutex); |
57 virtual ~SkPixelRef(); | 54 virtual ~SkPixelRef(); |
58 | 55 |
59 const SkImageInfo& info() const { | 56 const SkImageInfo& info() const { |
60 return fInfo; | 57 return fInfo; |
61 } | 58 } |
62 | 59 |
63 /** Return the pixel memory returned from lockPixels, or null if the | 60 /** Return the pixel memory returned from lockPixels, or null if the |
64 lockCount is 0. | 61 lockCount is 0. |
65 */ | 62 */ |
66 void* pixels() const { return fRec.fPixels; } | 63 void* pixels() const { return fPixels; } |
67 | 64 |
68 /** Return the current colorTable (if any) if pixels are locked, or null. | 65 /** Return the current colorTable (if any) if pixels are locked, or null. |
69 */ | 66 */ |
70 SkColorTable* colorTable() const { return fRec.fColorTable; } | 67 SkColorTable* colorTable() const { return fColorTable; } |
71 | 68 |
72 /** | 69 /** |
73 * To access the actual pixels of a pixelref, it must be "locked". | 70 * To access the actual pixels of a pixelref, it must be "locked". |
74 * Calling lockPixels returns a LockRec struct (on success). | 71 * Calling lockPixels returns a LockRec struct (on success). |
75 */ | 72 */ |
76 struct LockRec { | 73 struct LockRec { |
77 void* fPixels; | 74 void* fPixels; |
78 SkColorTable* fColorTable; | 75 SkColorTable* fColorTable; |
79 size_t fRowBytes; | 76 size_t fRowBytes; |
80 | 77 |
81 void zero() { sk_bzero(this, sizeof(*this)); } | 78 void zero() { sk_bzero(this, sizeof(*this)); } |
82 | 79 |
83 bool isZero() const { | 80 bool isZero() const { |
84 return NULL == fPixels && NULL == fColorTable && 0 == fRowBytes; | 81 return NULL == fPixels && NULL == fColorTable && 0 == fRowBytes; |
85 } | 82 } |
86 }; | 83 }; |
87 | 84 |
88 /** | 85 /** |
89 * Returns true if the lockcount > 0 | 86 * Returns true if the lockcount > 0 |
90 */ | 87 */ |
91 bool isLocked() const { return fLockCount > 0; } | 88 bool isLocked() const { return fLockCount > 0; } |
92 | 89 |
93 SkDEBUGCODE(int getLockCount() const { return fLockCount; }) | 90 SkDEBUGCODE(int getLockCount() const { return fLockCount; }) |
94 | 91 |
95 /** | 92 /** Call to access the pixel memory, which is returned. Balance with a call |
96 * Call to access the pixel memory. Return true on success. Balance this | 93 to unlockPixels(). |
97 * with a call to unlockPixels(). | 94 */ |
98 */ | 95 void lockPixels(); |
99 bool lockPixels(); | |
100 | |
101 /** | |
102 * Call to access the pixel memory. On success, return true and fill out | |
103 * the specified rec. On failure, return false and ignore the rec parameter
. | |
104 * Balance this with a call to unlockPixels(). | |
105 */ | |
106 bool lockPixels(LockRec* rec); | |
107 | |
108 /** Call to balanace a previous call to lockPixels(). Returns the pixels | 96 /** Call to balanace a previous call to lockPixels(). Returns the pixels |
109 (or null) after the unlock. NOTE: lock calls can be nested, but the | 97 (or null) after the unlock. NOTE: lock calls can be nested, but the |
110 matching number of unlock calls must be made in order to free the | 98 matching number of unlock calls must be made in order to free the |
111 memory (if the subclass implements caching/deferred-decoding.) | 99 memory (if the subclass implements caching/deferred-decoding.) |
112 */ | 100 */ |
113 void unlockPixels(); | 101 void unlockPixels(); |
114 | 102 |
115 /** | 103 /** |
116 * Some bitmaps can return a copy of their pixels for lockPixels(), but | 104 * Some bitmaps can return a copy of their pixels for lockPixels(), but |
117 * that copy, if modified, will not be pushed back. These bitmaps should | 105 * that copy, if modified, will not be pushed back. These bitmaps should |
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
254 // This can be used to invalidate caches keyed by SkPixelRef generation ID. | 242 // This can be used to invalidate caches keyed by SkPixelRef generation ID. |
255 struct GenIDChangeListener { | 243 struct GenIDChangeListener { |
256 virtual ~GenIDChangeListener() {} | 244 virtual ~GenIDChangeListener() {} |
257 virtual void onChange() = 0; | 245 virtual void onChange() = 0; |
258 }; | 246 }; |
259 | 247 |
260 // Takes ownership of listener. | 248 // Takes ownership of listener. |
261 void addGenIDChangeListener(GenIDChangeListener* listener); | 249 void addGenIDChangeListener(GenIDChangeListener* listener); |
262 | 250 |
263 protected: | 251 protected: |
264 #ifdef SK_SUPPORT_LEGACY_ONLOCKPIXELS | 252 /** Called when the lockCount goes from 0 to 1. The caller will have already |
265 virtual void* onLockPixels(SkColorTable**); | 253 acquire a mutex for thread safety, so this method need not do that. |
266 virtual bool onNewLockPixels(LockRec*); | 254 */ |
267 #else | 255 virtual void* onLockPixels(SkColorTable**) = 0; |
268 /** | |
269 * On success, returns true and fills out the LockRec for the pixels. On | |
270 * failure returns false and ignores the LockRec parameter. | |
271 * | |
272 * The caller will have already acquired a mutex for thread safety, so this | |
273 * method need not do that. | |
274 */ | |
275 virtual bool onNewLockPixels(LockRec*) = 0; | |
276 #endif | |
277 | 256 |
278 /** | 257 /** |
279 * Balancing the previous successful call to onNewLockPixels. The locked | 258 * Called when the lock count goes from 1 to 0. The caller will have |
280 * pixel address will no longer be referenced, so the subclass is free to | 259 * already acquire a mutex for thread safety, so this method need not do |
281 * move or discard that memory. | 260 * that. |
282 * | 261 * |
283 * The caller will have already acquired a mutex for thread safety, so this | 262 * If the previous call to onLockPixels failed (i.e. returned NULL), then |
284 * method need not do that. | 263 * the onUnlockPixels will NOT be called. |
285 */ | 264 */ |
286 virtual void onUnlockPixels() = 0; | 265 virtual void onUnlockPixels() = 0; |
287 | 266 |
288 /** Default impl returns true */ | 267 /** Default impl returns true */ |
289 virtual bool onLockPixelsAreWritable() const; | 268 virtual bool onLockPixelsAreWritable() const; |
290 | 269 |
291 // returns false; | 270 // returns false; |
292 virtual bool onImplementsDecodeInto(); | 271 virtual bool onImplementsDecodeInto(); |
293 // returns false; | 272 // returns false; |
294 virtual bool onDecodeInto(int pow2, SkBitmap* bitmap); | 273 virtual bool onDecodeInto(int pow2, SkBitmap* bitmap); |
(...skipping 24 matching lines...) Expand all Loading... |
319 */ | 298 */ |
320 SkBaseMutex* mutex() const { return fMutex; } | 299 SkBaseMutex* mutex() const { return fMutex; } |
321 | 300 |
322 // serialization | 301 // serialization |
323 SkPixelRef(SkFlattenableReadBuffer&, SkBaseMutex*); | 302 SkPixelRef(SkFlattenableReadBuffer&, SkBaseMutex*); |
324 virtual void flatten(SkFlattenableWriteBuffer&) const SK_OVERRIDE; | 303 virtual void flatten(SkFlattenableWriteBuffer&) const SK_OVERRIDE; |
325 | 304 |
326 // only call from constructor. Flags this to always be locked, removing | 305 // only call from constructor. Flags this to always be locked, removing |
327 // the need to grab the mutex and call onLockPixels/onUnlockPixels. | 306 // the need to grab the mutex and call onLockPixels/onUnlockPixels. |
328 // Performance tweak to avoid those calls (esp. in multi-thread use case). | 307 // Performance tweak to avoid those calls (esp. in multi-thread use case). |
329 void setPreLocked(void*, size_t rowBytes, SkColorTable*); | 308 void setPreLocked(void* pixels, SkColorTable* ctable); |
330 | 309 |
331 private: | 310 private: |
332 SkBaseMutex* fMutex; // must remain in scope for the life of this object | 311 SkBaseMutex* fMutex; // must remain in scope for the life of this object |
333 | 312 |
334 const SkImageInfo fInfo; | 313 const SkImageInfo fInfo; |
335 | 314 |
336 // LockRec is only valid if we're in a locked state (isLocked()) | 315 void* fPixels; |
337 LockRec fRec; | 316 SkColorTable* fColorTable; // we do not track ownership, subclass does |
338 int fLockCount; | 317 int fLockCount; |
339 | 318 |
340 mutable uint32_t fGenerationID; | 319 mutable uint32_t fGenerationID; |
341 mutable bool fUniqueGenerationID; | 320 mutable bool fUniqueGenerationID; |
342 | 321 |
343 SkTDArray<GenIDChangeListener*> fGenIDChangeListeners; // pointers are owne
d | 322 SkTDArray<GenIDChangeListener*> fGenIDChangeListeners; // pointers are owne
d |
344 | 323 |
345 SkString fURI; | 324 SkString fURI; |
346 | 325 |
347 // can go from false to true, but never from true to false | 326 // can go from false to true, but never from true to false |
348 bool fIsImmutable; | 327 bool fIsImmutable; |
349 // only ever set in constructor, const after that | 328 // only ever set in constructor, const after that |
350 bool fPreLocked; | 329 bool fPreLocked; |
351 | 330 |
352 void needsNewGenID(); | 331 void needsNewGenID(); |
353 void callGenIDChangeListeners(); | 332 void callGenIDChangeListeners(); |
354 | 333 |
355 void setMutex(SkBaseMutex* mutex); | 334 void setMutex(SkBaseMutex* mutex); |
356 | 335 |
357 // When copying a bitmap to another with the same shape and config, we can s
afely | 336 // When copying a bitmap to another with the same shape and config, we can s
afely |
358 // clone the pixelref generation ID too, which makes them equivalent under c
aching. | 337 // clone the pixelref generation ID too, which makes them equivalent under c
aching. |
359 friend class SkBitmap; // only for cloneGenID | 338 friend class SkBitmap; // only for cloneGenID |
360 void cloneGenID(const SkPixelRef&); | 339 void cloneGenID(const SkPixelRef&); |
361 | 340 |
362 typedef SkFlattenable INHERITED; | 341 typedef SkFlattenable INHERITED; |
363 }; | 342 }; |
364 | 343 |
365 #endif | 344 #endif |
OLD | NEW |