OLD | NEW |
---|---|
1 | 1 |
2 /* | 2 /* |
3 * Copyright 2006 The Android Open Source Project | 3 * Copyright 2006 The Android Open Source Project |
4 * | 4 * |
5 * Use of this source code is governed by a BSD-style license that can be | 5 * Use of this source code is governed by a BSD-style license that can be |
6 * found in the LICENSE file. | 6 * found in the LICENSE file. |
7 */ | 7 */ |
8 | 8 |
9 | 9 |
10 #ifndef SkRefCnt_DEFINED | 10 #ifndef SkRefCnt_DEFINED |
(...skipping 19 matching lines...) Expand all Loading... | |
30 SK_DECLARE_INST_COUNT_ROOT(SkRefCntBase) | 30 SK_DECLARE_INST_COUNT_ROOT(SkRefCntBase) |
31 | 31 |
32 /** Default construct, initializing the reference count to 1. | 32 /** Default construct, initializing the reference count to 1. |
33 */ | 33 */ |
34 SkRefCntBase() : fRefCnt(1) {} | 34 SkRefCntBase() : fRefCnt(1) {} |
35 | 35 |
36 /** Destruct, asserting that the reference count is 1. | 36 /** Destruct, asserting that the reference count is 1. |
37 */ | 37 */ |
38 virtual ~SkRefCntBase() { | 38 virtual ~SkRefCntBase() { |
39 #ifdef SK_DEBUG | 39 #ifdef SK_DEBUG |
40 SkASSERT(fRefCnt == 1); | 40 SkASSERT(this->unique()); |
41 fRefCnt = 0; // illegal value, to catch us if we reuse after delete | 41 fRefCnt = 0; // illegal value, to catch us if we reuse after delete |
42 #endif | 42 #endif |
43 } | 43 } |
44 | 44 |
45 /** Return the reference count. Use only for debugging. */ | 45 /** Return the reference count. Use only for debugging. */ |
46 int32_t getRefCnt() const { return fRefCnt; } | 46 int32_t getRefCnt() const { return fRefCnt; } |
47 | 47 |
48 /** May return true if the caller is the only owner. | 48 /** May return true if the caller is the only owner. |
49 * Ensures that all previous owner's actions are complete. | 49 * Ensures that all previous owner's actions are complete. |
50 */ | 50 */ |
51 bool unique() const { | 51 bool unique() const { |
52 // We believe we're reading fRefCnt in a safe way here, so we stifle the TSAN warning about | 52 // We believe we're reading fRefCnt in a safe way here, so we stifle the TSAN warning about |
53 // an unproctected read. Generally, don't read fRefCnt, and don't stifl e this warning. | 53 // an unproctected read. Generally, don't read fRefCnt, and don't stifl e this warning. |
54 bool const unique = (1 == SK_ANNOTATE_UNPROTECTED_READ(fRefCnt)); | 54 bool const unique = (1 == SK_ANNOTATE_UNPROTECTED_READ(fRefCnt)); |
55 if (unique) { | 55 if (unique) { |
56 SK_ANNOTATE_HAPPENS_AFTER(this); | |
56 // Acquire barrier (L/SL), if not provided by load of fRefCnt. | 57 // Acquire barrier (L/SL), if not provided by load of fRefCnt. |
57 // Prevents user's 'unique' code from happening before decrements. | 58 // Prevents user's 'unique' code from happening before decrements. |
58 //TODO: issue the barrier. | 59 //TODO: issue the barrier. |
bungeman-skia
2014/04/23 22:37:54
We do need to actually issue a barrier here on ARM
mtklein
2014/04/23 23:41:47
Ooh, yes, or as you noted, provide a way to load f
bungeman-skia
2014/04/24 14:06:56
Eh, there's no real reason to provide any barrier
| |
59 } | 60 } |
60 return unique; | 61 return unique; |
61 } | 62 } |
62 | 63 |
63 /** Increment the reference count. Must be balanced by a call to unref(). | 64 /** Increment the reference count. Must be balanced by a call to unref(). |
64 */ | 65 */ |
65 void ref() const { | 66 void ref() const { |
66 SkASSERT(fRefCnt > 0); | 67 SkASSERT(this->unsafeGetRefCnt() > 0); |
67 sk_atomic_inc(&fRefCnt); // No barrier required. | 68 sk_atomic_inc(&fRefCnt); // No barrier required. |
68 } | 69 } |
69 | 70 |
70 /** Decrement the reference count. If the reference count is 1 before the | 71 /** Decrement the reference count. If the reference count is 1 before the |
71 decrement, then delete the object. Note that if this is the case, then | 72 decrement, then delete the object. Note that if this is the case, then |
72 the object needs to have been allocated via new, and not on the stack. | 73 the object needs to have been allocated via new, and not on the stack. |
73 */ | 74 */ |
74 void unref() const { | 75 void unref() const { |
75 SkASSERT(fRefCnt > 0); | 76 SkASSERT(this->unsafeGetRefCnt() > 0); |
77 SK_ANNOTATE_HAPPENS_BEFORE(this); | |
76 // Release barrier (SL/S), if not provided below. | 78 // Release barrier (SL/S), if not provided below. |
77 if (sk_atomic_dec(&fRefCnt) == 1) { | 79 if (sk_atomic_dec(&fRefCnt) == 1) { |
80 SK_ANNOTATE_HAPPENS_AFTER(this); | |
78 // Acquire barrier (L/SL), if not provided above. | 81 // Acquire barrier (L/SL), if not provided above. |
79 // Prevents code in dispose from happening before the decrement. | 82 // Prevents code in dispose from happening before the decrement. |
80 sk_membar_acquire__after_atomic_dec(); | 83 sk_membar_acquire__after_atomic_dec(); |
81 internal_dispose(); | 84 internal_dispose(); |
82 } | 85 } |
83 } | 86 } |
84 | 87 |
85 #ifdef SK_DEBUG | 88 #ifdef SK_DEBUG |
86 void validate() const { | 89 void validate() const { |
87 SkASSERT(fRefCnt > 0); | 90 SkASSERT(this->unsafeGetRefCnt() > 0); |
88 } | 91 } |
89 #endif | 92 #endif |
90 | 93 |
91 protected: | 94 protected: |
92 /** | 95 /** |
93 * Allow subclasses to call this if they've overridden internal_dispose | 96 * Allow subclasses to call this if they've overridden internal_dispose |
94 * so they can reset fRefCnt before the destructor is called. Should only | 97 * so they can reset fRefCnt before the destructor is called. Should only |
95 * be called right before calling through to inherited internal_dispose() | 98 * be called right before calling through to inherited internal_dispose() |
96 * or before calling the destructor. | 99 * or before calling the destructor. |
97 */ | 100 */ |
98 void internal_dispose_restore_refcnt_to_1() const { | 101 void internal_dispose_restore_refcnt_to_1() const { |
99 #ifdef SK_DEBUG | 102 #ifdef SK_DEBUG |
100 SkASSERT(0 == fRefCnt); | 103 SkASSERT(0 == fRefCnt); |
101 fRefCnt = 1; | 104 fRefCnt = 1; |
102 #endif | 105 #endif |
103 } | 106 } |
104 | 107 |
105 private: | 108 private: |
109 // OK for use in asserts, but not much else. | |
110 int32_t unsafeGetRefCnt() { return SK_ANNOTATE_UNPROTECTED_READ(fRefCnt); } | |
111 | |
106 /** | 112 /** |
107 * Called when the ref count goes to 0. | 113 * Called when the ref count goes to 0. |
108 */ | 114 */ |
109 virtual void internal_dispose() const { | 115 virtual void internal_dispose() const { |
110 this->internal_dispose_restore_refcnt_to_1(); | 116 this->internal_dispose_restore_refcnt_to_1(); |
111 SkDELETE(this); | 117 SkDELETE(this); |
112 } | 118 } |
113 | 119 |
114 // The following friends are those which override internal_dispose() | 120 // The following friends are those which override internal_dispose() |
115 // and conditionally call SkRefCnt::internal_dispose(). | 121 // and conditionally call SkRefCnt::internal_dispose(). |
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
283 typedef T* SkRefPtr::*unspecified_bool_type; | 289 typedef T* SkRefPtr::*unspecified_bool_type; |
284 operator unspecified_bool_type() const { | 290 operator unspecified_bool_type() const { |
285 return fObj ? &SkRefPtr::fObj : NULL; | 291 return fObj ? &SkRefPtr::fObj : NULL; |
286 } | 292 } |
287 | 293 |
288 private: | 294 private: |
289 T* fObj; | 295 T* fObj; |
290 }; | 296 }; |
291 | 297 |
292 #endif | 298 #endif |
OLD | NEW |