OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2012 Google Inc. | 2 * Copyright 2012 Google Inc. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
6 */ | 6 */ |
7 | 7 |
8 #ifndef SkWeakRefCnt_DEFINED | 8 #ifndef SkWeakRefCnt_DEFINED |
9 #define SkWeakRefCnt_DEFINED | 9 #define SkWeakRefCnt_DEFINED |
10 | 10 |
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
82 | 82 |
83 /** Creates a strong reference from a weak reference, if possible. The | 83 /** Creates a strong reference from a weak reference, if possible. The |
84 caller must already be an owner. If try_ref() returns true the owner | 84 caller must already be an owner. If try_ref() returns true the owner |
85 is in posession of an additional strong reference. Both the original | 85 is in posession of an additional strong reference. Both the original |
86 reference and new reference must be properly unreferenced. If try_ref() | 86 reference and new reference must be properly unreferenced. If try_ref() |
87 returns false, no strong reference could be created and the owner's | 87 returns false, no strong reference could be created and the owner's |
88 reference is in the same state as before the call. | 88 reference is in the same state as before the call. |
89 */ | 89 */ |
90 bool SK_WARN_UNUSED_RESULT try_ref() const { | 90 bool SK_WARN_UNUSED_RESULT try_ref() const { |
91 if (sk_atomic_conditional_inc(&fRefCnt) != 0) { | 91 if (sk_atomic_conditional_inc(&fRefCnt) != 0) { |
92 // Aquire barrier (L/SL), if not provided above. | 92 // Acquire barrier (L/SL), if not provided above. |
93 // Prevents subsequent code from happening before the increment. | 93 // Prevents subsequent code from happening before the increment. |
94 sk_membar_aquire__after_atomic_conditional_inc(); | 94 sk_membar_acquire__after_atomic_conditional_inc(); |
95 return true; | 95 return true; |
96 } | 96 } |
97 return false; | 97 return false; |
98 } | 98 } |
99 | 99 |
100 /** Increment the weak reference count. Must be balanced by a call to | 100 /** Increment the weak reference count. Must be balanced by a call to |
101 weak_unref(). | 101 weak_unref(). |
102 */ | 102 */ |
103 void weak_ref() const { | 103 void weak_ref() const { |
104 SkASSERT(fRefCnt > 0); | 104 SkASSERT(fRefCnt > 0); |
105 SkASSERT(fWeakCnt > 0); | 105 SkASSERT(fWeakCnt > 0); |
106 sk_atomic_inc(&fWeakCnt); // No barrier required. | 106 sk_atomic_inc(&fWeakCnt); // No barrier required. |
107 } | 107 } |
108 | 108 |
109 /** Decrement the weak reference count. If the weak reference count is 1 | 109 /** Decrement the weak reference count. If the weak reference count is 1 |
110 before the decrement, then call delete on the object. Note that if this | 110 before the decrement, then call delete on the object. Note that if this |
111 is the case, then the object needs to have been allocated via new, and | 111 is the case, then the object needs to have been allocated via new, and |
112 not on the stack. | 112 not on the stack. |
113 */ | 113 */ |
114 void weak_unref() const { | 114 void weak_unref() const { |
115 SkASSERT(fWeakCnt > 0); | 115 SkASSERT(fWeakCnt > 0); |
116 // Release barrier (SL/S), if not provided below. | 116 // Release barrier (SL/S), if not provided below. |
117 if (sk_atomic_dec(&fWeakCnt) == 1) { | 117 if (sk_atomic_dec(&fWeakCnt) == 1) { |
118 // Aquire barrier (L/SL), if not provided above. | 118 // Acquire barrier (L/SL), if not provided above. |
119 // Prevents code in destructor from happening before the decrement. | 119 // Prevents code in destructor from happening before the decrement. |
120 sk_membar_aquire__after_atomic_dec(); | 120 sk_membar_acquire__after_atomic_dec(); |
121 #ifdef SK_DEBUG | 121 #ifdef SK_DEBUG |
122 // so our destructor won't complain | 122 // so our destructor won't complain |
123 fWeakCnt = 1; | 123 fWeakCnt = 1; |
124 #endif | 124 #endif |
125 SkRefCnt::internal_dispose(); | 125 SkRefCnt::internal_dispose(); |
126 } | 126 } |
127 } | 127 } |
128 | 128 |
129 /** Returns true if there are no strong references to the object. When this | 129 /** Returns true if there are no strong references to the object. When this |
130 is the case all future calls to try_ref() will return false. | 130 is the case all future calls to try_ref() will return false. |
(...skipping 21 matching lines...) Expand all Loading... |
152 weak_unref(); | 152 weak_unref(); |
153 } | 153 } |
154 | 154 |
155 /* Invariant: fWeakCnt = #weak + (fRefCnt > 0 ? 1 : 0) */ | 155 /* Invariant: fWeakCnt = #weak + (fRefCnt > 0 ? 1 : 0) */ |
156 mutable int32_t fWeakCnt; | 156 mutable int32_t fWeakCnt; |
157 | 157 |
158 typedef SkRefCnt INHERITED; | 158 typedef SkRefCnt INHERITED; |
159 }; | 159 }; |
160 | 160 |
161 #endif | 161 #endif |
OLD | NEW |