| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2014 Google Inc. | 2 * Copyright 2014 Google Inc. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
| 5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
| 6 */ | 6 */ |
| 7 | 7 |
| 8 #ifndef SkLazyPtr_DEFINED | 8 #ifndef SkLazyPtr_DEFINED |
| 9 #define SkLazyPtr_DEFINED | 9 #define SkLazyPtr_DEFINED |
| 10 | 10 |
| (...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 96 // _through_ our atomically set pointer, there is a data dependency between our | 96 // _through_ our atomically set pointer, there is a data dependency between our |
| 97 // atomic and the guarded data, and so we only need writer-releases / | 97 // atomic and the guarded data, and so we only need writer-releases / |
| 98 // reader-consumes memory pairing rather than the more general write-releases / | 98 // reader-consumes memory pairing rather than the more general write-releases / |
| 99 // reader-acquires convention. | 99 // reader-acquires convention. |
| 100 // | 100 // |
| 101 // This is nice, because a consume load is free on all our platforms: x86, | 101 // This is nice, because a consume load is free on all our platforms: x86, |
| 102 // ARM, MIPS. In contrast, an acquire load issues a memory barrier on non-x86. | 102 // ARM, MIPS. In contrast, an acquire load issues a memory barrier on non-x86. |
| 103 | 103 |
| 104 template <typename T> | 104 template <typename T> |
| 105 T consume_load(T* ptr) { | 105 T consume_load(T* ptr) { |
| 106 #if DYNAMIC_ANNOTATIONS_ENABLED | 106 #if defined(THREAD_SANITIZER) |
| 107 // TSAN gets anxious if we don't tell it what we're actually doing, a consum
e load. | 107 // TSAN gets anxious if we don't tell it what we're actually doing, a consum
e load. |
| 108 return sk_atomic_load(ptr, sk_memory_order_consume); | 108 return sk_atomic_load(ptr, sk_memory_order_consume); |
| 109 #else | 109 #else |
| 110 // All current compilers blindly upgrade consume memory order to acquire mem
ory order. | 110 // All current compilers blindly upgrade consume memory order to acquire mem
ory order. |
| 111 // For our purposes, though, no memory barrier is required, so we lie and us
e relaxed. | 111 // For our purposes, though, no memory barrier is required, so we lie and us
e relaxed. |
| 112 return sk_atomic_load(ptr, sk_memory_order_relaxed); | 112 return sk_atomic_load(ptr, sk_memory_order_relaxed); |
| 113 #endif | 113 #endif |
| 114 } | 114 } |
| 115 | 115 |
| 116 // This has no constructor and must be zero-initalized (the macro above does thi
s). | 116 // This has no constructor and must be zero-initalized (the macro above does thi
s). |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 170 T* ptr = Private::consume_load(&fPtr); | 170 T* ptr = Private::consume_load(&fPtr); |
| 171 return ptr ? ptr : Private::try_cas<T*, Destroy>(&fPtr, create()); | 171 return ptr ? ptr : Private::try_cas<T*, Destroy>(&fPtr, create()); |
| 172 } | 172 } |
| 173 | 173 |
| 174 private: | 174 private: |
| 175 mutable T* fPtr; | 175 mutable T* fPtr; |
| 176 }; | 176 }; |
| 177 | 177 |
| 178 | 178 |
| 179 #endif//SkLazyPtr_DEFINED | 179 #endif//SkLazyPtr_DEFINED |
| OLD | NEW |