OLD | NEW |
1 // Protocol Buffers - Google's data interchange format | 1 // Protocol Buffers - Google's data interchange format |
2 // Copyright 2012 Google Inc. All rights reserved. | 2 // Copyright 2012 Google Inc. All rights reserved. |
3 // https://developers.google.com/protocol-buffers/ | 3 // https://developers.google.com/protocol-buffers/ |
4 // | 4 // |
5 // Redistribution and use in source and binary forms, with or without | 5 // Redistribution and use in source and binary forms, with or without |
6 // modification, are permitted provided that the following conditions are | 6 // modification, are permitted provided that the following conditions are |
7 // met: | 7 // met: |
8 // | 8 // |
9 // * Redistributions of source code must retain the above copyright | 9 // * Redistributions of source code must retain the above copyright |
10 // notice, this list of conditions and the following disclaimer. | 10 // notice, this list of conditions and the following disclaimer. |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
66 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr, | 66 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr, |
67 Atomic32 increment) { | 67 Atomic32 increment) { |
68 return OSAtomicAdd32(increment, const_cast<Atomic32*>(ptr)); | 68 return OSAtomicAdd32(increment, const_cast<Atomic32*>(ptr)); |
69 } | 69 } |
70 | 70 |
71 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr, | 71 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr, |
72 Atomic32 increment) { | 72 Atomic32 increment) { |
73 return OSAtomicAdd32Barrier(increment, const_cast<Atomic32*>(ptr)); | 73 return OSAtomicAdd32Barrier(increment, const_cast<Atomic32*>(ptr)); |
74 } | 74 } |
75 | 75 |
76 inline void MemoryBarrierInternal() { | 76 inline void MemoryBarrier() { |
77 OSMemoryBarrier(); | 77 OSMemoryBarrier(); |
78 } | 78 } |
79 | 79 |
80 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr, | 80 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr, |
81 Atomic32 old_value, | 81 Atomic32 old_value, |
82 Atomic32 new_value) { | 82 Atomic32 new_value) { |
83 Atomic32 prev_value; | 83 Atomic32 prev_value; |
84 do { | 84 do { |
85 if (OSAtomicCompareAndSwap32Barrier(old_value, new_value, | 85 if (OSAtomicCompareAndSwap32Barrier(old_value, new_value, |
86 const_cast<Atomic32*>(ptr))) { | 86 const_cast<Atomic32*>(ptr))) { |
87 return old_value; | 87 return old_value; |
88 } | 88 } |
89 prev_value = *ptr; | 89 prev_value = *ptr; |
90 } while (prev_value == old_value); | 90 } while (prev_value == old_value); |
91 return prev_value; | 91 return prev_value; |
92 } | 92 } |
93 | 93 |
94 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, | 94 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, |
95 Atomic32 old_value, | 95 Atomic32 old_value, |
96 Atomic32 new_value) { | 96 Atomic32 new_value) { |
97 return Acquire_CompareAndSwap(ptr, old_value, new_value); | 97 return Acquire_CompareAndSwap(ptr, old_value, new_value); |
98 } | 98 } |
99 | 99 |
100 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) { | 100 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) { |
101 *ptr = value; | 101 *ptr = value; |
102 } | 102 } |
103 | 103 |
104 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { | 104 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { |
105 *ptr = value; | 105 *ptr = value; |
106 MemoryBarrierInternal(); | 106 MemoryBarrier(); |
107 } | 107 } |
108 | 108 |
109 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { | 109 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { |
110 MemoryBarrierInternal(); | 110 MemoryBarrier(); |
111 *ptr = value; | 111 *ptr = value; |
112 } | 112 } |
113 | 113 |
114 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) { | 114 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) { |
115 return *ptr; | 115 return *ptr; |
116 } | 116 } |
117 | 117 |
118 inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) { | 118 inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) { |
119 Atomic32 value = *ptr; | 119 Atomic32 value = *ptr; |
120 MemoryBarrierInternal(); | 120 MemoryBarrier(); |
121 return value; | 121 return value; |
122 } | 122 } |
123 | 123 |
124 inline Atomic32 Release_Load(volatile const Atomic32* ptr) { | 124 inline Atomic32 Release_Load(volatile const Atomic32* ptr) { |
125 MemoryBarrierInternal(); | 125 MemoryBarrier(); |
126 return *ptr; | 126 return *ptr; |
127 } | 127 } |
128 | 128 |
129 #ifdef __LP64__ | 129 #ifdef __LP64__ |
130 | 130 |
131 // 64-bit implementation on 64-bit platform | 131 // 64-bit implementation on 64-bit platform |
132 | 132 |
133 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr, | 133 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr, |
134 Atomic64 old_value, | 134 Atomic64 old_value, |
135 Atomic64 new_value) { | 135 Atomic64 new_value) { |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
186 // Acquire and Release memory barriers; they are equivalent. | 186 // Acquire and Release memory barriers; they are equivalent. |
187 return Acquire_CompareAndSwap(ptr, old_value, new_value); | 187 return Acquire_CompareAndSwap(ptr, old_value, new_value); |
188 } | 188 } |
189 | 189 |
190 inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) { | 190 inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) { |
191 *ptr = value; | 191 *ptr = value; |
192 } | 192 } |
193 | 193 |
194 inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) { | 194 inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) { |
195 *ptr = value; | 195 *ptr = value; |
196 MemoryBarrierInternal(); | 196 MemoryBarrier(); |
197 } | 197 } |
198 | 198 |
199 inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) { | 199 inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) { |
200 MemoryBarrierInternal(); | 200 MemoryBarrier(); |
201 *ptr = value; | 201 *ptr = value; |
202 } | 202 } |
203 | 203 |
204 inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) { | 204 inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) { |
205 return *ptr; | 205 return *ptr; |
206 } | 206 } |
207 | 207 |
208 inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) { | 208 inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) { |
209 Atomic64 value = *ptr; | 209 Atomic64 value = *ptr; |
210 MemoryBarrierInternal(); | 210 MemoryBarrier(); |
211 return value; | 211 return value; |
212 } | 212 } |
213 | 213 |
214 inline Atomic64 Release_Load(volatile const Atomic64* ptr) { | 214 inline Atomic64 Release_Load(volatile const Atomic64* ptr) { |
215 MemoryBarrierInternal(); | 215 MemoryBarrier(); |
216 return *ptr; | 216 return *ptr; |
217 } | 217 } |
218 | 218 |
219 #endif // defined(__LP64__) | 219 #endif // defined(__LP64__) |
220 | 220 |
221 } // namespace internal | 221 } // namespace internal |
222 } // namespace protobuf | 222 } // namespace protobuf |
223 } // namespace google | 223 } // namespace google |
224 | 224 |
225 #endif // GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_MACOSX_H_ | 225 #endif // GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_MACOSX_H_ |
OLD | NEW |