| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (C) 2007, 2008, 2010, 2012 Apple Inc. All rights reserved. | 2 * Copyright (C) 2007, 2008, 2010, 2012 Apple Inc. All rights reserved. |
| 3 * Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com) | 3 * Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com) |
| 4 * | 4 * |
| 5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
| 6 * modification, are permitted provided that the following conditions | 6 * modification, are permitted provided that the following conditions |
| 7 * are met: | 7 * are met: |
| 8 * | 8 * |
| 9 * 1. Redistributions of source code must retain the above copyright | 9 * 1. Redistributions of source code must retain the above copyright |
| 10 * notice, this list of conditions and the following disclaimer. | 10 * notice, this list of conditions and the following disclaimer. |
| (...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 136 #if defined(THREAD_SANITIZER) | 136 #if defined(THREAD_SANITIZER) |
| 137 | 137 |
| 138 ALWAYS_INLINE void releaseStore(volatile int* ptr, int value) | 138 ALWAYS_INLINE void releaseStore(volatile int* ptr, int value) |
| 139 { | 139 { |
| 140 __tsan_atomic32_store(ptr, value, __tsan_memory_order_release); | 140 __tsan_atomic32_store(ptr, value, __tsan_memory_order_release); |
| 141 } | 141 } |
| 142 ALWAYS_INLINE void releaseStore(volatile unsigned* ptr, unsigned value) | 142 ALWAYS_INLINE void releaseStore(volatile unsigned* ptr, unsigned value) |
| 143 { | 143 { |
| 144 __tsan_atomic32_store(reinterpret_cast<volatile int*>(ptr), static_cast<int>
(value), __tsan_memory_order_release); | 144 __tsan_atomic32_store(reinterpret_cast<volatile int*>(ptr), static_cast<int>
(value), __tsan_memory_order_release); |
| 145 } | 145 } |
| 146 ALWAYS_INLINE void releaseStore(void* volatile* ptr, void* value) |
| 147 { |
| 148 #if CPU(64BIT) |
| 149 __tsan_atomic64_store(reinterpret_cast<volatile long*>(ptr), reinterpret_cas
t<long>(value), __tsan_memory_order_release); |
| 150 #else |
| 151 __tsan_atomic32_store(reinterpret_cast<volatile long*>(ptr), reinterpret_cas
t<long>(value), __tsan_memory_order_release); |
| 152 #endif |
| 153 } |
| 146 | 154 |
| 147 ALWAYS_INLINE int acquireLoad(volatile const int* ptr) | 155 ALWAYS_INLINE int acquireLoad(volatile const int* ptr) |
| 148 { | 156 { |
| 149 return __tsan_atomic32_load(ptr, __tsan_memory_order_acquire); | 157 return __tsan_atomic32_load(ptr, __tsan_memory_order_acquire); |
| 150 } | 158 } |
| 151 ALWAYS_INLINE unsigned acquireLoad(volatile const unsigned* ptr) | 159 ALWAYS_INLINE unsigned acquireLoad(volatile const unsigned* ptr) |
| 152 { | 160 { |
| 153 return static_cast<unsigned>(__tsan_atomic32_load(reinterpret_cast<volatile
const int*>(ptr), __tsan_memory_order_acquire)); | 161 return static_cast<unsigned>(__tsan_atomic32_load(reinterpret_cast<volatile
const int*>(ptr), __tsan_memory_order_acquire)); |
| 154 } | 162 } |
| 155 ALWAYS_INLINE unsigned long acquireLoad(volatile const unsigned long* ptr) | 163 ALWAYS_INLINE unsigned long acquireLoad(volatile const unsigned long* ptr) |
| 156 { | 164 { |
| 157 #if CPU(64BIT) | 165 #if CPU(64BIT) |
| 158 return static_cast<unsigned long>(__tsan_atomic64_load(reinterpret_cast<vola
tile const long*>(ptr), __tsan_memory_order_acquire)); | 166 return static_cast<unsigned long>(__tsan_atomic64_load(reinterpret_cast<vola
tile const long*>(ptr), __tsan_memory_order_acquire)); |
| 159 #else | 167 #else |
| 160 return static_cast<unsigned long>(__tsan_atomic32_load(reinterpret_cast<vola
tile const long*>(ptr), __tsan_memory_order_acquire)); | 168 return static_cast<unsigned long>(__tsan_atomic32_load(reinterpret_cast<vola
tile const long*>(ptr), __tsan_memory_order_acquire)); |
| 161 #endif | 169 #endif |
| 162 } | 170 } |
| 171 ALWAYS_INLINE void* acquireLoad(void* volatile const* ptr) |
| 172 { |
| 173 #if CPU(64BIT) |
| 174 return reinterpret_cast<void*>(__tsan_atomic64_load(reinterpret_cast<volatil
e const long*>(ptr), __tsan_memory_order_acquire)); |
| 175 #else |
| 176 return reinterpret_cast<void*>(__tsan_atomic32_load(reinterpret_cast<volatil
e const long*>(ptr), __tsan_memory_order_acquire)); |
| 177 #endif |
| 178 } |
| 163 | 179 |
| 164 #else | 180 #else |
| 165 | 181 |
| 166 #if CPU(X86) || CPU(X86_64) | 182 #if CPU(X86) || CPU(X86_64) |
| 167 // Only compiler barrier is needed. | 183 // Only compiler barrier is needed. |
| 168 #if COMPILER(MSVC) | 184 #if COMPILER(MSVC) |
| 169 // Starting from Visual Studio 2005 compiler guarantees acquire and release | 185 // Starting from Visual Studio 2005 compiler guarantees acquire and release |
| 170 // semantics for operations on volatile variables. See MSDN entry for | 186 // semantics for operations on volatile variables. See MSDN entry for |
| 171 // MemoryBarrier macro. | 187 // MemoryBarrier macro. |
| 172 #define MEMORY_BARRIER() | 188 #define MEMORY_BARRIER() |
| (...skipping 19 matching lines...) Expand all Loading... |
| 192 ALWAYS_INLINE void releaseStore(volatile int* ptr, int value) | 208 ALWAYS_INLINE void releaseStore(volatile int* ptr, int value) |
| 193 { | 209 { |
| 194 MEMORY_BARRIER(); | 210 MEMORY_BARRIER(); |
| 195 *ptr = value; | 211 *ptr = value; |
| 196 } | 212 } |
| 197 ALWAYS_INLINE void releaseStore(volatile unsigned* ptr, unsigned value) | 213 ALWAYS_INLINE void releaseStore(volatile unsigned* ptr, unsigned value) |
| 198 { | 214 { |
| 199 MEMORY_BARRIER(); | 215 MEMORY_BARRIER(); |
| 200 *ptr = value; | 216 *ptr = value; |
| 201 } | 217 } |
| 218 ALWAYS_INLINE void releaseStore(void* volatile* ptr, void* value) |
| 219 { |
| 220 MEMORY_BARRIER(); |
| 221 *ptr = value; |
| 222 } |
| 202 | 223 |
| 203 ALWAYS_INLINE int acquireLoad(volatile const int* ptr) | 224 ALWAYS_INLINE int acquireLoad(volatile const int* ptr) |
| 204 { | 225 { |
| 205 int value = *ptr; | 226 int value = *ptr; |
| 206 MEMORY_BARRIER(); | 227 MEMORY_BARRIER(); |
| 207 return value; | 228 return value; |
| 208 } | 229 } |
| 209 ALWAYS_INLINE unsigned acquireLoad(volatile const unsigned* ptr) | 230 ALWAYS_INLINE unsigned acquireLoad(volatile const unsigned* ptr) |
| 210 { | 231 { |
| 211 unsigned value = *ptr; | 232 unsigned value = *ptr; |
| 212 MEMORY_BARRIER(); | 233 MEMORY_BARRIER(); |
| 213 return value; | 234 return value; |
| 214 } | 235 } |
| 215 ALWAYS_INLINE unsigned long acquireLoad(volatile const unsigned long* ptr) | 236 ALWAYS_INLINE unsigned long acquireLoad(volatile const unsigned long* ptr) |
| 216 { | 237 { |
| 217 unsigned long value = *ptr; | 238 unsigned long value = *ptr; |
| 218 MEMORY_BARRIER(); | 239 MEMORY_BARRIER(); |
| 219 return value; | 240 return value; |
| 220 } | 241 } |
| 221 ALWAYS_INLINE unsigned long long acquireLoad(volatile const unsigned long long*
ptr) | 242 ALWAYS_INLINE unsigned long long acquireLoad(volatile const unsigned long long*
ptr) |
| 222 { | 243 { |
| 223 unsigned long long value = *ptr; | 244 unsigned long long value = *ptr; |
| 224 MEMORY_BARRIER(); | 245 MEMORY_BARRIER(); |
| 225 return value; | 246 return value; |
| 226 } | 247 } |
| 248 ALWAYS_INLINE void* acquireLoad(void* volatile const* ptr) |
| 249 { |
| 250 void* value = *ptr; |
| 251 MEMORY_BARRIER(); |
| 252 return value; |
| 253 } |
| 227 | 254 |
| 228 #if defined(ADDRESS_SANITIZER) | 255 #if defined(ADDRESS_SANITIZER) |
| 229 | 256 |
| 230 // FIXME: See comment on NO_SANITIZE_ADDRESS in platform/heap/AddressSanitizer.h | 257 // FIXME: See comment on NO_SANITIZE_ADDRESS in platform/heap/AddressSanitizer.h |
| 231 #if !OS(WIN) || COMPILER(CLANG) | 258 #if !OS(WIN) || COMPILER(CLANG) |
| 232 #define NO_SANITIZE_ADDRESS_ATOMICS __attribute__((no_sanitize_address)) | 259 #define NO_SANITIZE_ADDRESS_ATOMICS __attribute__((no_sanitize_address)) |
| 233 #else | 260 #else |
| 234 #define NO_SANITIZE_ADDRESS_ATOMICS | 261 #define NO_SANITIZE_ADDRESS_ATOMICS |
| 235 #endif | 262 #endif |
| 236 | 263 |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 280 using WTF::acquireLoad; | 307 using WTF::acquireLoad; |
| 281 using WTF::releaseStore; | 308 using WTF::releaseStore; |
| 282 | 309 |
| 283 // These methods allow loading from and storing to poisoned memory. Only | 310 // These methods allow loading from and storing to poisoned memory. Only |
| 284 // use these methods if you know what you are doing since they will | 311 // use these methods if you know what you are doing since they will |
| 285 // silence use-after-poison errors from ASan. | 312 // silence use-after-poison errors from ASan. |
| 286 using WTF::asanUnsafeAcquireLoad; | 313 using WTF::asanUnsafeAcquireLoad; |
| 287 using WTF::asanUnsafeReleaseStore; | 314 using WTF::asanUnsafeReleaseStore; |
| 288 | 315 |
| 289 #endif // Atomics_h | 316 #endif // Atomics_h |
| OLD | NEW |