Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright (C) 2007, 2008, 2010, 2012 Apple Inc. All rights reserved. | 2 * Copyright (C) 2007, 2008, 2010, 2012 Apple Inc. All rights reserved. |
| 3 * Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com) | 3 * Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com) |
| 4 * | 4 * |
| 5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
| 6 * modification, are permitted provided that the following conditions | 6 * modification, are permitted provided that the following conditions |
| 7 * are met: | 7 * are met: |
| 8 * | 8 * |
| 9 * 1. Redistributions of source code must retain the above copyright | 9 * 1. Redistributions of source code must retain the above copyright |
| 10 * notice, this list of conditions and the following disclaimer. | 10 * notice, this list of conditions and the following disclaimer. |
| (...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 155 __tsan_atomic64_store(reinterpret_cast<volatile __tsan_atomic64*>(ptr), stat ic_cast<__tsan_atomic64>(value), __tsan_memory_order_release); | 155 __tsan_atomic64_store(reinterpret_cast<volatile __tsan_atomic64*>(ptr), stat ic_cast<__tsan_atomic64>(value), __tsan_memory_order_release); |
| 156 } | 156 } |
| 157 ALWAYS_INLINE void releaseStore(volatile unsigned long long* ptr, unsigned long long value) | 157 ALWAYS_INLINE void releaseStore(volatile unsigned long long* ptr, unsigned long long value) |
| 158 { | 158 { |
| 159 __tsan_atomic64_store(reinterpret_cast<volatile __tsan_atomic64*>(ptr), stat ic_cast<__tsan_atomic64>(value), __tsan_memory_order_release); | 159 __tsan_atomic64_store(reinterpret_cast<volatile __tsan_atomic64*>(ptr), stat ic_cast<__tsan_atomic64>(value), __tsan_memory_order_release); |
| 160 } | 160 } |
| 161 ALWAYS_INLINE void releaseStore(void* volatile* ptr, void* value) | 161 ALWAYS_INLINE void releaseStore(void* volatile* ptr, void* value) |
| 162 { | 162 { |
| 163 __tsan_atomic64_store(reinterpret_cast<volatile __tsan_atomic64*>(ptr), rein terpret_cast<__tsan_atomic64>(value), __tsan_memory_order_release); | 163 __tsan_atomic64_store(reinterpret_cast<volatile __tsan_atomic64*>(ptr), rein terpret_cast<__tsan_atomic64>(value), __tsan_memory_order_release); |
| 164 } | 164 } |
| 165 ALWAYS_INLINE void releaseStore(volatile float* ptr, float value) | |
| 166 { | |
| 167 union { | |
|
tkent
2015/08/07 01:10:04
should have static_assert for sizeof(int) == sizeo
Raymond Toy
2015/08/07 16:16:37
Done.
| |
| 168 int ivalue; | |
| 169 float fvalue; | |
| 170 } u; | |
| 171 u.fvalue = value; | |
| 172 __tsan_atomic32_store(reinterpret_cast<volatile __tsan_atomic32*>(ptr), u.iv alue, __tsan_memory_order_release); | |
| 173 } | |
| 174 ALWAYS_INLINE void releaseStore(volatile double* ptr, double value) | |
| 175 { | |
| 176 union { | |
| 177 long ivalue; | |
| 178 double dvalue; | |
| 179 } u; | |
| 180 u.dvalue = value; | |
| 181 __tsan_atomic64_store(reinterpret_cast<volatile __tsan_atomic64*>(ptr), u.iv alue, __tsan_memory_order_release); | |
| 182 } | |
| 165 | 183 |
| 166 ALWAYS_INLINE int acquireLoad(volatile const int* ptr) | 184 ALWAYS_INLINE int acquireLoad(volatile const int* ptr) |
| 167 { | 185 { |
| 168 return __tsan_atomic32_load(ptr, __tsan_memory_order_acquire); | 186 return __tsan_atomic32_load(ptr, __tsan_memory_order_acquire); |
| 169 } | 187 } |
| 170 ALWAYS_INLINE unsigned acquireLoad(volatile const unsigned* ptr) | 188 ALWAYS_INLINE unsigned acquireLoad(volatile const unsigned* ptr) |
| 171 { | 189 { |
| 172 return static_cast<unsigned>(__tsan_atomic32_load(reinterpret_cast<volatile const int*>(ptr), __tsan_memory_order_acquire)); | 190 return static_cast<unsigned>(__tsan_atomic32_load(reinterpret_cast<volatile const int*>(ptr), __tsan_memory_order_acquire)); |
| 173 } | 191 } |
| 174 ALWAYS_INLINE long acquireLoad(volatile const long* ptr) | 192 ALWAYS_INLINE long acquireLoad(volatile const long* ptr) |
| 175 { | 193 { |
| 176 return static_cast<long>(__tsan_atomic64_load(reinterpret_cast<volatile cons t __tsan_atomic64*>(ptr), __tsan_memory_order_acquire)); | 194 return static_cast<long>(__tsan_atomic64_load(reinterpret_cast<volatile cons t __tsan_atomic64*>(ptr), __tsan_memory_order_acquire)); |
| 177 } | 195 } |
| 178 ALWAYS_INLINE unsigned long acquireLoad(volatile const unsigned long* ptr) | 196 ALWAYS_INLINE unsigned long acquireLoad(volatile const unsigned long* ptr) |
| 179 { | 197 { |
| 180 return static_cast<unsigned long>(__tsan_atomic64_load(reinterpret_cast<vola tile const __tsan_atomic64*>(ptr), __tsan_memory_order_acquire)); | 198 return static_cast<unsigned long>(__tsan_atomic64_load(reinterpret_cast<vola tile const __tsan_atomic64*>(ptr), __tsan_memory_order_acquire)); |
| 181 } | 199 } |
| 182 ALWAYS_INLINE void* acquireLoad(void* volatile const* ptr) | 200 ALWAYS_INLINE void* acquireLoad(void* volatile const* ptr) |
| 183 { | 201 { |
| 184 return reinterpret_cast<void*>(__tsan_atomic64_load(reinterpret_cast<volatil e const __tsan_atomic64*>(ptr), __tsan_memory_order_acquire)); | 202 return reinterpret_cast<void*>(__tsan_atomic64_load(reinterpret_cast<volatil e const __tsan_atomic64*>(ptr), __tsan_memory_order_acquire)); |
| 185 } | 203 } |
| 204 ALWAYS_INLINE float acquireLoad(volatile const float* ptr) | |
| 205 { | |
| 206 union { | |
| 207 int ivalue; | |
| 208 float fvalue; | |
| 209 } u; | |
| 210 u.ivalue = __tsan_atomic32_load(reinterpret_cast<volatile const int*>(ptr), __tsan_memory_order_acquire); | |
| 211 return u.fvalue; | |
| 212 } | |
| 213 ALWAYS_INLINE double acquireLoad(volatile const double* ptr) | |
| 214 { | |
| 215 union { | |
| 216 long ivalue; | |
| 217 double dvalue; | |
| 218 } u; | |
| 219 u.ivalue = static_cast<long>(__tsan_atomic64_load(reinterpret_cast<volatile const __tsan_atomic64*>(ptr), __tsan_memory_order_acquire)); | |
| 220 return u.dvalue; | |
| 221 } | |
| 186 #endif | 222 #endif |
| 187 | 223 |
| 188 #else // defined(THREAD_SANITIZER) | 224 #else // defined(THREAD_SANITIZER) |
| 189 | 225 |
| 190 #if CPU(X86) || CPU(X86_64) | 226 #if CPU(X86) || CPU(X86_64) |
| 191 // Only compiler barrier is needed. | 227 // Only compiler barrier is needed. |
| 192 #if COMPILER(MSVC) | 228 #if COMPILER(MSVC) |
| 193 // Starting from Visual Studio 2005 compiler guarantees acquire and release | 229 // Starting from Visual Studio 2005 compiler guarantees acquire and release |
| 194 // semantics for operations on volatile variables. See MSDN entry for | 230 // semantics for operations on volatile variables. See MSDN entry for |
| 195 // MemoryBarrier macro. | 231 // MemoryBarrier macro. |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 236 ALWAYS_INLINE void releaseStore(volatile unsigned long long* ptr, unsigned long long value) | 272 ALWAYS_INLINE void releaseStore(volatile unsigned long long* ptr, unsigned long long value) |
| 237 { | 273 { |
| 238 MEMORY_BARRIER(); | 274 MEMORY_BARRIER(); |
| 239 *ptr = value; | 275 *ptr = value; |
| 240 } | 276 } |
| 241 ALWAYS_INLINE void releaseStore(void* volatile* ptr, void* value) | 277 ALWAYS_INLINE void releaseStore(void* volatile* ptr, void* value) |
| 242 { | 278 { |
| 243 MEMORY_BARRIER(); | 279 MEMORY_BARRIER(); |
| 244 *ptr = value; | 280 *ptr = value; |
| 245 } | 281 } |
| 282 ALWAYS_INLINE void releaseStore(volatile float* ptr, float value) | |
| 283 { | |
| 284 MEMORY_BARRIER(); | |
| 285 *ptr = value; | |
| 286 } | |
| 287 ALWAYS_INLINE void releaseStore(volatile double* ptr, double value) | |
| 288 { | |
| 289 MEMORY_BARRIER(); | |
| 290 *ptr = value; | |
| 291 } | |
| 246 | 292 |
| 293 | |
| 247 ALWAYS_INLINE int acquireLoad(volatile const int* ptr) | 294 ALWAYS_INLINE int acquireLoad(volatile const int* ptr) |
| 248 { | 295 { |
| 249 int value = *ptr; | 296 int value = *ptr; |
| 250 MEMORY_BARRIER(); | 297 MEMORY_BARRIER(); |
| 251 return value; | 298 return value; |
| 252 } | 299 } |
| 253 ALWAYS_INLINE unsigned acquireLoad(volatile const unsigned* ptr) | 300 ALWAYS_INLINE unsigned acquireLoad(volatile const unsigned* ptr) |
| 254 { | 301 { |
| 255 unsigned value = *ptr; | 302 unsigned value = *ptr; |
| 256 MEMORY_BARRIER(); | 303 MEMORY_BARRIER(); |
| (...skipping 16 matching lines...) Expand all Loading... | |
| 273 unsigned long long value = *ptr; | 320 unsigned long long value = *ptr; |
| 274 MEMORY_BARRIER(); | 321 MEMORY_BARRIER(); |
| 275 return value; | 322 return value; |
| 276 } | 323 } |
| 277 ALWAYS_INLINE void* acquireLoad(void* volatile const* ptr) | 324 ALWAYS_INLINE void* acquireLoad(void* volatile const* ptr) |
| 278 { | 325 { |
| 279 void* value = *ptr; | 326 void* value = *ptr; |
| 280 MEMORY_BARRIER(); | 327 MEMORY_BARRIER(); |
| 281 return value; | 328 return value; |
| 282 } | 329 } |
| 330 ALWAYS_INLINE float acquireLoad(volatile const float* ptr) | |
| 331 { | |
| 332 float value = *ptr; | |
| 333 MEMORY_BARRIER(); | |
| 334 return value; | |
| 335 } | |
| 336 ALWAYS_INLINE double acquireLoad(volatile const double* ptr) | |
| 337 { | |
| 338 double value = *ptr; | |
| 339 MEMORY_BARRIER(); | |
| 340 return value; | |
| 341 } | |
| 283 | 342 |
| 284 #if defined(ADDRESS_SANITIZER) | 343 #if defined(ADDRESS_SANITIZER) |
| 285 | 344 |
| 286 NO_SANITIZE_ADDRESS ALWAYS_INLINE void asanUnsafeReleaseStore(volatile unsigned* ptr, unsigned value) | 345 NO_SANITIZE_ADDRESS ALWAYS_INLINE void asanUnsafeReleaseStore(volatile unsigned* ptr, unsigned value) |
| 287 { | 346 { |
| 288 MEMORY_BARRIER(); | 347 MEMORY_BARRIER(); |
| 289 *ptr = value; | 348 *ptr = value; |
| 290 } | 349 } |
| 291 | 350 |
| 292 NO_SANITIZE_ADDRESS ALWAYS_INLINE unsigned asanUnsafeAcquireLoad(volatile const unsigned* ptr) | 351 NO_SANITIZE_ADDRESS ALWAYS_INLINE unsigned asanUnsafeAcquireLoad(volatile const unsigned* ptr) |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 327 using WTF::acquireLoad; | 386 using WTF::acquireLoad; |
| 328 using WTF::releaseStore; | 387 using WTF::releaseStore; |
| 329 | 388 |
| 330 // These methods allow loading from and storing to poisoned memory. Only | 389 // These methods allow loading from and storing to poisoned memory. Only |
| 331 // use these methods if you know what you are doing since they will | 390 // use these methods if you know what you are doing since they will |
| 332 // silence use-after-poison errors from ASan. | 391 // silence use-after-poison errors from ASan. |
| 333 using WTF::asanUnsafeAcquireLoad; | 392 using WTF::asanUnsafeAcquireLoad; |
| 334 using WTF::asanUnsafeReleaseStore; | 393 using WTF::asanUnsafeReleaseStore; |
| 335 | 394 |
| 336 #endif // Atomics_h | 395 #endif // Atomics_h |
| OLD | NEW |