OLD | NEW |
1 /* | 1 /* |
2 * Copyright (C) 2007, 2008, 2010, 2012 Apple Inc. All rights reserved. | 2 * Copyright (C) 2007, 2008, 2010, 2012 Apple Inc. All rights reserved. |
3 * Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com) | 3 * Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com) |
4 * | 4 * |
5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
6 * modification, are permitted provided that the following conditions | 6 * modification, are permitted provided that the following conditions |
7 * are met: | 7 * are met: |
8 * | 8 * |
9 * 1. Redistributions of source code must retain the above copyright | 9 * 1. Redistributions of source code must retain the above copyright |
10 * notice, this list of conditions and the following disclaimer. | 10 * notice, this list of conditions and the following disclaimer. |
(...skipping 25 matching lines...) Expand all Loading... |
36 #include <stdint.h> | 36 #include <stdint.h> |
37 | 37 |
38 #if COMPILER(MSVC) | 38 #if COMPILER(MSVC) |
39 #include <windows.h> | 39 #include <windows.h> |
40 #endif | 40 #endif |
41 | 41 |
42 #if defined(THREAD_SANITIZER) | 42 #if defined(THREAD_SANITIZER) |
43 #include <sanitizer/tsan_interface_atomic.h> | 43 #include <sanitizer/tsan_interface_atomic.h> |
44 #endif | 44 #endif |
45 | 45 |
| 46 #if defined(ADDRESS_SANITIZER) |
| 47 #include <sanitizer/asan_interface.h> |
| 48 #endif |
| 49 |
46 namespace WTF { | 50 namespace WTF { |
47 | 51 |
48 #if COMPILER(MSVC) | 52 #if COMPILER(MSVC) |
49 | 53 |
50 // atomicAdd returns the result of the addition. | 54 // atomicAdd returns the result of the addition. |
51 ALWAYS_INLINE int atomicAdd(int volatile* addend, int increment) | 55 ALWAYS_INLINE int atomicAdd(int volatile* addend, int increment) |
52 { | 56 { |
53 return InterlockedExchangeAdd(reinterpret_cast<long volatile*>(addend), stat
ic_cast<long>(increment)) + increment; | 57 return InterlockedExchangeAdd(reinterpret_cast<long volatile*>(addend), stat
ic_cast<long>(increment)) + increment; |
54 } | 58 } |
55 | 59 |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
99 } | 103 } |
100 | 104 |
101 ALWAYS_INLINE void atomicSetOneToZero(int volatile* ptr) | 105 ALWAYS_INLINE void atomicSetOneToZero(int volatile* ptr) |
102 { | 106 { |
103 ASSERT(*ptr == 1); | 107 ASSERT(*ptr == 1); |
104 __sync_lock_release(ptr); | 108 __sync_lock_release(ptr); |
105 } | 109 } |
106 #endif | 110 #endif |
107 | 111 |
108 #if defined(THREAD_SANITIZER) | 112 #if defined(THREAD_SANITIZER) |
| 113 |
109 ALWAYS_INLINE void releaseStore(volatile int* ptr, int value) | 114 ALWAYS_INLINE void releaseStore(volatile int* ptr, int value) |
110 { | 115 { |
111 __tsan_atomic32_store(ptr, value, __tsan_memory_order_release); | 116 __tsan_atomic32_store(ptr, value, __tsan_memory_order_release); |
112 } | 117 } |
113 | 118 |
114 ALWAYS_INLINE int acquireLoad(volatile const int* ptr) | 119 ALWAYS_INLINE int acquireLoad(volatile const int* ptr) |
115 { | 120 { |
116 return __tsan_atomic32_load(ptr, __tsan_memory_order_acquire); | 121 return __tsan_atomic32_load(ptr, __tsan_memory_order_acquire); |
117 } | 122 } |
118 | 123 |
119 ALWAYS_INLINE void releaseStore(volatile unsigned* ptr, unsigned value) | 124 ALWAYS_INLINE void releaseStore(volatile unsigned* ptr, unsigned value) |
120 { | 125 { |
121 __tsan_atomic32_store(reinterpret_cast<volatile int*>(ptr), static_cast<int>
(value), __tsan_memory_order_release); | 126 __tsan_atomic32_store(reinterpret_cast<volatile int*>(ptr), static_cast<int>
(value), __tsan_memory_order_release); |
122 } | 127 } |
123 | 128 |
124 ALWAYS_INLINE unsigned acquireLoad(volatile const unsigned* ptr) | 129 ALWAYS_INLINE unsigned acquireLoad(volatile const unsigned* ptr) |
125 { | 130 { |
126 return static_cast<unsigned>(__tsan_atomic32_load(reinterpret_cast<volatile
const int*>(ptr), __tsan_memory_order_acquire)); | 131 return static_cast<unsigned>(__tsan_atomic32_load(reinterpret_cast<volatile
const int*>(ptr), __tsan_memory_order_acquire)); |
127 } | 132 } |
| 133 |
128 #else | 134 #else |
129 | 135 |
130 #if CPU(X86) || CPU(X86_64) | 136 #if CPU(X86) || CPU(X86_64) |
131 // Only compiler barrier is needed. | 137 // Only compiler barrier is needed. |
132 #if COMPILER(MSVC) | 138 #if COMPILER(MSVC) |
133 // Starting from Visual Studio 2005 compiler guarantees acquire and release | 139 // Starting from Visual Studio 2005 compiler guarantees acquire and release |
134 // semantics for operations on volatile variables. See MSDN entry for | 140 // semantics for operations on volatile variables. See MSDN entry for |
135 // MemoryBarrier macro. | 141 // MemoryBarrier macro. |
136 #define MEMORY_BARRIER() | 142 #define MEMORY_BARRIER() |
137 #else | 143 #else |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
172 *ptr = value; | 178 *ptr = value; |
173 } | 179 } |
174 | 180 |
175 ALWAYS_INLINE unsigned acquireLoad(volatile const unsigned* ptr) | 181 ALWAYS_INLINE unsigned acquireLoad(volatile const unsigned* ptr) |
176 { | 182 { |
177 unsigned value = *ptr; | 183 unsigned value = *ptr; |
178 MEMORY_BARRIER(); | 184 MEMORY_BARRIER(); |
179 return value; | 185 return value; |
180 } | 186 } |
181 | 187 |
| 188 #if defined(ADDRESS_SANITIZER) |
| 189 |
| 190 __attribute__((no_sanitize_address)) ALWAYS_INLINE void asanUnsafeReleaseStore(v
olatile unsigned* ptr, unsigned value) |
| 191 { |
| 192 MEMORY_BARRIER(); |
| 193 *ptr = value; |
| 194 } |
| 195 |
| 196 __attribute__((no_sanitize_address)) ALWAYS_INLINE unsigned asanUnsafeAcquireLoa
d(volatile const unsigned* ptr) |
| 197 { |
| 198 unsigned value = *ptr; |
| 199 MEMORY_BARRIER(); |
| 200 return value; |
| 201 } |
| 202 |
| 203 #endif // defined(ADDRESS_SANITIZER) |
| 204 |
182 #undef MEMORY_BARRIER | 205 #undef MEMORY_BARRIER |
183 | 206 |
184 #endif | 207 #endif |
185 | 208 |
| 209 #if !defined(ADDRESS_SANITIZER) |
| 210 |
| 211 ALWAYS_INLINE void asanUnsafeReleaseStore(volatile unsigned* ptr, unsigned value
) |
| 212 { |
| 213 releaseStore(ptr, value); |
| 214 } |
| 215 |
| 216 ALWAYS_INLINE unsigned asanUnsafeAcquireLoad(volatile const unsigned* ptr) |
| 217 { |
| 218 return acquireLoad(ptr); |
| 219 } |
| 220 |
| 221 #endif |
| 222 |
186 } // namespace WTF | 223 } // namespace WTF |
187 | 224 |
188 using WTF::atomicAdd; | 225 using WTF::atomicAdd; |
189 using WTF::atomicSubtract; | 226 using WTF::atomicSubtract; |
190 using WTF::atomicDecrement; | 227 using WTF::atomicDecrement; |
191 using WTF::atomicIncrement; | 228 using WTF::atomicIncrement; |
192 using WTF::atomicTestAndSetToOne; | 229 using WTF::atomicTestAndSetToOne; |
193 using WTF::atomicSetOneToZero; | 230 using WTF::atomicSetOneToZero; |
194 using WTF::acquireLoad; | 231 using WTF::acquireLoad; |
195 using WTF::releaseStore; | 232 using WTF::releaseStore; |
196 | 233 |
| 234 // These methods allow loading from and storing to poisoned memory. Only |
| 235 // use these methods if you know what you are doing since they will |
| 236 // silence use-after-poison errors from ASan. |
| 237 using WTF::asanUnsafeAcquireLoad; |
| 238 using WTF::asanUnsafeReleaseStore; |
| 239 |
197 #endif // Atomics_h | 240 #endif // Atomics_h |
OLD | NEW |