OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
55 | 55 |
56 #ifdef __cplusplus | 56 #ifdef __cplusplus |
57 extern "C" { | 57 extern "C" { |
58 #endif | 58 #endif |
59 | 59 |
60 typedef char __tsan_atomic8; | 60 typedef char __tsan_atomic8; |
61 typedef short __tsan_atomic16; // NOLINT | 61 typedef short __tsan_atomic16; // NOLINT |
62 typedef int __tsan_atomic32; | 62 typedef int __tsan_atomic32; |
63 typedef long __tsan_atomic64; // NOLINT | 63 typedef long __tsan_atomic64; // NOLINT |
64 | 64 |
| 65 #if defined(__SIZEOF_INT128__) \ |
| 66 || (__clang_major__ * 100 + __clang_minor__ >= 302) |
| 67 typedef __int128 __tsan_atomic128; |
| 68 #define __TSAN_HAS_INT128 1 |
| 69 #else |
| 70 typedef char __tsan_atomic128; |
| 71 #define __TSAN_HAS_INT128 0 |
| 72 #endif |
| 73 |
65 typedef enum { | 74 typedef enum { |
66 __tsan_memory_order_relaxed = (1 << 0) + 100500, | 75 __tsan_memory_order_relaxed, |
67 __tsan_memory_order_consume = (1 << 1) + 100500, | 76 __tsan_memory_order_consume, |
68 __tsan_memory_order_acquire = (1 << 2) + 100500, | 77 __tsan_memory_order_acquire, |
69 __tsan_memory_order_release = (1 << 3) + 100500, | 78 __tsan_memory_order_release, |
70 __tsan_memory_order_acq_rel = (1 << 4) + 100500, | 79 __tsan_memory_order_acq_rel, |
71 __tsan_memory_order_seq_cst = (1 << 5) + 100500, | 80 __tsan_memory_order_seq_cst, |
72 } __tsan_memory_order; | 81 } __tsan_memory_order; |
73 | 82 |
74 __tsan_atomic8 __tsan_atomic8_load(const volatile __tsan_atomic8* a, | 83 __tsan_atomic8 __tsan_atomic8_load(const volatile __tsan_atomic8 *a, |
75 __tsan_memory_order mo); | 84 __tsan_memory_order mo); |
76 __tsan_atomic16 __tsan_atomic16_load(const volatile __tsan_atomic16* a, | 85 __tsan_atomic16 __tsan_atomic16_load(const volatile __tsan_atomic16 *a, |
77 __tsan_memory_order mo); | 86 __tsan_memory_order mo); |
78 __tsan_atomic32 __tsan_atomic32_load(const volatile __tsan_atomic32* a, | 87 __tsan_atomic32 __tsan_atomic32_load(const volatile __tsan_atomic32 *a, |
79 __tsan_memory_order mo); | 88 __tsan_memory_order mo); |
80 __tsan_atomic64 __tsan_atomic64_load(const volatile __tsan_atomic64* a, | 89 __tsan_atomic64 __tsan_atomic64_load(const volatile __tsan_atomic64 *a, |
| 90 __tsan_memory_order mo); |
| 91 __tsan_atomic128 __tsan_atomic128_load(const volatile __tsan_atomic128 *a, |
81 __tsan_memory_order mo); | 92 __tsan_memory_order mo); |
82 | 93 |
83 void __tsan_atomic8_store(volatile __tsan_atomic8* a, __tsan_atomic8 v, | 94 void __tsan_atomic8_store(volatile __tsan_atomic8 *a, __tsan_atomic8 v, |
84 __tsan_memory_order mo); | 95 __tsan_memory_order mo); |
85 void __tsan_atomic16_store(volatile __tsan_atomic16* a, __tsan_atomic16 v, | 96 void __tsan_atomic16_store(volatile __tsan_atomic16 *a, __tsan_atomic16 v, |
86 __tsan_memory_order mo); | 97 __tsan_memory_order mo); |
87 void __tsan_atomic32_store(volatile __tsan_atomic32* a, __tsan_atomic32 v, | 98 void __tsan_atomic32_store(volatile __tsan_atomic32 *a, __tsan_atomic32 v, |
88 __tsan_memory_order mo); | 99 __tsan_memory_order mo); |
89 void __tsan_atomic64_store(volatile __tsan_atomic64* a, __tsan_atomic64 v, | 100 void __tsan_atomic64_store(volatile __tsan_atomic64 *a, __tsan_atomic64 v, |
| 101 __tsan_memory_order mo); |
| 102 void __tsan_atomic128_store(volatile __tsan_atomic128 *a, __tsan_atomic128 v, |
90 __tsan_memory_order mo); | 103 __tsan_memory_order mo); |
91 | 104 |
92 __tsan_atomic8 __tsan_atomic8_exchange(volatile __tsan_atomic8* a, | 105 __tsan_atomic8 __tsan_atomic8_exchange(volatile __tsan_atomic8 *a, |
93 __tsan_atomic8 v, __tsan_memory_order mo); | 106 __tsan_atomic8 v, __tsan_memory_order mo); |
94 __tsan_atomic16 __tsan_atomic16_exchange(volatile __tsan_atomic16* a, | 107 __tsan_atomic16 __tsan_atomic16_exchange(volatile __tsan_atomic16 *a, |
95 __tsan_atomic16 v, __tsan_memory_order mo); | 108 __tsan_atomic16 v, __tsan_memory_order mo); |
96 __tsan_atomic32 __tsan_atomic32_exchange(volatile __tsan_atomic32* a, | 109 __tsan_atomic32 __tsan_atomic32_exchange(volatile __tsan_atomic32 *a, |
97 __tsan_atomic32 v, __tsan_memory_order mo); | 110 __tsan_atomic32 v, __tsan_memory_order mo); |
98 __tsan_atomic64 __tsan_atomic64_exchange(volatile __tsan_atomic64* a, | 111 __tsan_atomic64 __tsan_atomic64_exchange(volatile __tsan_atomic64 *a, |
| 112 __tsan_atomic64 v, __tsan_memory_order mo); |
| 113 __tsan_atomic128 __tsan_atomic128_exchange(volatile __tsan_atomic128 *a, |
| 114 __tsan_atomic128 v, __tsan_memory_order mo); |
| 115 |
| 116 __tsan_atomic8 __tsan_atomic8_fetch_add(volatile __tsan_atomic8 *a, |
| 117 __tsan_atomic8 v, __tsan_memory_order mo); |
| 118 __tsan_atomic16 __tsan_atomic16_fetch_add(volatile __tsan_atomic16 *a, |
| 119 __tsan_atomic16 v, __tsan_memory_order mo); |
| 120 __tsan_atomic32 __tsan_atomic32_fetch_add(volatile __tsan_atomic32 *a, |
| 121 __tsan_atomic32 v, __tsan_memory_order mo); |
| 122 __tsan_atomic64 __tsan_atomic64_fetch_add(volatile __tsan_atomic64 *a, |
| 123 __tsan_atomic64 v, __tsan_memory_order mo); |
| 124 __tsan_atomic128 __tsan_atomic128_fetch_add(volatile __tsan_atomic128 *a, |
| 125 __tsan_atomic128 v, __tsan_memory_order mo); |
| 126 |
| 127 __tsan_atomic8 __tsan_atomic8_fetch_and(volatile __tsan_atomic8 *a, |
| 128 __tsan_atomic8 v, __tsan_memory_order mo); |
| 129 __tsan_atomic16 __tsan_atomic16_fetch_and(volatile __tsan_atomic16 *a, |
| 130 __tsan_atomic16 v, __tsan_memory_order mo); |
| 131 __tsan_atomic32 __tsan_atomic32_fetch_and(volatile __tsan_atomic32 *a, |
| 132 __tsan_atomic32 v, __tsan_memory_order mo); |
| 133 __tsan_atomic64 __tsan_atomic64_fetch_and(volatile __tsan_atomic64 *a, |
| 134 __tsan_atomic64 v, __tsan_memory_order mo); |
| 135 __tsan_atomic128 __tsan_atomic128_fetch_and(volatile __tsan_atomic128 *a, |
| 136 __tsan_atomic128 v, __tsan_memory_order mo); |
| 137 |
| 138 __tsan_atomic8 __tsan_atomic8_fetch_or(volatile __tsan_atomic8 *a, |
| 139 __tsan_atomic8 v, __tsan_memory_order mo); |
| 140 __tsan_atomic16 __tsan_atomic16_fetch_or(volatile __tsan_atomic16 *a, |
| 141 __tsan_atomic16 v, __tsan_memory_order mo); |
| 142 __tsan_atomic32 __tsan_atomic32_fetch_or(volatile __tsan_atomic32 *a, |
| 143 __tsan_atomic32 v, __tsan_memory_order mo); |
| 144 __tsan_atomic64 __tsan_atomic64_fetch_or(volatile __tsan_atomic64 *a, |
| 145 __tsan_atomic64 v, __tsan_memory_order mo); |
| 146 __tsan_atomic128 __tsan_atomic128_fetch_or(volatile __tsan_atomic128 *a, |
| 147 __tsan_atomic128 v, __tsan_memory_order mo); |
| 148 |
| 149 __tsan_atomic8 __tsan_atomic8_fetch_xor(volatile __tsan_atomic8 *a, |
| 150 __tsan_atomic8 v, __tsan_memory_order mo); |
| 151 __tsan_atomic16 __tsan_atomic16_fetch_xor(volatile __tsan_atomic16 *a, |
| 152 __tsan_atomic16 v, __tsan_memory_order mo); |
| 153 __tsan_atomic32 __tsan_atomic32_fetch_xor(volatile __tsan_atomic32 *a, |
| 154 __tsan_atomic32 v, __tsan_memory_order mo); |
| 155 __tsan_atomic64 __tsan_atomic64_fetch_xor(volatile __tsan_atomic64 *a, |
| 156 __tsan_atomic64 v, __tsan_memory_order mo); |
| 157 __tsan_atomic128 __tsan_atomic128_fetch_xor(volatile __tsan_atomic128 *a, |
| 158 __tsan_atomic128 v, __tsan_memory_order mo); |
| 159 |
| 160 __tsan_atomic8 __tsan_atomic8_fetch_nand(volatile __tsan_atomic8 *a, |
| 161 __tsan_atomic8 v, __tsan_memory_order mo); |
| 162 __tsan_atomic16 __tsan_atomic16_fetch_nand(volatile __tsan_atomic16 *a, |
| 163 __tsan_atomic16 v, __tsan_memory_order mo); |
| 164 __tsan_atomic32 __tsan_atomic32_fetch_nand(volatile __tsan_atomic32 *a, |
| 165 __tsan_atomic32 v, __tsan_memory_order mo); |
| 166 __tsan_atomic64 __tsan_atomic64_fetch_nand(volatile __tsan_atomic64 *a, |
| 167 __tsan_atomic64 v, __tsan_memory_order mo); |
| 168 __tsan_atomic128 __tsan_atomic128_fetch_nand(volatile __tsan_atomic128 *a, |
99 __tsan_atomic64 v, __tsan_memory_order mo); | 169 __tsan_atomic64 v, __tsan_memory_order mo); |
100 | 170 |
101 __tsan_atomic8 __tsan_atomic8_fetch_add(volatile __tsan_atomic8* a, | 171 int __tsan_atomic8_compare_exchange_weak(volatile __tsan_atomic8 *a, |
102 __tsan_atomic8 v, __tsan_memory_order mo); | 172 __tsan_atomic8 *c, __tsan_atomic8 v, __tsan_memory_order mo, |
103 __tsan_atomic16 __tsan_atomic16_fetch_add(volatile __tsan_atomic16* a, | 173 __tsan_memory_order fail_mo); |
104 __tsan_atomic16 v, __tsan_memory_order mo); | 174 int __tsan_atomic16_compare_exchange_weak(volatile __tsan_atomic16 *a, |
105 __tsan_atomic32 __tsan_atomic32_fetch_add(volatile __tsan_atomic32* a, | 175 __tsan_atomic16 *c, __tsan_atomic16 v, __tsan_memory_order mo, |
106 __tsan_atomic32 v, __tsan_memory_order mo); | 176 __tsan_memory_order fail_mo); |
107 __tsan_atomic64 __tsan_atomic64_fetch_add(volatile __tsan_atomic64* a, | 177 int __tsan_atomic32_compare_exchange_weak(volatile __tsan_atomic32 *a, |
108 __tsan_atomic64 v, __tsan_memory_order mo); | 178 __tsan_atomic32 *c, __tsan_atomic32 v, __tsan_memory_order mo, |
| 179 __tsan_memory_order fail_mo); |
| 180 int __tsan_atomic64_compare_exchange_weak(volatile __tsan_atomic64 *a, |
| 181 __tsan_atomic64 *c, __tsan_atomic64 v, __tsan_memory_order mo, |
| 182 __tsan_memory_order fail_mo); |
| 183 int __tsan_atomic128_compare_exchange_weak(volatile __tsan_atomic128 *a, |
| 184 __tsan_atomic128 *c, __tsan_atomic128 v, __tsan_memory_order mo, |
| 185 __tsan_memory_order fail_mo); |
109 | 186 |
110 __tsan_atomic8 __tsan_atomic8_fetch_and(volatile __tsan_atomic8* a, | 187 int __tsan_atomic8_compare_exchange_strong(volatile __tsan_atomic8 *a, |
111 __tsan_atomic8 v, __tsan_memory_order mo); | 188 __tsan_atomic8 *c, __tsan_atomic8 v, __tsan_memory_order mo, |
112 __tsan_atomic16 __tsan_atomic16_fetch_and(volatile __tsan_atomic16* a, | 189 __tsan_memory_order fail_mo); |
113 __tsan_atomic16 v, __tsan_memory_order mo); | 190 int __tsan_atomic16_compare_exchange_strong(volatile __tsan_atomic16 *a, |
114 __tsan_atomic32 __tsan_atomic32_fetch_and(volatile __tsan_atomic32* a, | 191 __tsan_atomic16 *c, __tsan_atomic16 v, __tsan_memory_order mo, |
115 __tsan_atomic32 v, __tsan_memory_order mo); | 192 __tsan_memory_order fail_mo); |
116 __tsan_atomic64 __tsan_atomic64_fetch_and(volatile __tsan_atomic64* a, | 193 int __tsan_atomic32_compare_exchange_strong(volatile __tsan_atomic32 *a, |
117 __tsan_atomic64 v, __tsan_memory_order mo); | 194 __tsan_atomic32 *c, __tsan_atomic32 v, __tsan_memory_order mo, |
| 195 __tsan_memory_order fail_mo); |
| 196 int __tsan_atomic64_compare_exchange_strong(volatile __tsan_atomic64 *a, |
| 197 __tsan_atomic64 *c, __tsan_atomic64 v, __tsan_memory_order mo, |
| 198 __tsan_memory_order fail_mo); |
| 199 int __tsan_atomic128_compare_exchange_strong(volatile __tsan_atomic128 *a, |
| 200 __tsan_atomic128 *c, __tsan_atomic128 v, __tsan_memory_order mo, |
| 201 __tsan_memory_order fail_mo); |
118 | 202 |
119 __tsan_atomic8 __tsan_atomic8_fetch_or(volatile __tsan_atomic8* a, | 203 __tsan_atomic8 __tsan_atomic8_compare_exchange_val( |
120 __tsan_atomic8 v, __tsan_memory_order mo); | 204 volatile __tsan_atomic8 *a, __tsan_atomic8 c, __tsan_atomic8 v, |
121 __tsan_atomic16 __tsan_atomic16_fetch_or(volatile __tsan_atomic16* a, | 205 __tsan_memory_order mo, __tsan_memory_order fail_mo); |
122 __tsan_atomic16 v, __tsan_memory_order mo); | 206 __tsan_atomic16 __tsan_atomic16_compare_exchange_val( |
123 __tsan_atomic32 __tsan_atomic32_fetch_or(volatile __tsan_atomic32* a, | 207 volatile __tsan_atomic16 *a, __tsan_atomic16 c, __tsan_atomic16 v, |
124 __tsan_atomic32 v, __tsan_memory_order mo); | 208 __tsan_memory_order mo, __tsan_memory_order fail_mo); |
125 __tsan_atomic64 __tsan_atomic64_fetch_or(volatile __tsan_atomic64* a, | 209 __tsan_atomic32 __tsan_atomic32_compare_exchange_val( |
126 __tsan_atomic64 v, __tsan_memory_order mo); | 210 volatile __tsan_atomic32 *a, __tsan_atomic32 c, __tsan_atomic32 v, |
127 | 211 __tsan_memory_order mo, __tsan_memory_order fail_mo); |
128 __tsan_atomic8 __tsan_atomic8_fetch_xor(volatile __tsan_atomic8* a, | 212 __tsan_atomic64 __tsan_atomic64_compare_exchange_val( |
129 __tsan_atomic8 v, __tsan_memory_order mo); | 213 volatile __tsan_atomic64 *a, __tsan_atomic64 c, __tsan_atomic64 v, |
130 __tsan_atomic16 __tsan_atomic16_fetch_xor(volatile __tsan_atomic16* a, | 214 __tsan_memory_order mo, __tsan_memory_order fail_mo); |
131 __tsan_atomic16 v, __tsan_memory_order mo); | 215 __tsan_atomic128 __tsan_atomic128_compare_exchange_val( |
132 __tsan_atomic32 __tsan_atomic32_fetch_xor(volatile __tsan_atomic32* a, | 216 volatile __tsan_atomic128 *a, __tsan_atomic128 c, __tsan_atomic128 v, |
133 __tsan_atomic32 v, __tsan_memory_order mo); | 217 __tsan_memory_order mo, __tsan_memory_order fail_mo); |
134 __tsan_atomic64 __tsan_atomic64_fetch_xor(volatile __tsan_atomic64* a, | |
135 __tsan_atomic64 v, __tsan_memory_order mo); | |
136 | |
137 int __tsan_atomic8_compare_exchange_weak(volatile __tsan_atomic8* a, | |
138 __tsan_atomic8* c, __tsan_atomic8 v, __tsan_memory_order mo); | |
139 int __tsan_atomic16_compare_exchange_weak(volatile __tsan_atomic16* a, | |
140 __tsan_atomic16* c, __tsan_atomic16 v, __tsan_memory_order mo); | |
141 int __tsan_atomic32_compare_exchange_weak(volatile __tsan_atomic32* a, | |
142 __tsan_atomic32* c, __tsan_atomic32 v, __tsan_memory_order mo); | |
143 int __tsan_atomic64_compare_exchange_weak(volatile __tsan_atomic64* a, | |
144 __tsan_atomic64* c, __tsan_atomic64 v, __tsan_memory_order mo); | |
145 | |
146 int __tsan_atomic8_compare_exchange_strong(volatile __tsan_atomic8* a, | |
147 __tsan_atomic8* c, __tsan_atomic8 v, __tsan_memory_order mo); | |
148 int __tsan_atomic16_compare_exchange_strong(volatile __tsan_atomic16* a, | |
149 __tsan_atomic16* c, __tsan_atomic16 v, __tsan_memory_order mo); | |
150 int __tsan_atomic32_compare_exchange_strong(volatile __tsan_atomic32* a, | |
151 __tsan_atomic32* c, __tsan_atomic32 v, __tsan_memory_order mo); | |
152 int __tsan_atomic64_compare_exchange_strong(volatile __tsan_atomic64* a, | |
153 __tsan_atomic64* c, __tsan_atomic64 v, __tsan_memory_order mo); | |
154 | 218 |
155 void __tsan_atomic_thread_fence(__tsan_memory_order mo); | 219 void __tsan_atomic_thread_fence(__tsan_memory_order mo); |
| 220 void __tsan_atomic_signal_fence(__tsan_memory_order mo); |
156 | 221 |
157 #ifdef __cplusplus | 222 #ifdef __cplusplus |
158 } // extern "C" | 223 } // extern "C" |
159 #endif | 224 #endif |
160 | 225 |
161 #endif // #ifndef TSAN_INTERFACE_ATOMIC_H | 226 #endif // #ifndef TSAN_INTERFACE_ATOMIC_H |
162 | 227 |
163 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr, | 228 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr, |
164 Atomic32 old_value, | 229 Atomic32 old_value, |
165 Atomic32 new_value) { | 230 Atomic32 new_value) { |
166 Atomic32 cmp = old_value; | 231 Atomic32 cmp = old_value; |
167 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value, | 232 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value, |
168 __tsan_memory_order_relaxed); | 233 __tsan_memory_order_relaxed, __tsan_memory_order_relaxed); |
169 return cmp; | 234 return cmp; |
170 } | 235 } |
171 | 236 |
172 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr, | 237 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32 *ptr, |
173 Atomic32 new_value) { | 238 Atomic32 new_value) { |
174 return __tsan_atomic32_exchange(ptr, new_value, | 239 return __tsan_atomic32_exchange(ptr, new_value, |
175 __tsan_memory_order_relaxed); | 240 __tsan_memory_order_relaxed); |
176 } | 241 } |
177 | 242 |
178 inline Atomic32 Acquire_AtomicExchange(volatile Atomic32* ptr, | 243 inline Atomic32 Acquire_AtomicExchange(volatile Atomic32 *ptr, |
179 Atomic32 new_value) { | 244 Atomic32 new_value) { |
180 return __tsan_atomic32_exchange(ptr, new_value, | 245 return __tsan_atomic32_exchange(ptr, new_value, |
181 __tsan_memory_order_acquire); | 246 __tsan_memory_order_acquire); |
182 } | 247 } |
183 | 248 |
184 inline Atomic32 Release_AtomicExchange(volatile Atomic32* ptr, | 249 inline Atomic32 Release_AtomicExchange(volatile Atomic32 *ptr, |
185 Atomic32 new_value) { | 250 Atomic32 new_value) { |
186 return __tsan_atomic32_exchange(ptr, new_value, | 251 return __tsan_atomic32_exchange(ptr, new_value, |
187 __tsan_memory_order_release); | 252 __tsan_memory_order_release); |
188 } | 253 } |
189 | 254 |
190 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr, | 255 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32 *ptr, |
191 Atomic32 increment) { | 256 Atomic32 increment) { |
192 return increment + __tsan_atomic32_fetch_add(ptr, increment, | 257 return increment + __tsan_atomic32_fetch_add(ptr, increment, |
193 __tsan_memory_order_relaxed); | 258 __tsan_memory_order_relaxed); |
194 } | 259 } |
195 | 260 |
196 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr, | 261 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32 *ptr, |
197 Atomic32 increment) { | 262 Atomic32 increment) { |
198 return increment + __tsan_atomic32_fetch_add(ptr, increment, | 263 return increment + __tsan_atomic32_fetch_add(ptr, increment, |
199 __tsan_memory_order_acq_rel); | 264 __tsan_memory_order_acq_rel); |
200 } | 265 } |
201 | 266 |
202 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr, | 267 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32 *ptr, |
203 Atomic32 old_value, | 268 Atomic32 old_value, |
204 Atomic32 new_value) { | 269 Atomic32 new_value) { |
205 Atomic32 cmp = old_value; | 270 Atomic32 cmp = old_value; |
206 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value, | 271 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value, |
207 __tsan_memory_order_acquire); | 272 __tsan_memory_order_acquire, __tsan_memory_order_acquire); |
208 return cmp; | 273 return cmp; |
209 } | 274 } |
210 | 275 |
211 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, | 276 inline Atomic32 Release_CompareAndSwap(volatile Atomic32 *ptr, |
212 Atomic32 old_value, | 277 Atomic32 old_value, |
213 Atomic32 new_value) { | 278 Atomic32 new_value) { |
214 Atomic32 cmp = old_value; | 279 Atomic32 cmp = old_value; |
215 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value, | 280 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value, |
216 __tsan_memory_order_release); | 281 __tsan_memory_order_release, __tsan_memory_order_relaxed); |
217 return cmp; | 282 return cmp; |
218 } | 283 } |
219 | 284 |
220 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) { | 285 inline void NoBarrier_Store(volatile Atomic32 *ptr, Atomic32 value) { |
221 __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed); | 286 __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed); |
222 } | 287 } |
223 | 288 |
224 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { | 289 inline void Acquire_Store(volatile Atomic32 *ptr, Atomic32 value) { |
225 __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed); | 290 __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed); |
226 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); | 291 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); |
227 } | 292 } |
228 | 293 |
229 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { | 294 inline void Release_Store(volatile Atomic32 *ptr, Atomic32 value) { |
230 __tsan_atomic32_store(ptr, value, __tsan_memory_order_release); | 295 __tsan_atomic32_store(ptr, value, __tsan_memory_order_release); |
231 } | 296 } |
232 | 297 |
233 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) { | 298 inline Atomic32 NoBarrier_Load(volatile const Atomic32 *ptr) { |
234 return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed); | 299 return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed); |
235 } | 300 } |
236 | 301 |
237 inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) { | 302 inline Atomic32 Acquire_Load(volatile const Atomic32 *ptr) { |
238 return __tsan_atomic32_load(ptr, __tsan_memory_order_acquire); | 303 return __tsan_atomic32_load(ptr, __tsan_memory_order_acquire); |
239 } | 304 } |
240 | 305 |
241 inline Atomic32 Release_Load(volatile const Atomic32* ptr) { | 306 inline Atomic32 Release_Load(volatile const Atomic32 *ptr) { |
242 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); | 307 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); |
243 return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed); | 308 return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed); |
244 } | 309 } |
245 | 310 |
246 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr, | 311 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64 *ptr, |
247 Atomic64 old_value, | 312 Atomic64 old_value, |
248 Atomic64 new_value) { | 313 Atomic64 new_value) { |
249 Atomic64 cmp = old_value; | 314 Atomic64 cmp = old_value; |
250 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value, | 315 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value, |
251 __tsan_memory_order_relaxed); | 316 __tsan_memory_order_relaxed, __tsan_memory_order_relaxed); |
252 return cmp; | 317 return cmp; |
253 } | 318 } |
254 | 319 |
255 inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64* ptr, | 320 inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64 *ptr, |
256 Atomic64 new_value) { | 321 Atomic64 new_value) { |
257 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_relaxed); | 322 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_relaxed); |
258 } | 323 } |
259 | 324 |
260 inline Atomic64 Acquire_AtomicExchange(volatile Atomic64* ptr, | 325 inline Atomic64 Acquire_AtomicExchange(volatile Atomic64 *ptr, |
261 Atomic64 new_value) { | 326 Atomic64 new_value) { |
262 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_acquire); | 327 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_acquire); |
263 } | 328 } |
264 | 329 |
265 inline Atomic64 Release_AtomicExchange(volatile Atomic64* ptr, | 330 inline Atomic64 Release_AtomicExchange(volatile Atomic64 *ptr, |
266 Atomic64 new_value) { | 331 Atomic64 new_value) { |
267 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_release); | 332 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_release); |
268 } | 333 } |
269 | 334 |
270 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr, | 335 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64 *ptr, |
271 Atomic64 increment) { | 336 Atomic64 increment) { |
272 return increment + __tsan_atomic64_fetch_add(ptr, increment, | 337 return increment + __tsan_atomic64_fetch_add(ptr, increment, |
273 __tsan_memory_order_relaxed); | 338 __tsan_memory_order_relaxed); |
274 } | 339 } |
275 | 340 |
276 inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64* ptr, | 341 inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64 *ptr, |
277 Atomic64 increment) { | 342 Atomic64 increment) { |
278 return increment + __tsan_atomic64_fetch_add(ptr, increment, | 343 return increment + __tsan_atomic64_fetch_add(ptr, increment, |
279 __tsan_memory_order_acq_rel); | 344 __tsan_memory_order_acq_rel); |
280 } | 345 } |
281 | 346 |
282 inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) { | 347 inline void NoBarrier_Store(volatile Atomic64 *ptr, Atomic64 value) { |
283 __tsan_atomic64_store(ptr, value, __tsan_memory_order_relaxed); | 348 __tsan_atomic64_store(ptr, value, __tsan_memory_order_relaxed); |
284 } | 349 } |
285 | 350 |
286 inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) { | 351 inline void Acquire_Store(volatile Atomic64 *ptr, Atomic64 value) { |
287 __tsan_atomic64_store(ptr, value, __tsan_memory_order_relaxed); | 352 __tsan_atomic64_store(ptr, value, __tsan_memory_order_relaxed); |
288 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); | 353 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); |
289 } | 354 } |
290 | 355 |
291 inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) { | 356 inline void Release_Store(volatile Atomic64 *ptr, Atomic64 value) { |
292 __tsan_atomic64_store(ptr, value, __tsan_memory_order_release); | 357 __tsan_atomic64_store(ptr, value, __tsan_memory_order_release); |
293 } | 358 } |
294 | 359 |
295 inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) { | 360 inline Atomic64 NoBarrier_Load(volatile const Atomic64 *ptr) { |
296 return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed); | 361 return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed); |
297 } | 362 } |
298 | 363 |
299 inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) { | 364 inline Atomic64 Acquire_Load(volatile const Atomic64 *ptr) { |
300 return __tsan_atomic64_load(ptr, __tsan_memory_order_acquire); | 365 return __tsan_atomic64_load(ptr, __tsan_memory_order_acquire); |
301 } | 366 } |
302 | 367 |
303 inline Atomic64 Release_Load(volatile const Atomic64* ptr) { | 368 inline Atomic64 Release_Load(volatile const Atomic64 *ptr) { |
304 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); | 369 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); |
305 return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed); | 370 return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed); |
306 } | 371 } |
307 | 372 |
308 inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr, | 373 inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64 *ptr, |
309 Atomic64 old_value, | 374 Atomic64 old_value, |
310 Atomic64 new_value) { | 375 Atomic64 new_value) { |
311 Atomic64 cmp = old_value; | 376 Atomic64 cmp = old_value; |
312 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value, | 377 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value, |
313 __tsan_memory_order_acquire); | 378 __tsan_memory_order_acquire, __tsan_memory_order_acquire); |
314 return cmp; | 379 return cmp; |
315 } | 380 } |
316 | 381 |
317 inline Atomic64 Release_CompareAndSwap(volatile Atomic64* ptr, | 382 inline Atomic64 Release_CompareAndSwap(volatile Atomic64 *ptr, |
318 Atomic64 old_value, | 383 Atomic64 old_value, |
319 Atomic64 new_value) { | 384 Atomic64 new_value) { |
320 Atomic64 cmp = old_value; | 385 Atomic64 cmp = old_value; |
321 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value, | 386 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value, |
322 __tsan_memory_order_release); | 387 __tsan_memory_order_release, __tsan_memory_order_relaxed); |
323 return cmp; | 388 return cmp; |
324 } | 389 } |
325 | 390 |
326 inline void MemoryBarrier() { | 391 inline void MemoryBarrier() { |
327 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); | 392 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); |
328 } | 393 } |
329 | 394 |
330 } // namespace internal | 395 } // namespace internal |
331 } // namespace v8 | 396 } // namespace v8 |
332 | 397 |
333 #undef ATOMICOPS_COMPILER_BARRIER | 398 #undef ATOMICOPS_COMPILER_BARRIER |
334 | 399 |
335 #endif // V8_ATOMICOPS_INTERNALS_TSAN_H_ | 400 #endif // V8_ATOMICOPS_INTERNALS_TSAN_H_ |
OLD | NEW |