Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(250)

Side by Side Diff: base/atomicops_internals_tsan.h

Issue 11440006: Update the TSan v2 atomic declarations. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src/
Patch Set: Created 8 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 // This file is an internal atomic implementation for compiler-based 5 // This file is an internal atomic implementation for compiler-based
6 // ThreadSanitizer. Use base/atomicops.h instead. 6 // ThreadSanitizer. Use base/atomicops.h instead.
7 7
8 #ifndef BASE_ATOMICOPS_INTERNALS_TSAN_H_ 8 #ifndef BASE_ATOMICOPS_INTERNALS_TSAN_H_
9 #define BASE_ATOMICOPS_INTERNALS_TSAN_H_ 9 #define BASE_ATOMICOPS_INTERNALS_TSAN_H_
10 10
(...skipping 22 matching lines...) Expand all
33 33
34 #ifdef __cplusplus 34 #ifdef __cplusplus
35 extern "C" { 35 extern "C" {
36 #endif 36 #endif
37 37
38 typedef char __tsan_atomic8; 38 typedef char __tsan_atomic8;
39 typedef short __tsan_atomic16; // NOLINT 39 typedef short __tsan_atomic16; // NOLINT
40 typedef int __tsan_atomic32; 40 typedef int __tsan_atomic32;
41 typedef long __tsan_atomic64; // NOLINT 41 typedef long __tsan_atomic64; // NOLINT
42 42
43 #if defined(__SIZEOF_INT128__) \
44 || (__clang_major__ * 100 + __clang_minor__ >= 302)
45 typedef __int128 __tsan_atomic128;
46 #define __TSAN_HAS_INT128 1
47 #else
48 typedef char __tsan_atomic128;
49 #define __TSAN_HAS_INT128 0
50 #endif
51
43 typedef enum { 52 typedef enum {
44 __tsan_memory_order_relaxed = 1 << 0, 53 __tsan_memory_order_relaxed,
45 __tsan_memory_order_consume = 1 << 1, 54 __tsan_memory_order_consume,
46 __tsan_memory_order_acquire = 1 << 2, 55 __tsan_memory_order_acquire,
47 __tsan_memory_order_release = 1 << 3, 56 __tsan_memory_order_release,
48 __tsan_memory_order_acq_rel = 1 << 4, 57 __tsan_memory_order_acq_rel,
49 __tsan_memory_order_seq_cst = 1 << 5, 58 __tsan_memory_order_seq_cst,
50 } __tsan_memory_order; 59 } __tsan_memory_order;
51 60
52 __tsan_atomic8 __tsan_atomic8_load(const volatile __tsan_atomic8 *a, 61 __tsan_atomic8 __tsan_atomic8_load(const volatile __tsan_atomic8 *a,
53 __tsan_memory_order mo); 62 __tsan_memory_order mo);
54 __tsan_atomic16 __tsan_atomic16_load(const volatile __tsan_atomic16 *a, 63 __tsan_atomic16 __tsan_atomic16_load(const volatile __tsan_atomic16 *a,
55 __tsan_memory_order mo); 64 __tsan_memory_order mo);
56 __tsan_atomic32 __tsan_atomic32_load(const volatile __tsan_atomic32 *a, 65 __tsan_atomic32 __tsan_atomic32_load(const volatile __tsan_atomic32 *a,
57 __tsan_memory_order mo); 66 __tsan_memory_order mo);
58 __tsan_atomic64 __tsan_atomic64_load(const volatile __tsan_atomic64 *a, 67 __tsan_atomic64 __tsan_atomic64_load(const volatile __tsan_atomic64 *a,
59 __tsan_memory_order mo); 68 __tsan_memory_order mo);
69 __tsan_atomic128 __tsan_atomic128_load(const volatile __tsan_atomic128 *a,
70 __tsan_memory_order mo);
60 71
61 void __tsan_atomic8_store(volatile __tsan_atomic8 *a, __tsan_atomic8 v, 72 void __tsan_atomic8_store(volatile __tsan_atomic8 *a, __tsan_atomic8 v,
62 __tsan_memory_order mo); 73 __tsan_memory_order mo);
63 void __tsan_atomic16_store(volatile __tsan_atomic16 *a, __tsan_atomic16 v, 74 void __tsan_atomic16_store(volatile __tsan_atomic16 *a, __tsan_atomic16 v,
64 __tsan_memory_order mo); 75 __tsan_memory_order mo);
65 void __tsan_atomic32_store(volatile __tsan_atomic32 *a, __tsan_atomic32 v, 76 void __tsan_atomic32_store(volatile __tsan_atomic32 *a, __tsan_atomic32 v,
66 __tsan_memory_order mo); 77 __tsan_memory_order mo);
67 void __tsan_atomic64_store(volatile __tsan_atomic64 *a, __tsan_atomic64 v, 78 void __tsan_atomic64_store(volatile __tsan_atomic64 *a, __tsan_atomic64 v,
68 __tsan_memory_order mo); 79 __tsan_memory_order mo);
80 void __tsan_atomic128_store(volatile __tsan_atomic128 *a, __tsan_atomic128 v,
81 __tsan_memory_order mo);
69 82
70 __tsan_atomic8 __tsan_atomic8_exchange(volatile __tsan_atomic8 *a, 83 __tsan_atomic8 __tsan_atomic8_exchange(volatile __tsan_atomic8 *a,
71 __tsan_atomic8 v, __tsan_memory_order mo); 84 __tsan_atomic8 v, __tsan_memory_order mo);
72 __tsan_atomic16 __tsan_atomic16_exchange(volatile __tsan_atomic16 *a, 85 __tsan_atomic16 __tsan_atomic16_exchange(volatile __tsan_atomic16 *a,
73 __tsan_atomic16 v, __tsan_memory_order mo); 86 __tsan_atomic16 v, __tsan_memory_order mo);
74 __tsan_atomic32 __tsan_atomic32_exchange(volatile __tsan_atomic32 *a, 87 __tsan_atomic32 __tsan_atomic32_exchange(volatile __tsan_atomic32 *a,
75 __tsan_atomic32 v, __tsan_memory_order mo); 88 __tsan_atomic32 v, __tsan_memory_order mo);
76 __tsan_atomic64 __tsan_atomic64_exchange(volatile __tsan_atomic64 *a, 89 __tsan_atomic64 __tsan_atomic64_exchange(volatile __tsan_atomic64 *a,
77 __tsan_atomic64 v, __tsan_memory_order mo); 90 __tsan_atomic64 v, __tsan_memory_order mo);
91 __tsan_atomic128 __tsan_atomic128_exchange(volatile __tsan_atomic128 *a,
92 __tsan_atomic128 v, __tsan_memory_order mo);
78 93
79 __tsan_atomic8 __tsan_atomic8_fetch_add(volatile __tsan_atomic8 *a, 94 __tsan_atomic8 __tsan_atomic8_fetch_add(volatile __tsan_atomic8 *a,
80 __tsan_atomic8 v, __tsan_memory_order mo); 95 __tsan_atomic8 v, __tsan_memory_order mo);
81 __tsan_atomic16 __tsan_atomic16_fetch_add(volatile __tsan_atomic16 *a, 96 __tsan_atomic16 __tsan_atomic16_fetch_add(volatile __tsan_atomic16 *a,
82 __tsan_atomic16 v, __tsan_memory_order mo); 97 __tsan_atomic16 v, __tsan_memory_order mo);
83 __tsan_atomic32 __tsan_atomic32_fetch_add(volatile __tsan_atomic32 *a, 98 __tsan_atomic32 __tsan_atomic32_fetch_add(volatile __tsan_atomic32 *a,
84 __tsan_atomic32 v, __tsan_memory_order mo); 99 __tsan_atomic32 v, __tsan_memory_order mo);
85 __tsan_atomic64 __tsan_atomic64_fetch_add(volatile __tsan_atomic64 *a, 100 __tsan_atomic64 __tsan_atomic64_fetch_add(volatile __tsan_atomic64 *a,
86 __tsan_atomic64 v, __tsan_memory_order mo); 101 __tsan_atomic64 v, __tsan_memory_order mo);
102 __tsan_atomic128 __tsan_atomic128_fetch_add(volatile __tsan_atomic128 *a,
103 __tsan_atomic128 v, __tsan_memory_order mo);
87 104
88 __tsan_atomic8 __tsan_atomic8_fetch_and(volatile __tsan_atomic8 *a, 105 __tsan_atomic8 __tsan_atomic8_fetch_and(volatile __tsan_atomic8 *a,
89 __tsan_atomic8 v, __tsan_memory_order mo); 106 __tsan_atomic8 v, __tsan_memory_order mo);
90 __tsan_atomic16 __tsan_atomic16_fetch_and(volatile __tsan_atomic16 *a, 107 __tsan_atomic16 __tsan_atomic16_fetch_and(volatile __tsan_atomic16 *a,
91 __tsan_atomic16 v, __tsan_memory_order mo); 108 __tsan_atomic16 v, __tsan_memory_order mo);
92 __tsan_atomic32 __tsan_atomic32_fetch_and(volatile __tsan_atomic32 *a, 109 __tsan_atomic32 __tsan_atomic32_fetch_and(volatile __tsan_atomic32 *a,
93 __tsan_atomic32 v, __tsan_memory_order mo); 110 __tsan_atomic32 v, __tsan_memory_order mo);
94 __tsan_atomic64 __tsan_atomic64_fetch_and(volatile __tsan_atomic64 *a, 111 __tsan_atomic64 __tsan_atomic64_fetch_and(volatile __tsan_atomic64 *a,
95 __tsan_atomic64 v, __tsan_memory_order mo); 112 __tsan_atomic64 v, __tsan_memory_order mo);
113 __tsan_atomic128 __tsan_atomic128_fetch_and(volatile __tsan_atomic128 *a,
114 __tsan_atomic128 v, __tsan_memory_order mo);
96 115
97 __tsan_atomic8 __tsan_atomic8_fetch_or(volatile __tsan_atomic8 *a, 116 __tsan_atomic8 __tsan_atomic8_fetch_or(volatile __tsan_atomic8 *a,
98 __tsan_atomic8 v, __tsan_memory_order mo); 117 __tsan_atomic8 v, __tsan_memory_order mo);
99 __tsan_atomic16 __tsan_atomic16_fetch_or(volatile __tsan_atomic16 *a, 118 __tsan_atomic16 __tsan_atomic16_fetch_or(volatile __tsan_atomic16 *a,
100 __tsan_atomic16 v, __tsan_memory_order mo); 119 __tsan_atomic16 v, __tsan_memory_order mo);
101 __tsan_atomic32 __tsan_atomic32_fetch_or(volatile __tsan_atomic32 *a, 120 __tsan_atomic32 __tsan_atomic32_fetch_or(volatile __tsan_atomic32 *a,
102 __tsan_atomic32 v, __tsan_memory_order mo); 121 __tsan_atomic32 v, __tsan_memory_order mo);
103 __tsan_atomic64 __tsan_atomic64_fetch_or(volatile __tsan_atomic64 *a, 122 __tsan_atomic64 __tsan_atomic64_fetch_or(volatile __tsan_atomic64 *a,
104 __tsan_atomic64 v, __tsan_memory_order mo); 123 __tsan_atomic64 v, __tsan_memory_order mo);
124 __tsan_atomic128 __tsan_atomic128_fetch_or(volatile __tsan_atomic128 *a,
125 __tsan_atomic128 v, __tsan_memory_order mo);
105 126
106 __tsan_atomic8 __tsan_atomic8_fetch_xor(volatile __tsan_atomic8 *a, 127 __tsan_atomic8 __tsan_atomic8_fetch_xor(volatile __tsan_atomic8 *a,
107 __tsan_atomic8 v, __tsan_memory_order mo); 128 __tsan_atomic8 v, __tsan_memory_order mo);
108 __tsan_atomic16 __tsan_atomic16_fetch_xor(volatile __tsan_atomic16 *a, 129 __tsan_atomic16 __tsan_atomic16_fetch_xor(volatile __tsan_atomic16 *a,
109 __tsan_atomic16 v, __tsan_memory_order mo); 130 __tsan_atomic16 v, __tsan_memory_order mo);
110 __tsan_atomic32 __tsan_atomic32_fetch_xor(volatile __tsan_atomic32 *a, 131 __tsan_atomic32 __tsan_atomic32_fetch_xor(volatile __tsan_atomic32 *a,
111 __tsan_atomic32 v, __tsan_memory_order mo); 132 __tsan_atomic32 v, __tsan_memory_order mo);
112 __tsan_atomic64 __tsan_atomic64_fetch_xor(volatile __tsan_atomic64 *a, 133 __tsan_atomic64 __tsan_atomic64_fetch_xor(volatile __tsan_atomic64 *a,
113 __tsan_atomic64 v, __tsan_memory_order mo); 134 __tsan_atomic64 v, __tsan_memory_order mo);
135 __tsan_atomic128 __tsan_atomic128_fetch_xor(volatile __tsan_atomic128 *a,
136 __tsan_atomic128 v, __tsan_memory_order mo);
137
138 __tsan_atomic8 __tsan_atomic8_fetch_nand(volatile __tsan_atomic8 *a,
139 __tsan_atomic8 v, __tsan_memory_order mo);
140 __tsan_atomic16 __tsan_atomic16_fetch_nand(volatile __tsan_atomic16 *a,
141 __tsan_atomic16 v, __tsan_memory_order mo);
142 __tsan_atomic32 __tsan_atomic32_fetch_nand(volatile __tsan_atomic32 *a,
143 __tsan_atomic32 v, __tsan_memory_order mo);
144 __tsan_atomic64 __tsan_atomic64_fetch_nand(volatile __tsan_atomic64 *a,
145 __tsan_atomic64 v, __tsan_memory_order mo);
146 __tsan_atomic128 __tsan_atomic128_fetch_nand(volatile __tsan_atomic128 *a,
147 __tsan_atomic128 v, __tsan_memory_order mo);
114 148
115 int __tsan_atomic8_compare_exchange_weak(volatile __tsan_atomic8 *a, 149 int __tsan_atomic8_compare_exchange_weak(volatile __tsan_atomic8 *a,
116 __tsan_atomic8 *c, __tsan_atomic8 v, __tsan_memory_order mo); 150 __tsan_atomic8 *c, __tsan_atomic8 v, __tsan_memory_order mo,
151 __tsan_memory_order fail_mo);
117 int __tsan_atomic16_compare_exchange_weak(volatile __tsan_atomic16 *a, 152 int __tsan_atomic16_compare_exchange_weak(volatile __tsan_atomic16 *a,
118 __tsan_atomic16 *c, __tsan_atomic16 v, __tsan_memory_order mo); 153 __tsan_atomic16 *c, __tsan_atomic16 v, __tsan_memory_order mo,
154 __tsan_memory_order fail_mo);
119 int __tsan_atomic32_compare_exchange_weak(volatile __tsan_atomic32 *a, 155 int __tsan_atomic32_compare_exchange_weak(volatile __tsan_atomic32 *a,
120 __tsan_atomic32 *c, __tsan_atomic32 v, __tsan_memory_order mo); 156 __tsan_atomic32 *c, __tsan_atomic32 v, __tsan_memory_order mo,
157 __tsan_memory_order fail_mo);
121 int __tsan_atomic64_compare_exchange_weak(volatile __tsan_atomic64 *a, 158 int __tsan_atomic64_compare_exchange_weak(volatile __tsan_atomic64 *a,
122 __tsan_atomic64 *c, __tsan_atomic64 v, __tsan_memory_order mo); 159 __tsan_atomic64 *c, __tsan_atomic64 v, __tsan_memory_order mo,
160 __tsan_memory_order fail_mo);
161 int __tsan_atomic128_compare_exchange_weak(volatile __tsan_atomic128 *a,
162 __tsan_atomic128 *c, __tsan_atomic128 v, __tsan_memory_order mo,
163 __tsan_memory_order fail_mo);
123 164
124 int __tsan_atomic8_compare_exchange_strong(volatile __tsan_atomic8 *a, 165 int __tsan_atomic8_compare_exchange_strong(volatile __tsan_atomic8 *a,
125 __tsan_atomic8 *c, __tsan_atomic8 v, __tsan_memory_order mo); 166 __tsan_atomic8 *c, __tsan_atomic8 v, __tsan_memory_order mo,
167 __tsan_memory_order fail_mo);
126 int __tsan_atomic16_compare_exchange_strong(volatile __tsan_atomic16 *a, 168 int __tsan_atomic16_compare_exchange_strong(volatile __tsan_atomic16 *a,
127 __tsan_atomic16 *c, __tsan_atomic16 v, __tsan_memory_order mo); 169 __tsan_atomic16 *c, __tsan_atomic16 v, __tsan_memory_order mo,
170 __tsan_memory_order fail_mo);
128 int __tsan_atomic32_compare_exchange_strong(volatile __tsan_atomic32 *a, 171 int __tsan_atomic32_compare_exchange_strong(volatile __tsan_atomic32 *a,
129 __tsan_atomic32 *c, __tsan_atomic32 v, __tsan_memory_order mo); 172 __tsan_atomic32 *c, __tsan_atomic32 v, __tsan_memory_order mo,
173 __tsan_memory_order fail_mo);
130 int __tsan_atomic64_compare_exchange_strong(volatile __tsan_atomic64 *a, 174 int __tsan_atomic64_compare_exchange_strong(volatile __tsan_atomic64 *a,
131 __tsan_atomic64 *c, __tsan_atomic64 v, __tsan_memory_order mo); 175 __tsan_atomic64 *c, __tsan_atomic64 v, __tsan_memory_order mo,
176 __tsan_memory_order fail_mo);
177 int __tsan_atomic128_compare_exchange_strong(volatile __tsan_atomic128 *a,
178 __tsan_atomic128 *c, __tsan_atomic128 v, __tsan_memory_order mo,
179 __tsan_memory_order fail_mo);
180
181 int __tsan_atomic8_compare_exchange_val(
182 volatile __tsan_atomic8 *a, __tsan_atomic8 c, __tsan_atomic8 v,
Alexander Potapenko 2012/12/05 05:25:34 Is this really "__tsan_atomic8 c", not "__tsan_ato
Dmitry Vyukov 2012/12/05 06:09:53 Yes, it is value. The _val functions return old va
Dmitry Vyukov 2012/12/06 19:31:25 This needs to be addressed.
183 __tsan_memory_order mo, __tsan_memory_order fail_mo);
184 int __tsan_atomic16_compare_exchange_val(
185 volatile __tsan_atomic16 *a, __tsan_atomic16 c, __tsan_atomic16 v,
186 __tsan_memory_order mo, __tsan_memory_order fail_mo);
187 int __tsan_atomic32_compare_exchange_val(
188 volatile __tsan_atomic32 *a, __tsan_atomic32 c, __tsan_atomic32 v,
189 __tsan_memory_order mo, __tsan_memory_order fail_mo);
190 int __tsan_atomic64_compare_exchange_val(
191 volatile __tsan_atomic64 *a, __tsan_atomic64 c, __tsan_atomic64 v,
192 __tsan_memory_order mo, __tsan_memory_order fail_mo);
193 int __tsan_atomic128_compare_exchange_val(
194 volatile __tsan_atomic128 *a, __tsan_atomic128 c, __tsan_atomic128 v,
195 __tsan_memory_order mo, __tsan_memory_order fail_mo);
132 196
133 void __tsan_atomic_thread_fence(__tsan_memory_order mo); 197 void __tsan_atomic_thread_fence(__tsan_memory_order mo);
198 void __tsan_atomic_signal_fence(__tsan_memory_order mo);
134 199
135 #ifdef __cplusplus 200 #ifdef __cplusplus
136 } // extern "C" 201 } // extern "C"
137 #endif 202 #endif
138 203
139 #endif // #ifndef TSAN_INTERFACE_ATOMIC_H 204 #endif // #ifndef TSAN_INTERFACE_ATOMIC_H
140 205
141 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr, 206 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr,
142 Atomic32 old_value, 207 Atomic32 old_value,
143 Atomic32 new_value) { 208 Atomic32 new_value) {
144 Atomic32 cmp = old_value; 209 Atomic32 cmp = old_value;
145 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value, 210 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
146 __tsan_memory_order_relaxed); 211 __tsan_memory_order_relaxed, __tsan_memory_order_relaxed);
147 return cmp; 212 return cmp;
148 } 213 }
149 214
150 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32 *ptr, 215 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32 *ptr,
151 Atomic32 new_value) { 216 Atomic32 new_value) {
152 return __tsan_atomic32_exchange(ptr, new_value, 217 return __tsan_atomic32_exchange(ptr, new_value,
153 __tsan_memory_order_relaxed); 218 __tsan_memory_order_relaxed);
154 } 219 }
155 220
156 inline Atomic32 Acquire_AtomicExchange(volatile Atomic32 *ptr, 221 inline Atomic32 Acquire_AtomicExchange(volatile Atomic32 *ptr,
(...skipping 18 matching lines...) Expand all
175 Atomic32 increment) { 240 Atomic32 increment) {
176 return increment + __tsan_atomic32_fetch_add(ptr, increment, 241 return increment + __tsan_atomic32_fetch_add(ptr, increment,
177 __tsan_memory_order_acq_rel); 242 __tsan_memory_order_acq_rel);
178 } 243 }
179 244
180 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32 *ptr, 245 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32 *ptr,
181 Atomic32 old_value, 246 Atomic32 old_value,
182 Atomic32 new_value) { 247 Atomic32 new_value) {
183 Atomic32 cmp = old_value; 248 Atomic32 cmp = old_value;
184 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value, 249 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
185 __tsan_memory_order_acquire); 250 __tsan_memory_order_acquire, __tsan_memory_order_acquire);
186 return cmp; 251 return cmp;
187 } 252 }
188 253
189 inline Atomic32 Release_CompareAndSwap(volatile Atomic32 *ptr, 254 inline Atomic32 Release_CompareAndSwap(volatile Atomic32 *ptr,
190 Atomic32 old_value, 255 Atomic32 old_value,
191 Atomic32 new_value) { 256 Atomic32 new_value) {
192 Atomic32 cmp = old_value; 257 Atomic32 cmp = old_value;
193 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value, 258 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
194 __tsan_memory_order_release); 259 __tsan_memory_order_release, __tsan_memory_order_relaxed);
195 return cmp; 260 return cmp;
196 } 261 }
197 262
198 inline void NoBarrier_Store(volatile Atomic32 *ptr, Atomic32 value) { 263 inline void NoBarrier_Store(volatile Atomic32 *ptr, Atomic32 value) {
199 __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed); 264 __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed);
200 } 265 }
201 266
202 inline void Acquire_Store(volatile Atomic32 *ptr, Atomic32 value) { 267 inline void Acquire_Store(volatile Atomic32 *ptr, Atomic32 value) {
203 __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed); 268 __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed);
204 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); 269 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
(...skipping 14 matching lines...) Expand all
219 inline Atomic32 Release_Load(volatile const Atomic32 *ptr) { 284 inline Atomic32 Release_Load(volatile const Atomic32 *ptr) {
220 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); 285 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
221 return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed); 286 return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed);
222 } 287 }
223 288
224 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64 *ptr, 289 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64 *ptr,
225 Atomic64 old_value, 290 Atomic64 old_value,
226 Atomic64 new_value) { 291 Atomic64 new_value) {
227 Atomic64 cmp = old_value; 292 Atomic64 cmp = old_value;
228 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value, 293 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
229 __tsan_memory_order_relaxed); 294 __tsan_memory_order_relaxed, __tsan_memory_order_relaxed);
230 return cmp; 295 return cmp;
231 } 296 }
232 297
233 inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64 *ptr, 298 inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64 *ptr,
234 Atomic64 new_value) { 299 Atomic64 new_value) {
235 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_relaxed); 300 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_relaxed);
236 } 301 }
237 302
238 inline Atomic64 Acquire_AtomicExchange(volatile Atomic64 *ptr, 303 inline Atomic64 Acquire_AtomicExchange(volatile Atomic64 *ptr,
239 Atomic64 new_value) { 304 Atomic64 new_value) {
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
281 inline Atomic64 Release_Load(volatile const Atomic64 *ptr) { 346 inline Atomic64 Release_Load(volatile const Atomic64 *ptr) {
282 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); 347 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
283 return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed); 348 return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed);
284 } 349 }
285 350
286 inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64 *ptr, 351 inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64 *ptr,
287 Atomic64 old_value, 352 Atomic64 old_value,
288 Atomic64 new_value) { 353 Atomic64 new_value) {
289 Atomic64 cmp = old_value; 354 Atomic64 cmp = old_value;
290 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value, 355 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
291 __tsan_memory_order_acquire); 356 __tsan_memory_order_acquire, __tsan_memory_order_acquire);
292 return cmp; 357 return cmp;
293 } 358 }
294 359
295 inline Atomic64 Release_CompareAndSwap(volatile Atomic64 *ptr, 360 inline Atomic64 Release_CompareAndSwap(volatile Atomic64 *ptr,
296 Atomic64 old_value, 361 Atomic64 old_value,
297 Atomic64 new_value) { 362 Atomic64 new_value) {
298 Atomic64 cmp = old_value; 363 Atomic64 cmp = old_value;
299 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value, 364 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
300 __tsan_memory_order_release); 365 __tsan_memory_order_release, __tsan_memory_order_relaxed);
301 return cmp; 366 return cmp;
302 } 367 }
303 368
304 inline void MemoryBarrier() { 369 inline void MemoryBarrier() {
305 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); 370 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
306 } 371 }
307 372
308 } // namespace base::subtle 373 } // namespace base::subtle
309 } // namespace base 374 } // namespace base
310 375
311 #undef ATOMICOPS_COMPILER_BARRIER 376 #undef ATOMICOPS_COMPILER_BARRIER
312 377
313 #endif // BASE_ATOMICOPS_INTERNALS_TSAN_H_ 378 #endif // BASE_ATOMICOPS_INTERNALS_TSAN_H_
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698