Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3)

Side by Side Diff: base/atomicops_internals_tsan.h

Issue 143273005: Atomic ops cleanup. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: More C++-style declarations Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « base/atomicops_internals_mac.h ('k') | base/atomicops_internals_x86_gcc.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 // This file is an internal atomic implementation for compiler-based 5 // This file is an internal atomic implementation for compiler-based
6 // ThreadSanitizer. Use base/atomicops.h instead. 6 // ThreadSanitizer. Use base/atomicops.h instead.
7 7
8 #ifndef BASE_ATOMICOPS_INTERNALS_TSAN_H_ 8 #ifndef BASE_ATOMICOPS_INTERNALS_TSAN_H_
9 #define BASE_ATOMICOPS_INTERNALS_TSAN_H_ 9 #define BASE_ATOMICOPS_INTERNALS_TSAN_H_
10 10
(...skipping 13 matching lines...) Expand all
24 AtomicOps_Internalx86CPUFeatures; 24 AtomicOps_Internalx86CPUFeatures;
25 25
26 #define ATOMICOPS_COMPILER_BARRIER() __asm__ __volatile__("" : : : "memory") 26 #define ATOMICOPS_COMPILER_BARRIER() __asm__ __volatile__("" : : : "memory")
27 27
28 namespace base { 28 namespace base {
29 namespace subtle { 29 namespace subtle {
30 30
31 #ifndef TSAN_INTERFACE_ATOMIC_H 31 #ifndef TSAN_INTERFACE_ATOMIC_H
32 #define TSAN_INTERFACE_ATOMIC_H 32 #define TSAN_INTERFACE_ATOMIC_H
33 33
34 #ifdef __cplusplus
35 extern "C" { 34 extern "C" {
36 #endif
37 35
38 typedef char __tsan_atomic8; 36 typedef char __tsan_atomic8;
39 typedef short __tsan_atomic16; // NOLINT 37 typedef short __tsan_atomic16; // NOLINT
40 typedef int __tsan_atomic32; 38 typedef int __tsan_atomic32;
41 typedef long __tsan_atomic64; // NOLINT 39 typedef long __tsan_atomic64; // NOLINT
42 40
43 #if defined(__SIZEOF_INT128__) \ 41 #if defined(__SIZEOF_INT128__) \
44 || (__clang_major__ * 100 + __clang_minor__ >= 302) 42 || (__clang_major__ * 100 + __clang_minor__ >= 302)
45 typedef __int128 __tsan_atomic128; 43 typedef __int128 __tsan_atomic128;
46 #define __TSAN_HAS_INT128 1 44 #define __TSAN_HAS_INT128 1
47 #else 45 #else
48 typedef char __tsan_atomic128; 46 typedef char __tsan_atomic128;
49 #define __TSAN_HAS_INT128 0 47 #define __TSAN_HAS_INT128 0
50 #endif 48 #endif
51 49
52 typedef enum { 50 typedef enum {
53 __tsan_memory_order_relaxed, 51 __tsan_memory_order_relaxed,
54 __tsan_memory_order_consume, 52 __tsan_memory_order_consume,
55 __tsan_memory_order_acquire, 53 __tsan_memory_order_acquire,
56 __tsan_memory_order_release, 54 __tsan_memory_order_release,
57 __tsan_memory_order_acq_rel, 55 __tsan_memory_order_acq_rel,
58 __tsan_memory_order_seq_cst, 56 __tsan_memory_order_seq_cst,
59 } __tsan_memory_order; 57 } __tsan_memory_order;
60 58
61 __tsan_atomic8 __tsan_atomic8_load(const volatile __tsan_atomic8 *a, 59 __tsan_atomic8 __tsan_atomic8_load(const volatile __tsan_atomic8* a,
62 __tsan_memory_order mo); 60 __tsan_memory_order mo);
63 __tsan_atomic16 __tsan_atomic16_load(const volatile __tsan_atomic16 *a, 61 __tsan_atomic16 __tsan_atomic16_load(const volatile __tsan_atomic16* a,
64 __tsan_memory_order mo); 62 __tsan_memory_order mo);
65 __tsan_atomic32 __tsan_atomic32_load(const volatile __tsan_atomic32 *a, 63 __tsan_atomic32 __tsan_atomic32_load(const volatile __tsan_atomic32* a,
66 __tsan_memory_order mo); 64 __tsan_memory_order mo);
67 __tsan_atomic64 __tsan_atomic64_load(const volatile __tsan_atomic64 *a, 65 __tsan_atomic64 __tsan_atomic64_load(const volatile __tsan_atomic64* a,
68 __tsan_memory_order mo); 66 __tsan_memory_order mo);
69 __tsan_atomic128 __tsan_atomic128_load(const volatile __tsan_atomic128 *a, 67 __tsan_atomic128 __tsan_atomic128_load(const volatile __tsan_atomic128* a,
70 __tsan_memory_order mo); 68 __tsan_memory_order mo);
71 69
72 void __tsan_atomic8_store(volatile __tsan_atomic8 *a, __tsan_atomic8 v, 70 void __tsan_atomic8_store(volatile __tsan_atomic8* a, __tsan_atomic8 v,
73 __tsan_memory_order mo); 71 __tsan_memory_order mo);
74 void __tsan_atomic16_store(volatile __tsan_atomic16 *a, __tsan_atomic16 v, 72 void __tsan_atomic16_store(volatile __tsan_atomic16* a, __tsan_atomic16 v,
75 __tsan_memory_order mo); 73 __tsan_memory_order mo);
76 void __tsan_atomic32_store(volatile __tsan_atomic32 *a, __tsan_atomic32 v, 74 void __tsan_atomic32_store(volatile __tsan_atomic32* a, __tsan_atomic32 v,
77 __tsan_memory_order mo); 75 __tsan_memory_order mo);
78 void __tsan_atomic64_store(volatile __tsan_atomic64 *a, __tsan_atomic64 v, 76 void __tsan_atomic64_store(volatile __tsan_atomic64* a, __tsan_atomic64 v,
79 __tsan_memory_order mo); 77 __tsan_memory_order mo);
80 void __tsan_atomic128_store(volatile __tsan_atomic128 *a, __tsan_atomic128 v, 78 void __tsan_atomic128_store(volatile __tsan_atomic128* a, __tsan_atomic128 v,
81 __tsan_memory_order mo); 79 __tsan_memory_order mo);
82 80
83 __tsan_atomic8 __tsan_atomic8_exchange(volatile __tsan_atomic8 *a, 81 __tsan_atomic8 __tsan_atomic8_exchange(volatile __tsan_atomic8* a,
84 __tsan_atomic8 v, __tsan_memory_order mo); 82 __tsan_atomic8 v, __tsan_memory_order mo);
85 __tsan_atomic16 __tsan_atomic16_exchange(volatile __tsan_atomic16 *a, 83 __tsan_atomic16 __tsan_atomic16_exchange(volatile __tsan_atomic16* a,
86 __tsan_atomic16 v, __tsan_memory_order mo); 84 __tsan_atomic16 v, __tsan_memory_order mo);
87 __tsan_atomic32 __tsan_atomic32_exchange(volatile __tsan_atomic32 *a, 85 __tsan_atomic32 __tsan_atomic32_exchange(volatile __tsan_atomic32* a,
88 __tsan_atomic32 v, __tsan_memory_order mo); 86 __tsan_atomic32 v, __tsan_memory_order mo);
89 __tsan_atomic64 __tsan_atomic64_exchange(volatile __tsan_atomic64 *a, 87 __tsan_atomic64 __tsan_atomic64_exchange(volatile __tsan_atomic64* a,
90 __tsan_atomic64 v, __tsan_memory_order mo); 88 __tsan_atomic64 v, __tsan_memory_order mo);
91 __tsan_atomic128 __tsan_atomic128_exchange(volatile __tsan_atomic128 *a, 89 __tsan_atomic128 __tsan_atomic128_exchange(volatile __tsan_atomic128* a,
92 __tsan_atomic128 v, __tsan_memory_order mo); 90 __tsan_atomic128 v, __tsan_memory_order mo);
93 91
94 __tsan_atomic8 __tsan_atomic8_fetch_add(volatile __tsan_atomic8 *a, 92 __tsan_atomic8 __tsan_atomic8_fetch_add(volatile __tsan_atomic8* a,
95 __tsan_atomic8 v, __tsan_memory_order mo); 93 __tsan_atomic8 v, __tsan_memory_order mo);
96 __tsan_atomic16 __tsan_atomic16_fetch_add(volatile __tsan_atomic16 *a, 94 __tsan_atomic16 __tsan_atomic16_fetch_add(volatile __tsan_atomic16* a,
97 __tsan_atomic16 v, __tsan_memory_order mo); 95 __tsan_atomic16 v, __tsan_memory_order mo);
98 __tsan_atomic32 __tsan_atomic32_fetch_add(volatile __tsan_atomic32 *a, 96 __tsan_atomic32 __tsan_atomic32_fetch_add(volatile __tsan_atomic32* a,
99 __tsan_atomic32 v, __tsan_memory_order mo); 97 __tsan_atomic32 v, __tsan_memory_order mo);
100 __tsan_atomic64 __tsan_atomic64_fetch_add(volatile __tsan_atomic64 *a, 98 __tsan_atomic64 __tsan_atomic64_fetch_add(volatile __tsan_atomic64* a,
101 __tsan_atomic64 v, __tsan_memory_order mo); 99 __tsan_atomic64 v, __tsan_memory_order mo);
102 __tsan_atomic128 __tsan_atomic128_fetch_add(volatile __tsan_atomic128 *a, 100 __tsan_atomic128 __tsan_atomic128_fetch_add(volatile __tsan_atomic128* a,
103 __tsan_atomic128 v, __tsan_memory_order mo); 101 __tsan_atomic128 v, __tsan_memory_order mo);
104 102
105 __tsan_atomic8 __tsan_atomic8_fetch_and(volatile __tsan_atomic8 *a, 103 __tsan_atomic8 __tsan_atomic8_fetch_and(volatile __tsan_atomic8* a,
106 __tsan_atomic8 v, __tsan_memory_order mo); 104 __tsan_atomic8 v, __tsan_memory_order mo);
107 __tsan_atomic16 __tsan_atomic16_fetch_and(volatile __tsan_atomic16 *a, 105 __tsan_atomic16 __tsan_atomic16_fetch_and(volatile __tsan_atomic16* a,
108 __tsan_atomic16 v, __tsan_memory_order mo); 106 __tsan_atomic16 v, __tsan_memory_order mo);
109 __tsan_atomic32 __tsan_atomic32_fetch_and(volatile __tsan_atomic32 *a, 107 __tsan_atomic32 __tsan_atomic32_fetch_and(volatile __tsan_atomic32* a,
110 __tsan_atomic32 v, __tsan_memory_order mo); 108 __tsan_atomic32 v, __tsan_memory_order mo);
111 __tsan_atomic64 __tsan_atomic64_fetch_and(volatile __tsan_atomic64 *a, 109 __tsan_atomic64 __tsan_atomic64_fetch_and(volatile __tsan_atomic64* a,
112 __tsan_atomic64 v, __tsan_memory_order mo); 110 __tsan_atomic64 v, __tsan_memory_order mo);
113 __tsan_atomic128 __tsan_atomic128_fetch_and(volatile __tsan_atomic128 *a, 111 __tsan_atomic128 __tsan_atomic128_fetch_and(volatile __tsan_atomic128* a,
114 __tsan_atomic128 v, __tsan_memory_order mo); 112 __tsan_atomic128 v, __tsan_memory_order mo);
115 113
116 __tsan_atomic8 __tsan_atomic8_fetch_or(volatile __tsan_atomic8 *a, 114 __tsan_atomic8 __tsan_atomic8_fetch_or(volatile __tsan_atomic8* a,
117 __tsan_atomic8 v, __tsan_memory_order mo); 115 __tsan_atomic8 v, __tsan_memory_order mo);
118 __tsan_atomic16 __tsan_atomic16_fetch_or(volatile __tsan_atomic16 *a, 116 __tsan_atomic16 __tsan_atomic16_fetch_or(volatile __tsan_atomic16* a,
119 __tsan_atomic16 v, __tsan_memory_order mo); 117 __tsan_atomic16 v, __tsan_memory_order mo);
120 __tsan_atomic32 __tsan_atomic32_fetch_or(volatile __tsan_atomic32 *a, 118 __tsan_atomic32 __tsan_atomic32_fetch_or(volatile __tsan_atomic32* a,
121 __tsan_atomic32 v, __tsan_memory_order mo); 119 __tsan_atomic32 v, __tsan_memory_order mo);
122 __tsan_atomic64 __tsan_atomic64_fetch_or(volatile __tsan_atomic64 *a, 120 __tsan_atomic64 __tsan_atomic64_fetch_or(volatile __tsan_atomic64* a,
123 __tsan_atomic64 v, __tsan_memory_order mo); 121 __tsan_atomic64 v, __tsan_memory_order mo);
124 __tsan_atomic128 __tsan_atomic128_fetch_or(volatile __tsan_atomic128 *a, 122 __tsan_atomic128 __tsan_atomic128_fetch_or(volatile __tsan_atomic128* a,
125 __tsan_atomic128 v, __tsan_memory_order mo); 123 __tsan_atomic128 v, __tsan_memory_order mo);
126 124
127 __tsan_atomic8 __tsan_atomic8_fetch_xor(volatile __tsan_atomic8 *a, 125 __tsan_atomic8 __tsan_atomic8_fetch_xor(volatile __tsan_atomic8* a,
128 __tsan_atomic8 v, __tsan_memory_order mo); 126 __tsan_atomic8 v, __tsan_memory_order mo);
129 __tsan_atomic16 __tsan_atomic16_fetch_xor(volatile __tsan_atomic16 *a, 127 __tsan_atomic16 __tsan_atomic16_fetch_xor(volatile __tsan_atomic16* a,
130 __tsan_atomic16 v, __tsan_memory_order mo); 128 __tsan_atomic16 v, __tsan_memory_order mo);
131 __tsan_atomic32 __tsan_atomic32_fetch_xor(volatile __tsan_atomic32 *a, 129 __tsan_atomic32 __tsan_atomic32_fetch_xor(volatile __tsan_atomic32* a,
132 __tsan_atomic32 v, __tsan_memory_order mo); 130 __tsan_atomic32 v, __tsan_memory_order mo);
133 __tsan_atomic64 __tsan_atomic64_fetch_xor(volatile __tsan_atomic64 *a, 131 __tsan_atomic64 __tsan_atomic64_fetch_xor(volatile __tsan_atomic64* a,
134 __tsan_atomic64 v, __tsan_memory_order mo); 132 __tsan_atomic64 v, __tsan_memory_order mo);
135 __tsan_atomic128 __tsan_atomic128_fetch_xor(volatile __tsan_atomic128 *a, 133 __tsan_atomic128 __tsan_atomic128_fetch_xor(volatile __tsan_atomic128* a,
136 __tsan_atomic128 v, __tsan_memory_order mo); 134 __tsan_atomic128 v, __tsan_memory_order mo);
137 135
138 __tsan_atomic8 __tsan_atomic8_fetch_nand(volatile __tsan_atomic8 *a, 136 __tsan_atomic8 __tsan_atomic8_fetch_nand(volatile __tsan_atomic8* a,
139 __tsan_atomic8 v, __tsan_memory_order mo); 137 __tsan_atomic8 v, __tsan_memory_order mo);
140 __tsan_atomic16 __tsan_atomic16_fetch_nand(volatile __tsan_atomic16 *a, 138 __tsan_atomic16 __tsan_atomic16_fetch_nand(volatile __tsan_atomic16* a,
141 __tsan_atomic16 v, __tsan_memory_order mo); 139 __tsan_atomic16 v, __tsan_memory_order mo);
142 __tsan_atomic32 __tsan_atomic32_fetch_nand(volatile __tsan_atomic32 *a, 140 __tsan_atomic32 __tsan_atomic32_fetch_nand(volatile __tsan_atomic32* a,
143 __tsan_atomic32 v, __tsan_memory_order mo); 141 __tsan_atomic32 v, __tsan_memory_order mo);
144 __tsan_atomic64 __tsan_atomic64_fetch_nand(volatile __tsan_atomic64 *a, 142 __tsan_atomic64 __tsan_atomic64_fetch_nand(volatile __tsan_atomic64* a,
145 __tsan_atomic64 v, __tsan_memory_order mo); 143 __tsan_atomic64 v, __tsan_memory_order mo);
146 __tsan_atomic128 __tsan_atomic128_fetch_nand(volatile __tsan_atomic128 *a, 144 __tsan_atomic128 __tsan_atomic128_fetch_nand(volatile __tsan_atomic128* a,
147 __tsan_atomic128 v, __tsan_memory_order mo); 145 __tsan_atomic128 v, __tsan_memory_order mo);
148 146
149 int __tsan_atomic8_compare_exchange_weak(volatile __tsan_atomic8 *a, 147 int __tsan_atomic8_compare_exchange_weak(volatile __tsan_atomic8* a,
150 __tsan_atomic8 *c, __tsan_atomic8 v, __tsan_memory_order mo, 148 __tsan_atomic8* c, __tsan_atomic8 v, __tsan_memory_order mo,
151 __tsan_memory_order fail_mo); 149 __tsan_memory_order fail_mo);
152 int __tsan_atomic16_compare_exchange_weak(volatile __tsan_atomic16 *a, 150 int __tsan_atomic16_compare_exchange_weak(volatile __tsan_atomic16* a,
153 __tsan_atomic16 *c, __tsan_atomic16 v, __tsan_memory_order mo, 151 __tsan_atomic16* c, __tsan_atomic16 v, __tsan_memory_order mo,
154 __tsan_memory_order fail_mo); 152 __tsan_memory_order fail_mo);
155 int __tsan_atomic32_compare_exchange_weak(volatile __tsan_atomic32 *a, 153 int __tsan_atomic32_compare_exchange_weak(volatile __tsan_atomic32* a,
156 __tsan_atomic32 *c, __tsan_atomic32 v, __tsan_memory_order mo, 154 __tsan_atomic32* c, __tsan_atomic32 v, __tsan_memory_order mo,
157 __tsan_memory_order fail_mo); 155 __tsan_memory_order fail_mo);
158 int __tsan_atomic64_compare_exchange_weak(volatile __tsan_atomic64 *a, 156 int __tsan_atomic64_compare_exchange_weak(volatile __tsan_atomic64* a,
159 __tsan_atomic64 *c, __tsan_atomic64 v, __tsan_memory_order mo, 157 __tsan_atomic64* c, __tsan_atomic64 v, __tsan_memory_order mo,
160 __tsan_memory_order fail_mo); 158 __tsan_memory_order fail_mo);
161 int __tsan_atomic128_compare_exchange_weak(volatile __tsan_atomic128 *a, 159 int __tsan_atomic128_compare_exchange_weak(volatile __tsan_atomic128* a,
162 __tsan_atomic128 *c, __tsan_atomic128 v, __tsan_memory_order mo, 160 __tsan_atomic128* c, __tsan_atomic128 v, __tsan_memory_order mo,
163 __tsan_memory_order fail_mo); 161 __tsan_memory_order fail_mo);
164 162
165 int __tsan_atomic8_compare_exchange_strong(volatile __tsan_atomic8 *a, 163 int __tsan_atomic8_compare_exchange_strong(volatile __tsan_atomic8* a,
166 __tsan_atomic8 *c, __tsan_atomic8 v, __tsan_memory_order mo, 164 __tsan_atomic8* c, __tsan_atomic8 v, __tsan_memory_order mo,
167 __tsan_memory_order fail_mo); 165 __tsan_memory_order fail_mo);
168 int __tsan_atomic16_compare_exchange_strong(volatile __tsan_atomic16 *a, 166 int __tsan_atomic16_compare_exchange_strong(volatile __tsan_atomic16* a,
169 __tsan_atomic16 *c, __tsan_atomic16 v, __tsan_memory_order mo, 167 __tsan_atomic16* c, __tsan_atomic16 v, __tsan_memory_order mo,
170 __tsan_memory_order fail_mo); 168 __tsan_memory_order fail_mo);
171 int __tsan_atomic32_compare_exchange_strong(volatile __tsan_atomic32 *a, 169 int __tsan_atomic32_compare_exchange_strong(volatile __tsan_atomic32* a,
172 __tsan_atomic32 *c, __tsan_atomic32 v, __tsan_memory_order mo, 170 __tsan_atomic32* c, __tsan_atomic32 v, __tsan_memory_order mo,
173 __tsan_memory_order fail_mo); 171 __tsan_memory_order fail_mo);
174 int __tsan_atomic64_compare_exchange_strong(volatile __tsan_atomic64 *a, 172 int __tsan_atomic64_compare_exchange_strong(volatile __tsan_atomic64* a,
175 __tsan_atomic64 *c, __tsan_atomic64 v, __tsan_memory_order mo, 173 __tsan_atomic64* c, __tsan_atomic64 v, __tsan_memory_order mo,
176 __tsan_memory_order fail_mo); 174 __tsan_memory_order fail_mo);
177 int __tsan_atomic128_compare_exchange_strong(volatile __tsan_atomic128 *a, 175 int __tsan_atomic128_compare_exchange_strong(volatile __tsan_atomic128* a,
178 __tsan_atomic128 *c, __tsan_atomic128 v, __tsan_memory_order mo, 176 __tsan_atomic128* c, __tsan_atomic128 v, __tsan_memory_order mo,
179 __tsan_memory_order fail_mo); 177 __tsan_memory_order fail_mo);
180 178
181 __tsan_atomic8 __tsan_atomic8_compare_exchange_val( 179 __tsan_atomic8 __tsan_atomic8_compare_exchange_val(
182 volatile __tsan_atomic8 *a, __tsan_atomic8 c, __tsan_atomic8 v, 180 volatile __tsan_atomic8* a, __tsan_atomic8 c, __tsan_atomic8 v,
183 __tsan_memory_order mo, __tsan_memory_order fail_mo); 181 __tsan_memory_order mo, __tsan_memory_order fail_mo);
184 __tsan_atomic16 __tsan_atomic16_compare_exchange_val( 182 __tsan_atomic16 __tsan_atomic16_compare_exchange_val(
185 volatile __tsan_atomic16 *a, __tsan_atomic16 c, __tsan_atomic16 v, 183 volatile __tsan_atomic16* a, __tsan_atomic16 c, __tsan_atomic16 v,
186 __tsan_memory_order mo, __tsan_memory_order fail_mo); 184 __tsan_memory_order mo, __tsan_memory_order fail_mo);
187 __tsan_atomic32 __tsan_atomic32_compare_exchange_val( 185 __tsan_atomic32 __tsan_atomic32_compare_exchange_val(
188 volatile __tsan_atomic32 *a, __tsan_atomic32 c, __tsan_atomic32 v, 186 volatile __tsan_atomic32* a, __tsan_atomic32 c, __tsan_atomic32 v,
189 __tsan_memory_order mo, __tsan_memory_order fail_mo); 187 __tsan_memory_order mo, __tsan_memory_order fail_mo);
190 __tsan_atomic64 __tsan_atomic64_compare_exchange_val( 188 __tsan_atomic64 __tsan_atomic64_compare_exchange_val(
191 volatile __tsan_atomic64 *a, __tsan_atomic64 c, __tsan_atomic64 v, 189 volatile __tsan_atomic64* a, __tsan_atomic64 c, __tsan_atomic64 v,
192 __tsan_memory_order mo, __tsan_memory_order fail_mo); 190 __tsan_memory_order mo, __tsan_memory_order fail_mo);
193 __tsan_atomic128 __tsan_atomic128_compare_exchange_val( 191 __tsan_atomic128 __tsan_atomic128_compare_exchange_val(
194 volatile __tsan_atomic128 *a, __tsan_atomic128 c, __tsan_atomic128 v, 192 volatile __tsan_atomic128* a, __tsan_atomic128 c, __tsan_atomic128 v,
195 __tsan_memory_order mo, __tsan_memory_order fail_mo); 193 __tsan_memory_order mo, __tsan_memory_order fail_mo);
196 194
197 void __tsan_atomic_thread_fence(__tsan_memory_order mo); 195 void __tsan_atomic_thread_fence(__tsan_memory_order mo);
198 void __tsan_atomic_signal_fence(__tsan_memory_order mo); 196 void __tsan_atomic_signal_fence(__tsan_memory_order mo);
199 197
200 #ifdef __cplusplus
201 } // extern "C" 198 } // extern "C"
202 #endif
203 199
204 #endif // #ifndef TSAN_INTERFACE_ATOMIC_H 200 #endif // #ifndef TSAN_INTERFACE_ATOMIC_H
205 201
206 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr, 202 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr,
207 Atomic32 old_value, 203 Atomic32 old_value,
208 Atomic32 new_value) { 204 Atomic32 new_value) {
209 Atomic32 cmp = old_value; 205 Atomic32 cmp = old_value;
210 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value, 206 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
211 __tsan_memory_order_relaxed, __tsan_memory_order_relaxed); 207 __tsan_memory_order_relaxed, __tsan_memory_order_relaxed);
212 return cmp; 208 return cmp;
213 } 209 }
214 210
215 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32 *ptr, 211 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr,
216 Atomic32 new_value) { 212 Atomic32 new_value) {
217 return __tsan_atomic32_exchange(ptr, new_value, 213 return __tsan_atomic32_exchange(ptr, new_value,
218 __tsan_memory_order_relaxed); 214 __tsan_memory_order_relaxed);
219 } 215 }
220 216
221 inline Atomic32 Acquire_AtomicExchange(volatile Atomic32 *ptr, 217 inline Atomic32 Acquire_AtomicExchange(volatile Atomic32* ptr,
222 Atomic32 new_value) { 218 Atomic32 new_value) {
223 return __tsan_atomic32_exchange(ptr, new_value, 219 return __tsan_atomic32_exchange(ptr, new_value,
224 __tsan_memory_order_acquire); 220 __tsan_memory_order_acquire);
225 } 221 }
226 222
227 inline Atomic32 Release_AtomicExchange(volatile Atomic32 *ptr, 223 inline Atomic32 Release_AtomicExchange(volatile Atomic32* ptr,
228 Atomic32 new_value) { 224 Atomic32 new_value) {
229 return __tsan_atomic32_exchange(ptr, new_value, 225 return __tsan_atomic32_exchange(ptr, new_value,
230 __tsan_memory_order_release); 226 __tsan_memory_order_release);
231 } 227 }
232 228
233 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32 *ptr, 229 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
234 Atomic32 increment) { 230 Atomic32 increment) {
235 return increment + __tsan_atomic32_fetch_add(ptr, increment, 231 return increment + __tsan_atomic32_fetch_add(ptr, increment,
236 __tsan_memory_order_relaxed); 232 __tsan_memory_order_relaxed);
237 } 233 }
238 234
239 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32 *ptr, 235 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr,
240 Atomic32 increment) { 236 Atomic32 increment) {
241 return increment + __tsan_atomic32_fetch_add(ptr, increment, 237 return increment + __tsan_atomic32_fetch_add(ptr, increment,
242 __tsan_memory_order_acq_rel); 238 __tsan_memory_order_acq_rel);
243 } 239 }
244 240
245 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32 *ptr, 241 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
246 Atomic32 old_value, 242 Atomic32 old_value,
247 Atomic32 new_value) { 243 Atomic32 new_value) {
248 Atomic32 cmp = old_value; 244 Atomic32 cmp = old_value;
249 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value, 245 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
250 __tsan_memory_order_acquire, __tsan_memory_order_acquire); 246 __tsan_memory_order_acquire, __tsan_memory_order_acquire);
251 return cmp; 247 return cmp;
252 } 248 }
253 249
254 inline Atomic32 Release_CompareAndSwap(volatile Atomic32 *ptr, 250 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
255 Atomic32 old_value, 251 Atomic32 old_value,
256 Atomic32 new_value) { 252 Atomic32 new_value) {
257 Atomic32 cmp = old_value; 253 Atomic32 cmp = old_value;
258 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value, 254 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
259 __tsan_memory_order_release, __tsan_memory_order_relaxed); 255 __tsan_memory_order_release, __tsan_memory_order_relaxed);
260 return cmp; 256 return cmp;
261 } 257 }
262 258
263 inline void NoBarrier_Store(volatile Atomic32 *ptr, Atomic32 value) { 259 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
264 __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed); 260 __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed);
265 } 261 }
266 262
267 inline void Acquire_Store(volatile Atomic32 *ptr, Atomic32 value) { 263 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
268 __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed); 264 __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed);
269 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); 265 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
270 } 266 }
271 267
272 inline void Release_Store(volatile Atomic32 *ptr, Atomic32 value) { 268 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
273 __tsan_atomic32_store(ptr, value, __tsan_memory_order_release); 269 __tsan_atomic32_store(ptr, value, __tsan_memory_order_release);
274 } 270 }
275 271
276 inline Atomic32 NoBarrier_Load(volatile const Atomic32 *ptr) { 272 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
277 return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed); 273 return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed);
278 } 274 }
279 275
280 inline Atomic32 Acquire_Load(volatile const Atomic32 *ptr) { 276 inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
281 return __tsan_atomic32_load(ptr, __tsan_memory_order_acquire); 277 return __tsan_atomic32_load(ptr, __tsan_memory_order_acquire);
282 } 278 }
283 279
284 inline Atomic32 Release_Load(volatile const Atomic32 *ptr) { 280 inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
285 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); 281 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
286 return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed); 282 return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed);
287 } 283 }
288 284
289 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64 *ptr, 285 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr,
290 Atomic64 old_value, 286 Atomic64 old_value,
291 Atomic64 new_value) { 287 Atomic64 new_value) {
292 Atomic64 cmp = old_value; 288 Atomic64 cmp = old_value;
293 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value, 289 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
294 __tsan_memory_order_relaxed, __tsan_memory_order_relaxed); 290 __tsan_memory_order_relaxed, __tsan_memory_order_relaxed);
295 return cmp; 291 return cmp;
296 } 292 }
297 293
298 inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64 *ptr, 294 inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64* ptr,
299 Atomic64 new_value) { 295 Atomic64 new_value) {
300 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_relaxed); 296 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_relaxed);
301 } 297 }
302 298
303 inline Atomic64 Acquire_AtomicExchange(volatile Atomic64 *ptr, 299 inline Atomic64 Acquire_AtomicExchange(volatile Atomic64* ptr,
304 Atomic64 new_value) { 300 Atomic64 new_value) {
305 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_acquire); 301 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_acquire);
306 } 302 }
307 303
308 inline Atomic64 Release_AtomicExchange(volatile Atomic64 *ptr, 304 inline Atomic64 Release_AtomicExchange(volatile Atomic64* ptr,
309 Atomic64 new_value) { 305 Atomic64 new_value) {
310 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_release); 306 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_release);
311 } 307 }
312 308
313 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64 *ptr, 309 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr,
314 Atomic64 increment) { 310 Atomic64 increment) {
315 return increment + __tsan_atomic64_fetch_add(ptr, increment, 311 return increment + __tsan_atomic64_fetch_add(ptr, increment,
316 __tsan_memory_order_relaxed); 312 __tsan_memory_order_relaxed);
317 } 313 }
318 314
319 inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64 *ptr, 315 inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64* ptr,
320 Atomic64 increment) { 316 Atomic64 increment) {
321 return increment + __tsan_atomic64_fetch_add(ptr, increment, 317 return increment + __tsan_atomic64_fetch_add(ptr, increment,
322 __tsan_memory_order_acq_rel); 318 __tsan_memory_order_acq_rel);
323 } 319 }
324 320
325 inline void NoBarrier_Store(volatile Atomic64 *ptr, Atomic64 value) { 321 inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
326 __tsan_atomic64_store(ptr, value, __tsan_memory_order_relaxed); 322 __tsan_atomic64_store(ptr, value, __tsan_memory_order_relaxed);
327 } 323 }
328 324
329 inline void Acquire_Store(volatile Atomic64 *ptr, Atomic64 value) { 325 inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) {
330 __tsan_atomic64_store(ptr, value, __tsan_memory_order_relaxed); 326 __tsan_atomic64_store(ptr, value, __tsan_memory_order_relaxed);
331 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); 327 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
332 } 328 }
333 329
334 inline void Release_Store(volatile Atomic64 *ptr, Atomic64 value) { 330 inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) {
335 __tsan_atomic64_store(ptr, value, __tsan_memory_order_release); 331 __tsan_atomic64_store(ptr, value, __tsan_memory_order_release);
336 } 332 }
337 333
338 inline Atomic64 NoBarrier_Load(volatile const Atomic64 *ptr) { 334 inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
339 return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed); 335 return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed);
340 } 336 }
341 337
342 inline Atomic64 Acquire_Load(volatile const Atomic64 *ptr) { 338 inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) {
343 return __tsan_atomic64_load(ptr, __tsan_memory_order_acquire); 339 return __tsan_atomic64_load(ptr, __tsan_memory_order_acquire);
344 } 340 }
345 341
346 inline Atomic64 Release_Load(volatile const Atomic64 *ptr) { 342 inline Atomic64 Release_Load(volatile const Atomic64* ptr) {
347 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); 343 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
348 return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed); 344 return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed);
349 } 345 }
350 346
351 inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64 *ptr, 347 inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr,
352 Atomic64 old_value, 348 Atomic64 old_value,
353 Atomic64 new_value) { 349 Atomic64 new_value) {
354 Atomic64 cmp = old_value; 350 Atomic64 cmp = old_value;
355 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value, 351 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
356 __tsan_memory_order_acquire, __tsan_memory_order_acquire); 352 __tsan_memory_order_acquire, __tsan_memory_order_acquire);
357 return cmp; 353 return cmp;
358 } 354 }
359 355
360 inline Atomic64 Release_CompareAndSwap(volatile Atomic64 *ptr, 356 inline Atomic64 Release_CompareAndSwap(volatile Atomic64* ptr,
361 Atomic64 old_value, 357 Atomic64 old_value,
362 Atomic64 new_value) { 358 Atomic64 new_value) {
363 Atomic64 cmp = old_value; 359 Atomic64 cmp = old_value;
364 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value, 360 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
365 __tsan_memory_order_release, __tsan_memory_order_relaxed); 361 __tsan_memory_order_release, __tsan_memory_order_relaxed);
366 return cmp; 362 return cmp;
367 } 363 }
368 364
369 inline void MemoryBarrier() { 365 inline void MemoryBarrier() {
370 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); 366 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
371 } 367 }
372 368
373 } // namespace base::subtle 369 } // namespace base::subtle
374 } // namespace base 370 } // namespace base
375 371
376 #undef ATOMICOPS_COMPILER_BARRIER 372 #undef ATOMICOPS_COMPILER_BARRIER
377 373
378 #endif // BASE_ATOMICOPS_INTERNALS_TSAN_H_ 374 #endif // BASE_ATOMICOPS_INTERNALS_TSAN_H_
OLDNEW
« no previous file with comments | « base/atomicops_internals_mac.h ('k') | base/atomicops_internals_x86_gcc.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698