OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 // This file is an internal atomic implementation for compiler-based | 5 // This file is an internal atomic implementation for compiler-based |
6 // ThreadSanitizer. Use base/atomicops.h instead. | 6 // ThreadSanitizer. Use base/atomicops.h instead. |
7 | 7 |
8 #ifndef BASE_ATOMICOPS_INTERNALS_TSAN_H_ | 8 #ifndef BASE_ATOMICOPS_INTERNALS_TSAN_H_ |
9 #define BASE_ATOMICOPS_INTERNALS_TSAN_H_ | 9 #define BASE_ATOMICOPS_INTERNALS_TSAN_H_ |
10 | 10 |
11 #include "base/base_export.h" | 11 #include "base/base_export.h" |
12 | 12 |
13 // This struct is not part of the public API of this module; clients may not | 13 // This struct is not part of the public API of this module; clients may not |
14 // use it. (However, it's exported via BASE_EXPORT because clients implicitly | 14 // use it. (However, it's exported via BASE_EXPORT because clients implicitly |
15 // do use it at link time by inlining these functions.) | 15 // do use it at link time by inlining these functions.) |
16 // Features of this x86. Values may not be correct before main() is run, | 16 // Features of this x86. Values may not be correct before main() is run, |
17 // but are set conservatively. | 17 // but are set conservatively. |
18 struct AtomicOps_x86CPUFeatureStruct { | 18 struct AtomicOps_x86CPUFeatureStruct { |
19 bool has_amd_lock_mb_bug; // Processor has AMD memory-barrier bug; do lfence | 19 bool has_amd_lock_mb_bug; // Processor has AMD memory-barrier bug; do lfence |
20 // after acquire compare-and-swap. | 20 // after acquire compare-and-swap. |
21 bool has_sse2; // Processor has SSE2. | 21 bool has_sse2; // Processor has SSE2. |
22 }; | 22 }; |
23 BASE_EXPORT extern struct AtomicOps_x86CPUFeatureStruct | 23 BASE_EXPORT extern struct AtomicOps_x86CPUFeatureStruct |
24 AtomicOps_Internalx86CPUFeatures; | 24 AtomicOps_Internalx86CPUFeatures; |
25 | 25 |
26 #define ATOMICOPS_COMPILER_BARRIER() __asm__ __volatile__("" : : : "memory") | 26 #define ATOMICOPS_COMPILER_BARRIER() __asm__ __volatile__("" : : : "memory") |
27 | 27 |
| 28 #include <sanitizer/tsan_interface_atomic.h> |
| 29 |
28 namespace base { | 30 namespace base { |
29 namespace subtle { | 31 namespace subtle { |
30 | 32 |
31 #ifndef TSAN_INTERFACE_ATOMIC_H | |
32 #define TSAN_INTERFACE_ATOMIC_H | |
33 | |
34 extern "C" { | |
35 | |
36 typedef char __tsan_atomic8; | |
37 typedef short __tsan_atomic16; // NOLINT | |
38 typedef int __tsan_atomic32; | |
39 typedef long __tsan_atomic64; // NOLINT | |
40 | |
41 #if defined(__SIZEOF_INT128__) \ | |
42 || (__clang_major__ * 100 + __clang_minor__ >= 302) | |
43 typedef __int128 __tsan_atomic128; | |
44 #define __TSAN_HAS_INT128 1 | |
45 #else | |
46 typedef char __tsan_atomic128; | |
47 #define __TSAN_HAS_INT128 0 | |
48 #endif | |
49 | |
50 typedef enum { | |
51 __tsan_memory_order_relaxed, | |
52 __tsan_memory_order_consume, | |
53 __tsan_memory_order_acquire, | |
54 __tsan_memory_order_release, | |
55 __tsan_memory_order_acq_rel, | |
56 __tsan_memory_order_seq_cst, | |
57 } __tsan_memory_order; | |
58 | |
59 __tsan_atomic8 __tsan_atomic8_load(const volatile __tsan_atomic8* a, | |
60 __tsan_memory_order mo); | |
61 __tsan_atomic16 __tsan_atomic16_load(const volatile __tsan_atomic16* a, | |
62 __tsan_memory_order mo); | |
63 __tsan_atomic32 __tsan_atomic32_load(const volatile __tsan_atomic32* a, | |
64 __tsan_memory_order mo); | |
65 __tsan_atomic64 __tsan_atomic64_load(const volatile __tsan_atomic64* a, | |
66 __tsan_memory_order mo); | |
67 __tsan_atomic128 __tsan_atomic128_load(const volatile __tsan_atomic128* a, | |
68 __tsan_memory_order mo); | |
69 | |
70 void __tsan_atomic8_store(volatile __tsan_atomic8* a, __tsan_atomic8 v, | |
71 __tsan_memory_order mo); | |
72 void __tsan_atomic16_store(volatile __tsan_atomic16* a, __tsan_atomic16 v, | |
73 __tsan_memory_order mo); | |
74 void __tsan_atomic32_store(volatile __tsan_atomic32* a, __tsan_atomic32 v, | |
75 __tsan_memory_order mo); | |
76 void __tsan_atomic64_store(volatile __tsan_atomic64* a, __tsan_atomic64 v, | |
77 __tsan_memory_order mo); | |
78 void __tsan_atomic128_store(volatile __tsan_atomic128* a, __tsan_atomic128 v, | |
79 __tsan_memory_order mo); | |
80 | |
81 __tsan_atomic8 __tsan_atomic8_exchange(volatile __tsan_atomic8* a, | |
82 __tsan_atomic8 v, __tsan_memory_order mo); | |
83 __tsan_atomic16 __tsan_atomic16_exchange(volatile __tsan_atomic16* a, | |
84 __tsan_atomic16 v, __tsan_memory_order mo); | |
85 __tsan_atomic32 __tsan_atomic32_exchange(volatile __tsan_atomic32* a, | |
86 __tsan_atomic32 v, __tsan_memory_order mo); | |
87 __tsan_atomic64 __tsan_atomic64_exchange(volatile __tsan_atomic64* a, | |
88 __tsan_atomic64 v, __tsan_memory_order mo); | |
89 __tsan_atomic128 __tsan_atomic128_exchange(volatile __tsan_atomic128* a, | |
90 __tsan_atomic128 v, __tsan_memory_order mo); | |
91 | |
92 __tsan_atomic8 __tsan_atomic8_fetch_add(volatile __tsan_atomic8* a, | |
93 __tsan_atomic8 v, __tsan_memory_order mo); | |
94 __tsan_atomic16 __tsan_atomic16_fetch_add(volatile __tsan_atomic16* a, | |
95 __tsan_atomic16 v, __tsan_memory_order mo); | |
96 __tsan_atomic32 __tsan_atomic32_fetch_add(volatile __tsan_atomic32* a, | |
97 __tsan_atomic32 v, __tsan_memory_order mo); | |
98 __tsan_atomic64 __tsan_atomic64_fetch_add(volatile __tsan_atomic64* a, | |
99 __tsan_atomic64 v, __tsan_memory_order mo); | |
100 __tsan_atomic128 __tsan_atomic128_fetch_add(volatile __tsan_atomic128* a, | |
101 __tsan_atomic128 v, __tsan_memory_order mo); | |
102 | |
103 __tsan_atomic8 __tsan_atomic8_fetch_and(volatile __tsan_atomic8* a, | |
104 __tsan_atomic8 v, __tsan_memory_order mo); | |
105 __tsan_atomic16 __tsan_atomic16_fetch_and(volatile __tsan_atomic16* a, | |
106 __tsan_atomic16 v, __tsan_memory_order mo); | |
107 __tsan_atomic32 __tsan_atomic32_fetch_and(volatile __tsan_atomic32* a, | |
108 __tsan_atomic32 v, __tsan_memory_order mo); | |
109 __tsan_atomic64 __tsan_atomic64_fetch_and(volatile __tsan_atomic64* a, | |
110 __tsan_atomic64 v, __tsan_memory_order mo); | |
111 __tsan_atomic128 __tsan_atomic128_fetch_and(volatile __tsan_atomic128* a, | |
112 __tsan_atomic128 v, __tsan_memory_order mo); | |
113 | |
114 __tsan_atomic8 __tsan_atomic8_fetch_or(volatile __tsan_atomic8* a, | |
115 __tsan_atomic8 v, __tsan_memory_order mo); | |
116 __tsan_atomic16 __tsan_atomic16_fetch_or(volatile __tsan_atomic16* a, | |
117 __tsan_atomic16 v, __tsan_memory_order mo); | |
118 __tsan_atomic32 __tsan_atomic32_fetch_or(volatile __tsan_atomic32* a, | |
119 __tsan_atomic32 v, __tsan_memory_order mo); | |
120 __tsan_atomic64 __tsan_atomic64_fetch_or(volatile __tsan_atomic64* a, | |
121 __tsan_atomic64 v, __tsan_memory_order mo); | |
122 __tsan_atomic128 __tsan_atomic128_fetch_or(volatile __tsan_atomic128* a, | |
123 __tsan_atomic128 v, __tsan_memory_order mo); | |
124 | |
125 __tsan_atomic8 __tsan_atomic8_fetch_xor(volatile __tsan_atomic8* a, | |
126 __tsan_atomic8 v, __tsan_memory_order mo); | |
127 __tsan_atomic16 __tsan_atomic16_fetch_xor(volatile __tsan_atomic16* a, | |
128 __tsan_atomic16 v, __tsan_memory_order mo); | |
129 __tsan_atomic32 __tsan_atomic32_fetch_xor(volatile __tsan_atomic32* a, | |
130 __tsan_atomic32 v, __tsan_memory_order mo); | |
131 __tsan_atomic64 __tsan_atomic64_fetch_xor(volatile __tsan_atomic64* a, | |
132 __tsan_atomic64 v, __tsan_memory_order mo); | |
133 __tsan_atomic128 __tsan_atomic128_fetch_xor(volatile __tsan_atomic128* a, | |
134 __tsan_atomic128 v, __tsan_memory_order mo); | |
135 | |
136 __tsan_atomic8 __tsan_atomic8_fetch_nand(volatile __tsan_atomic8* a, | |
137 __tsan_atomic8 v, __tsan_memory_order mo); | |
138 __tsan_atomic16 __tsan_atomic16_fetch_nand(volatile __tsan_atomic16* a, | |
139 __tsan_atomic16 v, __tsan_memory_order mo); | |
140 __tsan_atomic32 __tsan_atomic32_fetch_nand(volatile __tsan_atomic32* a, | |
141 __tsan_atomic32 v, __tsan_memory_order mo); | |
142 __tsan_atomic64 __tsan_atomic64_fetch_nand(volatile __tsan_atomic64* a, | |
143 __tsan_atomic64 v, __tsan_memory_order mo); | |
144 __tsan_atomic128 __tsan_atomic128_fetch_nand(volatile __tsan_atomic128* a, | |
145 __tsan_atomic128 v, __tsan_memory_order mo); | |
146 | |
147 int __tsan_atomic8_compare_exchange_weak(volatile __tsan_atomic8* a, | |
148 __tsan_atomic8* c, __tsan_atomic8 v, __tsan_memory_order mo, | |
149 __tsan_memory_order fail_mo); | |
150 int __tsan_atomic16_compare_exchange_weak(volatile __tsan_atomic16* a, | |
151 __tsan_atomic16* c, __tsan_atomic16 v, __tsan_memory_order mo, | |
152 __tsan_memory_order fail_mo); | |
153 int __tsan_atomic32_compare_exchange_weak(volatile __tsan_atomic32* a, | |
154 __tsan_atomic32* c, __tsan_atomic32 v, __tsan_memory_order mo, | |
155 __tsan_memory_order fail_mo); | |
156 int __tsan_atomic64_compare_exchange_weak(volatile __tsan_atomic64* a, | |
157 __tsan_atomic64* c, __tsan_atomic64 v, __tsan_memory_order mo, | |
158 __tsan_memory_order fail_mo); | |
159 int __tsan_atomic128_compare_exchange_weak(volatile __tsan_atomic128* a, | |
160 __tsan_atomic128* c, __tsan_atomic128 v, __tsan_memory_order mo, | |
161 __tsan_memory_order fail_mo); | |
162 | |
163 int __tsan_atomic8_compare_exchange_strong(volatile __tsan_atomic8* a, | |
164 __tsan_atomic8* c, __tsan_atomic8 v, __tsan_memory_order mo, | |
165 __tsan_memory_order fail_mo); | |
166 int __tsan_atomic16_compare_exchange_strong(volatile __tsan_atomic16* a, | |
167 __tsan_atomic16* c, __tsan_atomic16 v, __tsan_memory_order mo, | |
168 __tsan_memory_order fail_mo); | |
169 int __tsan_atomic32_compare_exchange_strong(volatile __tsan_atomic32* a, | |
170 __tsan_atomic32* c, __tsan_atomic32 v, __tsan_memory_order mo, | |
171 __tsan_memory_order fail_mo); | |
172 int __tsan_atomic64_compare_exchange_strong(volatile __tsan_atomic64* a, | |
173 __tsan_atomic64* c, __tsan_atomic64 v, __tsan_memory_order mo, | |
174 __tsan_memory_order fail_mo); | |
175 int __tsan_atomic128_compare_exchange_strong(volatile __tsan_atomic128* a, | |
176 __tsan_atomic128* c, __tsan_atomic128 v, __tsan_memory_order mo, | |
177 __tsan_memory_order fail_mo); | |
178 | |
179 __tsan_atomic8 __tsan_atomic8_compare_exchange_val( | |
180 volatile __tsan_atomic8* a, __tsan_atomic8 c, __tsan_atomic8 v, | |
181 __tsan_memory_order mo, __tsan_memory_order fail_mo); | |
182 __tsan_atomic16 __tsan_atomic16_compare_exchange_val( | |
183 volatile __tsan_atomic16* a, __tsan_atomic16 c, __tsan_atomic16 v, | |
184 __tsan_memory_order mo, __tsan_memory_order fail_mo); | |
185 __tsan_atomic32 __tsan_atomic32_compare_exchange_val( | |
186 volatile __tsan_atomic32* a, __tsan_atomic32 c, __tsan_atomic32 v, | |
187 __tsan_memory_order mo, __tsan_memory_order fail_mo); | |
188 __tsan_atomic64 __tsan_atomic64_compare_exchange_val( | |
189 volatile __tsan_atomic64* a, __tsan_atomic64 c, __tsan_atomic64 v, | |
190 __tsan_memory_order mo, __tsan_memory_order fail_mo); | |
191 __tsan_atomic128 __tsan_atomic128_compare_exchange_val( | |
192 volatile __tsan_atomic128* a, __tsan_atomic128 c, __tsan_atomic128 v, | |
193 __tsan_memory_order mo, __tsan_memory_order fail_mo); | |
194 | |
195 void __tsan_atomic_thread_fence(__tsan_memory_order mo); | |
196 void __tsan_atomic_signal_fence(__tsan_memory_order mo); | |
197 | |
198 } // extern "C" | |
199 | |
200 #endif // #ifndef TSAN_INTERFACE_ATOMIC_H | |
201 | |
202 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr, | 33 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr, |
203 Atomic32 old_value, | 34 Atomic32 old_value, |
204 Atomic32 new_value) { | 35 Atomic32 new_value) { |
205 Atomic32 cmp = old_value; | 36 Atomic32 cmp = old_value; |
206 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value, | 37 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value, |
207 __tsan_memory_order_relaxed, __tsan_memory_order_relaxed); | 38 __tsan_memory_order_relaxed, __tsan_memory_order_relaxed); |
208 return cmp; | 39 return cmp; |
209 } | 40 } |
210 | 41 |
211 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr, | 42 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr, |
(...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
365 inline void MemoryBarrier() { | 196 inline void MemoryBarrier() { |
366 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); | 197 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); |
367 } | 198 } |
368 | 199 |
369 } // namespace base::subtle | 200 } // namespace base::subtle |
370 } // namespace base | 201 } // namespace base |
371 | 202 |
372 #undef ATOMICOPS_COMPILER_BARRIER | 203 #undef ATOMICOPS_COMPILER_BARRIER |
373 | 204 |
374 #endif // BASE_ATOMICOPS_INTERNALS_TSAN_H_ | 205 #endif // BASE_ATOMICOPS_INTERNALS_TSAN_H_ |
OLD | NEW |