Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(496)

Side by Side Diff: src/atomicops_internals_tsan.h

Issue 129813008: Atomic ops: sync with Chromium. (Closed) Base URL: git://github.com/v8/v8.git@master
Patch Set: Resolve a conflict with the new ARM64 code Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/atomicops_internals_mac.h ('k') | src/atomicops_internals_x86_macosx.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
46 struct AtomicOps_x86CPUFeatureStruct { 46 struct AtomicOps_x86CPUFeatureStruct {
47 bool has_amd_lock_mb_bug; // Processor has AMD memory-barrier bug; do lfence 47 bool has_amd_lock_mb_bug; // Processor has AMD memory-barrier bug; do lfence
48 // after acquire compare-and-swap. 48 // after acquire compare-and-swap.
49 bool has_sse2; // Processor has SSE2. 49 bool has_sse2; // Processor has SSE2.
50 }; 50 };
51 extern struct AtomicOps_x86CPUFeatureStruct 51 extern struct AtomicOps_x86CPUFeatureStruct
52 AtomicOps_Internalx86CPUFeatures; 52 AtomicOps_Internalx86CPUFeatures;
53 53
54 #define ATOMICOPS_COMPILER_BARRIER() __asm__ __volatile__("" : : : "memory") 54 #define ATOMICOPS_COMPILER_BARRIER() __asm__ __volatile__("" : : : "memory")
55 55
56 #ifdef __cplusplus
57 extern "C" { 56 extern "C" {
58 #endif
59
60 typedef char __tsan_atomic8; 57 typedef char __tsan_atomic8;
61 typedef short __tsan_atomic16; // NOLINT 58 typedef short __tsan_atomic16; // NOLINT
62 typedef int __tsan_atomic32; 59 typedef int __tsan_atomic32;
63 typedef long __tsan_atomic64; // NOLINT 60 typedef long __tsan_atomic64; // NOLINT
64 61
65 #if defined(__SIZEOF_INT128__) \ 62 #if defined(__SIZEOF_INT128__) \
66 || (__clang_major__ * 100 + __clang_minor__ >= 302) 63 || (__clang_major__ * 100 + __clang_minor__ >= 302)
67 typedef __int128 __tsan_atomic128; 64 typedef __int128 __tsan_atomic128;
68 #define __TSAN_HAS_INT128 1 65 #define __TSAN_HAS_INT128 1
69 #else 66 #else
70 typedef char __tsan_atomic128; 67 typedef char __tsan_atomic128;
71 #define __TSAN_HAS_INT128 0 68 #define __TSAN_HAS_INT128 0
72 #endif 69 #endif
73 70
74 typedef enum { 71 typedef enum {
75 __tsan_memory_order_relaxed, 72 __tsan_memory_order_relaxed,
76 __tsan_memory_order_consume, 73 __tsan_memory_order_consume,
77 __tsan_memory_order_acquire, 74 __tsan_memory_order_acquire,
78 __tsan_memory_order_release, 75 __tsan_memory_order_release,
79 __tsan_memory_order_acq_rel, 76 __tsan_memory_order_acq_rel,
80 __tsan_memory_order_seq_cst, 77 __tsan_memory_order_seq_cst,
81 } __tsan_memory_order; 78 } __tsan_memory_order;
82 79
83 __tsan_atomic8 __tsan_atomic8_load(const volatile __tsan_atomic8 *a, 80 __tsan_atomic8 __tsan_atomic8_load(const volatile __tsan_atomic8* a,
84 __tsan_memory_order mo); 81 __tsan_memory_order mo);
85 __tsan_atomic16 __tsan_atomic16_load(const volatile __tsan_atomic16 *a, 82 __tsan_atomic16 __tsan_atomic16_load(const volatile __tsan_atomic16* a,
86 __tsan_memory_order mo); 83 __tsan_memory_order mo);
87 __tsan_atomic32 __tsan_atomic32_load(const volatile __tsan_atomic32 *a, 84 __tsan_atomic32 __tsan_atomic32_load(const volatile __tsan_atomic32* a,
88 __tsan_memory_order mo); 85 __tsan_memory_order mo);
89 __tsan_atomic64 __tsan_atomic64_load(const volatile __tsan_atomic64 *a, 86 __tsan_atomic64 __tsan_atomic64_load(const volatile __tsan_atomic64* a,
90 __tsan_memory_order mo); 87 __tsan_memory_order mo);
91 __tsan_atomic128 __tsan_atomic128_load(const volatile __tsan_atomic128 *a, 88 __tsan_atomic128 __tsan_atomic128_load(const volatile __tsan_atomic128* a,
92 __tsan_memory_order mo); 89 __tsan_memory_order mo);
93 90
94 void __tsan_atomic8_store(volatile __tsan_atomic8 *a, __tsan_atomic8 v, 91 void __tsan_atomic8_store(volatile __tsan_atomic8* a, __tsan_atomic8 v,
95 __tsan_memory_order mo); 92 __tsan_memory_order mo);
96 void __tsan_atomic16_store(volatile __tsan_atomic16 *a, __tsan_atomic16 v, 93 void __tsan_atomic16_store(volatile __tsan_atomic16* a, __tsan_atomic16 v,
97 __tsan_memory_order mo); 94 __tsan_memory_order mo);
98 void __tsan_atomic32_store(volatile __tsan_atomic32 *a, __tsan_atomic32 v, 95 void __tsan_atomic32_store(volatile __tsan_atomic32* a, __tsan_atomic32 v,
99 __tsan_memory_order mo); 96 __tsan_memory_order mo);
100 void __tsan_atomic64_store(volatile __tsan_atomic64 *a, __tsan_atomic64 v, 97 void __tsan_atomic64_store(volatile __tsan_atomic64* a, __tsan_atomic64 v,
101 __tsan_memory_order mo); 98 __tsan_memory_order mo);
102 void __tsan_atomic128_store(volatile __tsan_atomic128 *a, __tsan_atomic128 v, 99 void __tsan_atomic128_store(volatile __tsan_atomic128* a, __tsan_atomic128 v,
103 __tsan_memory_order mo); 100 __tsan_memory_order mo);
104 101
105 __tsan_atomic8 __tsan_atomic8_exchange(volatile __tsan_atomic8 *a, 102 __tsan_atomic8 __tsan_atomic8_exchange(volatile __tsan_atomic8* a,
106 __tsan_atomic8 v, __tsan_memory_order mo); 103 __tsan_atomic8 v, __tsan_memory_order mo);
107 __tsan_atomic16 __tsan_atomic16_exchange(volatile __tsan_atomic16 *a, 104 __tsan_atomic16 __tsan_atomic16_exchange(volatile __tsan_atomic16* a,
108 __tsan_atomic16 v, __tsan_memory_order mo); 105 __tsan_atomic16 v, __tsan_memory_order mo);
109 __tsan_atomic32 __tsan_atomic32_exchange(volatile __tsan_atomic32 *a, 106 __tsan_atomic32 __tsan_atomic32_exchange(volatile __tsan_atomic32* a,
110 __tsan_atomic32 v, __tsan_memory_order mo); 107 __tsan_atomic32 v, __tsan_memory_order mo);
111 __tsan_atomic64 __tsan_atomic64_exchange(volatile __tsan_atomic64 *a, 108 __tsan_atomic64 __tsan_atomic64_exchange(volatile __tsan_atomic64* a,
112 __tsan_atomic64 v, __tsan_memory_order mo); 109 __tsan_atomic64 v, __tsan_memory_order mo);
113 __tsan_atomic128 __tsan_atomic128_exchange(volatile __tsan_atomic128 *a, 110 __tsan_atomic128 __tsan_atomic128_exchange(volatile __tsan_atomic128* a,
114 __tsan_atomic128 v, __tsan_memory_order mo); 111 __tsan_atomic128 v, __tsan_memory_order mo);
115 112
116 __tsan_atomic8 __tsan_atomic8_fetch_add(volatile __tsan_atomic8 *a, 113 __tsan_atomic8 __tsan_atomic8_fetch_add(volatile __tsan_atomic8* a,
117 __tsan_atomic8 v, __tsan_memory_order mo); 114 __tsan_atomic8 v, __tsan_memory_order mo);
118 __tsan_atomic16 __tsan_atomic16_fetch_add(volatile __tsan_atomic16 *a, 115 __tsan_atomic16 __tsan_atomic16_fetch_add(volatile __tsan_atomic16* a,
119 __tsan_atomic16 v, __tsan_memory_order mo); 116 __tsan_atomic16 v, __tsan_memory_order mo);
120 __tsan_atomic32 __tsan_atomic32_fetch_add(volatile __tsan_atomic32 *a, 117 __tsan_atomic32 __tsan_atomic32_fetch_add(volatile __tsan_atomic32* a,
121 __tsan_atomic32 v, __tsan_memory_order mo); 118 __tsan_atomic32 v, __tsan_memory_order mo);
122 __tsan_atomic64 __tsan_atomic64_fetch_add(volatile __tsan_atomic64 *a, 119 __tsan_atomic64 __tsan_atomic64_fetch_add(volatile __tsan_atomic64* a,
123 __tsan_atomic64 v, __tsan_memory_order mo); 120 __tsan_atomic64 v, __tsan_memory_order mo);
124 __tsan_atomic128 __tsan_atomic128_fetch_add(volatile __tsan_atomic128 *a, 121 __tsan_atomic128 __tsan_atomic128_fetch_add(volatile __tsan_atomic128* a,
125 __tsan_atomic128 v, __tsan_memory_order mo); 122 __tsan_atomic128 v, __tsan_memory_order mo);
126 123
127 __tsan_atomic8 __tsan_atomic8_fetch_and(volatile __tsan_atomic8 *a, 124 __tsan_atomic8 __tsan_atomic8_fetch_and(volatile __tsan_atomic8* a,
128 __tsan_atomic8 v, __tsan_memory_order mo); 125 __tsan_atomic8 v, __tsan_memory_order mo);
129 __tsan_atomic16 __tsan_atomic16_fetch_and(volatile __tsan_atomic16 *a, 126 __tsan_atomic16 __tsan_atomic16_fetch_and(volatile __tsan_atomic16* a,
130 __tsan_atomic16 v, __tsan_memory_order mo); 127 __tsan_atomic16 v, __tsan_memory_order mo);
131 __tsan_atomic32 __tsan_atomic32_fetch_and(volatile __tsan_atomic32 *a, 128 __tsan_atomic32 __tsan_atomic32_fetch_and(volatile __tsan_atomic32* a,
132 __tsan_atomic32 v, __tsan_memory_order mo); 129 __tsan_atomic32 v, __tsan_memory_order mo);
133 __tsan_atomic64 __tsan_atomic64_fetch_and(volatile __tsan_atomic64 *a, 130 __tsan_atomic64 __tsan_atomic64_fetch_and(volatile __tsan_atomic64* a,
134 __tsan_atomic64 v, __tsan_memory_order mo); 131 __tsan_atomic64 v, __tsan_memory_order mo);
135 __tsan_atomic128 __tsan_atomic128_fetch_and(volatile __tsan_atomic128 *a, 132 __tsan_atomic128 __tsan_atomic128_fetch_and(volatile __tsan_atomic128* a,
136 __tsan_atomic128 v, __tsan_memory_order mo); 133 __tsan_atomic128 v, __tsan_memory_order mo);
137 134
138 __tsan_atomic8 __tsan_atomic8_fetch_or(volatile __tsan_atomic8 *a, 135 __tsan_atomic8 __tsan_atomic8_fetch_or(volatile __tsan_atomic8* a,
139 __tsan_atomic8 v, __tsan_memory_order mo); 136 __tsan_atomic8 v, __tsan_memory_order mo);
140 __tsan_atomic16 __tsan_atomic16_fetch_or(volatile __tsan_atomic16 *a, 137 __tsan_atomic16 __tsan_atomic16_fetch_or(volatile __tsan_atomic16* a,
141 __tsan_atomic16 v, __tsan_memory_order mo); 138 __tsan_atomic16 v, __tsan_memory_order mo);
142 __tsan_atomic32 __tsan_atomic32_fetch_or(volatile __tsan_atomic32 *a, 139 __tsan_atomic32 __tsan_atomic32_fetch_or(volatile __tsan_atomic32* a,
143 __tsan_atomic32 v, __tsan_memory_order mo); 140 __tsan_atomic32 v, __tsan_memory_order mo);
144 __tsan_atomic64 __tsan_atomic64_fetch_or(volatile __tsan_atomic64 *a, 141 __tsan_atomic64 __tsan_atomic64_fetch_or(volatile __tsan_atomic64* a,
145 __tsan_atomic64 v, __tsan_memory_order mo); 142 __tsan_atomic64 v, __tsan_memory_order mo);
146 __tsan_atomic128 __tsan_atomic128_fetch_or(volatile __tsan_atomic128 *a, 143 __tsan_atomic128 __tsan_atomic128_fetch_or(volatile __tsan_atomic128* a,
147 __tsan_atomic128 v, __tsan_memory_order mo); 144 __tsan_atomic128 v, __tsan_memory_order mo);
148 145
149 __tsan_atomic8 __tsan_atomic8_fetch_xor(volatile __tsan_atomic8 *a, 146 __tsan_atomic8 __tsan_atomic8_fetch_xor(volatile __tsan_atomic8* a,
150 __tsan_atomic8 v, __tsan_memory_order mo); 147 __tsan_atomic8 v, __tsan_memory_order mo);
151 __tsan_atomic16 __tsan_atomic16_fetch_xor(volatile __tsan_atomic16 *a, 148 __tsan_atomic16 __tsan_atomic16_fetch_xor(volatile __tsan_atomic16* a,
152 __tsan_atomic16 v, __tsan_memory_order mo); 149 __tsan_atomic16 v, __tsan_memory_order mo);
153 __tsan_atomic32 __tsan_atomic32_fetch_xor(volatile __tsan_atomic32 *a, 150 __tsan_atomic32 __tsan_atomic32_fetch_xor(volatile __tsan_atomic32* a,
154 __tsan_atomic32 v, __tsan_memory_order mo); 151 __tsan_atomic32 v, __tsan_memory_order mo);
155 __tsan_atomic64 __tsan_atomic64_fetch_xor(volatile __tsan_atomic64 *a, 152 __tsan_atomic64 __tsan_atomic64_fetch_xor(volatile __tsan_atomic64* a,
156 __tsan_atomic64 v, __tsan_memory_order mo); 153 __tsan_atomic64 v, __tsan_memory_order mo);
157 __tsan_atomic128 __tsan_atomic128_fetch_xor(volatile __tsan_atomic128 *a, 154 __tsan_atomic128 __tsan_atomic128_fetch_xor(volatile __tsan_atomic128* a,
158 __tsan_atomic128 v, __tsan_memory_order mo); 155 __tsan_atomic128 v, __tsan_memory_order mo);
159 156
160 __tsan_atomic8 __tsan_atomic8_fetch_nand(volatile __tsan_atomic8 *a, 157 __tsan_atomic8 __tsan_atomic8_fetch_nand(volatile __tsan_atomic8* a,
161 __tsan_atomic8 v, __tsan_memory_order mo); 158 __tsan_atomic8 v, __tsan_memory_order mo);
162 __tsan_atomic16 __tsan_atomic16_fetch_nand(volatile __tsan_atomic16 *a, 159 __tsan_atomic16 __tsan_atomic16_fetch_nand(volatile __tsan_atomic16* a,
163 __tsan_atomic16 v, __tsan_memory_order mo); 160 __tsan_atomic16 v, __tsan_memory_order mo);
164 __tsan_atomic32 __tsan_atomic32_fetch_nand(volatile __tsan_atomic32 *a, 161 __tsan_atomic32 __tsan_atomic32_fetch_nand(volatile __tsan_atomic32* a,
165 __tsan_atomic32 v, __tsan_memory_order mo); 162 __tsan_atomic32 v, __tsan_memory_order mo);
166 __tsan_atomic64 __tsan_atomic64_fetch_nand(volatile __tsan_atomic64 *a, 163 __tsan_atomic64 __tsan_atomic64_fetch_nand(volatile __tsan_atomic64* a,
167 __tsan_atomic64 v, __tsan_memory_order mo); 164 __tsan_atomic64 v, __tsan_memory_order mo);
168 __tsan_atomic128 __tsan_atomic128_fetch_nand(volatile __tsan_atomic128 *a, 165 __tsan_atomic128 __tsan_atomic128_fetch_nand(volatile __tsan_atomic128* a,
169 __tsan_atomic64 v, __tsan_memory_order mo); 166 __tsan_atomic128 v, __tsan_memory_order mo);
170 167
171 int __tsan_atomic8_compare_exchange_weak(volatile __tsan_atomic8 *a, 168 int __tsan_atomic8_compare_exchange_weak(volatile __tsan_atomic8* a,
172 __tsan_atomic8 *c, __tsan_atomic8 v, __tsan_memory_order mo, 169 __tsan_atomic8* c, __tsan_atomic8 v, __tsan_memory_order mo,
173 __tsan_memory_order fail_mo); 170 __tsan_memory_order fail_mo);
174 int __tsan_atomic16_compare_exchange_weak(volatile __tsan_atomic16 *a, 171 int __tsan_atomic16_compare_exchange_weak(volatile __tsan_atomic16* a,
175 __tsan_atomic16 *c, __tsan_atomic16 v, __tsan_memory_order mo, 172 __tsan_atomic16* c, __tsan_atomic16 v, __tsan_memory_order mo,
176 __tsan_memory_order fail_mo); 173 __tsan_memory_order fail_mo);
177 int __tsan_atomic32_compare_exchange_weak(volatile __tsan_atomic32 *a, 174 int __tsan_atomic32_compare_exchange_weak(volatile __tsan_atomic32* a,
178 __tsan_atomic32 *c, __tsan_atomic32 v, __tsan_memory_order mo, 175 __tsan_atomic32* c, __tsan_atomic32 v, __tsan_memory_order mo,
179 __tsan_memory_order fail_mo); 176 __tsan_memory_order fail_mo);
180 int __tsan_atomic64_compare_exchange_weak(volatile __tsan_atomic64 *a, 177 int __tsan_atomic64_compare_exchange_weak(volatile __tsan_atomic64* a,
181 __tsan_atomic64 *c, __tsan_atomic64 v, __tsan_memory_order mo, 178 __tsan_atomic64* c, __tsan_atomic64 v, __tsan_memory_order mo,
182 __tsan_memory_order fail_mo); 179 __tsan_memory_order fail_mo);
183 int __tsan_atomic128_compare_exchange_weak(volatile __tsan_atomic128 *a, 180 int __tsan_atomic128_compare_exchange_weak(volatile __tsan_atomic128* a,
184 __tsan_atomic128 *c, __tsan_atomic128 v, __tsan_memory_order mo, 181 __tsan_atomic128* c, __tsan_atomic128 v, __tsan_memory_order mo,
185 __tsan_memory_order fail_mo); 182 __tsan_memory_order fail_mo);
186 183
187 int __tsan_atomic8_compare_exchange_strong(volatile __tsan_atomic8 *a, 184 int __tsan_atomic8_compare_exchange_strong(volatile __tsan_atomic8* a,
188 __tsan_atomic8 *c, __tsan_atomic8 v, __tsan_memory_order mo, 185 __tsan_atomic8* c, __tsan_atomic8 v, __tsan_memory_order mo,
189 __tsan_memory_order fail_mo); 186 __tsan_memory_order fail_mo);
190 int __tsan_atomic16_compare_exchange_strong(volatile __tsan_atomic16 *a, 187 int __tsan_atomic16_compare_exchange_strong(volatile __tsan_atomic16* a,
191 __tsan_atomic16 *c, __tsan_atomic16 v, __tsan_memory_order mo, 188 __tsan_atomic16* c, __tsan_atomic16 v, __tsan_memory_order mo,
192 __tsan_memory_order fail_mo); 189 __tsan_memory_order fail_mo);
193 int __tsan_atomic32_compare_exchange_strong(volatile __tsan_atomic32 *a, 190 int __tsan_atomic32_compare_exchange_strong(volatile __tsan_atomic32* a,
194 __tsan_atomic32 *c, __tsan_atomic32 v, __tsan_memory_order mo, 191 __tsan_atomic32* c, __tsan_atomic32 v, __tsan_memory_order mo,
195 __tsan_memory_order fail_mo); 192 __tsan_memory_order fail_mo);
196 int __tsan_atomic64_compare_exchange_strong(volatile __tsan_atomic64 *a, 193 int __tsan_atomic64_compare_exchange_strong(volatile __tsan_atomic64* a,
197 __tsan_atomic64 *c, __tsan_atomic64 v, __tsan_memory_order mo, 194 __tsan_atomic64* c, __tsan_atomic64 v, __tsan_memory_order mo,
198 __tsan_memory_order fail_mo); 195 __tsan_memory_order fail_mo);
199 int __tsan_atomic128_compare_exchange_strong(volatile __tsan_atomic128 *a, 196 int __tsan_atomic128_compare_exchange_strong(volatile __tsan_atomic128* a,
200 __tsan_atomic128 *c, __tsan_atomic128 v, __tsan_memory_order mo, 197 __tsan_atomic128* c, __tsan_atomic128 v, __tsan_memory_order mo,
201 __tsan_memory_order fail_mo); 198 __tsan_memory_order fail_mo);
202 199
203 __tsan_atomic8 __tsan_atomic8_compare_exchange_val( 200 __tsan_atomic8 __tsan_atomic8_compare_exchange_val(
204 volatile __tsan_atomic8 *a, __tsan_atomic8 c, __tsan_atomic8 v, 201 volatile __tsan_atomic8* a, __tsan_atomic8 c, __tsan_atomic8 v,
205 __tsan_memory_order mo, __tsan_memory_order fail_mo); 202 __tsan_memory_order mo, __tsan_memory_order fail_mo);
206 __tsan_atomic16 __tsan_atomic16_compare_exchange_val( 203 __tsan_atomic16 __tsan_atomic16_compare_exchange_val(
207 volatile __tsan_atomic16 *a, __tsan_atomic16 c, __tsan_atomic16 v, 204 volatile __tsan_atomic16* a, __tsan_atomic16 c, __tsan_atomic16 v,
208 __tsan_memory_order mo, __tsan_memory_order fail_mo); 205 __tsan_memory_order mo, __tsan_memory_order fail_mo);
209 __tsan_atomic32 __tsan_atomic32_compare_exchange_val( 206 __tsan_atomic32 __tsan_atomic32_compare_exchange_val(
210 volatile __tsan_atomic32 *a, __tsan_atomic32 c, __tsan_atomic32 v, 207 volatile __tsan_atomic32* a, __tsan_atomic32 c, __tsan_atomic32 v,
211 __tsan_memory_order mo, __tsan_memory_order fail_mo); 208 __tsan_memory_order mo, __tsan_memory_order fail_mo);
212 __tsan_atomic64 __tsan_atomic64_compare_exchange_val( 209 __tsan_atomic64 __tsan_atomic64_compare_exchange_val(
213 volatile __tsan_atomic64 *a, __tsan_atomic64 c, __tsan_atomic64 v, 210 volatile __tsan_atomic64* a, __tsan_atomic64 c, __tsan_atomic64 v,
214 __tsan_memory_order mo, __tsan_memory_order fail_mo); 211 __tsan_memory_order mo, __tsan_memory_order fail_mo);
215 __tsan_atomic128 __tsan_atomic128_compare_exchange_val( 212 __tsan_atomic128 __tsan_atomic128_compare_exchange_val(
216 volatile __tsan_atomic128 *a, __tsan_atomic128 c, __tsan_atomic128 v, 213 volatile __tsan_atomic128* a, __tsan_atomic128 c, __tsan_atomic128 v,
217 __tsan_memory_order mo, __tsan_memory_order fail_mo); 214 __tsan_memory_order mo, __tsan_memory_order fail_mo);
218 215
219 void __tsan_atomic_thread_fence(__tsan_memory_order mo); 216 void __tsan_atomic_thread_fence(__tsan_memory_order mo);
220 void __tsan_atomic_signal_fence(__tsan_memory_order mo); 217 void __tsan_atomic_signal_fence(__tsan_memory_order mo);
221
222 #ifdef __cplusplus
223 } // extern "C" 218 } // extern "C"
224 #endif
225 219
226 #endif // #ifndef TSAN_INTERFACE_ATOMIC_H 220 #endif // #ifndef TSAN_INTERFACE_ATOMIC_H
227 221
228 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr, 222 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr,
229 Atomic32 old_value, 223 Atomic32 old_value,
230 Atomic32 new_value) { 224 Atomic32 new_value) {
231 Atomic32 cmp = old_value; 225 Atomic32 cmp = old_value;
232 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value, 226 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
233 __tsan_memory_order_relaxed, __tsan_memory_order_relaxed); 227 __tsan_memory_order_relaxed, __tsan_memory_order_relaxed);
234 return cmp; 228 return cmp;
235 } 229 }
236 230
237 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32 *ptr, 231 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr,
238 Atomic32 new_value) { 232 Atomic32 new_value) {
239 return __tsan_atomic32_exchange(ptr, new_value, 233 return __tsan_atomic32_exchange(ptr, new_value,
240 __tsan_memory_order_relaxed); 234 __tsan_memory_order_relaxed);
241 } 235 }
242 236
243 inline Atomic32 Acquire_AtomicExchange(volatile Atomic32 *ptr, 237 inline Atomic32 Acquire_AtomicExchange(volatile Atomic32* ptr,
244 Atomic32 new_value) { 238 Atomic32 new_value) {
245 return __tsan_atomic32_exchange(ptr, new_value, 239 return __tsan_atomic32_exchange(ptr, new_value,
246 __tsan_memory_order_acquire); 240 __tsan_memory_order_acquire);
247 } 241 }
248 242
249 inline Atomic32 Release_AtomicExchange(volatile Atomic32 *ptr, 243 inline Atomic32 Release_AtomicExchange(volatile Atomic32* ptr,
250 Atomic32 new_value) { 244 Atomic32 new_value) {
251 return __tsan_atomic32_exchange(ptr, new_value, 245 return __tsan_atomic32_exchange(ptr, new_value,
252 __tsan_memory_order_release); 246 __tsan_memory_order_release);
253 } 247 }
254 248
255 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32 *ptr, 249 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
256 Atomic32 increment) { 250 Atomic32 increment) {
257 return increment + __tsan_atomic32_fetch_add(ptr, increment, 251 return increment + __tsan_atomic32_fetch_add(ptr, increment,
258 __tsan_memory_order_relaxed); 252 __tsan_memory_order_relaxed);
259 } 253 }
260 254
261 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32 *ptr, 255 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr,
262 Atomic32 increment) { 256 Atomic32 increment) {
263 return increment + __tsan_atomic32_fetch_add(ptr, increment, 257 return increment + __tsan_atomic32_fetch_add(ptr, increment,
264 __tsan_memory_order_acq_rel); 258 __tsan_memory_order_acq_rel);
265 } 259 }
266 260
267 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32 *ptr, 261 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
268 Atomic32 old_value, 262 Atomic32 old_value,
269 Atomic32 new_value) { 263 Atomic32 new_value) {
270 Atomic32 cmp = old_value; 264 Atomic32 cmp = old_value;
271 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value, 265 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
272 __tsan_memory_order_acquire, __tsan_memory_order_acquire); 266 __tsan_memory_order_acquire, __tsan_memory_order_acquire);
273 return cmp; 267 return cmp;
274 } 268 }
275 269
276 inline Atomic32 Release_CompareAndSwap(volatile Atomic32 *ptr, 270 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
277 Atomic32 old_value, 271 Atomic32 old_value,
278 Atomic32 new_value) { 272 Atomic32 new_value) {
279 Atomic32 cmp = old_value; 273 Atomic32 cmp = old_value;
280 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value, 274 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
281 __tsan_memory_order_release, __tsan_memory_order_relaxed); 275 __tsan_memory_order_release, __tsan_memory_order_relaxed);
282 return cmp; 276 return cmp;
283 } 277 }
284 278
285 inline void NoBarrier_Store(volatile Atomic32 *ptr, Atomic32 value) { 279 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
286 __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed); 280 __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed);
287 } 281 }
288 282
289 inline void Acquire_Store(volatile Atomic32 *ptr, Atomic32 value) { 283 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
290 __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed); 284 __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed);
291 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); 285 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
292 } 286 }
293 287
294 inline void Release_Store(volatile Atomic32 *ptr, Atomic32 value) { 288 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
295 __tsan_atomic32_store(ptr, value, __tsan_memory_order_release); 289 __tsan_atomic32_store(ptr, value, __tsan_memory_order_release);
296 } 290 }
297 291
298 inline Atomic32 NoBarrier_Load(volatile const Atomic32 *ptr) { 292 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
299 return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed); 293 return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed);
300 } 294 }
301 295
302 inline Atomic32 Acquire_Load(volatile const Atomic32 *ptr) { 296 inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
303 return __tsan_atomic32_load(ptr, __tsan_memory_order_acquire); 297 return __tsan_atomic32_load(ptr, __tsan_memory_order_acquire);
304 } 298 }
305 299
306 inline Atomic32 Release_Load(volatile const Atomic32 *ptr) { 300 inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
307 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); 301 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
308 return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed); 302 return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed);
309 } 303 }
310 304
311 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64 *ptr, 305 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr,
312 Atomic64 old_value, 306 Atomic64 old_value,
313 Atomic64 new_value) { 307 Atomic64 new_value) {
314 Atomic64 cmp = old_value; 308 Atomic64 cmp = old_value;
315 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value, 309 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
316 __tsan_memory_order_relaxed, __tsan_memory_order_relaxed); 310 __tsan_memory_order_relaxed, __tsan_memory_order_relaxed);
317 return cmp; 311 return cmp;
318 } 312 }
319 313
320 inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64 *ptr, 314 inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64* ptr,
321 Atomic64 new_value) { 315 Atomic64 new_value) {
322 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_relaxed); 316 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_relaxed);
323 } 317 }
324 318
325 inline Atomic64 Acquire_AtomicExchange(volatile Atomic64 *ptr, 319 inline Atomic64 Acquire_AtomicExchange(volatile Atomic64* ptr,
326 Atomic64 new_value) { 320 Atomic64 new_value) {
327 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_acquire); 321 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_acquire);
328 } 322 }
329 323
330 inline Atomic64 Release_AtomicExchange(volatile Atomic64 *ptr, 324 inline Atomic64 Release_AtomicExchange(volatile Atomic64* ptr,
331 Atomic64 new_value) { 325 Atomic64 new_value) {
332 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_release); 326 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_release);
333 } 327 }
334 328
335 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64 *ptr, 329 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr,
336 Atomic64 increment) { 330 Atomic64 increment) {
337 return increment + __tsan_atomic64_fetch_add(ptr, increment, 331 return increment + __tsan_atomic64_fetch_add(ptr, increment,
338 __tsan_memory_order_relaxed); 332 __tsan_memory_order_relaxed);
339 } 333 }
340 334
341 inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64 *ptr, 335 inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64* ptr,
342 Atomic64 increment) { 336 Atomic64 increment) {
343 return increment + __tsan_atomic64_fetch_add(ptr, increment, 337 return increment + __tsan_atomic64_fetch_add(ptr, increment,
344 __tsan_memory_order_acq_rel); 338 __tsan_memory_order_acq_rel);
345 } 339 }
346 340
347 inline void NoBarrier_Store(volatile Atomic64 *ptr, Atomic64 value) { 341 inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
348 __tsan_atomic64_store(ptr, value, __tsan_memory_order_relaxed); 342 __tsan_atomic64_store(ptr, value, __tsan_memory_order_relaxed);
349 } 343 }
350 344
351 inline void Acquire_Store(volatile Atomic64 *ptr, Atomic64 value) { 345 inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) {
352 __tsan_atomic64_store(ptr, value, __tsan_memory_order_relaxed); 346 __tsan_atomic64_store(ptr, value, __tsan_memory_order_relaxed);
353 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); 347 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
354 } 348 }
355 349
356 inline void Release_Store(volatile Atomic64 *ptr, Atomic64 value) { 350 inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) {
357 __tsan_atomic64_store(ptr, value, __tsan_memory_order_release); 351 __tsan_atomic64_store(ptr, value, __tsan_memory_order_release);
358 } 352 }
359 353
360 inline Atomic64 NoBarrier_Load(volatile const Atomic64 *ptr) { 354 inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
361 return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed); 355 return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed);
362 } 356 }
363 357
364 inline Atomic64 Acquire_Load(volatile const Atomic64 *ptr) { 358 inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) {
365 return __tsan_atomic64_load(ptr, __tsan_memory_order_acquire); 359 return __tsan_atomic64_load(ptr, __tsan_memory_order_acquire);
366 } 360 }
367 361
368 inline Atomic64 Release_Load(volatile const Atomic64 *ptr) { 362 inline Atomic64 Release_Load(volatile const Atomic64* ptr) {
369 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); 363 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
370 return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed); 364 return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed);
371 } 365 }
372 366
373 inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64 *ptr, 367 inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr,
374 Atomic64 old_value, 368 Atomic64 old_value,
375 Atomic64 new_value) { 369 Atomic64 new_value) {
376 Atomic64 cmp = old_value; 370 Atomic64 cmp = old_value;
377 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value, 371 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
378 __tsan_memory_order_acquire, __tsan_memory_order_acquire); 372 __tsan_memory_order_acquire, __tsan_memory_order_acquire);
379 return cmp; 373 return cmp;
380 } 374 }
381 375
382 inline Atomic64 Release_CompareAndSwap(volatile Atomic64 *ptr, 376 inline Atomic64 Release_CompareAndSwap(volatile Atomic64* ptr,
383 Atomic64 old_value, 377 Atomic64 old_value,
384 Atomic64 new_value) { 378 Atomic64 new_value) {
385 Atomic64 cmp = old_value; 379 Atomic64 cmp = old_value;
386 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value, 380 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
387 __tsan_memory_order_release, __tsan_memory_order_relaxed); 381 __tsan_memory_order_release, __tsan_memory_order_relaxed);
388 return cmp; 382 return cmp;
389 } 383 }
390 384
391 inline void MemoryBarrier() { 385 inline void MemoryBarrier() {
392 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst); 386 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
393 } 387 }
394 388
395 } // namespace internal 389 } // namespace internal
396 } // namespace v8 390 } // namespace v8
397 391
398 #undef ATOMICOPS_COMPILER_BARRIER 392 #undef ATOMICOPS_COMPILER_BARRIER
399 393
400 #endif // V8_ATOMICOPS_INTERNALS_TSAN_H_ 394 #endif // V8_ATOMICOPS_INTERNALS_TSAN_H_
OLDNEW
« no previous file with comments | « src/atomicops_internals_mac.h ('k') | src/atomicops_internals_x86_macosx.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698