| OLD | NEW |
| (Empty) | |
| 1 //===-- tsan_interface_atomic.h ---------------------------------*- C++ -*-===// |
| 2 // |
| 3 // The LLVM Compiler Infrastructure |
| 4 // |
| 5 // This file is distributed under the University of Illinois Open Source |
| 6 // License. See LICENSE.TXT for details. |
| 7 // |
| 8 //===----------------------------------------------------------------------===// |
| 9 // |
| 10 // This file is a part of ThreadSanitizer (TSan), a race detector. |
| 11 // |
| 12 // Public interface header for TSan atomics. |
| 13 //===----------------------------------------------------------------------===// |
| 14 #ifndef TSAN_INTERFACE_ATOMIC_H |
| 15 #define TSAN_INTERFACE_ATOMIC_H |
| 16 |
| 17 #ifdef __cplusplus |
| 18 extern "C" { |
| 19 #endif |
| 20 |
| 21 typedef char __tsan_atomic8; |
| 22 typedef short __tsan_atomic16; // NOLINT |
| 23 typedef int __tsan_atomic32; |
| 24 typedef long __tsan_atomic64; // NOLINT |
| 25 #if defined(__SIZEOF_INT128__) \ |
| 26 || (__clang_major__ * 100 + __clang_minor__ >= 302) |
| 27 __extension__ typedef __int128 __tsan_atomic128; |
| 28 # define __TSAN_HAS_INT128 1 |
| 29 #else |
| 30 # define __TSAN_HAS_INT128 0 |
| 31 #endif |
| 32 |
| 33 // Part of ABI, do not change. |
| 34 // http://llvm.org/viewvc/llvm-project/libcxx/trunk/include/atomic?view=markup |
| 35 typedef enum { |
| 36 __tsan_memory_order_relaxed, |
| 37 __tsan_memory_order_consume, |
| 38 __tsan_memory_order_acquire, |
| 39 __tsan_memory_order_release, |
| 40 __tsan_memory_order_acq_rel, |
| 41 __tsan_memory_order_seq_cst |
| 42 } __tsan_memory_order; |
| 43 |
| 44 __tsan_atomic8 __tsan_atomic8_load(const volatile __tsan_atomic8 *a, |
| 45 __tsan_memory_order mo); |
| 46 __tsan_atomic16 __tsan_atomic16_load(const volatile __tsan_atomic16 *a, |
| 47 __tsan_memory_order mo); |
| 48 __tsan_atomic32 __tsan_atomic32_load(const volatile __tsan_atomic32 *a, |
| 49 __tsan_memory_order mo); |
| 50 __tsan_atomic64 __tsan_atomic64_load(const volatile __tsan_atomic64 *a, |
| 51 __tsan_memory_order mo); |
| 52 #if __TSAN_HAS_INT128 |
| 53 __tsan_atomic128 __tsan_atomic128_load(const volatile __tsan_atomic128 *a, |
| 54 __tsan_memory_order mo); |
| 55 #endif |
| 56 |
| 57 void __tsan_atomic8_store(volatile __tsan_atomic8 *a, __tsan_atomic8 v, |
| 58 __tsan_memory_order mo); |
| 59 void __tsan_atomic16_store(volatile __tsan_atomic16 *a, __tsan_atomic16 v, |
| 60 __tsan_memory_order mo); |
| 61 void __tsan_atomic32_store(volatile __tsan_atomic32 *a, __tsan_atomic32 v, |
| 62 __tsan_memory_order mo); |
| 63 void __tsan_atomic64_store(volatile __tsan_atomic64 *a, __tsan_atomic64 v, |
| 64 __tsan_memory_order mo); |
| 65 #if __TSAN_HAS_INT128 |
| 66 void __tsan_atomic128_store(volatile __tsan_atomic128 *a, __tsan_atomic128 v, |
| 67 __tsan_memory_order mo); |
| 68 #endif |
| 69 |
| 70 __tsan_atomic8 __tsan_atomic8_exchange(volatile __tsan_atomic8 *a, |
| 71 __tsan_atomic8 v, __tsan_memory_order mo); |
| 72 __tsan_atomic16 __tsan_atomic16_exchange(volatile __tsan_atomic16 *a, |
| 73 __tsan_atomic16 v, __tsan_memory_order mo); |
| 74 __tsan_atomic32 __tsan_atomic32_exchange(volatile __tsan_atomic32 *a, |
| 75 __tsan_atomic32 v, __tsan_memory_order mo); |
| 76 __tsan_atomic64 __tsan_atomic64_exchange(volatile __tsan_atomic64 *a, |
| 77 __tsan_atomic64 v, __tsan_memory_order mo); |
| 78 #if __TSAN_HAS_INT128 |
| 79 __tsan_atomic128 __tsan_atomic128_exchange(volatile __tsan_atomic128 *a, |
| 80 __tsan_atomic128 v, __tsan_memory_order mo); |
| 81 #endif |
| 82 |
| 83 __tsan_atomic8 __tsan_atomic8_fetch_add(volatile __tsan_atomic8 *a, |
| 84 __tsan_atomic8 v, __tsan_memory_order mo); |
| 85 __tsan_atomic16 __tsan_atomic16_fetch_add(volatile __tsan_atomic16 *a, |
| 86 __tsan_atomic16 v, __tsan_memory_order mo); |
| 87 __tsan_atomic32 __tsan_atomic32_fetch_add(volatile __tsan_atomic32 *a, |
| 88 __tsan_atomic32 v, __tsan_memory_order mo); |
| 89 __tsan_atomic64 __tsan_atomic64_fetch_add(volatile __tsan_atomic64 *a, |
| 90 __tsan_atomic64 v, __tsan_memory_order mo); |
| 91 #if __TSAN_HAS_INT128 |
| 92 __tsan_atomic128 __tsan_atomic128_fetch_add(volatile __tsan_atomic128 *a, |
| 93 __tsan_atomic128 v, __tsan_memory_order mo); |
| 94 #endif |
| 95 |
| 96 __tsan_atomic8 __tsan_atomic8_fetch_sub(volatile __tsan_atomic8 *a, |
| 97 __tsan_atomic8 v, __tsan_memory_order mo); |
| 98 __tsan_atomic16 __tsan_atomic16_fetch_sub(volatile __tsan_atomic16 *a, |
| 99 __tsan_atomic16 v, __tsan_memory_order mo); |
| 100 __tsan_atomic32 __tsan_atomic32_fetch_sub(volatile __tsan_atomic32 *a, |
| 101 __tsan_atomic32 v, __tsan_memory_order mo); |
| 102 __tsan_atomic64 __tsan_atomic64_fetch_sub(volatile __tsan_atomic64 *a, |
| 103 __tsan_atomic64 v, __tsan_memory_order mo); |
| 104 #if __TSAN_HAS_INT128 |
| 105 __tsan_atomic128 __tsan_atomic128_fetch_sub(volatile __tsan_atomic128 *a, |
| 106 __tsan_atomic128 v, __tsan_memory_order mo); |
| 107 #endif |
| 108 |
| 109 __tsan_atomic8 __tsan_atomic8_fetch_and(volatile __tsan_atomic8 *a, |
| 110 __tsan_atomic8 v, __tsan_memory_order mo); |
| 111 __tsan_atomic16 __tsan_atomic16_fetch_and(volatile __tsan_atomic16 *a, |
| 112 __tsan_atomic16 v, __tsan_memory_order mo); |
| 113 __tsan_atomic32 __tsan_atomic32_fetch_and(volatile __tsan_atomic32 *a, |
| 114 __tsan_atomic32 v, __tsan_memory_order mo); |
| 115 __tsan_atomic64 __tsan_atomic64_fetch_and(volatile __tsan_atomic64 *a, |
| 116 __tsan_atomic64 v, __tsan_memory_order mo); |
| 117 #if __TSAN_HAS_INT128 |
| 118 __tsan_atomic128 __tsan_atomic128_fetch_and(volatile __tsan_atomic128 *a, |
| 119 __tsan_atomic128 v, __tsan_memory_order mo); |
| 120 #endif |
| 121 |
| 122 __tsan_atomic8 __tsan_atomic8_fetch_or(volatile __tsan_atomic8 *a, |
| 123 __tsan_atomic8 v, __tsan_memory_order mo); |
| 124 __tsan_atomic16 __tsan_atomic16_fetch_or(volatile __tsan_atomic16 *a, |
| 125 __tsan_atomic16 v, __tsan_memory_order mo); |
| 126 __tsan_atomic32 __tsan_atomic32_fetch_or(volatile __tsan_atomic32 *a, |
| 127 __tsan_atomic32 v, __tsan_memory_order mo); |
| 128 __tsan_atomic64 __tsan_atomic64_fetch_or(volatile __tsan_atomic64 *a, |
| 129 __tsan_atomic64 v, __tsan_memory_order mo); |
| 130 #if __TSAN_HAS_INT128 |
| 131 __tsan_atomic128 __tsan_atomic128_fetch_or(volatile __tsan_atomic128 *a, |
| 132 __tsan_atomic128 v, __tsan_memory_order mo); |
| 133 #endif |
| 134 |
| 135 __tsan_atomic8 __tsan_atomic8_fetch_xor(volatile __tsan_atomic8 *a, |
| 136 __tsan_atomic8 v, __tsan_memory_order mo); |
| 137 __tsan_atomic16 __tsan_atomic16_fetch_xor(volatile __tsan_atomic16 *a, |
| 138 __tsan_atomic16 v, __tsan_memory_order mo); |
| 139 __tsan_atomic32 __tsan_atomic32_fetch_xor(volatile __tsan_atomic32 *a, |
| 140 __tsan_atomic32 v, __tsan_memory_order mo); |
| 141 __tsan_atomic64 __tsan_atomic64_fetch_xor(volatile __tsan_atomic64 *a, |
| 142 __tsan_atomic64 v, __tsan_memory_order mo); |
| 143 #if __TSAN_HAS_INT128 |
| 144 __tsan_atomic128 __tsan_atomic128_fetch_xor(volatile __tsan_atomic128 *a, |
| 145 __tsan_atomic128 v, __tsan_memory_order mo); |
| 146 #endif |
| 147 |
| 148 __tsan_atomic8 __tsan_atomic8_fetch_nand(volatile __tsan_atomic8 *a, |
| 149 __tsan_atomic8 v, __tsan_memory_order mo); |
| 150 __tsan_atomic16 __tsan_atomic16_fetch_nand(volatile __tsan_atomic16 *a, |
| 151 __tsan_atomic16 v, __tsan_memory_order mo); |
| 152 __tsan_atomic32 __tsan_atomic32_fetch_nand(volatile __tsan_atomic32 *a, |
| 153 __tsan_atomic32 v, __tsan_memory_order mo); |
| 154 __tsan_atomic64 __tsan_atomic64_fetch_nand(volatile __tsan_atomic64 *a, |
| 155 __tsan_atomic64 v, __tsan_memory_order mo); |
| 156 #if __TSAN_HAS_INT128 |
| 157 __tsan_atomic128 __tsan_atomic128_fetch_nand(volatile __tsan_atomic128 *a, |
| 158 __tsan_atomic128 v, __tsan_memory_order mo); |
| 159 #endif |
| 160 |
| 161 int __tsan_atomic8_compare_exchange_weak(volatile __tsan_atomic8 *a, |
| 162 __tsan_atomic8 *c, __tsan_atomic8 v, __tsan_memory_order mo, |
| 163 __tsan_memory_order fail_mo); |
| 164 int __tsan_atomic16_compare_exchange_weak(volatile __tsan_atomic16 *a, |
| 165 __tsan_atomic16 *c, __tsan_atomic16 v, __tsan_memory_order mo, |
| 166 __tsan_memory_order fail_mo); |
| 167 int __tsan_atomic32_compare_exchange_weak(volatile __tsan_atomic32 *a, |
| 168 __tsan_atomic32 *c, __tsan_atomic32 v, __tsan_memory_order mo, |
| 169 __tsan_memory_order fail_mo); |
| 170 int __tsan_atomic64_compare_exchange_weak(volatile __tsan_atomic64 *a, |
| 171 __tsan_atomic64 *c, __tsan_atomic64 v, __tsan_memory_order mo, |
| 172 __tsan_memory_order fail_mo); |
| 173 #if __TSAN_HAS_INT128 |
| 174 int __tsan_atomic128_compare_exchange_weak(volatile __tsan_atomic128 *a, |
| 175 __tsan_atomic128 *c, __tsan_atomic128 v, __tsan_memory_order mo, |
| 176 __tsan_memory_order fail_mo); |
| 177 #endif |
| 178 |
| 179 int __tsan_atomic8_compare_exchange_strong(volatile __tsan_atomic8 *a, |
| 180 __tsan_atomic8 *c, __tsan_atomic8 v, __tsan_memory_order mo, |
| 181 __tsan_memory_order fail_mo); |
| 182 int __tsan_atomic16_compare_exchange_strong(volatile __tsan_atomic16 *a, |
| 183 __tsan_atomic16 *c, __tsan_atomic16 v, __tsan_memory_order mo, |
| 184 __tsan_memory_order fail_mo); |
| 185 int __tsan_atomic32_compare_exchange_strong(volatile __tsan_atomic32 *a, |
| 186 __tsan_atomic32 *c, __tsan_atomic32 v, __tsan_memory_order mo, |
| 187 __tsan_memory_order fail_mo); |
| 188 int __tsan_atomic64_compare_exchange_strong(volatile __tsan_atomic64 *a, |
| 189 __tsan_atomic64 *c, __tsan_atomic64 v, __tsan_memory_order mo, |
| 190 __tsan_memory_order fail_mo); |
| 191 #if __TSAN_HAS_INT128 |
| 192 int __tsan_atomic128_compare_exchange_strong(volatile __tsan_atomic128 *a, |
| 193 __tsan_atomic128 *c, __tsan_atomic128 v, __tsan_memory_order mo, |
| 194 __tsan_memory_order fail_mo); |
| 195 #endif |
| 196 |
| 197 __tsan_atomic8 __tsan_atomic8_compare_exchange_val( |
| 198 volatile __tsan_atomic8 *a, __tsan_atomic8 c, __tsan_atomic8 v, |
| 199 __tsan_memory_order mo, __tsan_memory_order fail_mo); |
| 200 __tsan_atomic16 __tsan_atomic16_compare_exchange_val( |
| 201 volatile __tsan_atomic16 *a, __tsan_atomic16 c, __tsan_atomic16 v, |
| 202 __tsan_memory_order mo, __tsan_memory_order fail_mo); |
| 203 __tsan_atomic32 __tsan_atomic32_compare_exchange_val( |
| 204 volatile __tsan_atomic32 *a, __tsan_atomic32 c, __tsan_atomic32 v, |
| 205 __tsan_memory_order mo, __tsan_memory_order fail_mo); |
| 206 __tsan_atomic64 __tsan_atomic64_compare_exchange_val( |
| 207 volatile __tsan_atomic64 *a, __tsan_atomic64 c, __tsan_atomic64 v, |
| 208 __tsan_memory_order mo, __tsan_memory_order fail_mo); |
| 209 #if __TSAN_HAS_INT128 |
| 210 __tsan_atomic128 __tsan_atomic128_compare_exchange_val( |
| 211 volatile __tsan_atomic128 *a, __tsan_atomic128 c, __tsan_atomic128 v, |
| 212 __tsan_memory_order mo, __tsan_memory_order fail_mo); |
| 213 #endif |
| 214 |
| 215 void __tsan_atomic_thread_fence(__tsan_memory_order mo); |
| 216 void __tsan_atomic_signal_fence(__tsan_memory_order mo); |
| 217 |
| 218 #ifdef __cplusplus |
| 219 } // extern "C" |
| 220 #endif |
| 221 |
| 222 #endif // TSAN_INTERFACE_ATOMIC_H |
| OLD | NEW |